[med-svn] [mypy] 01/06: New upstream version 0.560

Andreas Tille tille at debian.org
Fri Dec 22 08:23:40 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository mypy.

commit 64fcc72c3fe6cbb66cda226425448c41b820c560
Author: Andreas Tille <tille at debian.org>
Date:   Fri Dec 22 09:13:35 2017 +0100

    New upstream version 0.560
---
 LICENSE                                            | 227 +++++++++
 MANIFEST.in                                        |   1 +
 PKG-INFO                                           |   3 +-
 README.md                                          |   4 +-
 docs/source/builtin_types.rst                      |   2 +
 docs/source/cheat_sheet.rst                        |   3 +
 docs/source/cheat_sheet_py3.rst                    |   3 +
 docs/source/class_basics.rst                       | 285 ++++++++++-
 docs/source/command_line.rst                       |  74 ++-
 docs/source/common_issues.rst                      |  26 +-
 docs/source/config_file.rst                        |  38 +-
 docs/source/faq.rst                                |  13 +-
 docs/source/generics.rst                           |  42 +-
 docs/source/getting_started.rst                    |   2 +-
 docs/source/kinds_of_types.rst                     |   8 +-
 docs/source/revision_history.rst                   |  22 +
 extensions/LICENSE                                 |  27 +
 extensions/MANIFEST.in                             |   1 +
 extensions/setup.py                                |   3 +
 mypy.egg-info/PKG-INFO                             |   3 +-
 mypy.egg-info/SOURCES.txt                          |  49 +-
 mypy.egg-info/entry_points.txt                     |   1 +
 mypy.egg-info/requires.txt                         |   1 +
 mypy/build.py                                      | 470 +++++++++++++----
 mypy/checker.py                                    |  91 ++--
 mypy/checkexpr.py                                  |  49 +-
 mypy/checkmember.py                                |   6 +-
 mypy/constraints.py                                |   8 +-
 mypy/dmypy.py                                      | 341 +++++++++++++
 mypy/dmypy_server.py                               | 289 +++++++++++
 mypy/dmypy_util.py                                 |  27 +
 mypy/errors.py                                     |  19 +-
 mypy/fastparse.py                                  |  36 +-
 mypy/fastparse2.py                                 |  11 +-
 mypy/main.py                                       | 116 +++--
 mypy/meet.py                                       |  53 +-
 mypy/messages.py                                   |  95 ++--
 mypy/myunit/__main__.py                            |   9 -
 mypy/nodes.py                                      |  54 +-
 mypy/options.py                                    |  28 +-
 mypy/parse.py                                      |   3 +
 mypy/report.py                                     |  33 +-
 mypy/semanal.py                                    | 294 ++++++-----
 mypy/semanal_pass1.py                              |  77 ++-
 mypy/semanal_pass3.py                              |  73 ++-
 mypy/server/astdiff.py                             | 332 +++++++++---
 mypy/server/astmerge.py                            |  79 ++-
 mypy/server/aststrip.py                            |  66 ++-
 mypy/server/deps.py                                | 523 ++++++++++++++++---
 mypy/server/target.py                              |  13 +-
 mypy/server/update.py                              | 565 ++++++++++++++++++---
 mypy/solve.py                                      |   1 +
 mypy/stats.py                                      | 108 +---
 mypy/strconv.py                                    |  10 +-
 mypy/stubgen.py                                    |  14 +-
 mypy/test/data.py                                  |  72 ++-
 mypy/test/testdeps.py                              |  42 +-
 mypy/test/testdiff.py                              |  10 +-
 mypy/test/testdmypy.py                             | 311 ++++++++++++
 mypy/test/testfinegrained.py                       |  65 +--
 mypy/test/testinfer.py                             |   2 +-
 mypy/test/testmerge.py                             | 101 ++--
 mypy/test/testparse.py                             |   5 +-
 mypy/test/testreports.py                           |   2 +-
 mypy/test/testsolve.py                             |   2 +-
 mypy/test/testsubtypes.py                          |   2 +-
 mypy/test/testtypes.py                             |  14 +-
 mypy/{ => test}/typefixture.py                     |   4 +-
 mypy/traverser.py                                  |   4 +
 mypy/treetransform.py                              |  13 +-
 mypy/tvar_scope.py                                 |  12 +-
 mypy/typeanal.py                                   |  41 +-
 mypy/types.py                                      |  86 ++--
 mypy/util.py                                       |   5 +
 mypy/version.py                                    |   2 +-
 mypy_self_check.ini                                |  13 +-
 runtests.py                                        |   1 +
 scripts/dmypy                                      |  20 +
 scripts/finegrained.py                             | 100 ++++
 setup.py                                           |  13 +-
 test-data/unit/check-abstract.test                 |  15 +
 test-data/unit/check-class-namedtuple.test         |   2 +-
 test-data/unit/check-classes.test                  |  66 ++-
 test-data/unit/check-expressions.test              |  27 +-
 test-data/unit/check-fastparse.test                |  73 ++-
 test-data/unit/check-flags.test                    | 110 ++--
 test-data/unit/check-functions.test                |  92 +++-
 test-data/unit/check-generics.test                 |  29 +-
 test-data/unit/check-incremental.test              | 201 +++++++-
 test-data/unit/check-inference-context.test        |   8 +
 test-data/unit/check-inference.test                |  51 +-
 test-data/unit/check-isinstance.test               |  30 ++
 test-data/unit/check-kwargs.test                   |  24 +-
 test-data/unit/check-namedtuple.test               |  23 +-
 test-data/unit/check-optional.test                 |  31 ++
 test-data/unit/check-overloading.test              |  80 +++
 test-data/unit/check-tuples.test                   |  27 +
 test-data/unit/check-typevar-values.test           |  15 +
 test-data/unit/check-unions.test                   |   6 +-
 test-data/unit/check-unreachable-code.test         |  32 +-
 test-data/unit/check-warnings.test                 |  12 +
 test-data/unit/cmdline.test                        | 130 +----
 test-data/unit/deps-classes.test                   |  21 +
 test-data/unit/deps-expressions.test               | 435 ++++++++++++++++
 test-data/unit/deps-generics.test                  | 142 ++++++
 test-data/unit/deps-statements.test                | 551 ++++++++++++++++++++
 test-data/unit/deps-types.test                     | 151 ++++++
 test-data/unit/deps.test                           | 269 +++++++++-
 test-data/unit/diff.test                           | 192 ++++++-
 test-data/unit/fine-grained-blockers.test          | 368 ++++++++++++++
 test-data/unit/fine-grained-cycles.test            | 218 ++++++++
 test-data/unit/fine-grained-modules.test           | 534 +++++++++++++++++++
 test-data/unit/fine-grained.test                   | 126 +++++
 test-data/unit/fixtures/async_await.pyi            |   3 +-
 test-data/unit/fixtures/tuple.pyi                  |   1 +
 test-data/unit/fixtures/typing-full.pyi            |   3 +-
 test-data/unit/lib-stub/blocker.pyi                |   2 +
 test-data/unit/lib-stub/blocker2.pyi               |   2 +
 test-data/unit/lib-stub/broken.pyi                 |   2 +
 test-data/unit/merge.test                          | 431 +++++++++++++++-
 test-data/unit/parse-errors.test                   |   1 +
 test-data/unit/pythoneval.test                     |  16 +
 test-data/unit/semanal-errors.test                 |   4 +-
 test-data/unit/semanal-symtable.test               |  34 ++
 typeshed/stdlib/2/SocketServer.pyi                 |   3 +-
 typeshed/stdlib/2/__builtin__.pyi                  |  98 ++--
 typeshed/stdlib/2/array.pyi                        |  56 --
 typeshed/stdlib/2/builtins.pyi                     |  98 ++--
 typeshed/stdlib/2/commands.pyi                     |  15 +-
 typeshed/stdlib/2/decimal.pyi                      |   4 +-
 typeshed/stdlib/2/shelve.pyi                       |  10 +-
 typeshed/stdlib/2/typing.pyi                       |  48 +-
 typeshed/stdlib/2/unittest.pyi                     |   4 +
 typeshed/stdlib/2/urllib2.pyi                      |   4 +-
 typeshed/stdlib/2/wsgiref/types.pyi                |   5 +-
 typeshed/stdlib/{3 => 2and3}/array.pyi             |  22 +-
 typeshed/stdlib/2and3/asynchat.pyi                 |   2 +-
 typeshed/stdlib/2and3/asyncore.pyi                 |   4 +-
 typeshed/stdlib/2and3/bz2.pyi                      |  48 +-
 typeshed/stdlib/2and3/calendar.pyi                 |   4 +-
 typeshed/stdlib/2and3/distutils/cmd.pyi            |  27 +-
 typeshed/stdlib/2and3/distutils/core.pyi           |   2 +-
 typeshed/stdlib/2and3/distutils/log.pyi            |  28 +
 typeshed/stdlib/2and3/fractions.pyi                |  14 +-
 typeshed/stdlib/2and3/imaplib.pyi                  | 133 +++++
 typeshed/stdlib/2and3/logging/__init__.pyi         |   2 +-
 typeshed/stdlib/2and3/numbers.pyi                  |   2 +-
 typeshed/stdlib/2and3/pdb.pyi                      |  10 +-
 typeshed/stdlib/2and3/pkgutil.pyi                  |   4 +-
 typeshed/stdlib/2and3/plistlib.pyi                 |  21 +-
 typeshed/stdlib/2and3/py_compile.pyi               |   2 +-
 typeshed/stdlib/2and3/threading.pyi                |   6 +-
 typeshed/stdlib/2and3/traceback.pyi                |   4 +-
 typeshed/stdlib/2and3/xml/sax/__init__.pyi         |   4 +-
 typeshed/stdlib/3.4/asyncio/futures.pyi            |   4 +-
 typeshed/stdlib/3.4/asyncio/locks.pyi              |   4 +-
 typeshed/stdlib/3.4/asyncio/protocols.pyi          |   4 +-
 typeshed/stdlib/3.4/asyncio/tasks.pyi              |  12 +-
 typeshed/stdlib/3.4/enum.pyi                       |   2 +-
 typeshed/stdlib/3/_importlib_modulespec.pyi        |   6 +-
 typeshed/stdlib/3/builtins.pyi                     | 180 +++----
 typeshed/stdlib/3/collections/__init__.pyi         |  13 +-
 typeshed/stdlib/3/concurrent/futures/_base.pyi     |   7 +-
 typeshed/stdlib/3/configparser.pyi                 |  12 +-
 typeshed/stdlib/3/curses/__init__.pyi              |   3 +
 typeshed/stdlib/3/datetime.pyi                     |   4 +-
 typeshed/stdlib/3/email/message.pyi                |  16 +-
 typeshed/stdlib/3/getpass.pyi                      |   2 +-
 typeshed/stdlib/3/gzip.pyi                         |  70 +--
 typeshed/stdlib/3/hashlib.pyi                      |  38 +-
 typeshed/stdlib/3/http/__init__.pyi                |   7 +-
 typeshed/stdlib/3/importlib/__init__.pyi           |   8 +-
 typeshed/stdlib/3/importlib/abc.pyi                |   6 +-
 typeshed/stdlib/3/importlib/machinery.pyi          |   6 +-
 typeshed/stdlib/3/importlib/util.pyi               |  14 +-
 typeshed/stdlib/3/inspect.pyi                      |  14 +-
 typeshed/stdlib/3/io.pyi                           |  11 +-
 typeshed/stdlib/3/json/decoder.pyi                 |  12 +-
 typeshed/stdlib/3/json/encoder.pyi                 |   4 +-
 typeshed/stdlib/3/multiprocessing/__init__.pyi     |  39 +-
 typeshed/stdlib/3/multiprocessing/connection.pyi   |  39 ++
 typeshed/stdlib/3/multiprocessing/context.pyi      |  41 +-
 typeshed/stdlib/3/multiprocessing/pool.pyi         |  30 +-
 typeshed/stdlib/3/multiprocessing/synchronize.pyi  |  64 +++
 typeshed/stdlib/3/shelve.pyi                       |  10 +-
 typeshed/stdlib/3/socketserver.pyi                 |   3 +-
 typeshed/stdlib/3/sre_constants.pyi                |  74 +++
 typeshed/stdlib/3/subprocess.pyi                   |  57 ++-
 typeshed/stdlib/3/sys.pyi                          |   2 +-
 typeshed/stdlib/3/types.pyi                        |   4 +
 typeshed/stdlib/3/typing.pyi                       |  80 ++-
 typeshed/stdlib/3/unittest/mock.pyi                |  35 +-
 typeshed/stdlib/3/urllib/request.pyi               |   4 +-
 typeshed/stdlib/3/wsgiref/types.pyi                |   7 +-
 typeshed/tests/pytype_test.py                      |  85 +++-
 typeshed/third_party/2/croniter.pyi                |  23 -
 typeshed/third_party/2/dateutil/parser.pyi         |  40 +-
 typeshed/third_party/2/dateutil/relativedelta.pyi  |  20 +-
 typeshed/third_party/2/dateutil/tz/__init__.pyi    |   2 +-
 typeshed/third_party/2/dateutil/tz/_common.pyi     |   2 +-
 typeshed/third_party/2/dateutil/tz/tz.pyi          |  14 +-
 typeshed/third_party/2/enum.pyi                    |  20 +-
 .../2/google/protobuf/message_factory.pyi          |   2 +-
 typeshed/third_party/2/itsdangerous.pyi            |  52 +-
 .../2/selenium/webdriver/remote/webdriver.pyi      |   4 +-
 .../2/selenium/webdriver/remote/webelement.pyi     |   2 +-
 typeshed/third_party/2/six/__init__.pyi            |   1 +
 typeshed/third_party/2/werkzeug/wrappers.pyi       |  14 +-
 .../third_party/2and3/atomicwrites/__init__.pyi    |  11 +-
 typeshed/third_party/2and3/boto/s3/__init__.pyi    |   2 +-
 .../third_party/2and3/characteristic/__init__.pyi  |  26 +-
 typeshed/third_party/2and3/click/core.pyi          | 130 ++---
 typeshed/third_party/2and3/click/decorators.pyi    | 252 +++++----
 typeshed/third_party/2and3/click/exceptions.pyi    |  32 +-
 typeshed/third_party/2and3/click/formatting.pyi    |  22 +-
 typeshed/third_party/2and3/click/globals.pyi       |   4 +-
 typeshed/third_party/2and3/click/parser.pyi        |  22 +-
 typeshed/third_party/2and3/click/termui.pyi        | 118 ++---
 typeshed/third_party/2and3/click/types.pyi         |  72 +--
 typeshed/third_party/2and3/click/utils.pyi         |  39 +-
 typeshed/third_party/2and3/croniter.pyi            |  41 ++
 typeshed/third_party/2and3/markupsafe/__init__.pyi |  18 +-
 typeshed/third_party/2and3/pymysql/connections.pyi |   2 +-
 typeshed/third_party/2and3/pymysql/cursors.pyi     |   2 +-
 typeshed/third_party/2and3/requests/__init__.pyi   |   1 +
 typeshed/third_party/2and3/requests/adapters.pyi   |   8 +-
 typeshed/third_party/2and3/requests/api.pyi        |   2 +-
 typeshed/third_party/2and3/requests/models.pyi     |   3 +
 typeshed/third_party/2and3/requests/sessions.pyi   |  13 +-
 .../third_party/{2 => 2and3}/thrift/Thrift.pyi     |   0
 .../third_party/{2 => 2and3}/thrift/__init__.pyi   |   0
 .../thrift/protocol/TBinaryProtocol.pyi            |   0
 .../{2 => 2and3}/thrift/protocol/TProtocol.pyi     |   0
 .../{2 => 2and3}/thrift/protocol/__init__.pyi      |   0
 .../{2 => 2and3}/thrift/transport/TSocket.pyi      |   0
 .../{2 => 2and3}/thrift/transport/TTransport.pyi   |   0
 .../{2 => 2and3}/thrift/transport/__init__.pyi     |   0
 typeshed/third_party/2and3/yaml/__init__.pyi       |  11 +-
 typeshed/third_party/3/dateutil/parser.pyi         |  61 +--
 typeshed/third_party/3/dateutil/relativedelta.pyi  |  73 +--
 typeshed/third_party/3/dateutil/tz/__init__.pyi    |  13 +-
 typeshed/third_party/3/dateutil/tz/tz.pyi          |  14 +-
 typeshed/third_party/3/enum.pyi                    |   2 +-
 typeshed/third_party/3/itsdangerous.pyi            |  54 +-
 typeshed/third_party/3/lxml/__init__.pyi           |   0
 typeshed/third_party/3/lxml/etree.pyi              | 134 -----
 typeshed/third_party/3/lxml/objectify.pyi          |  13 -
 typeshed/third_party/3/six/__init__.pyi            |   9 +-
 typeshed/third_party/3/werkzeug/wrappers.pyi       |  14 +-
 249 files changed, 10557 insertions(+), 2837 deletions(-)

diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..afddd48
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,227 @@
+Mypy is licensed under the terms of the MIT license, reproduced below.
+
+= = = = =
+
+The MIT License
+
+Copyright (c) 2015-2016 Jukka Lehtosalo and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+= = = = =
+
+Portions of mypy are licensed under different licenses.  The files
+under stdlib-samples are licensed under the PSF 2 License, reproduced below.
+
+= = = = =
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
+alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+= = = = =
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
index f184bf5..30211b0 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -4,3 +4,4 @@ recursive-include extensions *
 recursive-include docs *
 include runtests.py
 include mypy_self_check.ini
+include LICENSE
diff --git a/PKG-INFO b/PKG-INFO
index c85de6a..a40251e 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.540
+Version: 0.560
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
@@ -24,7 +24,6 @@ Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Operating System :: POSIX
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
diff --git a/README.md b/README.md
index 18c726c..8750f09 100644
--- a/README.md
+++ b/README.md
@@ -61,7 +61,7 @@ See 'Development status' below.
 Requirements
 ------------
 
-You need Python 3.3 or later to run mypy.  You can have multiple Python
+You need Python 3.4 or later to run mypy.  You can have multiple Python
 versions (2.x and 3.x) installed on the same system without problems.
 
 In Ubuntu, Mint and Debian you can install Python 3 like this:
@@ -105,7 +105,7 @@ IDE & Linter Integrations
 Mypy can be integrated into popular IDEs:
 
 * Vim: [vim-mypy](https://github.com/Integralist/vim-mypy)
-* Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy/issues)
+* Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy)
 * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy)
 * Atom: [linter-mypy](https://atom.io/packages/linter-mypy)
 * PyCharm: PyCharm integrates [its own implementation of PEP 484](https://www.jetbrains.com/help/pycharm/2017.1/type-hinting-in-pycharm.html).
diff --git a/docs/source/builtin_types.rst b/docs/source/builtin_types.rst
index 4426df7..e41c5db 100644
--- a/docs/source/builtin_types.rst
+++ b/docs/source/builtin_types.rst
@@ -13,6 +13,8 @@ Type                Description
 ``bytes``           8-bit string
 ``object``          an arbitrary object (``object`` is the common base class)
 ``List[str]``       list of ``str`` objects
+``Tuple[int, int]`` tuple of two ``int``s (``Tuple[()]`` is the empty tuple)
+``Tuple[int, ...]`` tuple of an arbitrary number of ``int`` objects
 ``Dict[str, int]``  dictionary from ``str`` keys to ``int`` values
 ``Iterable[int]``   iterable object containing ints
 ``Sequence[bool]``  sequence of booleans
diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst
index a1cdb98..590820c 100644
--- a/docs/source/cheat_sheet.rst
+++ b/docs/source/cheat_sheet.rst
@@ -33,6 +33,9 @@ Built-in types
    x = [1] # type: List[int]
    x = set([6, 7]) # type: Set[int]
 
+   # Empty Tuple types are a bit special
+   x = ()  # type: Tuple[()]
+
    # For mappings, we need the types of both keys and values.
    x = dict(field=2.0) # type: Dict[str, float]
 
diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst
index 09eab13..8628025 100644
--- a/docs/source/cheat_sheet_py3.rst
+++ b/docs/source/cheat_sheet_py3.rst
@@ -34,6 +34,9 @@ Built-in types
    x = [1]  # type: List[int]
    x = {6, 7}  # type: Set[int]
 
+   # Empty Tuple types are a bit special
+   x = ()  # type: Tuple[()]
+
    # For mappings, we need the types of both keys and values.
    x = {'field': 2.0}  # type: Dict[str, float]
 
diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst
index 3dc116c..b3027c1 100644
--- a/docs/source/class_basics.rst
+++ b/docs/source/class_basics.rst
@@ -117,16 +117,15 @@ annotations have no effect at runtime:
 Abstract base classes and multiple inheritance
 **********************************************
 
-Mypy uses Python abstract base classes for protocol types. There are
-several built-in abstract base classes types (for example,
-``Sequence``, ``Iterable`` and ``Iterator``). You can define abstract
-base classes using the ``abc.ABCMeta`` metaclass and the
-``abc.abstractmethod`` function decorator.
+Mypy supports Python abstract base classes (ABCs). Abstract classes
+have at least one abstract method or property that must be implemented
+by a subclass. You can define abstract base classes using the
+``abc.ABCMeta`` metaclass, and the ``abc.abstractmethod`` and
+``abc.abstractproperty`` function decorators. Example:
 
 .. code-block:: python
 
    from abc import ABCMeta, abstractmethod
-   import typing
 
    class A(metaclass=ABCMeta):
        @abstractmethod
@@ -140,32 +139,31 @@ base classes using the ``abc.ABCMeta`` metaclass and the
        def bar(self) -> str:
            return 'x'
 
-   a = A() # Error: A is abstract
-   b = B() # OK
+   a = A()  # Error: A is abstract
+   b = B()  # OK
 
-Unlike most Python code, abstract base classes are likely to play a
-significant role in many complex mypy programs.
+Note that mypy performs checking for unimplemented abstract methods
+even if you omit the ``ABCMeta`` metaclass. This can be useful if the
+metaclass would cause runtime metaclass conflicts.
 
 A class can inherit any number of classes, both abstract and
 concrete. As with normal overrides, a dynamically typed method can
-implement a statically typed abstract method defined in an abstract
-base class.
+implement a statically typed method defined in any base class,
+including an abstract method defined in an abstract base class.
+
+You can implement an abstract property using either a normal
+property or an instance variable.
 
 .. _protocol-types:
 
 Protocols and structural subtyping
 **********************************
 
-.. note::
-
-   Structural subtyping is experimental. Some things may not
-   work as expected. Mypy may pass unsafe code or it can reject
-   valid code.
-
 Mypy supports two ways of deciding whether two classes are compatible
 as types: nominal subtyping and structural subtyping. *Nominal*
 subtyping is strictly based on the class hierarchy. If class ``D``
-inherits class ``C``, it's also a subtype of ``C``. This form of
+inherits class ``C``, it's also a subtype of ``C``, and instances of
+``D`` can be used when ``C`` instances are expected. This form of
 subtyping is used by default in mypy, since it's easy to understand
 and produces clear and concise error messages, and since it matches
 how the native ``isinstance()`` check works -- based on class
@@ -174,16 +172,239 @@ a structural subtype of class ``C`` if the former has all attributes
 and methods of the latter, and with compatible types.
 
 Structural subtyping can be seen as a static equivalent of duck
-typing, which is well known to Python programmers. Mypy provides an
-opt-in support for structural subtyping via protocol classes described
+typing, which is well known to Python programmers. Mypy provides
+support for structural subtyping via protocol classes described
 below.  See `PEP 544 <https://www.python.org/dev/peps/pep-0544/>`_ for
 the detailed specification of protocols and structural subtyping in
 Python.
 
+.. _predefined_protocols:
+
+Predefined protocols
+********************
+
+The ``typing`` module defines various protocol classes that correspond
+to common Python protocols, such as ``Iterable[T]``.  If a class
+defines a suitable ``__iter__`` method, mypy understands that it
+implements the iterable protocol and is compatible with ``Iterable[T]``.
+For example, ``IntList`` below is iterable, over ``int`` values:
+
+.. code-block:: python
+
+   from typing import Iterator, Iterable, Optional
+
+   class IntList:
+       def __init__(self, value: int, next: Optional[IntList]) -> None:
+           self.value = value
+           self.next = next
+
+       def __iter__(self) -> Iterator[int]:
+           current = self
+           while current:
+               yield current.value
+               current = current.next
+
+   def print_numbered(items: Iterable[int]) -> None:
+       for n, x in enumerate(items):
+           print(n + 1, x)
+
+   x = IntList(3, IntList(5, None))
+   print_numbered(x)  # OK
+   print_numbered([4, 5])  # Also OK
+
+The subsections below introduce all built-in protocols defined in
+``typing`` and the signatures of the corresponding methods you need to define
+to implement each protocol (the signatures can be left out, as always, but mypy
+won't type check unannotated methods).
+
+Iteration protocols
+...................
+
+The iteration protocols are useful in many contexts. For example, they allow
+iteration of objects in for loops.
+
+``Iterable[T]``
+---------------
+
+The :ref:`example above <predefined_protocols>` has a simple implementation of an
+``__iter__`` method.
+
+.. code-block:: python
+
+   def __iter__(self) -> Iterator[T]
+
+``Iterator[T]``
+---------------
+
+.. code-block:: python
+
+   def __next__(self) -> T
+   def __iter__(self) -> Iterator[T]
+
+Collection protocols
+....................
+
+Many of these are implemented by built-in container types such as
+``list`` and ``dict``, and these are also useful for user-defined
+collection objects.
+
+``Sized``
+---------
+
+This is a type for objects that support ``len(x)``.
+
+.. code-block:: python
+
+   def __len__(self) -> int
+
+``Container[T]``
+----------------
+
+This is a type for objects that support the ``in`` operator.
+
+.. code-block:: python
+
+   def __contains__(self, x: object) -> bool
+
+``Collection[T]``
+-----------------
+
+.. code-block:: python
+
+   def __len__(self) -> int
+   def __iter__(self) -> Iterator[T]
+   def __contains__(self, x: object) -> bool
+
+One-off protocols
+.................
+
+These protocols are typically only useful with a single standard
+library function or class.
+
+``Reversible[T]``
+-----------------
+
+This is a type for objects that support ``reversed(x)``.
+
+.. code-block:: python
+
+   def __reversed__(self) -> Iterator[T]
+
+``SupportsAbs[T]``
+------------------
+
+This is a type for objects that support ``abs(x)``. ``T`` is the type of
+value returned by ``abs(x)``.
+
+.. code-block:: python
+
+   def __abs__(self) -> T
+
+``SupportsBytes``
+-----------------
+
+This is a type for objects that support ``bytes(x)``.
+
+.. code-block:: python
+
+   def __bytes__(self) -> bytes
+
+``SupportsComplex``
+-------------------
+
+This is a type for objects that support ``complex(x)``.
+
+.. code-block:: python
+
+   def __complex__(self) -> complex
+
+``SupportsFloat``
+-----------------
+
+This is a type for objects that support ``float(x)``.
+
+.. code-block:: python
+
+   def __float__(self) -> float
+
+``SupportsInt``
+---------------
+
+This is a type for objects that support ``int(x)``.
+
+.. code-block:: python
+
+   def __int__(self) -> int
+
+``SupportsRound[T]``
+--------------------
+
+This is a type for objects that support ``round(x)``.
+
+.. code-block:: python
+
+   def __round__(self) -> T
+
+Async protocols
+...............
+
+These protocols can be useful in async code.
+
+``Awaitable[T]``
+----------------
+
+.. code-block:: python
+
+   def __await__(self) -> Generator[Any, None, T]
+
+``AsyncIterable[T]``
+--------------------
+
+.. code-block:: python
+
+   def __aiter__(self) -> AsyncIterator[T]
+
+``AsyncIterator[T]``
+--------------------
+
+.. code-block:: python
+
+   def __anext__(self) -> Awaitable[T]
+   def __aiter__(self) -> AsyncIterator[T]
+
+Context manager protocols
+.........................
+
+There are two protocols for context managers -- one for regular context
+managers and one for async ones. These allow defining objects that can
+be used in ``with`` and ``async with`` statements.
+
+``ContextManager[T]``
+---------------------
+
+.. code-block:: python
+
+   def __enter__(self) -> T
+   def __exit__(self,
+                exc_type: Optional[Type[BaseException]],
+                exc_value: Optional[BaseException],
+                traceback: Optional[TracebackType]) -> Optional[bool]
+
+``AsyncContextManager[T]``
+--------------------------
+
+.. code-block:: python
+
+   def __aenter__(self) -> Awaitable[T]
+   def __aexit__(self,
+                 exc_type: Optional[Type[BaseException]],
+                 exc_value: Optional[BaseException],
+                 traceback: Optional[TracebackType]) -> Awaitable[Optional[bool]]
+
 Simple user-defined protocols
 *****************************
 
-You can define a protocol class by inheriting the special
+You can define your own protocol class by inheriting the special
 ``typing_extensions.Protocol`` class:
 
 .. code-block:: python
@@ -216,12 +437,10 @@ similarly compatible with the protocol, as they support ``close()``.
    The ``Protocol`` base class is currently provided in the ``typing_extensions``
    package. Once structural subtyping is mature and
    `PEP 544 <https://www.python.org/dev/peps/pep-0544/>`_ has been accepted,
-   ``Protocol`` will be included in the ``typing`` module. Several library
-   types such as ``typing.Sized`` and ``typing.Iterable`` will also be changed
-   into protocols. They are currently treated as regular ABCs by mypy.
+   ``Protocol`` will be included in the ``typing`` module.
 
-Defining subprotocols
-*********************
+Defining subprotocols and subclassing protocols
+***********************************************
 
 You can also define subprotocols. Existing protocols can be extended
 and merged using multiple inheritance. Example:
@@ -249,7 +468,7 @@ and merged using multiple inheritance. Example:
 
 Note that inheriting from an existing protocol does not automatically
 turn the subclass into a protocol -- it just creates a regular
-(non-protocol) ABC that implements the given protocol (or
+(non-protocol) class or ABC that implements the given protocol (or
 protocols). The ``typing_extensions.Protocol`` base class must always
 be explicitly present if you are defining a protocol:
 
@@ -267,6 +486,13 @@ be explicitly present if you are defining a protocol:
    # Error: nominal subtyping used by default
    x: NewProtocol = Concrete()  # Error!
 
+You can also include default implementations of methods in
+protocols. If you explicitly subclass these protocols you can inherit
+these default implementations. Explicitly including a protocol as a
+base class is also a way of documenting that your class implements a
+particular protocol, and it forces mypy to verify that your class
+implementation is actually compatible with the protocol.
+
 .. note::
 
    You can use Python 3.6 variable annotations (`PEP 526
@@ -326,6 +552,9 @@ adds support for basic runtime structural checks:
    if isinstance(mug, Portable):
       use(mug.handles)  # Works statically and at runtime
 
+``isinstance()`` also works with the :ref:`predefined protocols <predefined_protocols>`
+in ``typing`` such as ``Iterable``.
+
 .. note::
    ``isinstance()`` with protocols is not completely safe at runtime.
    For example, signatures of methods are not checked. The runtime
diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index a8ef653..99f7d5b 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -11,7 +11,7 @@ flag (or its long form ``--help``)::
   usage: mypy [-h] [-v] [-V] [--python-version x.y] [--platform PLATFORM] [-2]
               [--ignore-missing-imports]
               [--follow-imports {normal,silent,skip,error}]
-              [--disallow-any {unimported, expr, unannotated, decorated, explicit, generics}]
+              [--disallow-any-{unimported,expr,decorated,explicit,generics}]
               [--disallow-untyped-calls] [--disallow-untyped-defs]
               [--check-untyped-defs] [--disallow-subclassing-any]
               [--warn-incomplete-stub] [--warn-redundant-casts]
@@ -28,7 +28,7 @@ flag (or its long form ``--help``)::
               [--shadow-file SOURCE_FILE SHADOW_FILE] [--any-exprs-report DIR]
               [--cobertura-xml-report DIR] [--html-report DIR]
               [--linecount-report DIR] [--linecoverage-report DIR]
-              [--memory-xml-report DIR] [--old-html-report DIR]
+              [--memory-xml-report DIR]
               [--txt-report DIR] [--xml-report DIR] [--xslt-html-report DIR]
               [--xslt-txt-report DIR] [-m MODULE] [-c PROGRAM_TEXT] [-p PACKAGE]
               [files [files ...]]
@@ -176,7 +176,7 @@ same directory on the search path, only the stub file is used.
 in the earlier directory is used.)
 
 NOTE: These rules are relevant to the following section too:
-the ``--follow-imports`` flag described below is applied _after_ the
+the ``--follow-imports`` flag described below is applied *after* the
 above algorithm has determined which package, stub or module to use.
 
 .. _follow-imports:
@@ -249,6 +249,38 @@ directory.  The four possible values are:
     main.py:1: note: Import of 'submodule' ignored
     main.py:1: note: (Using --follow-imports=error, module not passed on command line)
 
+.. _disallow-any:
+
+Disallow Any Flags
+******************
+
+The ``--disallow-any`` family of flags disallows various types of ``Any`` in a module.
+The following options are available:
+
+- ``--disallow-any-unimported`` disallows usage of types that come from unfollowed imports
+  (such types become aliases for ``Any``). Unfollowed imports occur either
+  when the imported module does not exist or when ``--follow-imports=skip``
+  is set.
+
+- ``--disallow-any-expr`` disallows all expressions in the module that have type ``Any``.
+  If an expression of type ``Any`` appears anywhere in the module
+  mypy will output an error unless the expression is immediately
+  used as an argument to ``cast`` or assigned to a variable with an
+  explicit type annotation. In addition, declaring a variable of type ``Any``
+  or casting to type ``Any`` is not allowed. Note that calling functions
+  that take parameters of type ``Any`` is still allowed.
+
+- ``--disallow-any-decorated`` disallows functions that have ``Any`` in their signature
+  after decorator transformation.
+
+- ``--disallow-any-explicit`` disallows explicit ``Any`` in type positions such as type
+  annotations and generic type parameters.
+
+- ``--disallow-any-generics`` disallows usage of generic types that do not specify explicit
+  type parameters. Moreover, built-in collections (such as ``list`` and
+  ``dict``) become disallowed as you should use their aliases from the typing
+  module (such as ``List[int]`` and ``Dict[str, str]``).
+
 
 Additional command line flags
 *****************************
@@ -277,42 +309,6 @@ Here are some more useful flags:
   re-check your code without ``--strict-optional`` to ensure new type errors
   are not introduced.
 
-.. _disallow-any:
-
-- ``--disallow-any`` disallows various types of ``Any`` in a module.
-  The option takes a comma-separated list of the following values:
-  ``unimported``, ``unannotated``, ``expr``, ``decorated``, ``explicit``,
-  ``generics``.
-
-  ``unimported`` disallows usage of types that come from unfollowed imports
-  (such types become aliases for ``Any``). Unfollowed imports occur either
-  when the imported module does not exist or when ``--follow-imports=skip``
-  is set.
-
-  ``unannotated`` disallows function definitions that are not fully
-  typed (i.e. that are missing an explicit type annotation for any
-  of the parameters or the return type). ``unannotated`` option is
-  interchangeable with ``--disallow-untyped-defs``.
-
-  ``expr`` disallows all expressions in the module that have type ``Any``.
-  If an expression of type ``Any`` appears anywhere in the module
-  mypy will output an error unless the expression is immediately
-  used as an argument to ``cast`` or assigned to a variable with an
-  explicit type annotation. In addition, declaring a variable of type ``Any``
-  or casting to type ``Any`` is not allowed. Note that calling functions
-  that take parameters of type ``Any`` is still allowed.
-
-  ``decorated`` disallows functions that have ``Any`` in their signature
-  after decorator transformation.
-
-  ``explicit`` disallows explicit ``Any`` in type positions such as type
-  annotations and generic type parameters.
-
-  ``generics`` disallows usage of generic types that do not specify explicit
-  type parameters. Moreover, built-in collections (such as ``list`` and
-  ``dict``) become disallowed as you should use their aliases from the typing
-  module (such as ``List[int]`` and ``Dict[str, str]``).
-
 - ``--disallow-untyped-defs`` reports an error whenever it encounters
   a function definition without type annotations.
 
diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst
index e31d9a7..7141b14 100644
--- a/docs/source/common_issues.rst
+++ b/docs/source/common_issues.rst
@@ -14,7 +14,7 @@ Can't install mypy using pip
 
 If installation fails, you've probably hit one of these issues:
 
-* Mypy needs Python 3.3 or later to run.
+* Mypy needs Python 3.4 or later to run.
 * You may have to run pip like this:
   ``python3 -m pip install mypy``.
 
@@ -472,3 +472,27 @@ Here's the above example modified to use ``MYPY``:
 
    def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
        return [arg]
+
+
+.. _silencing-linters:
+
+Silencing linters
+-----------------
+
+In some cases, linters will complain about unused imports or code. In
+these cases, you can silence them with a comment after type comments, or on
+the same line as the import:
+
+.. code-block:: python
+
+   # to silence complaints about unused imports
+   from typing import List  # noqa
+   a = None  # type: List[int]
+
+
+To silence the linter on the same line as a type comment
+put the linter comment *after* the type comment:
+
+.. code-block:: python
+
+    a = some_complex_thing()  # type: ignore  # noqa
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
index 9f5c832..a6b6457 100644
--- a/docs/source/config_file.rst
+++ b/docs/source/config_file.rst
@@ -142,13 +142,13 @@ overridden by the pattern sections matching the module name.
   ``error``.  For explanations see the discussion for the
   :ref:`--follow-imports <follow-imports>` command line flag.  Note
   that if pattern matching is used, the pattern should match the name
-  of the _imported_ module, not the module containing the import
+  of the *imported* module, not the module containing the import
   statement.
 
 - ``ignore_missing_imports`` (Boolean, default False) suppress error
   messages about imports that cannot be resolved.  Note that if
   pattern matching is used, the pattern should match the name of the
-  _imported_ module, not the module containing the import statement.
+  *imported* module, not the module containing the import statement.
 
 - ``silent_imports`` (Boolean, deprecated) equivalent to
   ``follow_imports=skip`` plus ``ignore_missing_imports=True``.
@@ -156,11 +156,20 @@ overridden by the pattern sections matching the module name.
 - ``almost_silent`` (Boolean, deprecated) equivalent to
   ``follow_imports=skip``.
 
-- ``disallow_any`` (Comma-separated list, default empty) is an option to
-  disallow various types of ``Any`` in a module. The flag takes a
-  comma-separated list of the following arguments: ``unimported``,
-  ``unannotated``, ``expr``, ``decorated``, ``explicit``, ``generics``.
-  For explanations see the discussion for the :ref:`--disallow-any <disallow-any>` option.
+- ``disallow_any_unimported`` (Boolean, default false) disallows usage of types that come
+  from unfollowed imports (such types become aliases for ``Any``).
+
+- ``disallow_any_expr`` (Boolean, default false) disallows all expressions in the module
+  that have type ``Any``.
+
+- ``disallow_any_decorated`` (Boolean, default false) disallows functions that have ``Any``
+  in their signature after decorator transformation.
+
+- ``disallow_any_explicit`` (Boolean, default false) disallows explicit ``Any`` in type
+  positions such as type annotations and generic type parameters.
+
+- ``disallow_any_generics`` (Boolean, default false) disallows usage of generic types that
+  do not specify explicit type parameters.
 
 - ``disallow_subclassing_any`` (Boolean, default False) disallows
   subclassing a value of type ``Any``.  See
@@ -200,8 +209,8 @@ overridden by the pattern sections matching the module name.
 - ``no_implicit_optional`` (Boolean, default false) changes the treatment of
   arguments with a default value of None by not implicitly making their type Optional
 
-Example
-*******
+Examples
+********
 
 You might put this in your ``mypy.ini`` file at the root of your repo:
 
@@ -218,6 +227,17 @@ for all mypy runs in this tree, and also selectively turns on the
 package.  This issues an error for function definitions without
 type annotations in that subdirectory only.
 
+If you would like to ignore specific imports, instead of ignoring all missing
+imports with ``--ignore-missing-imports``, use a section of the configuration
+file per module such as the following to ignore missing imports from
+``lib_module``:
+
+.. code-block:: text
+
+    [mypy-lib_module]
+    ignore_missing_imports = True
+
+
 .. note::
 
    Configuration flags are liable to change between releases.
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
index b131e3f..ff1b801 100644
--- a/docs/source/faq.rst
+++ b/docs/source/faq.rst
@@ -108,10 +108,11 @@ Mypy provides support for both `nominal subtyping
 <https://en.wikipedia.org/wiki/Nominative_type_system>`_ and
 `structural subtyping
 <https://en.wikipedia.org/wiki/Structural_type_system>`_.
-Support for structural subtyping is considered experimental.
 Some argue that structural subtyping is better suited for languages with duck
 typing such as Python. Mypy however primarily uses nominal subtyping,
-leaving structural subtyping opt-in. Here are some reasons why:
+leaving structural subtyping mostly opt-in (except for built-in protocols
+such as ``Iterable`` that always support structural subtyping). Here are some
+reasons why:
 
 1. It is easy to generate short and informative error messages when
    using a nominal type system. This is especially important when
@@ -119,12 +120,12 @@ leaving structural subtyping opt-in. Here are some reasons why:
 
 2. Python provides built-in support for nominal ``isinstance()`` tests and
    they are widely used in programs. Only limited support for structural
-   ``isinstance()`` exists for ABCs in ``collections.abc`` and ``typing``
-   standard library modules.
+   ``isinstance()`` is available, and it's less type safe than
+   nominal type tests.
 
-3. Many programmers are already familiar with nominal subtyping and it
+3. Many programmers are already familiar with static, nominal subtyping and it
    has been successfully used in languages such as Java, C++ and
-   C#. Only few languages use structural subtyping.
+   C#. Fewer languages use structural subtyping.
 
 However, structural subtyping can also be useful. For example, a "public API"
 may be more flexible if it is typed with protocols. Also, using protocol types
diff --git a/docs/source/generics.rst b/docs/source/generics.rst
index 2ea9b42..00e80e9 100644
--- a/docs/source/generics.rst
+++ b/docs/source/generics.rst
@@ -112,21 +112,27 @@ non-generic. For example:
 
 .. code-block:: python
 
-   from typing import Generic, TypeVar, Iterable
+   from typing import Generic, TypeVar, Mapping, Iterator, Dict
 
-   T = TypeVar('T')
+   KT = TypeVar('KT')
+   VT = TypeVar('VT')
 
-   class Stream(Iterable[T]):  # This is a generic subclass of Iterable
-       def __iter__(self) -> Iterator[T]:
+   class MyMap(Mapping[KT, VT]]):  # This is a generic subclass of Mapping
+       def __getitem__(self, k: KT) -> VT:
+           ...  # Implementations omitted
+       def __iter__(self) -> Iterator[KT]:
+           ...
+       def __len__(self) -> int:
            ...
 
-   input: Stream[int]  # Okay
+   items: MyMap[str, int]  # Okay
 
-   class Codes(Iterable[int]):  # This is a non-generic subclass of Iterable
-       def __iter__(self) -> Iterator[int]:
-           ...
+   class StrDict(Dict[str, str]):  # This is a non-generic subclass of Dict
+       def __str__(self) -> str:
+           return 'StrDict({})'.format(super().__str__())
 
-   output: Codes[int]  # Error! Codes is not generic
+   data: StrDict[int, int]  # Error! StrDict is not generic
+   data2: StrDict  # OK
 
    class Receiver(Generic[T]):
        def accept(self, value: T) -> None:
@@ -137,15 +143,15 @@ non-generic. For example:
 
 .. note::
 
-    You have to add an explicit ``Iterable`` (or ``Iterator``) base class
-    if you want mypy to consider a user-defined class as iterable (and
-    ``Sequence`` for sequences, etc.). This is because mypy doesn't support
-    *structural subtyping* and just having an ``__iter__`` method defined is
-    not sufficient to make mypy treat a class as iterable.
+    You have to add an explicit ``Mapping`` base class
+    if you want mypy to consider a user-defined class as a mapping (and
+    ``Sequence`` for sequences, etc.). This is because mypy doesn't use
+    *structural subtyping* for these ABCs, unlike simpler protocols
+    like ``Iterable``, which use :ref:`structural subtyping <protocol-types>`.
 
 ``Generic[...]`` can be omitted from bases if there are
-other base classes that include type variables, such as ``Iterable[T]`` in
-the above example. If you include ``Generic[...]`` in bases, then
+other base classes that include type variables, such as ``Mapping[KT, VT]``
+in the above example. If you include ``Generic[...]`` in bases, then
 it should list all type variables present in other bases (or more,
 if needed). The order of type variables is defined by the following
 rules:
@@ -549,7 +555,9 @@ problem. This is also the reason for the ``cast()`` call in the
 Generic protocols
 *****************
 
-Mypy supports generic protocols (see also :ref:`protocol-types`). Generic
+Mypy supports generic protocols (see also :ref:`protocol-types`). Several
+:ref:`predefined protocols <predefined_protocols>` are generic, such as
+``Iterable[T]``, and you can define additional generic protocols. Generic
 protocols mostly follow the normal rules for generic classes. Example:
 
 .. code-block:: python
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
index 310cd1a..3df67d3 100644
--- a/docs/source/getting_started.rst
+++ b/docs/source/getting_started.rst
@@ -6,7 +6,7 @@ Getting started
 Installation
 ************
 
-Mypy requires Python 3.3 or later.  Once you've `installed Python 3 <https://www.python.org/downloads/>`_,
+Mypy requires Python 3.4 or later.  Once you've `installed Python 3 <https://www.python.org/downloads/>`_,
 you can install mypy with:
 
 .. code-block:: text
diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst
index 1580a1b..d55926a 100644
--- a/docs/source/kinds_of_types.rst
+++ b/docs/source/kinds_of_types.rst
@@ -1166,8 +1166,8 @@ TypedDict
 
 .. note::
 
-   TypedDict is not yet an officially supported feature.  It may not work reliably,
-   and details of TypedDict may change in future mypy releases.
+   TypedDict is an officially supported feature, but it is still experimental.
+
 
 Python programs often use dictionaries with string keys to represent objects.
 Here is a typical example:
@@ -1307,7 +1307,9 @@ Class-based syntax
 ------------------
 
 Python 3.6 supports an alternative, class-based syntax to define a
-TypedDict:
+TypedDict. This means that your code must be checked as if it were
+Python 3.6 (using the ``--python-version`` flag on the command line,
+for example). Simply running mypy on Python 3.6 is insufficient.
 
 .. code-block:: python
 
diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst
index fbfd676..60ee878 100644
--- a/docs/source/revision_history.rst
+++ b/docs/source/revision_history.rst
@@ -3,6 +3,28 @@ Revision history
 
 List of major changes:
 
+- December 2017
+    * Publish ``mypy`` version 0.560 on PyPI.
+
+      * Various types in ``typing`` that used to be ABCs
+        :ref:`are now protocols <predefined_protocols>`
+        and support :ref:`structural subtyping <protocol-types>`.
+
+      * Explain how to :ref:`silence invalid complaints <silencing-linters>`
+        by linters about unused imports due to type comments.
+
+- November 2017
+    * Publish ``mypy`` version 0.550 on PyPI.
+
+      * Running mypy now requires Python 3.4 or higher.
+        However Python 3.3 is still valid for the target
+        of the analysis (i.e. the ``--python-version`` flag).
+
+      * Split ``--disallow-any`` flag into
+        :ref:`separate boolean flags <disallow-any>`.
+
+      * The ``--old-html-report`` flag was removed.
+
 - October 2017
     * Publish ``mypy`` version 0.540 on PyPI.
 
diff --git a/extensions/LICENSE b/extensions/LICENSE
new file mode 100644
index 0000000..bdb7786
--- /dev/null
+++ b/extensions/LICENSE
@@ -0,0 +1,27 @@
+Mypy extensions are licensed under the terms of the MIT license, reproduced below.
+
+= = = = =
+
+The MIT License
+
+Copyright (c) 2016-2017 Jukka Lehtosalo and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+= = = = =
diff --git a/extensions/MANIFEST.in b/extensions/MANIFEST.in
new file mode 100644
index 0000000..1aba38f
--- /dev/null
+++ b/extensions/MANIFEST.in
@@ -0,0 +1 @@
+include LICENSE
diff --git a/extensions/setup.py b/extensions/setup.py
index 0ec4ba1..b4918f4 100644
--- a/extensions/setup.py
+++ b/extensions/setup.py
@@ -40,4 +40,7 @@ setup(
     platforms=['POSIX'],
     py_modules=['mypy_extensions'],
     classifiers=classifiers,
+    install_requires=[
+        'typing >= 3.5.3; python_version < "3.5"',
+    ],
 )
diff --git a/mypy.egg-info/PKG-INFO b/mypy.egg-info/PKG-INFO
index c85de6a..a40251e 100644
--- a/mypy.egg-info/PKG-INFO
+++ b/mypy.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.540
+Version: 0.560
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
@@ -24,7 +24,6 @@ Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Operating System :: POSIX
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
diff --git a/mypy.egg-info/SOURCES.txt b/mypy.egg-info/SOURCES.txt
index 4dc9bb0..b432a3e 100644
--- a/mypy.egg-info/SOURCES.txt
+++ b/mypy.egg-info/SOURCES.txt
@@ -1,3 +1,4 @@
+LICENSE
 MANIFEST.in
 README.md
 mypy_self_check.ini
@@ -33,6 +34,8 @@ docs/source/python36.rst
 docs/source/revision_history.rst
 docs/source/supported_python_features.rst
 docs/source/type_inference_and_annotations.rst
+extensions/LICENSE
+extensions/MANIFEST.in
 extensions/README.md
 extensions/mypy_extensions.py
 extensions/setup.cfg
@@ -49,6 +52,9 @@ mypy/checkmember.py
 mypy/checkstrformat.py
 mypy/constraints.py
 mypy/defaults.py
+mypy/dmypy.py
+mypy/dmypy_server.py
+mypy/dmypy_util.py
 mypy/erasetype.py
 mypy/errors.py
 mypy/expandtype.py
@@ -88,7 +94,6 @@ mypy/traverser.py
 mypy/treetransform.py
 mypy/tvar_scope.py
 mypy/typeanal.py
-mypy/typefixture.py
 mypy/types.py
 mypy/typevars.py
 mypy/util.py
@@ -122,6 +127,7 @@ mypy/test/testcheck.py
 mypy/test/testcmdline.py
 mypy/test/testdeps.py
 mypy/test/testdiff.py
+mypy/test/testdmypy.py
 mypy/test/testextensions.py
 mypy/test/testfinegrained.py
 mypy/test/testgraph.py
@@ -138,9 +144,12 @@ mypy/test/testsubtypes.py
 mypy/test/testtransform.py
 mypy/test/testtypegen.py
 mypy/test/testtypes.py
+mypy/test/typefixture.py
 mypy/test/update.py
+scripts/dmypy
 scripts/dumpmodule.py
 scripts/find_type.py
+scripts/finegrained.py
 scripts/mypy
 scripts/mypy.bat
 scripts/myunit
@@ -258,8 +267,16 @@ test-data/unit/check-unsupported.test
 test-data/unit/check-varargs.test
 test-data/unit/check-warnings.test
 test-data/unit/cmdline.test
+test-data/unit/deps-classes.test
+test-data/unit/deps-expressions.test
+test-data/unit/deps-generics.test
+test-data/unit/deps-statements.test
+test-data/unit/deps-types.test
 test-data/unit/deps.test
 test-data/unit/diff.test
+test-data/unit/fine-grained-blockers.test
+test-data/unit/fine-grained-cycles.test
+test-data/unit/fine-grained-modules.test
 test-data/unit/fine-grained.test
 test-data/unit/merge.test
 test-data/unit/parse-errors.test
@@ -323,6 +340,9 @@ test-data/unit/fixtures/typing-full.pyi
 test-data/unit/fixtures/union.pyi
 test-data/unit/lib-stub/__builtin__.pyi
 test-data/unit/lib-stub/abc.pyi
+test-data/unit/lib-stub/blocker.pyi
+test-data/unit/lib-stub/blocker2.pyi
+test-data/unit/lib-stub/broken.pyi
 test-data/unit/lib-stub/builtins.pyi
 test-data/unit/lib-stub/collections.pyi
 test-data/unit/lib-stub/contextlib.pyi
@@ -370,7 +390,6 @@ typeshed/stdlib/2/_symtable.pyi
 typeshed/stdlib/2/_threading_local.pyi
 typeshed/stdlib/2/_warnings.pyi
 typeshed/stdlib/2/abc.pyi
-typeshed/stdlib/2/array.pyi
 typeshed/stdlib/2/ast.pyi
 typeshed/stdlib/2/atexit.pyi
 typeshed/stdlib/2/builtins.pyi
@@ -492,6 +511,7 @@ typeshed/stdlib/2and3/_random.pyi
 typeshed/stdlib/2and3/_weakref.pyi
 typeshed/stdlib/2and3/_weakrefset.pyi
 typeshed/stdlib/2and3/argparse.pyi
+typeshed/stdlib/2and3/array.pyi
 typeshed/stdlib/2and3/asynchat.pyi
 typeshed/stdlib/2and3/asyncore.pyi
 typeshed/stdlib/2and3/base64.pyi
@@ -524,6 +544,7 @@ typeshed/stdlib/2and3/fractions.pyi
 typeshed/stdlib/2and3/ftplib.pyi
 typeshed/stdlib/2and3/grp.pyi
 typeshed/stdlib/2and3/hmac.pyi
+typeshed/stdlib/2and3/imaplib.pyi
 typeshed/stdlib/2and3/keyword.pyi
 typeshed/stdlib/2and3/linecache.pyi
 typeshed/stdlib/2and3/locale.pyi
@@ -669,7 +690,6 @@ typeshed/stdlib/3/_thread.pyi
 typeshed/stdlib/3/_threading_local.pyi
 typeshed/stdlib/3/_warnings.pyi
 typeshed/stdlib/3/abc.pyi
-typeshed/stdlib/3/array.pyi
 typeshed/stdlib/3/ast.pyi
 typeshed/stdlib/3/atexit.pyi
 typeshed/stdlib/3/builtins.pyi
@@ -798,10 +818,12 @@ typeshed/stdlib/3/json/__init__.pyi
 typeshed/stdlib/3/json/decoder.pyi
 typeshed/stdlib/3/json/encoder.pyi
 typeshed/stdlib/3/multiprocessing/__init__.pyi
+typeshed/stdlib/3/multiprocessing/connection.pyi
 typeshed/stdlib/3/multiprocessing/context.pyi
 typeshed/stdlib/3/multiprocessing/managers.pyi
 typeshed/stdlib/3/multiprocessing/pool.pyi
 typeshed/stdlib/3/multiprocessing/process.pyi
+typeshed/stdlib/3/multiprocessing/synchronize.pyi
 typeshed/stdlib/3/os/__init__.pyi
 typeshed/stdlib/3/os/path.pyi
 typeshed/stdlib/3/sqlite3/__init__.pyi
@@ -823,7 +845,6 @@ typeshed/stdlib/3/wsgiref/validate.pyi
 typeshed/tests/mypy_selftest.py
 typeshed/tests/mypy_test.py
 typeshed/tests/pytype_test.py
-typeshed/third_party/2/croniter.pyi
 typeshed/third_party/2/enum.pyi
 typeshed/third_party/2/gflags.pyi
 typeshed/third_party/2/itsdangerous.pyi
@@ -892,14 +913,6 @@ typeshed/third_party/2/six/moves/urllib/parse.pyi
 typeshed/third_party/2/six/moves/urllib/request.pyi
 typeshed/third_party/2/six/moves/urllib/response.pyi
 typeshed/third_party/2/six/moves/urllib/robotparser.pyi
-typeshed/third_party/2/thrift/Thrift.pyi
-typeshed/third_party/2/thrift/__init__.pyi
-typeshed/third_party/2/thrift/protocol/TBinaryProtocol.pyi
-typeshed/third_party/2/thrift/protocol/TProtocol.pyi
-typeshed/third_party/2/thrift/protocol/__init__.pyi
-typeshed/third_party/2/thrift/transport/TSocket.pyi
-typeshed/third_party/2/thrift/transport/TTransport.pyi
-typeshed/third_party/2/thrift/transport/__init__.pyi
 typeshed/third_party/2/tornado/__init__.pyi
 typeshed/third_party/2/tornado/concurrent.pyi
 typeshed/third_party/2/tornado/gen.pyi
@@ -954,6 +967,7 @@ typeshed/third_party/2/werkzeug/debug/repr.pyi
 typeshed/third_party/2/werkzeug/debug/tbtools.pyi
 typeshed/third_party/2and3/backports_abc.pyi
 typeshed/third_party/2and3/certifi.pyi
+typeshed/third_party/2and3/croniter.pyi
 typeshed/third_party/2and3/emoji.pyi
 typeshed/third_party/2and3/mypy_extensions.pyi
 typeshed/third_party/2and3/singledispatch.pyi
@@ -1153,6 +1167,14 @@ typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi
 typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi
 typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi
 typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi
+typeshed/third_party/2and3/thrift/Thrift.pyi
+typeshed/third_party/2and3/thrift/__init__.pyi
+typeshed/third_party/2and3/thrift/protocol/TBinaryProtocol.pyi
+typeshed/third_party/2and3/thrift/protocol/TProtocol.pyi
+typeshed/third_party/2and3/thrift/protocol/__init__.pyi
+typeshed/third_party/2and3/thrift/transport/TSocket.pyi
+typeshed/third_party/2and3/thrift/transport/TTransport.pyi
+typeshed/third_party/2and3/thrift/transport/__init__.pyi
 typeshed/third_party/2and3/yaml/__init__.pyi
 typeshed/third_party/2and3/yaml/composer.pyi
 typeshed/third_party/2and3/yaml/constructor.pyi
@@ -1192,9 +1214,6 @@ typeshed/third_party/3/jwt/contrib/__init__.pyi
 typeshed/third_party/3/jwt/contrib/algorithms/__init__.pyi
 typeshed/third_party/3/jwt/contrib/algorithms/py_ecdsa.pyi
 typeshed/third_party/3/jwt/contrib/algorithms/pycrypto.pyi
-typeshed/third_party/3/lxml/__init__.pyi
-typeshed/third_party/3/lxml/etree.pyi
-typeshed/third_party/3/lxml/objectify.pyi
 typeshed/third_party/3/six/__init__.pyi
 typeshed/third_party/3/six/moves/__init__.pyi
 typeshed/third_party/3/six/moves/urllib/__init__.pyi
diff --git a/mypy.egg-info/entry_points.txt b/mypy.egg-info/entry_points.txt
index b385ace..7431f67 100644
--- a/mypy.egg-info/entry_points.txt
+++ b/mypy.egg-info/entry_points.txt
@@ -1,4 +1,5 @@
 [console_scripts]
+dmypy = mypy.dmypy:main
 mypy = mypy.__main__:console_entry
 stubgen = mypy.stubgen:main
 
diff --git a/mypy.egg-info/requires.txt b/mypy.egg-info/requires.txt
index 39548ec..39d9773 100644
--- a/mypy.egg-info/requires.txt
+++ b/mypy.egg-info/requires.txt
@@ -1,4 +1,5 @@
 typed-ast<1.2.0,>=1.1.0
+psutil<5.5.0,>=5.4.0
 
 [:python_version < "3.5"]
 typing>=3.5.3
diff --git a/mypy/build.py b/mypy/build.py
index 891fc3c..aa8830d 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -13,24 +13,27 @@ The function build() is the main interface to this module.
 import binascii
 import collections
 import contextlib
+from distutils.sysconfig import get_python_lib
+import gc
 import hashlib
 import json
 import os.path
 import re
 import site
+import stat
 import sys
 import time
 from os.path import dirname, basename
 import errno
 
-from typing import (AbstractSet, Dict, Iterable, Iterator, List, cast, Any,
-                    NamedTuple, Optional, Set, Tuple, Union, Callable)
+from typing import (AbstractSet, Any, cast, Dict, Iterable, Iterator, List,
+                    Mapping, NamedTuple, Optional, Set, Tuple, Union, Callable)
 # Can't use TYPE_CHECKING because it's not in the Python 3.5.1 stdlib
 MYPY = False
 if MYPY:
     from typing import Deque
 
-from mypy.nodes import (MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll)
+from mypy.nodes import (MODULE_REF, MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll)
 from mypy.semanal_pass1 import SemanticAnalyzerPass1
 from mypy.semanal import SemanticAnalyzerPass2
 from mypy.semanal_pass3 import SemanticAnalyzerPass3
@@ -51,10 +54,6 @@ from mypy.plugin import Plugin, DefaultPlugin, ChainedPlugin
 from mypy.defaults import PYTHON3_VERSION_MIN
 
 
-# We need to know the location of this file to load data, but
-# until Python 3.4, __file__ is relative.
-__file__ = os.path.realpath(__file__)
-
 PYTHON_EXTENSIONS = ['.pyi', '.py']
 
 
@@ -80,7 +79,7 @@ class BuildResult:
         self.manager = manager
         self.graph = graph
         self.files = manager.modules
-        self.types = manager.all_types
+        self.types = manager.all_types  # Non-empty for tests only or if dumping deps
         self.errors = manager.errors.messages()
 
 
@@ -91,6 +90,11 @@ class BuildSource:
         self.module = module or '__main__'
         self.text = text
 
+    def __repr__(self) -> str:
+        return '<BuildSource path=%r module=%r has_text=%s>' % (self.path,
+                                                                self.module,
+                                                                self.text is not None)
+
 
 class BuildSourceSet:
     """Efficiently test a file's membership in the set of build sources."""
@@ -119,10 +123,17 @@ class BuildSourceSet:
             return False
 
 
+# A dict containing saved cache data from a previous run.  This will
+# be updated in place with newly computed cache data.  See dmypy.py.
+SavedCache = Dict[str, Tuple['CacheMeta', MypyFile, Dict[Expression, Type]]]
+
+
 def build(sources: List[BuildSource],
           options: Options,
           alt_lib_path: Optional[str] = None,
-          bin_dir: Optional[str] = None) -> BuildResult:
+          bin_dir: Optional[str] = None,
+          saved_cache: Optional[SavedCache] = None,
+          ) -> BuildResult:
     """Analyze a program.
 
     A single call to build performs parsing, semantic analysis and optionally
@@ -138,7 +149,10 @@ def build(sources: List[BuildSource],
         (takes precedence over other directories)
       bin_dir: directory containing the mypy script, used for finding data
         directories; if omitted, use '.' as the data directory
+      saved_cache: optional dict with saved cache state for dmypy (read-write!)
     """
+    # This seems the most reasonable place to tune garbage collection.
+    gc.set_threshold(50000)
 
     data_dir = default_data_dir(bin_dir)
 
@@ -167,6 +181,8 @@ def build(sources: List[BuildSource],
         # multiple builds, there could be a mix of files/modules, so its easier
         # to just define the semantics that we always add the current director
         # to the lib_path
+        # TODO: Don't do this in some cases; for motivation see see
+        # https://github.com/python/mypy/issues/4195#issuecomment-341915031
         lib_path.insert(0, os.getcwd())
 
     # Prepend a config-defined mypy path.
@@ -195,16 +211,16 @@ def build(sources: List[BuildSource],
                            options=options,
                            version_id=__version__,
                            plugin=plugin,
-                           errors=errors)
+                           errors=errors,
+                           saved_cache=saved_cache)
 
     try:
         graph = dispatch(sources, manager)
         return BuildResult(manager, graph)
     finally:
-        manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
+        manager.log("Build finished in %.3f seconds with %d modules, and %d errors" %
                     (time.time() - manager.start_time,
                      len(manager.modules),
-                     len(manager.all_types),
                      manager.errors.num_messages()))
         # Finish the HTML or XML reports even if CompileError was raised.
         reports.finish()
@@ -218,7 +234,12 @@ def default_data_dir(bin_dir: Optional[str]) -> str:
     """
     if not bin_dir:
         if os.name == 'nt':
-            prefixes = [os.path.join(sys.prefix, 'Lib'), os.path.join(site.getuserbase(), 'lib')]
+            prefixes = [os.path.join(sys.prefix, 'Lib')]
+            try:
+                prefixes.append(os.path.join(site.getuserbase(), 'lib'))
+            except AttributeError:
+                # getuserbase in not available in virtualenvs
+                prefixes.append(os.path.join(get_python_lib(), 'lib'))
             for parent in prefixes:
                     data_dir = os.path.join(parent, 'mypy')
                     if os.path.exists(data_dir):
@@ -320,6 +341,7 @@ def default_lib_path(data_dir: str,
 CacheMeta = NamedTuple('CacheMeta',
                        [('id', str),
                         ('path', str),
+                        ('memory_only', bool),  # no corresponding json files (fine-grained only)
                         ('mtime', int),
                         ('size', int),
                         ('hash', str),
@@ -339,6 +361,28 @@ CacheMeta = NamedTuple('CacheMeta',
 # silent mode or simply not found.
 
 
+def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
+    sentinel = None  # type: Any  # Values to be validated by the caller
+    return CacheMeta(
+        meta.get('id', sentinel),
+        meta.get('path', sentinel),
+        meta.get('memory_only', False),
+        int(meta['mtime']) if 'mtime' in meta else sentinel,
+        meta.get('size', sentinel),
+        meta.get('hash', sentinel),
+        meta.get('dependencies', []),
+        int(meta['data_mtime']) if 'data_mtime' in meta else sentinel,
+        data_json,
+        meta.get('suppressed', []),
+        meta.get('child_modules', []),
+        meta.get('options'),
+        meta.get('dep_prios', []),
+        meta.get('interface_hash', ''),
+        meta.get('version_id', sentinel),
+        meta.get('ignore_all', True),
+    )
+
+
 # Priorities used for imports.  (Here, top-level includes inside a class.)
 # These are used to determine a more predictable order in which the
 # nodes in an import cycle are processed.
@@ -452,8 +496,6 @@ def find_config_file_line_number(path: str, section: str, setting_name: str) ->
     return -1
 
 
-# TODO: Get rid of all_types.  It's not used except for one log message.
-#       Maybe we could instead publish a map from module ID to its type_map.
 class BuildManager:
     """This class holds shared state for building a mypy program.
 
@@ -469,13 +511,16 @@ class BuildManager:
                        Semantic analyzer, pass 2
       semantic_analyzer_pass3:
                        Semantic analyzer, pass 3
-      all_types:       Map {Expression: Type} collected from all modules
+      all_types:       Map {Expression: Type} collected from all modules (tests only)
       options:         Build options
       missing_modules: Set of modules that could not be imported encountered so far
       stale_modules:   Set of modules that needed to be rechecked (only used by tests)
       version_id:      The current mypy version (based on commit id when possible)
       plugin:          Active mypy plugin(s)
       errors:          Used for reporting all errors
+      saved_cache:     Dict with saved cache state for dmypy and fine-grained incremental mode
+                       (read-write!)
+      stats:           Dict with various instrumentation numbers
     """
 
     def __init__(self, data_dir: str,
@@ -486,7 +531,9 @@ class BuildManager:
                  options: Options,
                  version_id: str,
                  plugin: Plugin,
-                 errors: Errors) -> None:
+                 errors: Errors,
+                 saved_cache: Optional[SavedCache] = None,
+                 ) -> None:
         self.start_time = time.time()
         self.data_dir = data_dir
         self.errors = errors
@@ -501,14 +548,15 @@ class BuildManager:
         self.plugin = plugin
         self.semantic_analyzer = SemanticAnalyzerPass2(self.modules, self.missing_modules,
                                                   lib_path, self.errors, self.plugin)
-        self.modules = self.semantic_analyzer.modules
         self.semantic_analyzer_pass3 = SemanticAnalyzerPass3(self.modules, self.errors,
                                                              self.semantic_analyzer)
-        self.all_types = {}  # type: Dict[Expression, Type]
+        self.all_types = {}  # type: Dict[Expression, Type]  # Used by tests only
         self.indirection_detector = TypeIndirectionVisitor()
         self.stale_modules = set()  # type: Set[str]
         self.rechecked_modules = set()  # type: Set[str]
         self.plugin = plugin
+        self.saved_cache = saved_cache if saved_cache is not None else {}  # type: SavedCache
+        self.stats = {}  # type: Dict[str, Any]  # Values are ints or floats
 
     def maybe_swap_for_shadow_path(self, path: str) -> str:
         if (self.options.shadow_file and
@@ -590,8 +638,11 @@ class BuildManager:
         Raise CompileError if there is a parse error.
         """
         num_errs = self.errors.num_messages()
-        tree = parse(source, path, self.errors, options=self.options)
+        tree = parse(source, path, id, self.errors, options=self.options)
         tree._fullname = id
+        self.add_stats(files_parsed=1,
+                       modules_parsed=int(not tree.is_stub),
+                       stubs_parsed=int(tree.is_stub))
 
         if self.errors.num_messages() != num_errs:
             self.log("Bailing due to parse errors")
@@ -600,19 +651,24 @@ class BuildManager:
         self.errors.set_file_ignored_lines(path, tree.ignored_lines, ignore_errors)
         return tree
 
-    def module_not_found(self, path: str, line: int, id: str) -> None:
-        self.errors.set_file(path, id)
+    def module_not_found(self, path: str, source: str, line: int, target: str) -> None:
+        self.errors.set_file(path, source)
         stub_msg = "(Stub files are from https://github.com/python/typeshed)"
-        if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
-                (self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
+        if target == 'builtins':
+            self.errors.report(line, 0, "Cannot find 'builtins' module. Typeshed appears broken!",
+                               blocker=True)
+            self.errors.raise_error()
+        elif ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(target))
+              or (self.options.python_version[0] >= 3
+                  and moduleinfo.is_py3_std_lib_module(target))):
             self.errors.report(
-                line, 0, "No library stub file for standard library module '{}'".format(id))
+                line, 0, "No library stub file for standard library module '{}'".format(target))
             self.errors.report(line, 0, stub_msg, severity='note', only_once=True)
-        elif moduleinfo.is_third_party_module(id):
-            self.errors.report(line, 0, "No library stub file for module '{}'".format(id))
+        elif moduleinfo.is_third_party_module(target):
+            self.errors.report(line, 0, "No library stub file for module '{}'".format(target))
             self.errors.report(line, 0, stub_msg, severity='note', only_once=True)
         else:
-            self.errors.report(line, 0, "Cannot find module named '{}'".format(id))
+            self.errors.report(line, 0, "Cannot find module named '{}'".format(target))
             self.errors.report(line, 0, '(Perhaps setting MYPYPATH '
                                'or using the "--ignore-missing-imports" flag would help)',
                                severity='note', only_once=True)
@@ -626,7 +682,10 @@ class BuildManager:
 
     def log(self, *message: str) -> None:
         if self.options.verbosity >= 1:
-            print('LOG: ', *message, file=sys.stderr)
+            if message:
+                print('LOG: ', *message, file=sys.stderr)
+            else:
+                print(file=sys.stderr)
             sys.stderr.flush()
 
     def trace(self, *message: str) -> None:
@@ -634,6 +693,16 @@ class BuildManager:
             print('TRACE:', *message, file=sys.stderr)
             sys.stderr.flush()
 
+    def add_stats(self, **kwds: Any) -> None:
+        for key, value in kwds.items():
+            if key in self.stats:
+                self.stats[key] += value
+            else:
+                self.stats[key] = value
+
+    def stats_summary(self) -> Mapping[str, object]:
+        return self.stats
+
 
 def remove_cwd_prefix_from_path(p: str) -> str:
     """Remove current working directory prefix from p, if present.
@@ -679,11 +748,19 @@ find_module_dir_cache = {}  # type: Dict[Tuple[str, Tuple[str, ...]], List[str]]
 # gives us the case-correct filename on Windows and Mac.
 find_module_listdir_cache = {}  # type: Dict[str, Optional[List[str]]]
 
+# Cache for is_file()
+find_module_is_file_cache = {}  # type: Dict[str, bool]
+
+# Cache for isdir(join(head, tail))
+find_module_isdir_cache = {}  # type: Dict[Tuple[str, str], bool]
+
 
 def find_module_clear_caches() -> None:
     find_module_cache.clear()
     find_module_dir_cache.clear()
     find_module_listdir_cache.clear()
+    find_module_is_file_cache.clear()
+    find_module_isdir_cache.clear()
 
 
 def list_dir(path: str) -> Optional[List[str]]:
@@ -691,13 +768,13 @@ def list_dir(path: str) -> Optional[List[str]]:
 
     Returns None if the path doesn't exist or isn't a directory.
     """
-    if path in find_module_listdir_cache:
-        return find_module_listdir_cache[path]
-    try:
-        res = os.listdir(path)  # type: Optional[List[str]]
-    except OSError:
-        res = None
-    find_module_listdir_cache[path] = res
+    res = find_module_listdir_cache.get(path)
+    if res is None:
+        try:
+            res = os.listdir(path)
+        except OSError:
+            res = None
+        find_module_listdir_cache[path] = res
     return res
 
 
@@ -708,15 +785,16 @@ def is_file(path: str) -> bool:
     False if the case of the path's last component does not exactly
     match the case found in the filesystem.
     """
-    head, tail = os.path.split(path)
-    if not tail:
-        return False
-    names = list_dir(head)
-    if not names:
-        return False
-    if tail not in names:
-        return False
-    return os.path.isfile(path)
+    res = find_module_is_file_cache.get(path)
+    if res is None:
+        head, tail = os.path.split(path)
+        if not tail:
+            res = False
+        else:
+            names = list_dir(head)
+            res = names is not None and tail in names and os.path.isfile(path)
+        find_module_is_file_cache[path] = res
+    return res
 
 
 def find_module(id: str, lib_path_arg: Iterable[str]) -> Optional[str]:
@@ -734,8 +812,12 @@ def find_module(id: str, lib_path_arg: Iterable[str]) -> Optional[str]:
             dirs = []
             for pathitem in lib_path:
                 # e.g., '/usr/lib/python3.4/foo/bar'
-                dir = os.path.normpath(os.path.join(pathitem, dir_chain))
-                if os.path.isdir(dir):
+                isdir = find_module_isdir_cache.get((pathitem, dir_chain))
+                if isdir is None:
+                    dir = os.path.normpath(os.path.join(pathitem, dir_chain))
+                    isdir = os.path.isdir(dir)
+                    find_module_isdir_cache[pathitem, dir_chain] = isdir
+                if isdir:
                     dirs.append(dir)
             find_module_dir_cache[dir_chain, lib_path] = dirs
         candidate_base_dirs = find_module_dir_cache[dir_chain, lib_path]
@@ -804,7 +886,7 @@ def verify_module(id: str, path: str) -> bool:
         path = dirname(path)
     for i in range(id.count('.')):
         path = dirname(path)
-        if not any(os.path.isfile(os.path.join(path, '__init__{}'.format(extension)))
+        if not any(is_file(os.path.join(path, '__init__{}'.format(extension)))
                    for extension in PYTHON_EXTENSIONS):
             return False
     return True
@@ -880,38 +962,30 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
       A CacheMeta instance if the cache data was found and appears
       valid; otherwise None.
     """
+    saved_cache = manager.saved_cache
+    if id in saved_cache:
+        m, t, types = saved_cache[id]
+        manager.add_stats(reused_metas=1)
+        manager.trace("Reusing saved metadata for %s" % id)
+        # Note: it could still be skipped if the mtime/size/hash mismatches.
+        return m
+
     # TODO: May need to take more build options into account
     meta_json, data_json = get_cache_names(id, path, manager)
     manager.trace('Looking for {} at {}'.format(id, meta_json))
-    if not os.path.exists(meta_json):
+    try:
+        with open(meta_json, 'r') as f:
+            meta_str = f.read()
+            manager.trace('Meta {} {}'.format(id, meta_str.rstrip()))
+            meta = json.loads(meta_str)  # TODO: Errors
+    except IOError:
         manager.log('Could not load cache for {}: could not find {}'.format(id, meta_json))
         return None
-    with open(meta_json, 'r') as f:
-        meta_str = f.read()
-        manager.trace('Meta {} {}'.format(id, meta_str.rstrip()))
-        meta = json.loads(meta_str)  # TODO: Errors
     if not isinstance(meta, dict):
         manager.log('Could not load cache for {}: meta cache is not a dict: {}'
                     .format(id, repr(meta)))
         return None
-    sentinel = None  # type: Any  # the values will be post-validated below
-    m = CacheMeta(
-        meta.get('id', sentinel),
-        meta.get('path', sentinel),
-        int(meta['mtime']) if 'mtime' in meta else sentinel,
-        meta.get('size', sentinel),
-        meta.get('hash', sentinel),
-        meta.get('dependencies', []),
-        int(meta['data_mtime']) if 'data_mtime' in meta else sentinel,
-        data_json,
-        meta.get('suppressed', []),
-        meta.get('child_modules', []),
-        meta.get('options'),
-        meta.get('dep_prios', []),
-        meta.get('interface_hash', ''),
-        meta.get('version_id', sentinel),
-        meta.get('ignore_all', True),
-    )
+    m = cache_meta_from_dict(meta, data_json)
     # Don't check for path match, that is dealt with in validate_meta().
     if (m.id != id or
             m.mtime is None or m.size is None or
@@ -948,6 +1022,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
                                   .format(key, cached_options.get(key), current_options.get(key)))
         return None
 
+    manager.add_stats(fresh_metas=1)
     return m
 
 
@@ -971,7 +1046,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
                   ignore_all: bool, manager: BuildManager) -> Optional[CacheMeta]:
     '''Checks whether the cached AST of this module can be used.
 
-    Return:
+    Returns:
       None, if the cached AST is unusable.
       Original meta, if mtime/size matched.
       Meta with mtime updated to match source file, if hash/size matched but mtime/path didn't.
@@ -989,6 +1064,12 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
         manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id))
         return None
 
+    if meta.memory_only:
+        # Special case for fine-grained incremental mode when the JSON file is missing but
+        # we want to cache the module anyway.
+        manager.log('Memory-only metadata for {}'.format(id))
+        return meta
+
     assert path is not None, "Internal error: meta was provided without a path"
     # Check data_json; assume if its mtime matches it's good.
     # TODO: stat() errors
@@ -1000,6 +1081,9 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
     # TODO: Share stat() outcome with find_module()
     path = os.path.abspath(path)
     st = manager.get_stat(path)  # TODO: Errors
+    if not stat.S_ISREG(st.st_mode):
+        manager.log('Metadata abandoned for {}: file {} does not exist'.format(id, path))
+        return None
     size = st.st_size
     if size != meta.size:
         manager.log('Metadata abandoned for {}: file {} has different size'.format(id, path))
@@ -1044,7 +1128,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
             return meta
 
     # It's a match on (id, path, size, hash, mtime).
-    manager.log('Metadata fresh for {}: file {}'.format(id, path))
+    manager.trace('Metadata fresh for {}: file {}'.format(id, path))
     return meta
 
 
@@ -1059,7 +1143,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
                 dependencies: List[str], suppressed: List[str],
                 child_modules: List[str], dep_prios: List[int],
                 old_interface_hash: str, source_hash: str,
-                ignore_all: bool, manager: BuildManager) -> str:
+                ignore_all: bool, manager: BuildManager) -> Tuple[str, Optional[CacheMeta]]:
     """Write cache files for a module.
 
     Note that this mypy's behavior is still correct when any given
@@ -1076,8 +1160,10 @@ def write_cache(id: str, path: str, tree: MypyFile,
       old_interface_hash: the hash from the previous version of the data cache file
       manager: the build manager (for pyversion, log/trace)
 
-    Return:
-      The new interface hash based on the serialized tree
+    Returns:
+      A tuple containing the interface hash and CacheMeta
+      corresponding to the metadata that was written (the latter may
+      be None if the cache could not be written).
     """
     # Obtain file paths
     path = os.path.abspath(path)
@@ -1110,7 +1196,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
             except OSError:
                 pass
         # Still return the interface hash we computed.
-        return interface_hash
+        return interface_hash, None
 
     # Write data cache file, if applicable
     if old_interface_hash == interface_hash:
@@ -1132,7 +1218,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
             # data_mtime field won't match the data file's mtime.
             # Both have the effect of slowing down the next run a
             # little bit due to an out-of-date cache file.
-            return interface_hash
+            return interface_hash, None
         data_mtime = getmtime(data_json)
 
     mtime = int(st.st_mtime)
@@ -1166,7 +1252,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
         # The next run will simply find the cache entry out of date.
         manager.log("Error writing meta JSON file {}".format(meta_json))
 
-    return interface_hash
+    return interface_hash, cache_meta_from_dict(meta, data_json)
 
 
 def delete_cache(id: str, path: str, manager: BuildManager) -> None:
@@ -1179,6 +1265,8 @@ def delete_cache(id: str, path: str, manager: BuildManager) -> None:
     path = os.path.abspath(path)
     meta_json, data_json = get_cache_names(id, path, manager)
     manager.log('Deleting {} {} {} {}'.format(id, path, meta_json, data_json))
+    if id in manager.saved_cache:
+        del manager.saved_cache[id]
 
     for filename in [data_json, meta_json]:
         try:
@@ -1351,6 +1439,7 @@ class State:
     meta = None  # type: Optional[CacheMeta]
     data = None  # type: Optional[str]
     tree = None  # type: Optional[MypyFile]
+    is_from_saved_cache = False  # True if the tree came from the in-memory cache
     dependencies = None  # type: List[str]
     suppressed = None  # type: List[str]  # Suppressed/missing dependencies
     priorities = None  # type: Dict[str, int]
@@ -1385,6 +1474,13 @@ class State:
     # Whether to ignore all errors
     ignore_all = False
 
+    # Whether the module has an error or any of its dependencies have one.
+    transitive_error = False
+
+    # Type checker used for checking this file.  Use type_checker() for
+    # access and to construct this on demand.
+    _type_checker = None  # type: Optional[TypeChecker]
+
     def __init__(self,
                  id: Optional[str],
                  path: Optional[str],
@@ -1408,6 +1504,7 @@ class State:
             self.import_context = []
         self.id = id or '__main__'
         self.options = manager.options.clone_for_module(self.id)
+        self._type_checker = None
         if not path and source is None:
             assert id is not None
             file_id = id
@@ -1455,7 +1552,8 @@ class State:
                     if not self.options.ignore_missing_imports:
                         save_import_context = manager.errors.import_context()
                         manager.errors.set_import_context(caller_state.import_context)
-                        manager.module_not_found(caller_state.xpath, caller_line, id)
+                        manager.module_not_found(caller_state.xpath, caller_state.id,
+                                                 caller_line, id)
                         manager.errors.set_import_context(save_import_context)
                     manager.missing_modules.add(id)
                     raise ModuleNotFound
@@ -1559,7 +1657,6 @@ class State:
 
     def mark_interface_stale(self, *, on_errors: bool = False) -> None:
         """Marks this module as having a stale public interface, and discards the cache data."""
-        self.meta = None
         self.externally_same = False
         if not on_errors:
             self.manager.stale_modules.add(self.id)
@@ -1593,6 +1690,7 @@ class State:
         # TODO: Assert data file wasn't changed.
         self.tree = MypyFile.deserialize(data)
         self.manager.modules[self.id] = self.tree
+        self.manager.add_stats(fresh_trees=1)
 
     def fix_cross_refs(self) -> None:
         assert self.tree is not None, "Internal error: method must be called on parsed file only"
@@ -1660,6 +1758,11 @@ class State:
     # Methods for processing modules from source code.
 
     def parse_file(self) -> None:
+        """Parse file and run first pass of semantic analysis.
+
+        Everything done here is local to the file. Don't depend on imported
+        modules in any way. Also record module dependencies based on imports.
+        """
         if self.tree is not None:
             # The file was already parsed (in __init__()).
             return
@@ -1767,20 +1870,27 @@ class State:
             patch_func()
 
     def type_check_first_pass(self) -> None:
-        assert self.tree is not None, "Internal error: method must be called on parsed file only"
-        manager = self.manager
         if self.options.semantic_analysis_only:
             return
         with self.wrap_context():
-            self.type_checker = TypeChecker(manager.errors, manager.modules, self.options,
-                                            self.tree, self.xpath, manager.plugin)
-            self.type_checker.check_first_pass()
+            self.type_checker().check_first_pass()
+
+    def type_checker(self) -> TypeChecker:
+        if not self._type_checker:
+            assert self.tree is not None, "Internal error: must be called on parsed file only"
+            manager = self.manager
+            self._type_checker = TypeChecker(manager.errors, manager.modules, self.options,
+                                             self.tree, self.xpath, manager.plugin)
+        return self._type_checker
+
+    def type_map(self) -> Dict[Expression, Type]:
+        return self.type_checker().type_map
 
     def type_check_second_pass(self) -> bool:
         if self.options.semantic_analysis_only:
             return False
         with self.wrap_context():
-            return self.type_checker.check_second_pass()
+            return self.type_checker().check_second_pass()
 
     def finish_passes(self) -> None:
         assert self.tree is not None, "Internal error: method must be called on parsed file only"
@@ -1788,16 +1898,18 @@ class State:
         if self.options.semantic_analysis_only:
             return
         with self.wrap_context():
-            manager.all_types.update(self.type_checker.type_map)
+            # Some tests want to look at the set of all types.
+            if manager.options.use_builtins_fixtures or manager.options.dump_deps:
+                manager.all_types.update(self.type_map())
 
             if self.options.incremental:
-                self._patch_indirect_dependencies(self.type_checker.module_refs,
-                                                  self.type_checker.type_map)
+                self._patch_indirect_dependencies(self.type_checker().module_refs,
+                                                  self.type_map())
 
             if self.options.dump_inference_stats:
                 dump_type_stats(self.tree, self.xpath, inferred=True,
-                                typemap=self.type_checker.type_map)
-            manager.report_file(self.tree, self.type_checker.type_map, self.options)
+                                typemap=self.type_map())
+            manager.report_file(self.tree, self.type_map(), self.options)
 
     def _patch_indirect_dependencies(self,
                                      module_refs: Set[str],
@@ -1835,13 +1947,14 @@ class State:
         if self.manager.options.quick_and_dirty:
             is_errors = self.manager.errors.is_errors_for_file(self.path)
         else:
-            is_errors = self.manager.errors.is_errors()
+            is_errors = self.transitive_error
         if is_errors:
             delete_cache(self.id, self.path, self.manager)
+            self.meta = None
             self.mark_interface_stale(on_errors=True)
             return
-        dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
-        new_interface_hash = write_cache(
+        dep_prios = self.dependency_priorities()
+        new_interface_hash, self.meta = write_cache(
             self.id, self.path, self.tree,
             list(self.dependencies), list(self.suppressed), list(self.child_modules),
             dep_prios, self.interface_hash, self.source_hash, self.ignore_all,
@@ -1853,14 +1966,31 @@ class State:
             self.mark_interface_stale()
             self.interface_hash = new_interface_hash
 
+    def dependency_priorities(self) -> List[int]:
+        return [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
+
 
 def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
+    set_orig = set(manager.saved_cache)
+    manager.log()
     manager.log("Mypy version %s" % __version__)
+    t0 = time.time()
     graph = load_graph(sources, manager)
+    t1 = time.time()
+    manager.add_stats(graph_size=len(graph),
+                      stubs_found=sum(g.path is not None and g.path.endswith('.pyi')
+                                      for g in graph.values()),
+                      graph_load_time=(t1 - t0),
+                      fm_cache_size=len(find_module_cache),
+                      fm_dir_cache_size=len(find_module_dir_cache),
+                      fm_listdir_cache_size=len(find_module_listdir_cache),
+                      fm_is_file_cache_size=len(find_module_is_file_cache),
+                      fm_isdir_cache_size=len(find_module_isdir_cache),
+                      )
     if not graph:
         print("Nothing to do?!")
         return graph
-    manager.log("Loaded graph with %d nodes" % len(graph))
+    manager.log("Loaded graph with %d nodes (%.3f sec)" % (len(graph), t1 - t0))
     if manager.options.dump_graph:
         dump_graph(graph)
         return graph
@@ -1868,9 +1998,33 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
     if manager.options.warn_unused_ignores:
         # TODO: This could also be a per-module option.
         manager.errors.generate_unused_ignore_notes()
+    updated = preserve_cache(graph)
+    set_updated = set(updated)
+    manager.saved_cache.clear()
+    manager.saved_cache.update(updated)
+    set_final = set(manager.saved_cache)
+    # These keys have numbers in them to force a sort order.
+    manager.add_stats(saved_cache_1orig=len(set_orig),
+                      saved_cache_2updated=len(set_updated & set_orig),
+                      saved_cache_3added=len(set_final - set_orig),
+                      saved_cache_4removed=len(set_orig - set_final),
+                      saved_cache_5final=len(set_final))
+    if manager.options.dump_deps:
+        # This speeds up startup a little when not using the daemon mode.
+        from mypy.server.deps import dump_all_dependencies
+        dump_all_dependencies(manager.modules, manager.all_types, manager.options.python_version)
     return graph
 
 
+def preserve_cache(graph: Graph) -> SavedCache:
+    saved_cache = {}
+    for id, state in graph.items():
+        assert state.id == id
+        if state.meta is not None and state.tree is not None:
+            saved_cache[id] = (state.meta, state.tree, state.type_map())
+    return saved_cache
+
+
 class NodeInfo:
     """Some info about a node in the graph of SCCs."""
 
@@ -1928,7 +2082,11 @@ def dump_graph(graph: Graph) -> None:
 
 
 def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
-    """Given some source files, load the full dependency graph."""
+    """Given some source files, load the full dependency graph.
+
+    As this may need to parse files, this can raise CompileError in case
+    there are syntax errors.
+    """
     graph = {}  # type: Graph
     # The deque is used to implement breadth-first traversal.
     # TODO: Consider whether to go depth-first instead.  This may
@@ -1953,7 +2111,19 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
     while new:
         st = new.popleft()
         assert st.ancestors is not None
-        for dep in st.ancestors + st.dependencies + st.suppressed:
+        # Strip out indirect dependencies.  These will be dealt with
+        # when they show up as direct dependencies, and there's a
+        # scenario where they hurt:
+        # - Suppose A imports B and B imports C.
+        # - Suppose on the next round:
+        #   - C is deleted;
+        #   - B is updated to remove the dependency on C;
+        #   - A is unchanged.
+        # - In this case A's cached *direct* dependencies are still valid
+        #   (since direct dependencies reflect the imports found in the source)
+        #   but A's cached *indirect* dependency on C is wrong.
+        dependencies = [dep for dep in st.dependencies if st.priorities.get(dep) != PRI_INDIRECT]
+        for dep in st.ancestors + dependencies + st.suppressed:
             # We don't want to recheck imports marked with '# type: ignore'
             # so we ignore any suppressed module not explicitly re-included
             # from the command line.
@@ -2035,7 +2205,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
         for id in scc:
             deps.update(graph[id].dependencies)
         deps -= ascc
-        stale_deps = {id for id in deps if not graph[id].is_interface_fresh()}
+        stale_deps = {id for id in deps if id in graph and not graph[id].is_interface_fresh()}
         if not manager.options.quick_and_dirty:
             fresh = fresh and not stale_deps
         undeps = set()
@@ -2090,13 +2260,20 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
         else:
             fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps))
 
+        # Initialize transitive_error for all SCC members from union
+        # of transitive_error of dependencies.
+        if any(graph[dep].transitive_error for dep in deps if dep in graph):
+            for id in scc:
+                graph[id].transitive_error = True
+
         scc_str = " ".join(scc)
         if fresh:
-            manager.log("Queuing %s SCC (%s)" % (fresh_msg, scc_str))
-            fresh_scc_queue.append(scc)
+            if not maybe_reuse_in_memory_tree(graph, scc, manager):
+                manager.trace("Queuing %s SCC (%s)" % (fresh_msg, scc_str))
+                fresh_scc_queue.append(scc)
         else:
             if len(fresh_scc_queue) > 0:
-                manager.log("Processing the last {} queued SCCs".format(len(fresh_scc_queue)))
+                manager.log("Processing {} queued fresh SCCs".format(len(fresh_scc_queue)))
                 # Defer processing fresh SCCs until we actually run into a stale SCC
                 # and need the earlier modules to be loaded.
                 #
@@ -2104,10 +2281,15 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
                 # single fresh SCC. This is intentional -- we don't need those modules
                 # loaded if there are no more stale SCCs to be rechecked.
                 #
+                # Also note we shouldn't have to worry about transitive_error here,
+                # since modules with transitive errors aren't written to the cache,
+                # and if any dependencies were changed, this SCC would be stale.
+                # (Also, in quick_and_dirty mode we don't care about transitive errors.)
+                #
                 # TODO: see if it's possible to determine if we need to process only a
                 # _subset_ of the past SCCs instead of having to process them all.
                 for prev_scc in fresh_scc_queue:
-                    process_fresh_scc(graph, prev_scc)
+                    process_fresh_scc(graph, prev_scc, manager)
                 fresh_scc_queue = []
             size = len(scc)
             if size == 1:
@@ -2117,8 +2299,11 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
             process_stale_scc(graph, scc, manager)
 
     sccs_left = len(fresh_scc_queue)
+    nodes_left = sum(len(scc) for scc in fresh_scc_queue)
+    manager.add_stats(sccs_left=sccs_left, nodes_left=nodes_left)
     if sccs_left:
-        manager.log("{} fresh SCCs left in queue (and will remain unprocessed)".format(sccs_left))
+        manager.log("{} fresh SCCs ({} nodes) left in queue (and will remain unprocessed)"
+                    .format(sccs_left, nodes_left))
         manager.trace(str(fresh_scc_queue))
     else:
         manager.log("No fresh SCCs left in queue")
@@ -2171,8 +2356,13 @@ def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) ->
     return [s for ss in sccs for s in order_ascc(graph, ss, pri_max)]
 
 
-def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
-    """Process the modules in one SCC from their cached data."""
+def process_fresh_scc(graph: Graph, scc: List[str], manager: BuildManager) -> None:
+    """Process the modules in one SCC from their cached data.
+
+    This involves loading the tree from JSON and then doing various cleanups.
+
+    If the tree is loaded from memory ('saved_cache') it's even quicker.
+    """
     for id in scc:
         graph[id].load_tree()
     for id in scc:
@@ -2183,6 +2373,71 @@ def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
         graph[id].patch_dependency_parents()
 
 
+def maybe_reuse_in_memory_tree(graph: Graph, scc: List[str], manager: BuildManager) -> bool:
+    """Set the trees for the given SCC from the in-memory cache, if all valid.
+
+    If any saved tree for this SCC is invalid, set the trees for all
+    SCC members to None and mark as not-from-cache.
+    """
+    if not can_reuse_in_memory_tree(graph, scc, manager):
+        for id in scc:
+            manager.add_stats(cleared_trees=1)
+            manager.trace("Clearing tree %s" % id)
+            st = graph[id]
+            st.tree = None
+            st.is_from_saved_cache = False
+            if id in manager.modules:
+                del manager.modules[id]
+        return False
+    trees = {id: manager.saved_cache[id][1] for id in scc}
+    for id, tree in trees.items():
+        manager.add_stats(reused_trees=1)
+        manager.trace("Reusing saved tree %s" % id)
+        st = graph[id]
+        st.tree = tree
+        st.is_from_saved_cache = True
+        manager.modules[id] = tree
+        # Delete any submodules from the module that aren't
+        # dependencies of the module; they will be re-added once
+        # imported.  It's possible that the parent module is reused
+        # but a submodule isn't; we don't want to accidentally link
+        # into the old submodule's tree.  See also
+        # patch_dependency_parents() above.  The exception for subname
+        # in st.dependencies handles the case where 'import m'
+        # guarantees that some submodule of m is also available
+        # (e.g. 'os.path'); in those cases the submodule is an
+        # explicit dependency of the parent.
+        for name in list(tree.names):
+            sym = tree.names[name]
+            subname = id + '.' + name
+            if (sym.kind == MODULE_REF
+                    and sym.node is not None
+                    and sym.node.fullname() == subname
+                    and subname not in st.dependencies):
+                manager.trace("Purging %s" % subname)
+                del tree.names[name]
+    return True
+
+
+def can_reuse_in_memory_tree(graph: Graph, scc: List[str], manager: BuildManager) -> bool:
+    """Check whether the given SCC can safely reuse the trees from saved_cache.
+
+    Assumes the SCC is already considered fresh.
+    """
+    saved_cache = manager.saved_cache
+    # Check that all nodes are available for loading from memory.
+    if all(id in saved_cache for id in scc):
+        # Check that all dependencies were loaded from memory.
+        # If not, some dependency was reparsed but the interface hash
+        # wasn't changed -- in that case we can't reuse the tree.
+        # TODO: Pass deps in from process_graph(), via maybe_reuse_in_memory_tree()?
+        deps = set(dep for id in scc for dep in graph[id].dependencies if dep in graph)
+        deps -= set(scc)  # Subtract the SCC itself (else nothing will be safe)
+        if all(graph[dep].is_from_saved_cache for dep in deps):
+            return True
+    return False
+
+
 def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> None:
     """Process the modules in one SCC from source code.
 
@@ -2224,6 +2479,9 @@ def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> No
         for id in stale:
             if graph[id].type_check_second_pass():
                 more = True
+    if any(manager.errors.is_errors_for_file(graph[id].xpath) for id in stale):
+        for id in stale:
+            graph[id].transitive_error = True
     for id in stale:
         graph[id].finish_passes()
         graph[id].write_cache()
diff --git a/mypy/checker.py b/mypy/checker.py
index d58fb24..aeb2b1d 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -260,9 +260,12 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         # TODO: Handle __all__
 
     def handle_cannot_determine_type(self, name: str, context: Context) -> None:
-        node = self.scope.top_function()
-        if self.pass_num < LAST_PASS and isinstance(node, (FuncDef, LambdaExpr)):
-            # Don't report an error yet. Just defer.
+        node = self.scope.top_non_lambda_function()
+        if self.pass_num < LAST_PASS and isinstance(node, FuncDef):
+            # Don't report an error yet. Just defer. Note that we don't defer
+            # lambdas because they are coupled to the surrounding function
+            # through the binder and the inferred type of the lambda, so it
+            # would get messy.
             if self.errors.type_name:
                 type_name = self.errors.type_name[-1]
             else:
@@ -541,17 +544,22 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 if not is_same_type(new_type, self.function_type(defn.original_def)):
                     self.msg.incompatible_conditional_function_def(defn)
             else:
-                # Function definition overrides a variable initialized via assignment.
+                # Function definition overrides a variable initialized via assignment or a
+                # decorated function.
                 orig_type = defn.original_def.type
                 if orig_type is None:
                     # XXX This can be None, as happens in
                     # test_testcheck_TypeCheckSuite.testRedefinedFunctionInTryWithElse
-                    self.msg.note("Internal mypy error checking function redefinition.", defn)
+                    self.msg.note("Internal mypy error checking function redefinition", defn)
                     return
                 if isinstance(orig_type, PartialType):
                     if orig_type.type is None:
                         # Ah this is a partial type. Give it the type of the function.
-                        var = defn.original_def
+                        orig_def = defn.original_def
+                        if isinstance(orig_def, Decorator):
+                            var = orig_def.var
+                        else:
+                            var = orig_def
                         partial_types = self.find_partial_types(var)
                         if partial_types is not None:
                             var.type = new_type
@@ -585,7 +593,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             with self.enter_partial_types():
                 typ = self.function_type(defn)
                 if type_override:
-                    typ = type_override
+                    typ = type_override.copy_modified(line=typ.line, column=typ.column)
                 if isinstance(typ, CallableType):
                     with self.enter_attribute_inference_context():
                         self.check_func_def(defn, typ, name)
@@ -624,7 +632,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                                   item)
 
                     self.check_for_missing_annotations(fdef)
-                    if 'unimported' in self.options.disallow_any:
+                    if self.options.disallow_any_unimported:
                         if fdef.type and isinstance(fdef.type, CallableType):
                             ret_type = fdef.type.ret_type
                             if has_any_from_unimported_type(ret_type):
@@ -638,7 +646,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
                 if name:  # Special method names
                     if name in nodes.reverse_op_method_set:
-                        self.check_reverse_op_method(item, typ, name)
+                        self.check_reverse_op_method(item, typ, name, defn)
                     elif name in ('__getattr__', '__getattribute__'):
                         self.check_getattr_method(typ, defn, name)
                     elif name == '__setattr__':
@@ -722,7 +730,10 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                             arg_type.variance == COVARIANT and
                             defn.name() not in ('__init__', '__new__')
                         ):
-                            self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, arg_type)
+                            ctx = arg_type  # type: Context
+                            if ctx.line < 0:
+                                ctx = typ
+                            self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx)
                     if typ.arg_kinds[i] == nodes.ARG_STAR:
                         # builtins.tuple[T] is typing.Tuple[T, ...]
                         arg_type = self.named_generic_type('builtins.tuple',
@@ -789,7 +800,11 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             if fdef.type is None and self.options.disallow_untyped_defs:
                 self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef)
             elif isinstance(fdef.type, CallableType):
-                if is_unannotated_any(fdef.type.ret_type):
+                ret_type = fdef.type.ret_type
+                if is_unannotated_any(ret_type):
+                    self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
+                elif (fdef.is_coroutine and isinstance(ret_type, Instance) and
+                      is_unannotated_any(ret_type.args[0])):
                     self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
                 if any(is_unannotated_any(t) for t in fdef.type.arg_types):
                     self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef)
@@ -813,13 +828,24 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                  isinstance(stmt.expr, EllipsisExpr)))
 
     def check_reverse_op_method(self, defn: FuncItem, typ: CallableType,
-                                method: str) -> None:
+                                method: str, context: Context) -> None:
         """Check a reverse operator method such as __radd__."""
 
         # This used to check for some very obscure scenario.  It now
         # just decides whether it's worth calling
         # check_overlapping_op_methods().
 
+        # First check for a valid signature
+        method_type = CallableType([AnyType(TypeOfAny.special_form),
+                                    AnyType(TypeOfAny.special_form)],
+                                   [nodes.ARG_POS, nodes.ARG_POS],
+                                   [None, None],
+                                   AnyType(TypeOfAny.special_form),
+                                   self.named_type('builtins.function'))
+        if not is_subtype(typ, method_type):
+            self.msg.invalid_signature(typ, context)
+            return
+
         if method in ('__eq__', '__ne__'):
             # These are defined for all objects => can't cause trouble.
             return
@@ -832,9 +858,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         if isinstance(ret_type, Instance):
             if ret_type.type.fullname() == 'builtins.object':
                 return
-        # Plausibly the method could have too few arguments, which would result
-        # in an error elsewhere.
-        if len(typ.arg_types) <= 2:
+
+        if len(typ.arg_types) == 2:
             # TODO check self argument kind
 
             # Check for the issue described above.
@@ -1312,7 +1337,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax)
 
         if (s.type is not None and
-                'unimported' in self.options.disallow_any and
+                self.options.disallow_any_unimported and
                 has_any_from_unimported_type(s.type)):
             if isinstance(s.lvalues[-1], TupleExpr):
                 # This is a multiple assignment. Instead of figuring out which type is problematic,
@@ -1360,8 +1385,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                         partial_types = self.find_partial_types(var)
                         if partial_types is not None:
                             if not self.current_node_deferred:
-                                var.type = UnionType.make_simplified_union(
+                                inferred_type = UnionType.make_simplified_union(
                                     [rvalue_type, NoneTyp()])
+                                self.set_inferred_type(var, lvalue, inferred_type)
                             else:
                                 var.type = None
                             del partial_types[var]
@@ -1814,9 +1840,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     def check_lvalue(self, lvalue: Lvalue) -> Tuple[Optional[Type],
                                                     Optional[IndexExpr],
                                                     Optional[Var]]:
-        lvalue_type = None  # type: Optional[Type]
-        index_lvalue = None  # type: Optional[IndexExpr]
-        inferred = None  # type: Optional[Var]
+        lvalue_type = None
+        index_lvalue = None
+        inferred = None
 
         if self.is_definition(lvalue):
             if isinstance(lvalue, NameExpr):
@@ -1848,7 +1874,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
     def is_definition(self, s: Lvalue) -> bool:
         if isinstance(s, NameExpr):
-            if s.is_def:
+            if s.is_inferred_def:
                 return True
             # If the node type is not defined, this must the first assignment
             # that we process => this is a definition, even though the semantic
@@ -1859,7 +1885,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             if isinstance(node, Var):
                 return node.type is None
         elif isinstance(s, MemberExpr):
-            return s.is_def
+            return s.is_inferred_def
         return False
 
     def infer_variable_type(self, name: Var, lvalue: Lvalue,
@@ -2112,7 +2138,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 if isinstance(typ, AnyType):
                     # (Unless you asked to be warned in that case, and the
                     # function is not declared to return Any)
-                    if (self.options.warn_return_any and
+                    if (self.options.warn_return_any and not self.current_node_deferred and
                             not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type)):
                         self.msg.incorrectly_returning_any(return_type, s)
                     return
@@ -2306,7 +2332,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                             if var:
                                 # To support local variables, we make this a definition line,
                                 # causing assignment to set the variable's type.
-                                var.is_def = True
+                                var.is_inferred_def = True
                                 # We also temporarily set current_node_deferred to False to
                                 # make sure the inference happens.
                                 # TODO: Use a better solution, e.g. a
@@ -2389,6 +2415,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             item_type = self.analyze_async_iterable_item_type(s.expr)
         else:
             item_type = self.analyze_iterable_item_type(s.expr)
+        s.inferred_item_type = item_type
         self.analyze_index_variables(s.index, item_type, s.index_type is None, s)
         self.accept_loop(s.body, s.else_body)
 
@@ -2524,7 +2551,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         self.accept(s.body)
 
     def check_untyped_after_decorator(self, typ: Type, func: FuncDef) -> None:
-        if 'decorated' not in self.options.disallow_any or self.is_stub:
+        if not self.options.disallow_any_decorated or self.is_stub:
             return
 
         if mypy.checkexpr.has_any_type(typ):
@@ -2781,12 +2808,10 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     def function_type(self, func: FuncBase) -> FunctionLike:
         return function_type(func, self.named_type('builtins.function'))
 
-    # TODO: These next two functions should refer to TypeMap below
-    def find_isinstance_check(self, n: Expression) -> Tuple[Optional[Dict[Expression, Type]],
-                                                            Optional[Dict[Expression, Type]]]:
+    def find_isinstance_check(self, n: Expression) -> 'Tuple[TypeMap, TypeMap]':
         return find_isinstance_check(n, self.type_map)
 
-    def push_type_map(self, type_map: Optional[Dict[Expression, Type]]) -> None:
+    def push_type_map(self, type_map: 'TypeMap') -> None:
         if type_map is None:
             self.binder.unreachable()
         else:
@@ -3213,6 +3238,8 @@ def get_isinstance_type(expr: Expression,
         elif isinstance(typ, Instance) and typ.type.fullname() == 'builtins.type':
             object_type = Instance(typ.type.mro[-1], [])
             types.append(TypeRange(object_type, is_upper_bound=True))
+        elif isinstance(typ, AnyType):
+            types.append(TypeRange(typ, is_upper_bound=False))
         else:  # we didn't see an actual type, but rather a variable whose value is unknown to us
             return None
     if not types:
@@ -3444,6 +3471,12 @@ class Scope:
                 return e
         return None
 
+    def top_non_lambda_function(self) -> Optional[FuncItem]:
+        for e in reversed(self.stack):
+            if isinstance(e, FuncItem) and not isinstance(e, LambdaExpr):
+                return e
+        return None
+
     def active_class(self) -> Optional[TypeInfo]:
         if isinstance(self.stack[-1], TypeInfo):
             return self.stack[-1]
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 9098891..a428980 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -271,7 +271,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 self.check_runtime_protocol_test(e)
             if e.callee.fullname == 'builtins.issubclass':
                 self.check_protocol_issubclass(e)
-        if isinstance(ret_type, UninhabitedType):
+        if isinstance(ret_type, UninhabitedType) and not ret_type.ambiguous:
             self.chk.binder.unreachable()
         if not allow_none_return and isinstance(ret_type, NoneTyp):
             self.chk.msg.does_not_return_value(callee_type, e)
@@ -543,7 +543,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
             if (callee.is_type_obj() and callee.type_object().is_abstract
                     # Exceptions for Type[...] and classmethod first argument
-                    and not callee.from_type_type and not callee.is_classmethod_class):
+                    and not callee.from_type_type and not callee.is_classmethod_class
+                    and not callee.type_object().fallback_to_any):
                 type = callee.type_object()
                 self.msg.cannot_instantiate_abstract_class(
                     callee.type_object().name(), type.abstract_attributes,
@@ -721,8 +722,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
         Returns the inferred types of *actual arguments*.
         """
-        dummy = None  # type: Any
-        res = [dummy] * len(args)  # type: List[Type]
+        res = [None] * len(args)  # type: List[Optional[Type]]
 
         for i, actuals in enumerate(formal_to_actual):
             for ai in actuals:
@@ -733,7 +733,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         for i, t in enumerate(res):
             if not t:
                 res[i] = self.accept(args[i])
-        return res
+        assert all(tp is not None for tp in res)
+        return cast(List[Type], res)
 
     def infer_function_type_arguments_using_context(
             self, callable: CallableType, error_context: Context) -> CallableType:
@@ -1394,7 +1395,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             method_type = None  # type: Optional[mypy.types.Type]
 
             if operator == 'in' or operator == 'not in':
-                right_type = self.accept(right)  # TODO only evaluate if needed
+                right_type = self.accept(right)  # always validate the right operand
 
                 # Keep track of whether we get type check errors (these won't be reported, they
                 # are just to verify whether something is valid typing wise).
@@ -1428,6 +1429,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                                                     allow_reverse=True)
 
             elif operator == 'is' or operator == 'is not':
+                self.accept(right)  # validate the right operand
                 sub_result = self.bool_type()
                 method_type = None
             else:
@@ -1654,20 +1656,9 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         op = e.op
         if op == 'not':
             result = self.bool_type()  # type: Type
-        elif op == '-':
-            method_type = self.analyze_external_member_access('__neg__',
-                                                              operand_type, e)
-            result, method_type = self.check_call(method_type, [], [], e)
-            e.method_type = method_type
-        elif op == '+':
-            method_type = self.analyze_external_member_access('__pos__',
-                                                              operand_type, e)
-            result, method_type = self.check_call(method_type, [], [], e)
-            e.method_type = method_type
         else:
-            assert op == '~', "unhandled unary operator"
-            method_type = self.analyze_external_member_access('__invert__',
-                                                              operand_type, e)
+            method = nodes.unary_op_methods[op]
+            method_type = self.analyze_external_member_access(method, operand_type, e)
             result, method_type = self.check_call(method_type, [], [], e)
             e.method_type = method_type
         return result
@@ -1786,7 +1777,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         options = self.chk.options
         if options.warn_redundant_casts and is_same_type(source_type, target_type):
             self.msg.redundant_cast(target_type, expr)
-        if 'unimported' in options.disallow_any and has_any_from_unimported_type(target_type):
+        if options.disallow_any_unimported and has_any_from_unimported_type(target_type):
             self.msg.unimported_type_becomes_any("Target type of cast", target_type, expr)
         check_for_explicit_any(target_type, self.chk.options, self.chk.is_typeshed_stub, self.msg,
                                context=expr)
@@ -1872,7 +1863,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         # Used for list and set expressions, as well as for tuples
         # containing star expressions that don't refer to a
         # Tuple. (Note: "lst" stands for list-set-tuple. :-)
-        tvdef = TypeVarDef('T', -1, [], self.object_type())
+        tvdef = TypeVarDef('T', 'T', -1, [], self.object_type())
         tv = TypeVarType(tvdef)
         constructor = CallableType(
             [tv],
@@ -1973,8 +1964,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             else:
                 args.append(TupleExpr([key, value]))
         # Define type variables (used in constructors below).
-        ktdef = TypeVarDef('KT', -1, [], self.object_type())
-        vtdef = TypeVarDef('VT', -2, [], self.object_type())
+        ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type())
+        vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type())
         kt = TypeVarType(ktdef)
         vt = TypeVarType(vtdef)
         rv = None
@@ -2241,7 +2232,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
             # Infer the type of the list comprehension by using a synthetic generic
             # callable type.
-            tvdef = TypeVarDef('T', -1, [], self.object_type())
+            tvdef = TypeVarDef('T', 'T', -1, [], self.object_type())
             tv = TypeVarType(tvdef)
             constructor = CallableType(
                 [tv],
@@ -2261,8 +2252,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
             # Infer the type of the list comprehension by using a synthetic generic
             # callable type.
-            ktdef = TypeVarDef('KT', -1, [], self.object_type())
-            vtdef = TypeVarDef('VT', -2, [], self.object_type())
+            ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type())
+            vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type())
             kt = TypeVarType(ktdef)
             vt = TypeVarType(vtdef)
             constructor = CallableType(
@@ -2379,7 +2370,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         assert typ is not None
         self.chk.store_type(node, typ)
 
-        if ('expr' in self.chk.options.disallow_any and
+        if (self.chk.options.disallow_any_expr and
                 not always_allow_any and
                 not self.chk.is_stub and
                 self.chk.in_checked_function() and
@@ -2558,7 +2549,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
     def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
         tuple_type = e.info.tuple_type
         if tuple_type:
-            if ('unimported' in self.chk.options.disallow_any and
+            if (self.chk.options.disallow_any_unimported and
                     has_any_from_unimported_type(tuple_type)):
                 self.msg.unimported_type_becomes_any("NamedTuple type", tuple_type, e)
             check_for_explicit_any(tuple_type, self.chk.options, self.chk.is_typeshed_stub,
@@ -2648,7 +2639,7 @@ def is_async_def(t: Type) -> bool:
 def map_actuals_to_formals(caller_kinds: List[int],
                            caller_names: Optional[Sequence[Optional[str]]],
                            callee_kinds: List[int],
-                           callee_names: List[Optional[str]],
+                           callee_names: Sequence[Optional[str]],
                            caller_arg_type: Callable[[int],
                                                      Type]) -> List[List[int]]:
     """Calculate mapping between actual (caller) args and formals.
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index db583ed..af2d142 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -191,6 +191,8 @@ def analyze_member_access(name: str,
         elif isinstance(typ.item, TypeVarType):
             if isinstance(typ.item.upper_bound, Instance):
                 item = typ.item.upper_bound
+        elif isinstance(typ.item, TupleType):
+            item = typ.item.fallback
         elif isinstance(typ.item, FunctionLike) and typ.item.is_type_obj():
             item = typ.item.fallback
         elif isinstance(typ.item, TypeType):
@@ -491,7 +493,7 @@ def add_class_tvars(t: Type, itype: Instance, is_classmethod: bool,
     info = itype.type  # type: TypeInfo
     if isinstance(t, CallableType):
         # TODO: Should we propagate type variable values?
-        tvars = [TypeVarDef(n, i + 1, [], builtin_type('builtins.object'), tv.variance)
+        tvars = [TypeVarDef(n, n, i + 1, [], builtin_type('builtins.object'), tv.variance)
                  for (i, n), tv in zip(enumerate(info.type_vars), info.defn.type_vars)]
         if is_classmethod:
             t = bind_self(t, original_type, is_classmethod=True)
@@ -582,7 +584,7 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance,
     callable_type = init_type.copy_modified(
         ret_type=fill_typevars(info), fallback=type_type, name=None, variables=variables,
         special_sig=special_sig)
-    c = callable_type.with_name('"{}"'.format(info.name()))
+    c = callable_type.with_name(info.name())
     return c
 
 
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 0a79483..92a1f35 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -373,12 +373,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
                 cb = infer_constraints(template.args[0], item, SUPERTYPE_OF)
                 res.extend(cb)
             return res
-        elif (isinstance(actual, TupleType) and template.type.is_protocol and
-              self.direction == SUPERTYPE_OF):
-            if mypy.subtypes.is_subtype(actual.fallback, erase_typevars(template)):
-                res.extend(infer_constraints(template, actual.fallback, self.direction))
-                return res
-            return []
+        elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF:
+            return infer_constraints(template, actual.fallback, self.direction)
         else:
             return []
 
diff --git a/mypy/dmypy.py b/mypy/dmypy.py
new file mode 100644
index 0000000..f3e8640
--- /dev/null
+++ b/mypy/dmypy.py
@@ -0,0 +1,341 @@
+"""Client for mypy daemon mode.
+
+Highly experimental!  Only supports UNIX-like systems.
+
+This manages a daemon process which keeps useful state in memory
+rather than having to read it back from disk on each run.
+"""
+
+import argparse
+import json
+import os
+import signal
+import socket
+import sys
+import time
+
+from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, TypeVar
+
+from mypy.dmypy_util import STATUS_FILE, receive
+
+# Argument parser.  Subparsers are tied to action functions by the
+# @action(subparse) decorator.
+
+parser = argparse.ArgumentParser(description="Client for mypy daemon mode",
+                                 fromfile_prefix_chars='@')
+parser.set_defaults(action=None)
+subparsers = parser.add_subparsers()
+
+start_parser = subparsers.add_parser('start', help="Start daemon")
+start_parser.add_argument('--log-file', metavar='FILE', type=str,
+                          help="Direct daemon stdout/stderr to FILE")
+start_parser.add_argument('flags', metavar='FLAG', nargs='*', type=str,
+                          help="Regular mypy flags (precede with --)")
+
+status_parser = subparsers.add_parser('status', help="Show daemon status")
+
+stop_parser = subparsers.add_parser('stop', help="Stop daemon (asks it politely to go away)")
+
+kill_parser = subparsers.add_parser('kill', help="Kill daemon (kills the process)")
+
+restart_parser = subparsers.add_parser('restart',
+    help="Restart daemon (stop or kill followed by start)")
+restart_parser.add_argument('--log-file', metavar='FILE', type=str,
+                            help="Direct daemon stdout/stderr to FILE")
+restart_parser.add_argument('flags', metavar='FLAG', nargs='*', type=str,
+                            help="Regular mypy flags (precede with --)")
+
+check_parser = subparsers.add_parser('check', help="Check some files (requires running daemon)")
+check_parser.add_argument('-q', '--quiet', action='store_true',
+                          help="Suppress instrumentation stats")
+check_parser.add_argument('files', metavar='FILE', nargs='+', help="File (or directory) to check")
+
+recheck_parser = subparsers.add_parser('recheck',
+    help="Check the same files as the most previous  check run (requires running daemon)")
+recheck_parser.add_argument('-q', '--quiet', action='store_true',
+                            help="Suppress instrumentation stats")
+
+hang_parser = subparsers.add_parser('hang', help="Hang for 100 seconds")
+
+daemon_parser = subparsers.add_parser('daemon', help="Run daemon in foreground")
+daemon_parser.add_argument('flags', metavar='FLAG', nargs='*', type=str,
+                           help="Regular mypy flags (precede with --)")
+
+help_parser = subparsers.add_parser('help')
+
+
+def main() -> None:
+    """The code is top-down."""
+    args = parser.parse_args()
+    if not args.action:
+        parser.print_usage()
+    else:
+        args.action(args)
+
+
+ActionFunction = Callable[[argparse.Namespace], None]
+
+
+def action(subparser: argparse.ArgumentParser) -> Callable[[ActionFunction], None]:
+    """Decorator to tie an action function to a subparser."""
+    def register(func: ActionFunction) -> None:
+        subparser.set_defaults(action=func)
+    return register
+
+
+# Action functions (run in client from command line).
+# TODO: Use a separate exception instead of SystemExit to indicate failures.
+
+ at action(start_parser)
+def do_start(args: argparse.Namespace) -> None:
+    """Start daemon (it must not already be running).
+
+    This is where mypy flags are set.  Setting flags is a bit awkward;
+    you have to use e.g.:
+
+      dmypy start -- --strict
+
+    since we don't want to duplicate mypy's huge list of flags.
+    """
+    try:
+        pid, sockname = get_status()
+    except SystemExit as err:
+        # Lazy import so this import doesn't slow down other commands.
+        from mypy.dmypy_server import daemonize, Server
+        if daemonize(Server(args.flags).serve, args.log_file):
+            sys.exit(1)
+        wait_for_server()
+    else:
+        sys.exit("Daemon is still alive")
+
+
+ at action(status_parser)
+def do_status(args: argparse.Namespace) -> None:
+    """Print daemon status.
+
+    This verifies that it is responsive to requests.
+    """
+    status = read_status()
+    show_stats(status)
+    check_status(status)
+    try:
+        response = request('status')
+    except Exception as err:
+        print("Daemon is stuck; consider %s kill" % sys.argv[0])
+        raise
+    else:
+        show_stats(response)
+
+
+ at action(stop_parser)
+def do_stop(args: argparse.Namespace) -> None:
+    """Stop daemon politely (via a request)."""
+    try:
+        response = request('stop')
+    except Exception as err:
+        sys.exit("Daemon is stuck; consider %s kill" % sys.argv[0])
+    else:
+        if response:
+            print("Stop response:", response)
+        else:
+            print("Daemon stopped")
+
+
+ at action(kill_parser)
+def do_kill(args: argparse.Namespace) -> None:
+    """Kill daemon rudely (by killing the process)."""
+    pid, sockname = get_status()
+    try:
+        os.kill(pid, signal.SIGKILL)
+    except os.error as err:
+        sys.exit(str(err))
+    else:
+        print("Daemon killed")
+
+
+ at action(restart_parser)
+def do_restart(args: argparse.Namespace) -> None:
+    """Restart daemon.
+
+    We first try to stop it politely if it's running.  This also sets
+    mypy flags (and has the same issues as start).
+    """
+    try:
+        response = request('stop')
+    except SystemExit:
+        pass
+    else:
+        if response:
+            sys.exit("Status: %s" % str(response))
+        else:
+            print("Daemon stopped")
+    # Lazy import so this import doesn't slow down other commands.
+    from mypy.dmypy_server import daemonize, Server
+    if daemonize(Server(args.flags).serve, args.log_file):
+        sys.exit(1)
+    wait_for_server()
+
+
+def wait_for_server(timeout: float = 5.0) -> None:
+    """Wait until the server is up.
+
+    Exit if it doesn't happen within the timeout.
+    """
+    endtime = time.time() + timeout
+    while time.time() < endtime:
+        try:
+            data = read_status()
+        except SystemExit:
+            # If the file isn't there yet, retry later.
+            time.sleep(0.1)
+            continue
+        # If the file's content is bogus or the process is dead, fail.
+        pid, sockname = check_status(data)
+        print("Daemon started")
+        return
+    sys.exit("Timed out waiting for daemon to start")
+
+
+ at action(check_parser)
+def do_check(args: argparse.Namespace) -> None:
+    """Ask the daemon to check a list of files."""
+    t0 = time.time()
+    response = request('check', files=args.files)
+    t1 = time.time()
+    response['roundtrip_time'] = t1 - t0
+    check_output(response, args.quiet)
+
+
+ at action(recheck_parser)
+def do_recheck(args: argparse.Namespace) -> None:
+    """Ask the daemon to check the same list of files it checked most recently.
+
+    This doesn't work across daemon restarts.
+    """
+    t0 = time.time()
+    response = request('recheck')
+    t1 = time.time()
+    response['roundtrip_time'] = t1 - t0
+    check_output(response, args.quiet)
+
+
+def check_output(response: Dict[str, Any], quiet: bool) -> None:
+    """Print the output from a check or recheck command."""
+    try:
+        out, err, status = response['out'], response['err'], response['status']
+    except KeyError:
+        sys.exit("Response: %s" % str(response))
+    sys.stdout.write(out)
+    sys.stderr.write(err)
+    if not quiet:
+        show_stats(response)
+    if status:
+        sys.exit(status)
+
+
+def show_stats(response: Mapping[str, object]) -> None:
+    for key, value in sorted(response.items()):
+        if key not in ('out', 'err'):
+            print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value))
+
+
+ at action(hang_parser)
+def do_hang(args: argparse.Namespace) -> None:
+    """Hang for 100 seconds, as a debug hack."""
+    request('hang')
+
+
+ at action(daemon_parser)
+def do_daemon(args: argparse.Namespace) -> None:
+    """Serve requests in the foreground."""
+    # Lazy import so this import doesn't slow down other commands.
+    from mypy.dmypy_server import Server
+    Server(args.flags).serve()
+
+
+ at action(help_parser)
+def do_help(args: argparse.Namespace) -> None:
+    """Print full help (same as dmypy --help)."""
+    parser.print_help()
+
+
+# Client-side infrastructure.
+
+
+def request(command: str, **kwds: object) -> Dict[str, Any]:
+    """Send a request to the daemon.
+
+    Return the JSON dict with the response.
+    """
+    args = dict(kwds)
+    if command:
+        args.update(command=command)
+    data = json.dumps(args)
+    pid, sockname = get_status()
+    sock = socket.socket(socket.AF_UNIX)
+    sock.connect(sockname)
+    sock.sendall(data.encode('utf8'))
+    sock.shutdown(socket.SHUT_WR)
+    try:
+        response = receive(sock)
+    except OSError as err:
+        return {'error': str(err)}
+    else:
+        return response
+    finally:
+        sock.close()
+
+
+def get_status() -> Tuple[int, str]:
+    """Read status file and check if the process is alive.
+
+    Return (pid, sockname) on success.
+
+    Raise SystemExit(<message>) if something's wrong.
+    """
+    data = read_status()
+    return check_status(data)
+
+
+def check_status(data: Dict[str, Any]) -> Tuple[int, str]:
+    """Check if the process is alive.
+
+    Return (pid, sockname) on success.
+
+    Raise SystemExit(<message>) if something's wrong.
+    """
+    if 'pid' not in data:
+        raise SystemExit("Invalid status file (no pid field)")
+    pid = data['pid']
+    if not isinstance(pid, int):
+        raise SystemExit("pid field is not an int")
+    try:
+        os.kill(pid, 0)
+    except OSError as err:
+        raise SystemExit("Daemon has died")
+    if 'sockname' not in data:
+        raise SystemExit("Invalid status file (no sockname field)")
+    sockname = data['sockname']
+    if not isinstance(sockname, str):
+        raise SystemExit("sockname field is not a string")
+    return pid, sockname
+
+
+def read_status() -> Dict[str, object]:
+    """Read status file."""
+    if not os.path.isfile(STATUS_FILE):
+        raise SystemExit("No status file found")
+    with open(STATUS_FILE) as f:
+        try:
+            data = json.load(f)
+        except Exception as err:
+            raise SystemExit("Malformed status file (not JSON)")
+    if not isinstance(data, dict):
+        raise SystemExit("Invalid status file (not a dict)")
+    return data
+
+
+# Run main().
+
+if __name__ == '__main__':
+    main()
diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py
new file mode 100644
index 0000000..a1b4059
--- /dev/null
+++ b/mypy/dmypy_server.py
@@ -0,0 +1,289 @@
+"""Client for mypy daemon mode.
+
+Highly experimental!  Only supports UNIX-like systems.
+
+This manages a daemon process which keeps useful state in memory
+rather than having to read it back from disk on each run.
+"""
+
+import gc
+import io
+import json
+import os
+import socket
+import sys
+import time
+
+from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence
+
+# TODO: Import all mypy modules lazily to speed up client startup time.
+import mypy.build
+import mypy.errors
+import mypy.main
+from mypy.dmypy_util import STATUS_FILE, receive
+
+
+def daemonize(func: Callable[[], None], log_file: Optional[str] = None) -> int:
+    """Arrange to call func() in a grandchild of the current process.
+
+    Return 0 for success, exit status for failure, negative if
+    subprocess killed by signal.
+    """
+    # See https://stackoverflow.com/questions/473620/how-do-you-create-a-daemon-in-python
+    sys.stdout.flush()
+    sys.stderr.flush()
+    pid = os.fork()
+    if pid:
+        # Parent process: wait for child in case things go bad there.
+        npid, sts = os.waitpid(pid, 0)
+        sig = sts & 0xff
+        if sig:
+            print("Child killed by signal", sig)
+            return -sig
+        sts = sts >> 8
+        if sts:
+            print("Child exit status", sts)
+        return sts
+    # Child process: do a bunch of UNIX stuff and then fork a grandchild.
+    try:
+        os.setsid()  # Detach controlling terminal
+        os.umask(0o27)
+        devnull = os.open('/dev/null', os.O_RDWR)
+        os.dup2(devnull, 0)
+        os.dup2(devnull, 1)
+        os.dup2(devnull, 2)
+        os.close(devnull)
+        pid = os.fork()
+        if pid:
+            # Child is done, exit to parent.
+            os._exit(0)
+        # Grandchild: run the server.
+        if log_file:
+            sys.stdout = sys.stderr = open(log_file, 'a', buffering=1)
+            fd = sys.stdout.fileno()
+            os.dup2(fd, 2)
+            os.dup2(fd, 1)
+        func()
+    finally:
+        # Make sure we never get back into the caller.
+        os._exit(1)
+
+
+# Server code.
+
+SOCKET_NAME = 'dmypy.sock'  # In current directory.
+
+
+class Server:
+
+    # NOTE: the instance is constructed in the parent process but
+    # serve() is called in the grandchild (by daemonize()).
+
+    def __init__(self, flags: List[str]) -> None:
+        """Initialize the server with the desired mypy flags."""
+        self.saved_cache = {}  # type: mypy.build.SavedCache
+        sources, options = mypy.main.process_options(['-i'] + flags, False)
+        if sources:
+            sys.exit("dmypy: start/restart does not accept sources")
+        if options.report_dirs:
+            sys.exit("dmypy: start/restart cannot generate reports")
+        if not options.incremental:
+            sys.exit("dmypy: start/restart should not disable incremental mode")
+        if options.quick_and_dirty:
+            sys.exit("dmypy: start/restart should not specify quick_and_dirty mode")
+        self.options = options
+        if os.path.isfile(STATUS_FILE):
+            os.unlink(STATUS_FILE)
+
+    def serve(self) -> None:
+        """Serve requests, synchronously (no thread or fork)."""
+        try:
+            sock = self.create_listening_socket()
+            try:
+                with open(STATUS_FILE, 'w') as f:
+                    json.dump({'pid': os.getpid(), 'sockname': sock.getsockname()}, f)
+                    f.write('\n')  # I like my JSON with trailing newline
+                while True:
+                    conn, addr = sock.accept()
+                    data = receive(conn)
+                    resp = {}  # type: Dict[str, Any]
+                    if 'command' not in data:
+                        resp = {'error': "No command found in request"}
+                    else:
+                        command = data['command']
+                        if not isinstance(command, str):
+                            resp = {'error': "Command is not a string"}
+                        else:
+                            command = data.pop('command')
+                        resp = self.run_command(command, data)
+                    try:
+                        conn.sendall(json.dumps(resp).encode('utf8'))
+                    except OSError as err:
+                        pass  # Maybe the client hung up
+                    conn.close()
+                    if command == 'stop':
+                        sock.close()
+                        sys.exit(0)
+            finally:
+                os.unlink(STATUS_FILE)
+        finally:
+            os.unlink(self.sockname)
+
+    def create_listening_socket(self) -> socket.socket:
+        """Create the socket and set it up for listening."""
+        self.sockname = os.path.abspath(SOCKET_NAME)
+        if os.path.exists(self.sockname):
+            os.unlink(self.sockname)
+        sock = socket.socket(socket.AF_UNIX)
+        sock.bind(self.sockname)
+        sock.listen(1)
+        return sock
+
+    def run_command(self, command: str, data: Mapping[str, object]) -> Dict[str, object]:
+        """Run a specific command from the registry."""
+        key = 'cmd_' + command
+        method = getattr(self.__class__, key, None)
+        if method is None:
+            return {'error': "Unrecognized command '%s'" % command}
+        else:
+            return method(self, **data)
+
+    # Command functions (run in the server via RPC).
+
+    def cmd_status(self) -> Dict[str, object]:
+        """Return daemon status."""
+        res = {}  # type: Dict[str, object]
+        res.update(get_meminfo())
+        return res
+
+    def cmd_stop(self) -> Dict[str, object]:
+        """Stop daemon."""
+        return {}
+
+    last_sources = None
+
+    def cmd_check(self, files: Sequence[str]) -> Dict[str, object]:
+        """Check a list of files."""
+        # TODO: Move this into check(), in case one of the args is a directory.
+        # Capture stdout/stderr and catch SystemExit while processing the source list.
+        save_stdout = sys.stdout
+        save_stderr = sys.stderr
+        try:
+            sys.stdout = stdout = io.StringIO()
+            sys.stderr = stderr = io.StringIO()
+            self.last_sources = mypy.main.create_source_list(files, self.options)
+        except SystemExit as err:
+            return {'out': stdout.getvalue(), 'err': stderr.getvalue(), 'status': err.code}
+        finally:
+            sys.stdout = save_stdout
+            sys.stderr = save_stderr
+        return self.check(self.last_sources)
+
+    def cmd_recheck(self) -> Dict[str, object]:
+        """Check the same list of files we checked most recently."""
+        if not self.last_sources:
+            return {'error': "Command 'recheck' is only valid after a 'check' command"}
+        return self.check(self.last_sources)
+
+    # Needed by tests.
+    last_manager = None  # type: Optional[mypy.build.BuildManager]
+
+    def check(self, sources: List[mypy.build.BuildSource],
+              alt_lib_path: Optional[str] = None) -> Dict[str, Any]:
+        self.last_manager = None
+        with GcLogger() as gc_result:
+            try:
+                # saved_cache is mutated in place.
+                res = mypy.build.build(sources, self.options,
+                                       saved_cache=self.saved_cache,
+                                       alt_lib_path=alt_lib_path)
+                msgs = res.errors
+                self.last_manager = res.manager  # type: Optional[mypy.build.BuildManager]
+            except mypy.errors.CompileError as err:
+                msgs = err.messages
+        if msgs:
+            msgs.append("")
+            response = {'out': "\n".join(msgs), 'err': "", 'status': 1}
+        else:
+            response = {'out': "", 'err': "", 'status': 0}
+        response.update(gc_result.get_stats())
+        response.update(get_meminfo())
+        if self.last_manager is not None:
+            response.update(self.last_manager.stats_summary())
+        return response
+
+    def cmd_hang(self) -> Dict[str, object]:
+        """Hang for 100 seconds, as a debug hack."""
+        time.sleep(100)
+        return {}
+
+
+# Misc utilities.
+
+
+class GcLogger:
+    """Context manager to log GC stats and overall time."""
+
+    def __enter__(self) -> 'GcLogger':
+        self.gc_start_time = None  # type: Optional[float]
+        self.gc_time = 0.0
+        self.gc_calls = 0
+        self.gc_collected = 0
+        self.gc_uncollectable = 0
+        gc.callbacks.append(self.gc_callback)
+        self.start_time = time.time()
+        return self
+
+    def gc_callback(self, phase: str, info: Mapping[str, int]) -> None:
+        if phase == 'start':
+            assert self.gc_start_time is None, "Start phase out of sequence"
+            self.gc_start_time = time.time()
+        elif phase == 'stop':
+            assert self.gc_start_time is not None, "Stop phase out of sequence"
+            self.gc_calls += 1
+            self.gc_time += time.time() - self.gc_start_time
+            self.gc_start_time = None
+            self.gc_collected += info['collected']
+            self.gc_uncollectable += info['uncollectable']
+        else:
+            assert False, "Unrecognized gc phase (%r)" % (phase,)
+
+    def __exit__(self, *args: object) -> None:
+        while self.gc_callback in gc.callbacks:
+            gc.callbacks.remove(self.gc_callback)
+
+    def get_stats(self) -> Dict[str, float]:
+        end_time = time.time()
+        result = {}
+        result['gc_time'] = self.gc_time
+        result['gc_calls'] = self.gc_calls
+        result['gc_collected'] = self.gc_collected
+        result['gc_uncollectable'] = self.gc_uncollectable
+        result['build_time'] = end_time - self.start_time
+        return result
+
+
+MiB = 2**20
+
+
+def get_meminfo() -> Mapping[str, float]:
+    # See https://stackoverflow.com/questions/938733/total-memory-used-by-python-process
+    import resource  # Since it doesn't exist on Windows.
+    res = {}
+    rusage = resource.getrusage(resource.RUSAGE_SELF)
+    if sys.platform == 'darwin':
+        factor = 1
+    else:
+        factor = 1024  # Linux
+    res['memory_maxrss_mib'] = rusage.ru_maxrss * factor / MiB
+    # If we can import psutil, use it for some extra data
+    try:
+        import psutil  # type: ignore  # It's not in typeshed yet
+    except ImportError:
+        pass
+    else:
+        process = psutil.Process(os.getpid())
+        meminfo = process.memory_info()
+        res['memory_rss_mib'] = meminfo.rss / MiB
+        res['memory_vms_mib'] = meminfo.vms / MiB
+    return res
diff --git a/mypy/dmypy_util.py b/mypy/dmypy_util.py
new file mode 100644
index 0000000..e2cc87f
--- /dev/null
+++ b/mypy/dmypy_util.py
@@ -0,0 +1,27 @@
+"""Shared code between dmypy.py and dmypy_server.py.
+
+This should be pretty lightweight and not depend on other mypy code.
+"""
+
+import json
+import socket
+
+from typing import Any
+
+STATUS_FILE = 'dmypy.json'
+
+
+def receive(sock: socket.socket) -> Any:
+    """Receive JSON data from a socket until EOF."""
+    bdata = bytearray()
+    while True:
+        more = sock.recv(100000)
+        if not more:
+            break
+        bdata.extend(more)
+    if not bdata:
+        raise OSError("No data received")
+    data = json.loads(bdata.decode('utf8'))
+    if not isinstance(data, dict):
+        raise OSError("Data received is not a dict (%s)" % str(type(data)))
+    return data
diff --git a/mypy/errors.py b/mypy/errors.py
index 4009e6a..923c592 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -331,6 +331,13 @@ class Errors:
         """Are the any errors that are blockers?"""
         return any(err for err in self.error_info if err.blocker)
 
+    def blocker_module(self) -> Optional[str]:
+        """Return the module with a blocking error, or None if not possible."""
+        for err in self.error_info:
+            if err.blocker:
+                return err.module
+        return None
+
     def is_errors_for_file(self, file: str) -> bool:
         """Are there any errors for the given file?"""
         return file in self.error_files
@@ -340,7 +347,9 @@ class Errors:
 
         Render the messages suitable for displaying.
         """
-        raise CompileError(self.messages(), use_stdout=True)
+        raise CompileError(self.messages(),
+                           use_stdout=True,
+                           module_with_blocker=self.blocker_module())
 
     def messages(self) -> List[str]:
         """Return a string list that represents the error messages.
@@ -506,11 +515,17 @@ class CompileError(Exception):
 
     messages = None  # type: List[str]
     use_stdout = False
+    # Can be set in case there was a module with a blocking error
+    module_with_blocker = None  # type: Optional[str]
 
-    def __init__(self, messages: List[str], use_stdout: bool = False) -> None:
+    def __init__(self,
+                 messages: List[str],
+                 use_stdout: bool = False,
+                 module_with_blocker: Optional[str] = None) -> None:
         super().__init__('\n'.join(messages))
         self.messages = messages
         self.use_stdout = use_stdout
+        self.module_with_blocker = module_with_blocker
 
 
 class DecodeError(Exception):
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index cb1eeba..e7ec40a 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -68,6 +68,7 @@ TYPE_COMMENT_AST_ERROR = 'invalid type comment or annotation'
 
 def parse(source: Union[str, bytes],
           fnam: str,
+          module: Optional[str],
           errors: Optional[Errors] = None,
           options: Options = Options()) -> MypyFile:
 
@@ -80,7 +81,7 @@ def parse(source: Union[str, bytes],
     if errors is None:
         errors = Errors()
         raise_on_error = True
-    errors.set_file(fnam, None)
+    errors.set_file(fnam, module)
     is_stub_file = fnam.endswith('.pyi')
     try:
         if is_stub_file:
@@ -97,7 +98,7 @@ def parse(source: Union[str, bytes],
         tree.path = fnam
         tree.is_stub = is_stub_file
     except SyntaxError as e:
-        errors.report(e.lineno, e.offset, e.msg)
+        errors.report(e.lineno, e.offset, e.msg, blocker=True)
         tree = MypyFile([], [], False, set())
 
     if raise_on_error and errors.is_errors():
@@ -111,7 +112,7 @@ def parse_type_comment(type_comment: str, line: int, errors: Optional[Errors]) -
         typ = ast3.parse(type_comment, '<type_comment>', 'eval')
     except SyntaxError as e:
         if errors is not None:
-            errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR)
+            errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR, blocker=True)
             return None
         else:
             raise
@@ -157,8 +158,11 @@ class ASTConverter(ast3.NodeTransformer):
         self.is_stub = is_stub
         self.errors = errors
 
+    def note(self, msg: str, line: int, column: int) -> None:
+        self.errors.report(line, column, msg, severity='note')
+
     def fail(self, msg: str, line: int, column: int) -> None:
-        self.errors.report(line, column, msg)
+        self.errors.report(line, column, msg, blocker=True)
 
     def generic_visit(self, node: ast3.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
@@ -356,6 +360,9 @@ class ASTConverter(ast3.NodeTransformer):
                     arg_types.insert(0, AnyType(TypeOfAny.special_form))
             except SyntaxError:
                 self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
+                if n.type_comment and n.type_comment[0] != "(":
+                    self.note('Suggestion: wrap argument types in parentheses',
+                              n.lineno, n.col_offset)
                 arg_types = [AnyType(TypeOfAny.from_error)] * len(args)
                 return_type = AnyType(TypeOfAny.from_error)
         else:
@@ -651,8 +658,8 @@ class ASTConverter(ast3.NodeTransformer):
     def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase:
         assert n.level is not None
         if len(n.names) == 1 and n.names[0].name == '*':
-            assert n.module is not None
-            i = ImportAll(n.module, n.level)  # type: ImportBase
+            mod = n.module if n.module is not None else ''
+            i = ImportAll(mod, n.level)  # type: ImportBase
         else:
             i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '',
                            n.level,
@@ -1013,7 +1020,11 @@ class TypeConverter(ast3.NodeTransformer):
 
     def fail(self, msg: str, line: int, column: int) -> None:
         if self.errors:
-            self.errors.report(line, column, msg)
+            self.errors.report(line, column, msg, blocker=True)
+
+    def note(self, msg: str, line: int, column: int) -> None:
+        if self.errors:
+            self.errors.report(line, column, msg, severity='note')
 
     def visit_raw_str(self, s: str) -> Type:
         # An escape hatch that allows the AST walker in fastparse2 to
@@ -1031,12 +1042,19 @@ class TypeConverter(ast3.NodeTransformer):
 
     def visit_Call(self, e: ast3.Call) -> Type:
         # Parse the arg constructor
-        if not isinstance(self.parent(), ast3.List):
-            return self.generic_visit(e)
         f = e.func
         constructor = stringify_name(f)
+
+        if not isinstance(self.parent(), ast3.List):
+            self.fail(TYPE_COMMENT_AST_ERROR, self.line, e.col_offset)
+            if constructor:
+                self.note("Suggestion: use {}[...] instead of {}(...)".format(
+                    constructor, constructor),
+                    self.line, e.col_offset)
+            return AnyType(TypeOfAny.from_error)
         if not constructor:
             self.fail("Expected arg constructor name", e.lineno, e.col_offset)
+
         name = None  # type: Optional[str]
         default_type = AnyType(TypeOfAny.special_form)
         typ = default_type  # type: Type
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index 3156f9e..7c7a9fe 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -79,6 +79,7 @@ TYPE_COMMENT_AST_ERROR = 'invalid type comment'
 
 def parse(source: Union[str, bytes],
           fnam: str,
+          module: Optional[str],
           errors: Optional[Errors] = None,
           options: Options = Options()) -> MypyFile:
     """Parse a source file, without doing any semantic analysis.
@@ -90,7 +91,7 @@ def parse(source: Union[str, bytes],
     if errors is None:
         errors = Errors()
         raise_on_error = True
-    errors.set_file(fnam, None)
+    errors.set_file(fnam, module)
     is_stub_file = fnam.endswith('.pyi')
     try:
         assert options.python_version[0] < 3 and not is_stub_file
@@ -103,7 +104,7 @@ def parse(source: Union[str, bytes],
         tree.path = fnam
         tree.is_stub = is_stub_file
     except SyntaxError as e:
-        errors.report(e.lineno, e.offset, e.msg)
+        errors.report(e.lineno, e.offset, e.msg, blocker=True)
         tree = MypyFile([], [], False, set())
 
     if raise_on_error and errors.is_errors():
@@ -150,7 +151,7 @@ class ASTConverter(ast27.NodeTransformer):
         self.errors = errors
 
     def fail(self, msg: str, line: int, column: int) -> None:
-        self.errors.report(line, column, msg)
+        self.errors.report(line, column, msg, blocker=True)
 
     def generic_visit(self, node: ast27.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
@@ -647,8 +648,8 @@ class ASTConverter(ast27.NodeTransformer):
     def visit_ImportFrom(self, n: ast27.ImportFrom) -> ImportBase:
         assert n.level is not None
         if len(n.names) == 1 and n.names[0].name == '*':
-            assert n.module is not None
-            i = ImportAll(n.module, n.level)  # type: ImportBase
+            mod = n.module if n.module is not None else ''
+            i = ImportAll(mod, n.level)  # type: ImportBase
         else:
             i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '',
                            n.level,
diff --git a/mypy/main.py b/mypy/main.py
index 60bf8a2..ef30c02 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -8,7 +8,7 @@ import re
 import sys
 import time
 
-from typing import Any, Dict, List, Mapping, Optional, Set, Tuple
+from typing import Any, Dict, List, Mapping, Optional, Sequence, Set, Tuple
 
 from mypy import build
 from mypy import defaults
@@ -28,6 +28,21 @@ class InvalidPackageName(Exception):
     """Exception indicating that a package name was invalid."""
 
 
+orig_stat = os.stat
+
+
+def stat_proxy(path: str) -> os.stat_result:
+    try:
+        st = orig_stat(path)
+    except os.error as err:
+        print("stat(%r) -> %s" % (path, err))
+        raise
+    else:
+        print("stat(%r) -> (st_mode=%o, st_mtime=%d, st_size=%d)" %
+              (path, st.st_mode, st.st_mtime, st.st_size))
+        return st
+
+
 def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
     """Main entry point to the type checker.
 
@@ -37,6 +52,7 @@ def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
         be used.
     """
     t0 = time.time()
+    # To log stat() calls: os.stat = stat_proxy
     if script_path:
         bin_dir = find_bin_directory(script_path)  # type: Optional[str]
     else:
@@ -103,24 +119,6 @@ def type_check_only(sources: List[BuildSource], bin_dir: Optional[str],
                        options=options)
 
 
-disallow_any_options = ['unimported', 'expr', 'unannotated', 'decorated', 'explicit', 'generics']
-
-
-def disallow_any_argument_type(raw_options: str) -> List[str]:
-    if not raw_options:
-        # empty string disables all options
-        return []
-    flag_options = [o.strip() for o in raw_options.split(',')]
-    for option in flag_options:
-        if option not in disallow_any_options:
-            formatted_valid_options = ', '.join(
-                "'{}'".format(o) for o in disallow_any_options)
-            message = "Invalid '--disallow-any' option '{}' (valid options are: {}).".format(
-                option, formatted_valid_options)
-            raise argparse.ArgumentError(None, message)
-    return flag_options
-
-
 FOOTER = """environment variables:
 MYPYPATH     additional module search path"""
 
@@ -256,10 +254,18 @@ def process_options(args: List[str],
                         help="silently ignore imports of missing modules")
     parser.add_argument('--follow-imports', choices=['normal', 'silent', 'skip', 'error'],
                         default='normal', help="how to treat imports (default normal)")
-    parser.add_argument('--disallow-any', type=disallow_any_argument_type, default=[],
-                        metavar='{{{}}}'.format(', '.join(disallow_any_options)),
-                        help="disallow various types of Any in a module. Takes a comma-separated "
-                             "list of options (defaults to all options disabled)")
+    parser.add_argument('--disallow-any-unimported', default=False, action='store_true',
+                        help="disallow Any types resulting from unfollowed imports")
+    parser.add_argument('--disallow-any-expr', default=False, action='store_true',
+                        help='disallow all expressions that have type Any')
+    parser.add_argument('--disallow-any-decorated', default=False, action='store_true',
+                        help='disallow functions that have Any in their signature '
+                             'after decorator transformation')
+    parser.add_argument('--disallow-any-explicit', default=False, action='store_true',
+                        help='disallow explicit Any in type positions')
+    parser.add_argument('--disallow-any-generics', default=False, action='store_true',
+                        help='disallow usage of generic types that do not specify explicit '
+                             'type parameters')
     add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True,
                         help="disallow calling functions without type annotations"
                         " from functions with type annotations")
@@ -345,9 +351,15 @@ def process_options(args: List[str],
     # which will make the cache writing process output pretty-printed JSON (which
     # is easier to debug).
     parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS)
+    # --dump-deps will dump all fine-grained dependencies to stdout
+    parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS)
     # --dump-graph will dump the contents of the graph of SCCs and exit.
     parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS)
+    # --semantic-analysis-only does exactly that.
+    parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS)
     # deprecated options
+    parser.add_argument('--disallow-any', dest='special-opts:disallow_any',
+                        help=argparse.SUPPRESS)
     add_invertible_flag('--strict-boolean', default=False,
                         help=argparse.SUPPRESS)
     parser.add_argument('-f', '--dirty-stubs', action='store_true',
@@ -422,6 +434,9 @@ def process_options(args: List[str],
                      )
 
     # Process deprecated options
+    if special_opts.disallow_any:
+        print("--disallow-any option was split up into multiple flags. "
+              "See http://mypy.readthedocs.io/en/latest/command_line.html#disallow-any-flags")
     if options.strict_boolean:
         print("Warning: --strict-boolean is deprecated; "
               "see https://github.com/python/mypy/issues/3195", file=sys.stderr)
@@ -446,9 +461,6 @@ def process_options(args: List[str],
         print("Warning: --no-fast-parser no longer has any effect.  The fast parser "
               "is now mypy's default and only parser.")
 
-    if 'unannotated' in options.disallow_any:
-        options.disallow_untyped_defs = True
-
     # Check for invalid argument combinations.
     if require_targets:
         code_methods = sum(bool(c) for c in [special_opts.modules,
@@ -505,28 +517,33 @@ def process_options(args: List[str],
         targets = [BuildSource(None, None, '\n'.join(special_opts.command))]
         return targets, options
     else:
-        targets = []
-        for f in special_opts.files:
-            if f.endswith(PY_EXTENSIONS):
-                try:
-                    targets.append(BuildSource(f, crawl_up(f)[1], None))
-                except InvalidPackageName as e:
-                    fail(str(e))
-            elif os.path.isdir(f):
-                try:
-                    sub_targets = expand_dir(f)
-                except InvalidPackageName as e:
-                    fail(str(e))
-                if not sub_targets:
-                    fail("There are no .py[i] files in directory '{}'"
-                         .format(f))
-                targets.extend(sub_targets)
-            else:
-                mod = os.path.basename(f) if options.scripts_are_modules else None
-                targets.append(BuildSource(f, mod, None))
+        targets = create_source_list(special_opts.files, options)
         return targets, options
 
 
+def create_source_list(files: Sequence[str], options: Options) -> List[BuildSource]:
+    targets = []
+    for f in files:
+        if f.endswith(PY_EXTENSIONS):
+            try:
+                targets.append(BuildSource(f, crawl_up(f)[1], None))
+            except InvalidPackageName as e:
+                fail(str(e))
+        elif os.path.isdir(f):
+            try:
+                sub_targets = expand_dir(f)
+            except InvalidPackageName as e:
+                fail(str(e))
+            if not sub_targets:
+                fail("There are no .py[i] files in directory '{}'"
+                     .format(f))
+            targets.extend(sub_targets)
+        else:
+            mod = os.path.basename(f) if options.scripts_are_modules else None
+            targets.append(BuildSource(f, mod, None))
+    return targets
+
+
 def keyfunc(name: str) -> Tuple[int, str]:
     """Determines sort order for directory listing.
 
@@ -631,7 +648,6 @@ config_types = {
     'custom_typeshed_dir': str,
     'mypy_path': lambda s: [p.strip() for p in re.split('[,:]', s)],
     'junit_xml': str,
-    'disallow_any': disallow_any_argument_type,
     # These two are for backwards compatibility
     'silent_imports': bool,
     'almost_silent': bool,
@@ -749,14 +765,6 @@ def parse_section(prefix: str, template: Options,
         except ValueError as err:
             print("%s: %s: %s" % (prefix, key, err), file=sys.stderr)
             continue
-        if key == 'disallow_any':
-            # "disallow_any = " should disable all disallow_any options, including untyped defs,
-            # given in a more general config.
-            if not v:
-                results['disallow_untyped_defs'] = False
-            # If "unannotated" is explicitly given, turn on disallow_untyped_defs.
-            elif 'unannotated' in v:
-                results['disallow_untyped_defs'] = True
         if key == 'silent_imports':
             print("%s: silent_imports has been replaced by "
                   "ignore_missing_imports=True; follow_imports=skip" % prefix, file=sys.stderr)
diff --git a/mypy/meet.py b/mypy/meet.py
index 3e883b5..3fbd9c2 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -73,13 +73,16 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
     multiple inheritance actually occurs somewhere in a program, due to
     stub files hiding implementation details, dynamic loading etc.
 
-    TODO: Don't consider tuples always overlapping.
     TODO: Don't consider callables always overlapping.
     TODO: Don't consider type variables with values always overlapping.
     """
     # Any overlaps with everything
     if isinstance(t, AnyType) or isinstance(s, AnyType):
         return True
+    # object overlaps with everything
+    if (isinstance(t, Instance) and t.type.fullname() == 'builtins.object' or
+            isinstance(s, Instance) and s.type.fullname() == 'builtins.object'):
+        return True
 
     # Since we are effectively working with the erased types, we only
     # need to handle occurrences of TypeVarType at the top level.
@@ -91,6 +94,20 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
         t = t.as_anonymous().fallback
     if isinstance(s, TypedDictType):
         s = s.as_anonymous().fallback
+
+    if isinstance(t, UnionType):
+        return any(is_overlapping_types(item, s)
+                   for item in t.relevant_items())
+    if isinstance(s, UnionType):
+        return any(is_overlapping_types(t, item)
+                   for item in s.relevant_items())
+
+    # We must check for TupleTypes before Instances, since Tuple[A, ...]
+    # is an Instance
+    tup_overlap = is_overlapping_tuples(t, s, use_promotions)
+    if tup_overlap is not None:
+        return tup_overlap
+
     if isinstance(t, Instance):
         if isinstance(s, Instance):
             # Consider two classes non-disjoint if one is included in the mro
@@ -110,12 +127,6 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
             if s.type.is_protocol and is_protocol_implementation(t, s):
                 return True
             return False
-    if isinstance(t, UnionType):
-        return any(is_overlapping_types(item, s)
-                   for item in t.relevant_items())
-    if isinstance(s, UnionType):
-        return any(is_overlapping_types(t, item)
-                   for item in s.relevant_items())
     if isinstance(t, TypeType) and isinstance(s, TypeType):
         # If both types are TypeType, compare their inner types.
         return is_overlapping_types(t.item, s.item, use_promotions)
@@ -132,11 +143,35 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
         if isinstance(t, NoneTyp) != isinstance(s, NoneTyp):
             # NoneTyp does not overlap with other non-Union types under strict Optional checking
             return False
-    # We conservatively assume that non-instance, non-union, and non-TypeType types can overlap
-    # any other types.
+    # We conservatively assume that non-instance, non-union, non-TupleType and non-TypeType types
+    # can overlap any other types.
     return True
 
 
+def is_overlapping_tuples(t: Type, s: Type, use_promotions: bool) -> Optional[bool]:
+    """Part of is_overlapping_types(), for tuples only"""
+    t = adjust_tuple(t, s) or t
+    s = adjust_tuple(s, t) or s
+    if isinstance(t, TupleType) or isinstance(s, TupleType):
+        if isinstance(t, TupleType) and isinstance(s, TupleType):
+            if t.length() == s.length():
+                if all(is_overlapping_types(ti, si, use_promotions)
+                       for ti, si in zip(t.items, s.items)):
+                    return True
+        # TupleType and non-tuples do not overlap
+        return False
+    # No tuples are involved here
+    return None
+
+
+def adjust_tuple(left: Type, r: Type) -> Optional[TupleType]:
+    """Find out if `left` is a Tuple[A, ...], and adjust its length to `right`"""
+    if isinstance(left, Instance) and left.type.fullname() == 'builtins.tuple':
+        n = r.length() if isinstance(r, TupleType) else 1
+        return TupleType([left.args[0]] * n, left)
+    return None
+
+
 class TypeMeetVisitor(TypeVisitor[Type]):
     def __init__(self, s: Type) -> None:
         self.s = s
diff --git a/mypy/messages.py b/mypy/messages.py
index b49292e..7c0df42 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -25,7 +25,7 @@ from mypy.types import (
 from mypy.nodes import (
     TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases,
     ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2,
-    ReturnStmt, NameExpr, Var, CONTRAVARIANT, COVARIANT
+    ReturnStmt, NameExpr, Var, CONTRAVARIANT, COVARIANT,
 )
 
 
@@ -512,7 +512,7 @@ class MessageBuilder:
         return AnyType(TypeOfAny.from_error)
 
     def untyped_function_call(self, callee: CallableType, context: Context) -> Type:
-        name = callee.name if callee.name is not None else '(unknown)'
+        name = callable_name(callee) or '(unknown)'
         self.fail('Call to untyped function {} in typed context'.format(name), context)
         return AnyType(TypeOfAny.from_error)
 
@@ -526,8 +526,9 @@ class MessageBuilder:
         operator name in the messages.
         """
         target = ''
-        if callee.name:
-            name = callee.name
+        callee_name = callable_name(callee)
+        if callee_name is not None:
+            name = callee_name
             if callee.bound_args and callee.bound_args[0] is not None:
                 base = self.format(callee.bound_args[0])
             else:
@@ -535,6 +536,7 @@ class MessageBuilder:
 
             for op, method in op_methods.items():
                 for variant in method, '__r' + method[2:]:
+                    # FIX: do not rely on textual formatting
                     if name.startswith('"{}" of'.format(variant)):
                         if op == 'in' or variant != method:
                             # Reversed order of base/argument.
@@ -565,15 +567,15 @@ class MessageBuilder:
 
         msg = ''
         notes = []  # type: List[str]
-        if callee.name == '<list>':
-            name = callee.name[1:-1]
+        if callee_name == '<list>':
+            name = callee_name[1:-1]
             n -= 1
             actual_type_str, expected_type_str = self.format_distinctly(arg_type,
                                                                         callee.arg_types[0])
             msg = '{} item {} has incompatible type {}; expected {}'.format(
                 name.title(), n, actual_type_str, expected_type_str)
-        elif callee.name == '<dict>':
-            name = callee.name[1:-1]
+        elif callee_name == '<dict>':
+            name = callee_name[1:-1]
             n -= 1
             key_type, value_type = cast(TupleType, arg_type).items
             expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[0]).items
@@ -596,19 +598,19 @@ class MessageBuilder:
             msg = '{} entry {} has incompatible type {}: {}; expected {}: {}'.format(
                 name.title(), n, key_type_str, value_type_str,
                 expected_key_type_str, expected_value_type_str)
-        elif callee.name == '<list-comprehension>':
+        elif callee_name == '<list-comprehension>':
             actual_type_str, expected_type_str = map(strip_quotes,
                                                      self.format_distinctly(arg_type,
                                                                             callee.arg_types[0]))
             msg = 'List comprehension has incompatible type List[{}]; expected List[{}]'.format(
                 actual_type_str, expected_type_str)
-        elif callee.name == '<set-comprehension>':
+        elif callee_name == '<set-comprehension>':
             actual_type_str, expected_type_str = map(strip_quotes,
                                                      self.format_distinctly(arg_type,
                                                                             callee.arg_types[0]))
             msg = 'Set comprehension has incompatible type Set[{}]; expected Set[{}]'.format(
                 actual_type_str, expected_type_str)
-        elif callee.name == '<dictionary-comprehension>':
+        elif callee_name == '<dictionary-comprehension>':
             actual_type_str, expected_type_str = self.format_distinctly(arg_type,
                                                                         callee.arg_types[n - 1])
             msg = ('{} expression in dictionary comprehension has incompatible type {}; '
@@ -616,7 +618,7 @@ class MessageBuilder:
                 'Key' if n == 1 else 'Value',
                 actual_type_str,
                 expected_type_str)
-        elif callee.name == '<generator>':
+        elif callee_name == '<generator>':
             actual_type_str, expected_type_str = self.format_distinctly(arg_type,
                                                                         callee.arg_types[0])
             msg = 'Generator has incompatible item type {}; expected {}'.format(
@@ -656,57 +658,48 @@ class MessageBuilder:
                 msg = 'Missing positional argument'
             else:
                 msg = 'Missing positional arguments'
-            if callee.name and diff and all(d is not None for d in diff):
-                msg += ' "{}" in call to {}'.format('", "'.join(cast(List[str], diff)),
-                                                    callee.name)
+            callee_name = callable_name(callee)
+            if callee_name is not None and diff and all(d is not None for d in diff):
+                args = '", "'.join(cast(List[str], diff))
+                msg += ' "{}" in call to {}'.format(args, callee_name)
         else:
-            msg = 'Too few arguments'
-            if callee.name:
-                msg += ' for {}'.format(callee.name)
+            msg = 'Too few arguments' + for_function(callee)
         self.fail(msg, context)
 
     def missing_named_argument(self, callee: CallableType, context: Context, name: str) -> None:
-        msg = 'Missing named argument "{}"'.format(name)
-        if callee.name:
-            msg += ' for function {}'.format(callee.name)
+        msg = 'Missing named argument "{}"'.format(name) + for_function(callee)
         self.fail(msg, context)
 
     def too_many_arguments(self, callee: CallableType, context: Context) -> None:
-        msg = 'Too many arguments'
-        if callee.name:
-            msg += ' for {}'.format(callee.name)
+        msg = 'Too many arguments' + for_function(callee)
         self.fail(msg, context)
 
     def too_many_positional_arguments(self, callee: CallableType,
                                       context: Context) -> None:
-        msg = 'Too many positional arguments'
-        if callee.name:
-            msg += ' for {}'.format(callee.name)
+        msg = 'Too many positional arguments' + for_function(callee)
         self.fail(msg, context)
 
     def unexpected_keyword_argument(self, callee: CallableType, name: str,
                                     context: Context) -> None:
-        msg = 'Unexpected keyword argument "{}"'.format(name)
-        if callee.name:
-            msg += ' for {}'.format(callee.name)
+        msg = 'Unexpected keyword argument "{}"'.format(name) + for_function(callee)
         self.fail(msg, context)
         module = find_defining_module(self.modules, callee)
         if module:
             assert callee.definition is not None
-            self.note('{} defined here'.format(callee.name), callee.definition,
+            self.note('{} defined here'.format(callable_name(callee)), callee.definition,
                       file=module.path, origin=context)
 
     def duplicate_argument_value(self, callee: CallableType, index: int,
                                  context: Context) -> None:
         self.fail('{} gets multiple values for keyword argument "{}"'.
-                  format(capitalize(callable_name(callee)),
-                         callee.arg_names[index]), context)
+                  format(callable_name(callee) or 'Function', callee.arg_names[index]),
+                  context)
 
     def does_not_return_value(self, callee_type: Optional[Type], context: Context) -> None:
         """Report an error about use of an unusable type."""
         name = None  # type: Optional[str]
         if isinstance(callee_type, FunctionLike):
-            name = callee_type.get_name()
+            name = callable_name(callee_type)
         if name is not None:
             self.fail('{} does not return a value'.format(capitalize(name)), context)
         else:
@@ -734,9 +727,10 @@ class MessageBuilder:
 
     def no_variant_matches_arguments(self, overload: Overloaded, arg_types: List[Type],
                                      context: Context) -> None:
-        if overload.name():
+        name = callable_name(overload)
+        if name:
             self.fail('No overload variant of {} matches argument types {}'
-                      .format(overload.name(), arg_types), context)
+                      .format(name, arg_types), context)
         else:
             self.fail('No overload variant matches argument types {}'.format(arg_types), context)
 
@@ -804,9 +798,9 @@ class MessageBuilder:
 
     def could_not_infer_type_arguments(self, callee_type: CallableType, n: int,
                                        context: Context) -> None:
-        if callee_type.name and n > 0:
-            self.fail('Cannot infer type argument {} of {}'.format(
-                n, callee_type.name), context)
+        callee_name = callable_name(callee_type)
+        if callee_name is not None and n > 0:
+            self.fail('Cannot infer type argument {} of {}'.format(n, callee_name), context)
         else:
             self.fail('Cannot infer function type argument', context)
 
@@ -909,8 +903,10 @@ class MessageBuilder:
                                    typ: Type,
                                    typevar_name: str,
                                    context: Context) -> None:
-        self.fail(INCOMPATIBLE_TYPEVAR_VALUE.format(typevar_name, callable_name(callee),
-                                                    self.format(typ)), context)
+        self.fail(INCOMPATIBLE_TYPEVAR_VALUE.format(typevar_name,
+                                                    callable_name(callee) or 'function',
+                                                    self.format(typ)),
+                  context)
 
     def overloaded_signatures_overlap(self, index1: int, index2: int,
                                       context: Context) -> None:
@@ -1354,11 +1350,18 @@ def format_item_name_list(s: Iterable[str]) -> str:
         return '(' + ', '.join(["'%s'" % name for name in lst[:5]]) + ', ...)'
 
 
-def callable_name(type: CallableType) -> str:
-    if type.name:
-        return type.name
-    else:
-        return 'function'
+def callable_name(type: FunctionLike) -> Optional[str]:
+    name = type.get_name()
+    if name is not None and name[0] != '<':
+        return '"{}"'.format(name).replace(' of ', '" of "')
+    return name
+
+
+def for_function(callee: CallableType) -> str:
+    name = callable_name(callee)
+    if name is not None:
+        return ' for {}'.format(name)
+    return ''
 
 
 def find_defining_module(modules: Dict[str, MypyFile], typ: CallableType) -> Optional[MypyFile]:
diff --git a/mypy/myunit/__main__.py b/mypy/myunit/__main__.py
index 78ef01f..34098d4 100644
--- a/mypy/myunit/__main__.py
+++ b/mypy/myunit/__main__.py
@@ -6,13 +6,4 @@ Usually used as a slave by runtests.py, but can be used directly.
 
 from mypy.myunit import main
 
-# In Python 3.3, mypy.__path__ contains a relative path to the mypy module
-# (whereas in later Python versions it contains an absolute path).  Because the
-# test runner changes directories, this breaks non-toplevel mypy imports.  We
-# fix that problem by fixing up the path to be absolute here.
-import os.path
-import mypy
-# User-defined packages always have __path__ attributes, but mypy doesn't know that.
-mypy.__path__ = [os.path.abspath(p) for p in mypy.__path__]  # type: ignore
-
 main()
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 761f936..9ccd928 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -4,7 +4,7 @@ import os
 from abc import abstractmethod
 from collections import OrderedDict
 from typing import (
-    Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable,
+    Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable, Sequence,
 )
 
 import mypy.strconv
@@ -475,7 +475,8 @@ class FuncDef(FuncItem, SymbolNode, Statement):
     is_conditional = False             # Defined conditionally (within block)?
     is_abstract = False
     is_property = False
-    original_def = None  # type: Union[None, FuncDef, Var]  # Original conditional definition
+    # Original conditional definition
+    original_def = None  # type: Union[None, FuncDef, Var, Decorator]
 
     FLAGS = FuncItem.FLAGS + [
         'is_decorated', 'is_conditional', 'is_abstract', 'is_property'
@@ -543,6 +544,7 @@ class Decorator(SymbolNode, Statement):
 
     func = None  # type: FuncDef                # Decorated function
     decorators = None  # type: List[Expression] # Decorators (may be empty)
+    # TODO: This is mostly used for the type; consider replacing with a 'type' attribute
     var = None  # type: Var                     # Represents the decorated function obj
     is_overload = False
 
@@ -563,6 +565,10 @@ class Decorator(SymbolNode, Statement):
     def info(self) -> 'TypeInfo':
         return self.func.info
 
+    @property
+    def type(self) -> 'Optional[mypy.types.Type]':
+        return self.var.type
+
     def accept(self, visitor: StatementVisitor[T]) -> T:
         return visitor.visit_decorator(self)
 
@@ -829,6 +835,8 @@ class ForStmt(Statement):
     index = None  # type: Lvalue
     # Type given by type comments for index, can be None
     index_type = None  # type: Optional[mypy.types.Type]
+    # Inferred iterable item type
+    inferred_item_type = None  # type: Optional[mypy.types.Type]
     # Expression to iterate
     expr = None  # type: Expression
     body = None  # type: Block
@@ -993,15 +1001,15 @@ class ExecStmt(Statement):
     """Python 2 exec statement"""
 
     expr = None  # type: Expression
-    variables1 = None  # type: Optional[Expression]
-    variables2 = None  # type: Optional[Expression]
+    globals = None  # type: Optional[Expression]
+    locals = None  # type: Optional[Expression]
 
     def __init__(self, expr: Expression,
-                 variables1: Optional[Expression],
-                 variables2: Optional[Expression]) -> None:
+                 globals: Optional[Expression],
+                 locals: Optional[Expression]) -> None:
         self.expr = expr
-        self.variables1 = variables1
-        self.variables2 = variables2
+        self.globals = globals
+        self.locals = locals
 
     def accept(self, visitor: StatementVisitor[T]) -> T:
         return visitor.visit_exec_stmt(self)
@@ -1122,11 +1130,13 @@ class RefExpr(Expression):
     node = None  # type: Optional[SymbolNode]  # Var, FuncDef or TypeInfo that describes this
     fullname = None  # type: Optional[str]  # Fully qualified name (or name if not global)
 
+    # Does this define a new name?
+    is_new_def = False
     # Does this define a new name with inferred type?
     #
     # For members, after semantic analysis, this does not take base
     # classes into consideration at all; the type checker deals with these.
-    is_def = False
+    is_inferred_def = False
 
 
 class NameExpr(RefExpr):
@@ -1150,7 +1160,8 @@ class NameExpr(RefExpr):
                 'kind': self.kind,
                 'node': None if self.node is None else self.node.serialize(),
                 'fullname': self.fullname,
-                'is_def': self.is_def,
+                'is_new_def': self.is_new_def,
+                'is_inferred_def': self.is_inferred_def,
                 'name': self.name,
                 }
 
@@ -1161,7 +1172,8 @@ class NameExpr(RefExpr):
         ret.kind = data['kind']
         ret.node = None if data['node'] is None else SymbolNode.deserialize(data['node'])
         ret.fullname = data['fullname']
-        ret.is_def = data['is_def']
+        ret.is_new_def = data['is_new_def']
+        ret.is_inferred_def = data['is_inferred_def']
         return ret
 
 
@@ -1353,6 +1365,12 @@ reverse_op_methods = {
 normal_from_reverse_op = dict((m, n) for n, m in reverse_op_methods.items())
 reverse_op_method_set = set(reverse_op_methods.values())
 
+unary_op_methods = {
+    '-': '__neg__',
+    '+': '__pos__',
+    '~': '__invert__',
+}
+
 
 class OpExpr(Expression):
     """Binary operation (other than . or [] or comparison operators,
@@ -1472,6 +1490,9 @@ class LambdaExpr(FuncItem, Expression):
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_lambda_expr(self)
 
+    def is_dynamic(self) -> bool:
+        return False
+
 
 class ListExpr(Expression):
     """List literal expression [...]."""
@@ -1786,13 +1807,14 @@ class NewTypeExpr(Expression):
     """NewType expression NewType(...)."""
     name = None  # type: str
     # The base type (the second argument to NewType)
-    old_type = None  # type: mypy.types.Type
+    old_type = None  # type: Optional[mypy.types.Type]
     # The synthesized class representing the new type (inherits old_type)
     info = None  # type: Optional[TypeInfo]
 
-    def __init__(self, name: str, old_type: 'mypy.types.Type', line: int) -> None:
+    def __init__(self, name: str, old_type: 'Optional[mypy.types.Type]', line: int) -> None:
         self.name = name
         self.old_type = old_type
+        self.line = line
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_newtype_expr(self)
@@ -1924,7 +1946,7 @@ class TypeInfo(SymbolNode):
 
     # Information related to type annotations.
 
-    # Generic type variable names
+    # Generic type variable names (full names)
     type_vars = None  # type: List[str]
 
     # Direct base classes.
@@ -1986,7 +2008,7 @@ class TypeInfo(SymbolNode):
     def add_type_vars(self) -> None:
         if self.defn.type_vars:
             for vd in self.defn.type_vars:
-                self.type_vars.append(vd.name)
+                self.type_vars.append(vd.fullname)
 
     def name(self) -> str:
         """Short name."""
@@ -2552,7 +2574,7 @@ def check_arg_kinds(arg_kinds: List[int], nodes: List[T], fail: Callable[[str, T
             is_kw_arg = True
 
 
-def check_arg_names(names: List[Optional[str]], nodes: List[T], fail: Callable[[str, T], None],
+def check_arg_names(names: Sequence[Optional[str]], nodes: List[T], fail: Callable[[str, T], None],
                     description: str = 'function definition') -> None:
     seen_names = set()  # type: Set[Optional[str]]
     for name, node in zip(names, nodes):
diff --git a/mypy/options.py b/mypy/options.py
index c6ee84c..dd9bfe0 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -20,7 +20,11 @@ class Options:
     PER_MODULE_OPTIONS = {
         "ignore_missing_imports",
         "follow_imports",
-        "disallow_any",
+        "disallow_any_generics",
+        "disallow_any_unimported",
+        "disallow_any_expr",
+        "disallow_any_decorated",
+        "disallow_any_explicit",
         "disallow_subclassing_any",
         "disallow_untyped_calls",
         "disallow_untyped_defs",
@@ -41,6 +45,9 @@ class Options:
                                - {"debug_cache"})
 
     def __init__(self) -> None:
+        # Cache for clone_for_module()
+        self.clone_cache = {}  # type: Dict[str, Options]
+
         # -- build options --
         self.build_type = BuildType.STANDARD
         self.python_version = defaults.PYTHON3_VERSION
@@ -51,7 +58,13 @@ class Options:
         self.report_dirs = {}  # type: Dict[str, str]
         self.ignore_missing_imports = False
         self.follow_imports = 'normal'  # normal|silent|skip|error
-        self.disallow_any = []  # type: List[str]
+
+        # disallow_any options
+        self.disallow_any_generics = False
+        self.disallow_any_unimported = False
+        self.disallow_any_expr = False
+        self.disallow_any_decorated = False
+        self.disallow_any_explicit = False
 
         # Disallow calling untyped functions from typed ones
         self.disallow_untyped_calls = False
@@ -155,6 +168,7 @@ class Options:
         self.shadow_file = None  # type: Optional[Tuple[str, str]]
         self.show_column_numbers = False  # type: bool
         self.dump_graph = False
+        self.dump_deps = False
 
     def __eq__(self, other: object) -> bool:
         return self.__class__ == other.__class__ and self.__dict__ == other.__dict__
@@ -166,6 +180,14 @@ class Options:
         return 'Options({})'.format(pprint.pformat(self.__dict__))
 
     def clone_for_module(self, module: str) -> 'Options':
+        """Create an Options object that incorporates per-module options.
+
+        NOTE: Once this method is called all Options objects should be
+        considered read-only, else the caching might be incorrect.
+        """
+        res = self.clone_cache.get(module)
+        if res is not None:
+            return res
         updates = {}
         for pattern in self.per_module_options:
             if self.module_matches_pattern(module, pattern):
@@ -173,10 +195,12 @@ class Options:
                     del self.unused_configs[pattern]
                 updates.update(self.per_module_options[pattern])
         if not updates:
+            self.clone_cache[module] = self
             return self
         new_options = Options()
         new_options.__dict__.update(self.__dict__)
         new_options.__dict__.update(updates)
+        self.clone_cache[module] = new_options
         return new_options
 
     def module_matches_pattern(self, module: str, pattern: Pattern[str]) -> bool:
diff --git a/mypy/parse.py b/mypy/parse.py
index 3583550..d845138 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -7,6 +7,7 @@ from mypy.nodes import MypyFile
 
 def parse(source: Union[str, bytes],
           fnam: str,
+          module: Optional[str],
           errors: Optional[Errors],
           options: Options) -> MypyFile:
     """Parse a source file, without doing any semantic analysis.
@@ -21,11 +22,13 @@ def parse(source: Union[str, bytes],
         import mypy.fastparse
         return mypy.fastparse.parse(source,
                                     fnam=fnam,
+                                    module=module,
                                     errors=errors,
                                     options=options)
     else:
         import mypy.fastparse2
         return mypy.fastparse2.parse(source,
                                      fnam=fnam,
+                                     module=module,
                                      errors=errors,
                                      options=options)
diff --git a/mypy/report.py b/mypy/report.py
index daf791b..46a30c5 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -25,7 +25,7 @@ from mypy.types import Type, TypeOfAny
 from mypy.version import __version__
 
 try:
-    import lxml.etree as etree
+    import lxml.etree as etree  # type: ignore
     LXML_INSTALLED = True
 except ImportError:
     LXML_INSTALLED = False
@@ -37,7 +37,7 @@ type_of_any_name_map = collections.OrderedDict([
     (TypeOfAny.from_omitted_generics, "Omitted Generics"),
     (TypeOfAny.from_error, "Error"),
     (TypeOfAny.special_form, "Special Form"),
-])  # type: collections.OrderedDict[TypeOfAny.TypeOfAny, str]
+])  # type: collections.OrderedDict[TypeOfAny, str]
 
 reporter_classes = {}  # type: Dict[str, Tuple[Callable[[Reports, str], AbstractReporter], bool]]
 
@@ -159,7 +159,7 @@ class AnyExpressionsReporter(AbstractReporter):
     def __init__(self, reports: Reports, output_dir: str) -> None:
         super().__init__(reports, output_dir)
         self.counts = {}  # type: Dict[str, Tuple[int, int]]
-        self.any_types_counter = {}  # type: Dict[str, typing.Counter[TypeOfAny.TypeOfAny]]
+        self.any_types_counter = {}  # type: Dict[str, typing.Counter[TypeOfAny]]
         stats.ensure_dir_exists(output_dir)
 
     def on_file(self,
@@ -229,7 +229,7 @@ class AnyExpressionsReporter(AbstractReporter):
         self._write_out_report('any-exprs.txt', column_names, rows, total_row)
 
     def _report_types_of_anys(self) -> None:
-        total_counter = collections.Counter()  # type: typing.Counter[TypeOfAny.TypeOfAny]
+        total_counter = collections.Counter()  # type: typing.Counter[TypeOfAny]
         for counter in self.any_types_counter.values():
             for any_type, value in counter.items():
                 total_counter[any_type] += value
@@ -361,25 +361,6 @@ class LineCoverageReporter(AbstractReporter):
 register_reporter('linecoverage', LineCoverageReporter)
 
 
-class OldHtmlReporter(AbstractReporter):
-    """Old HTML reporter.
-
-    This just calls the old functions in `stats`, which use global
-    variables to preserve state for the index.
-    """
-
-    def on_file(self,
-                tree: MypyFile,
-                type_map: Dict[Expression, Type], options: Options) -> None:
-        stats.generate_html_report(tree, tree.path, type_map, self.output_dir)
-
-    def on_finish(self) -> None:
-        stats.generate_html_index(self.output_dir)
-
-
-register_reporter('old-html', OldHtmlReporter)
-
-
 class FileInfo:
     def __init__(self, name: str, module: str) -> None:
         self.name = name
@@ -407,7 +388,7 @@ class MemoryXmlReporter(AbstractReporter):
         self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css')
         xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd')
         self.schema = etree.XMLSchema(etree.parse(xsd_path))
-        self.last_xml = None  # type: Optional[etree._ElementTree]
+        self.last_xml = None  # type: Optional[Any]
         self.files = []  # type: List[FileInfo]
 
     def on_file(self,
@@ -454,7 +435,7 @@ class MemoryXmlReporter(AbstractReporter):
     def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str:
         if lineno in visitor.any_line_map:
             result = "Any Types on this line: "
-            counter = collections.Counter()  # type: typing.Counter[TypeOfAny.TypeOfAny]
+            counter = collections.Counter()  # type: typing.Counter[TypeOfAny]
             for typ in visitor.any_line_map[lineno]:
                 counter[typ.type_of_any] += 1
             for any_type, occurrences in counter.items():
@@ -501,7 +482,7 @@ class CoberturaPackage(object):
     """
     def __init__(self, name: str) -> None:
         self.name = name
-        self.classes = {}  # type: Dict[str, etree._Element]
+        self.classes = {}  # type: Dict[str, Any]
         self.packages = {}  # type: Dict[str, CoberturaPackage]
         self.total_lines = 0
         self.covered_lines = 0
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 369a30f..52c45b4 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -83,6 +83,7 @@ from mypy.options import Options
 from mypy import experiments
 from mypy.plugin import Plugin
 from mypy import join
+from mypy.util import get_prefix
 
 
 T = TypeVar('T')
@@ -192,13 +193,13 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
     # Names declated using "nonlocal" (separate set for each scope)
     nonlocal_decls = None  # type: List[Set[str]]
     # Local names of function scopes; None for non-function scopes.
-    locals = None  # type: List[SymbolTable]
+    locals = None  # type: List[Optional[SymbolTable]]
     # Nested block depths of scopes
     block_depth = None  # type: List[int]
     # TypeInfo of directly enclosing class (or None)
     type = None  # type: Optional[TypeInfo]
     # Stack of outer classes (the second tuple item contains tvars).
-    type_stack = None  # type: List[TypeInfo]
+    type_stack = None  # type: List[Optional[TypeInfo]]
     # Type variables that are bound by the directly enclosing class
     bound_tvars = None  # type: List[SymbolTableNode]
     # Type variables bound by the current scope, be it class or function
@@ -277,6 +278,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             for name in implicit_module_attrs:
                 v = self.globals[name].node
                 if isinstance(v, Var):
+                    assert v.type is not None, "Type of implicit attribute not set"
                     v.type = self.anal_type(v.type)
                     v.is_ready = True
 
@@ -306,20 +308,22 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
 
     def refresh_top_level(self, file_node: MypyFile) -> None:
         """Reanalyze a stale module top-level in fine-grained incremental mode."""
+        # TODO: Recursion into block statements.
         for d in file_node.defs:
             if isinstance(d, ClassDef):
                 self.refresh_class_def(d)
-            elif not isinstance(d, FuncItem):
+            elif not isinstance(d, (FuncItem, Decorator)):
                 self.accept(d)
 
     def refresh_class_def(self, defn: ClassDef) -> None:
+        # TODO: Recursion into block statements.
         with self.analyze_class_body(defn) as should_continue:
             if should_continue:
                 for d in defn.defs.body:
                     # TODO: Make sure refreshing class bodies works.
                     if isinstance(d, ClassDef):
                         self.refresh_class_def(d)
-                    elif not isinstance(d, FuncItem):
+                    elif not isinstance(d, (FuncItem, Decorator)):
                         self.accept(d)
 
     @contextmanager
@@ -369,6 +373,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             #   be a win.
             if self.is_class_scope():
                 # Method definition
+                assert self.type is not None, "Type not set at class scope"
                 defn.info = self.type
                 if not defn.is_decorated and not defn.is_overload:
                     if (defn.name() in self.type.names and
@@ -378,9 +383,10 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                         if not self.set_original_def(n, defn):
                             self.name_already_defined(defn.name(), defn)
                     self.type.names[defn.name()] = SymbolTableNode(MDEF, defn)
-                self.prepare_method_signature(defn)
+                self.prepare_method_signature(defn, self.type)
             elif self.is_func_scope():
                 # Nested function
+                assert self.locals[-1] is not None, "No locals at function scope"
                 if not defn.is_decorated and not defn.is_overload:
                     if defn.name() in self.locals[-1]:
                         # Redefinition. Conditional redefinition is okay.
@@ -392,7 +398,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             else:
                 # Top-level function
                 if not defn.is_decorated and not defn.is_overload:
-                    symbol = self.globals.get(defn.name())
+                    symbol = self.globals[defn.name()]
                     if isinstance(symbol.node, FuncDef) and symbol.node != defn:
                         # This is redefinition. Conditional redefinition is okay.
                         if not self.set_original_def(symbol.node, defn):
@@ -413,12 +419,12 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 else:
                     # A coroutine defined as `async def foo(...) -> T: ...`
                     # has external return type `Awaitable[T]`.
-                    defn.type = defn.type.copy_modified(
-                        ret_type = self.named_type_or_none('typing.Awaitable',
-                                                           [defn.type.ret_type]))
+                    ret_type = self.named_type_or_none('typing.Awaitable', [defn.type.ret_type])
+                    assert ret_type is not None, "Internal error: typing.Awaitable not found"
+                    defn.type = defn.type.copy_modified(ret_type=ret_type)
             self.errors.pop_function()
 
-    def prepare_method_signature(self, func: FuncDef) -> None:
+    def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None:
         """Check basic signature validity and tweak annotation of self/cls argument."""
         # Only non-static methods are special.
         functype = func.type
@@ -429,12 +435,12 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 self_type = functype.arg_types[0]
                 if isinstance(self_type, AnyType):
                     if func.is_class or func.name() in ('__new__', '__init_subclass__'):
-                        leading_type = self.class_type(self.type)
+                        leading_type = self.class_type(info)
                     else:
-                        leading_type = fill_typevars(self.type)
+                        leading_type = fill_typevars(info)
                     func.type = replace_implicit_first_type(functype, leading_type)
 
-    def set_original_def(self, previous: Node, new: FuncDef) -> bool:
+    def set_original_def(self, previous: Optional[Node], new: FuncDef) -> bool:
         """If 'new' conditionally redefine 'previous', set 'previous' as original
 
         We reject straight redefinitions of functions, as they are usually
@@ -443,7 +449,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         . def f(): ...
         . def f(): ...  # Error: 'f' redefined
         """
-        if isinstance(previous, (FuncDef, Var)) and new.is_conditional:
+        if isinstance(previous, (FuncDef, Var, Decorator)) and new.is_conditional:
             new.original_def = previous
             return True
         else:
@@ -534,7 +540,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 assert defn.impl is defn.items[-1]
                 defn.items = defn.items[:-1]
             elif not self.is_stub_file and not non_overload_indexes:
-                if not (self.is_class_scope() and self.type.is_protocol):
+                if not (self.type and not self.is_func_scope() and self.type.is_protocol):
                     self.fail(
                         "An overloaded function outside a stub file must have an implementation",
                         defn)
@@ -554,7 +560,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             # redfinitions already.
             return
 
-        if self.is_class_scope():
+        if self.type and not self.is_func_scope():
             self.type.names[defn.name()] = SymbolTableNode(MDEF, defn,
                                                            typ=defn.type)
             defn.info = self.type
@@ -629,9 +635,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             self.function_stack.pop()
 
     def check_classvar_in_signature(self, typ: Type) -> None:
-        t = None  # type: Type
         if isinstance(typ, Overloaded):
-            for t in typ.items():
+            for t in typ.items():  # type: Type
                 self.check_classvar_in_signature(t)
             return
         if not isinstance(typ, CallableType):
@@ -690,8 +695,10 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                     if prohibited in named_tuple_info.names:
                         if nt_names.get(prohibited) is named_tuple_info.names[prohibited]:
                             continue
+                        ctx = named_tuple_info.names[prohibited].node
+                        assert ctx is not None
                         self.fail('Cannot overwrite NamedTuple attribute "{}"'.format(prohibited),
-                                  named_tuple_info.names[prohibited].node)
+                                  ctx)
 
                 # Restore the names in the original symbol table. This ensures that the symbol
                 # table contains the field objects created by build_namedtuple_typeinfo. Exclude
@@ -759,7 +766,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                     # check arbitrarily the first overload item. If the
                     # different items have a different abstract status, there
                     # should be an error reported elsewhere.
-                    func = node.items[0]  # type: Node
+                    func = node.items[0]  # type: Optional[Node]
                 else:
                     func = node
                 if isinstance(func, Decorator):
@@ -779,7 +786,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
 
         This includes things like 'int' being compatible with 'float'.
         """
-        promote_target = None  # type: Type
+        promote_target = None  # type: Optional[Type]
         for decorator in defn.decorators:
             if isinstance(decorator, CallExpr):
                 analyzed = decorator.analyzed
@@ -886,14 +893,14 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             return tvars
         return None
 
-    def analyze_unbound_tvar(self, t: Type) -> Tuple[str, TypeVarExpr]:
+    def analyze_unbound_tvar(self, t: Type) -> Optional[Tuple[str, TypeVarExpr]]:
         if not isinstance(t, UnboundType):
             return None
         unbound = t
         sym = self.lookup_qualified(unbound.name, unbound)
         if sym is None or sym.kind != TVAR:
             return None
-        elif not self.tvar_scope.allow_binding(sym.fullname):
+        elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname):
             # It's bound by our type variable scope
             return None
         else:
@@ -1049,7 +1056,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             else:
                 self.fail('Invalid base class', base_expr)
                 info.fallback_to_any = True
-            if 'unimported' in self.options.disallow_any and has_any_from_unimported_type(base):
+            if self.options.disallow_any_unimported and has_any_from_unimported_type(base):
                 if isinstance(base_expr, (NameExpr, MemberExpr)):
                     prefix = "Base type {}".format(base_expr.name)
                 else:
@@ -1138,6 +1145,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 # Some form of namedtuple is the only valid type that looks like a call
                 # expression. This isn't a valid type.
                 raise TypeTranslationError()
+            assert info.tuple_type, "NamedTuple without tuple type"
             fallback = Instance(info, [])
             return TupleType(info.tuple_type.items, fallback=fallback)
         typ = expr_to_unanalyzed_type(expr)
@@ -1178,11 +1186,12 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
 
     def analyze_metaclass(self, defn: ClassDef) -> None:
         if defn.metaclass:
+            metaclass_name = None
             if isinstance(defn.metaclass, NameExpr):
                 metaclass_name = defn.metaclass.name
             elif isinstance(defn.metaclass, MemberExpr):
                 metaclass_name = get_member_expr_fullname(defn.metaclass)
-            else:
+            if metaclass_name is None:
                 self.fail("Dynamic metaclass not supported for '%s'" % defn.name, defn.metaclass)
                 return
             sym = self.lookup_qualified(metaclass_name, defn.metaclass)
@@ -1229,8 +1238,9 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         else:
             return leading_type
 
-    def named_type(self, qualified_name: str, args: List[Type] = None) -> Instance:
-        sym = self.lookup_qualified(qualified_name, None)
+    def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance:
+        sym = self.lookup_qualified(qualified_name, Context())
+        assert sym, "Internal error: attempted to construct unknown type"
         node = sym.node
         assert isinstance(node, TypeInfo)
         if args:
@@ -1238,7 +1248,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             return Instance(node, args)
         return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars))
 
-    def named_type_or_none(self, qualified_name: str, args: List[Type] = None) -> Instance:
+    def named_type_or_none(self, qualified_name: str,
+                           args: Optional[List[Type]] = None) -> Optional[Instance]:
         sym = self.lookup_fully_qualified_or_none(qualified_name)
         if not sym:
             return None
@@ -1316,9 +1327,9 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         return False
 
     def check_typeddict_classdef(self, defn: ClassDef,
-                                 oldfields: List[str] = None) -> Tuple[List[str],
-                                                                       List[Type],
-                                                                       Set[str]]:
+                                 oldfields: Optional[List[str]] = None) -> Tuple[List[str],
+                                                                                 List[Type],
+                                                                                 Set[str]]:
         TPDICT_CLASS_ERROR = ('Invalid statement in TypedDict definition; '
                               'expected "field_name: field_type"')
         if self.options.python_version < (3, 6):
@@ -1356,7 +1367,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 elif not isinstance(stmt.rvalue, TempNode):
                     # x: int assigns rvalue to TempNode(AnyType())
                     self.fail('Right hand side values are not supported in TypedDict', stmt)
-        total = True
+        total = True  # type: Optional[bool]
         if 'total' in defn.keywords:
             total = self.parse_bool(defn.keywords['total'])
             if total is None:
@@ -1475,6 +1486,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 if extra:
                     message += " {}".format(extra)
                 self.fail(message, imp)
+                self.add_unknown_symbol(as_id or id, imp, is_import=True)
             else:
                 # Missing module.
                 self.add_unknown_symbol(as_id or id, imp, is_import=True)
@@ -1484,7 +1496,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                                           module_symbol: SymbolTableNode,
                                           import_node: ImportBase) -> bool:
         if (existing_symbol.kind in (LDEF, GDEF, MDEF) and
-                isinstance(existing_symbol.node, (Var, FuncDef, TypeInfo))):
+                isinstance(existing_symbol.node, (Var, FuncDef, TypeInfo, Decorator))):
             # This is a valid import over an existing definition in the file. Construct a dummy
             # assignment that we'll use to type check the import.
             lvalue = NameExpr(imported_id)
@@ -1501,24 +1513,25 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         return False
 
     def normalize_type_alias(self, node: SymbolTableNode,
-                             ctx: Context) -> SymbolTableNode:
+                             ctx: Context) -> Optional[SymbolTableNode]:
         normalized = False
         fullname = node.fullname
         if fullname in type_aliases:
             # Node refers to an aliased type such as typing.List; normalize.
-            node = self.lookup_qualified(type_aliases[fullname], ctx)
-            if node is None:
+            new_node = self.lookup_qualified(type_aliases[fullname], ctx)
+            if new_node is None:
                 self.add_fixture_note(fullname, ctx)
                 return None
             normalized = True
         if fullname in collections_type_aliases:
             # Similar, but for types from the collections module like typing.DefaultDict
             self.add_module_symbol('collections', '__mypy_collections__', False, ctx)
-            node = self.lookup_qualified(collections_type_aliases[fullname], ctx)
+            new_node = self.lookup_qualified(collections_type_aliases[fullname], ctx)
             normalized = True
         if normalized:
-            node = SymbolTableNode(node.kind, node.node, node.type_override,
-                                   normalized=True, alias_tvars=node.alias_tvars)
+            assert new_node is not None, "Collection node not found"
+            node = SymbolTableNode(new_node.kind, new_node.node, new_node.type_override,
+                                   normalized=True, alias_tvars=new_node.alias_tvars)
         return node
 
     def add_fixture_note(self, fullname: str, ctx: Context) -> None:
@@ -1551,20 +1564,21 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             m = self.modules[i_id]
             self.add_submodules_to_parent_modules(i_id, True)
             for name, node in m.names.items():
-                node = self.normalize_type_alias(node, i)
+                new_node = self.normalize_type_alias(node, i)
                 # if '__all__' exists, all nodes not included have had module_public set to
                 # False, and we can skip checking '_' because it's been explicitly included.
-                if node.module_public and (not name.startswith('_') or '__all__' in m.names):
+                if (new_node and new_node.module_public and
+                        (not name.startswith('_') or '__all__' in m.names)):
                     existing_symbol = self.globals.get(name)
                     if existing_symbol:
                         # Import can redefine a variable. They get special treatment.
                         if self.process_import_over_existing_name(
-                                name, existing_symbol, node, i):
+                                name, existing_symbol, new_node, i):
                             continue
-                    self.add_symbol(name, SymbolTableNode(node.kind, node.node,
-                                                          node.type_override,
-                                                          normalized=node.normalized,
-                                                          alias_tvars=node.alias_tvars), i)
+                    self.add_symbol(name, SymbolTableNode(new_node.kind, new_node.node,
+                                                          new_node.type_override,
+                                                          normalized=new_node.normalized,
+                                                          alias_tvars=new_node.alias_tvars), i)
         else:
             # Don't add any dummy symbols for 'from x import *' if 'x' is unknown.
             pass
@@ -1596,7 +1610,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             self.accept(s)
         self.block_depth[-1] -= 1
 
-    def visit_block_maybe(self, b: Block) -> None:
+    def visit_block_maybe(self, b: Optional[Block]) -> None:
         if b:
             self.visit_block(b)
 
@@ -1628,16 +1642,11 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                   allow_tuple_literal: bool = False,
                   aliasing: bool = False,
                   third_pass: bool = False) -> Type:
-        if t:
-            a = self.type_analyzer(
-                tvar_scope=tvar_scope,
-                aliasing=aliasing,
-                allow_tuple_literal=allow_tuple_literal,
-                third_pass=third_pass)
-            return t.accept(a)
-
-        else:
-            return None
+        a = self.type_analyzer(tvar_scope=tvar_scope,
+                               aliasing=aliasing,
+                               allow_tuple_literal=allow_tuple_literal,
+                               third_pass=third_pass)
+        return t.accept(a)
 
     def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
         for lval in s.lvalues:
@@ -1652,12 +1661,12 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                         if isinstance(lval.node, Var):
                             lval.node.is_abstract_var = True
         else:
-            if (any(isinstance(lv, NameExpr) and lv.is_def for lv in s.lvalues) and
+            if (any(isinstance(lv, NameExpr) and lv.is_inferred_def for lv in s.lvalues) and
                     self.type and self.type.is_protocol and not self.is_func_scope()):
                 self.fail('All protocol members must have explicitly declared types', s)
             # Set the type if the rvalue is a simple literal (even if the above error occurred).
             if len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr):
-                if s.lvalues[0].is_def:
+                if s.lvalues[0].is_inferred_def:
                     s.type = self.analyze_simple_literal_type(s.rvalue)
         if s.type:
             # Store type into nodes.
@@ -1754,7 +1763,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             # Second rule: Explicit type (cls: Type[A] = A) always creates variable, not alias.
             return
         non_global_scope = self.type or self.is_func_scope()
-        if isinstance(s.rvalue, NameExpr) and non_global_scope and lvalue.is_def:
+        if isinstance(s.rvalue, NameExpr) and non_global_scope and lvalue.is_inferred_def:
             # Third rule: Non-subscripted right hand side creates a variable
             # at class and function scopes. For example:
             #
@@ -1771,7 +1780,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         if not res:
             return
         node = self.lookup(lvalue.name, lvalue)
-        if not lvalue.is_def:
+        assert node is not None
+        if not lvalue.is_inferred_def:
             # Type aliases can't be re-defined.
             if node and (node.kind == TYPE_ALIAS or isinstance(node.node, TypeInfo)):
                 self.fail('Cannot assign multiple types to name "{}"'
@@ -1808,10 +1818,12 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                        explicit_type: bool = False) -> None:
         """Analyze an lvalue or assignment target.
 
-        Only if add_global is True, add name to globals table. If nested
-        is true, the lvalue is within a tuple or list lvalue expression.
+        Args:
+            lval: The target lvalue
+            nested: If true, the lvalue is within a tuple or list lvalue expression
+            add_global: Add name to globals table only if this is true (used in first pass)
+            explicit_type: Assignment has type annotation
         """
-
         if isinstance(lval, NameExpr):
             # Top-level definitions within some statements (at least while) are
             # not handled in the first pass, so they have to be added now.
@@ -1825,22 +1837,25 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 v._fullname = self.qualified_name(lval.name)
                 v.is_ready = False  # Type not inferred yet
                 lval.node = v
-                lval.is_def = True
+                lval.is_new_def = True
+                lval.is_inferred_def = True
                 lval.kind = GDEF
                 lval.fullname = v._fullname
                 self.globals[lval.name] = SymbolTableNode(GDEF, v)
-            elif isinstance(lval.node, Var) and lval.is_def:
-                # Since the is_def flag is set, this must have been analyzed
-                # already in the first pass and added to the symbol table.
-                assert lval.node.name() in self.globals
-            elif (self.is_func_scope() and lval.name not in self.locals[-1] and
+            elif isinstance(lval.node, Var) and lval.is_new_def:
+                if lval.kind == GDEF:
+                    # Since the is_new_def flag is set, this must have been analyzed
+                    # already in the first pass and added to the symbol table.
+                    assert lval.node.name() in self.globals
+            elif (self.locals[-1] is not None and lval.name not in self.locals[-1] and
                   lval.name not in self.global_decls[-1] and
                   lval.name not in self.nonlocal_decls[-1]):
                 # Define new local name.
                 v = Var(lval.name)
                 v.set_line(lval)
                 lval.node = v
-                lval.is_def = True
+                lval.is_new_def = True
+                lval.is_inferred_def = True
                 lval.kind = LDEF
                 lval.fullname = lval.name
                 self.add_local(v, lval)
@@ -1853,7 +1868,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 v.set_line(lval)
                 v._fullname = self.qualified_name(lval.name)
                 lval.node = v
-                lval.is_def = True
+                lval.is_new_def = True
+                lval.is_inferred_def = True
                 lval.kind = MDEF
                 lval.fullname = lval.name
                 self.type.names[lval.name] = SymbolTableNode(MDEF, v)
@@ -1908,13 +1924,15 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
     def analyze_member_lvalue(self, lval: MemberExpr) -> None:
         lval.accept(self)
         if self.is_self_member_ref(lval):
+            assert self.type, "Self member outside a class"
             node = self.type.get(lval.name)
             if node is None or isinstance(node.node, Var) and node.node.is_abstract_var:
                 if self.type.is_protocol and node is None:
                     self.fail("Protocol members cannot be defined via assignment to self", lval)
                 else:
                     # Implicit attribute definition in __init__.
-                    lval.is_def = True
+                    lval.is_new_def = True
+                    lval.is_inferred_def = True
                     v = Var(lval.name)
                     v.set_line(lval)
                     v._fullname = self.qualified_name(lval.name)
@@ -1932,7 +1950,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         node = memberexpr.expr.node
         return isinstance(node, Var) and node.is_self
 
-    def check_lvalue_validity(self, node: Union[Expression, SymbolNode], ctx: Context) -> None:
+    def check_lvalue_validity(self, node: Union[Expression, SymbolNode, None],
+                              ctx: Context) -> None:
         if isinstance(node, TypeVarExpr):
             self.fail('Invalid assignment target', ctx)
         elif isinstance(node, TypeInfo):
@@ -1942,7 +1961,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr):
             self.fail('Star type only allowed for starred expressions', lvalue)
         if isinstance(lvalue, RefExpr):
-            lvalue.is_def = False
+            lvalue.is_inferred_def = False
             if isinstance(lvalue.node, Var):
                 var = lvalue.node
                 var.type = typ
@@ -1996,7 +2015,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         check_for_explicit_any(old_type, self.options, self.is_typeshed_stub_file, self.msg,
                                context=s)
 
-        if 'unimported' in self.options.disallow_any and has_any_from_unimported_type(old_type):
+        if self.options.disallow_any_unimported and has_any_from_unimported_type(old_type):
             self.msg.unimported_type_becomes_any("Argument 2 to NewType(...)", old_type, s)
 
         # If so, add it to the symbol table.
@@ -2019,7 +2038,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                 and s.rvalue.callee.fullname == 'typing.NewType'):
             lvalue = s.lvalues[0]
             name = s.lvalues[0].name
-            if not lvalue.is_def:
+            if not lvalue.is_inferred_def:
                 if s.type:
                     self.fail("Cannot declare the type of a NewType declaration", s)
                 else:
@@ -2087,7 +2106,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         lvalue = s.lvalues[0]
         assert isinstance(lvalue, NameExpr)
         name = lvalue.name
-        if not lvalue.is_def:
+        if not lvalue.is_inferred_def:
             if s.type:
                 self.fail("Cannot declare the type of a type variable", s)
             else:
@@ -2110,7 +2129,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             return
         variance, upper_bound = res
 
-        if 'unimported' in self.options.disallow_any:
+        if self.options.disallow_any_unimported:
             for idx, constraint in enumerate(values, start=1):
                 if has_any_from_unimported_type(constraint):
                     prefix = "Constraint {}".format(idx)
@@ -2125,6 +2144,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                                    context=s)
         # Yes, it's a valid type variable definition! Add it to the symbol table.
         node = self.lookup(name, s)
+        assert node is not None
+        assert node.fullname is not None
         node.kind = TVAR
         TypeVar = TypeVarExpr(name, node.fullname, values, upper_bound, variance)
         TypeVar.line = call.line
@@ -2238,10 +2259,12 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             return
         # Yes, it's a valid namedtuple definition. Add it to the symbol table.
         node = self.lookup(name, s)
+        assert node is not None
         node.kind = GDEF   # TODO locally defined namedtuple
         node.node = named_tuple
 
-    def check_namedtuple(self, node: Expression, var_name: str = None) -> Optional[TypeInfo]:
+    def check_namedtuple(self, node: Expression,
+                         var_name: Optional[str] = None) -> Optional[TypeInfo]:
         """Check if a call defines a namedtuple.
 
         The optional var_name argument is the name of the variable to
@@ -2388,7 +2411,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         info.tuple_type = TupleType(types, fallback)
 
         def patch() -> None:
-            # Calculate the correct value type for the fallback Mapping.
+            # Calculate the correct value type for the fallback tuple.
+            assert info.tuple_type, "TupleType type deleted before calling the patch"
             fallback.args[0] = join.join_type_list(list(info.tuple_type.items))
 
         # We can't calculate the complete fallback type until after semantic
@@ -2401,6 +2425,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             var.info = info
             var.is_initialized_in_class = is_initialized_in_class
             var.is_property = is_property
+            var._fullname = '%s.%s' % (info.fullname(), var.name())
             info.names[var.name()] = SymbolTableNode(MDEF, var)
 
         vars = [Var(item, typ) for item, typ in zip(items, types)]
@@ -2415,13 +2440,13 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         add_field(Var('__annotations__', ordereddictype), is_initialized_in_class=True)
         add_field(Var('__doc__', strtype), is_initialized_in_class=True)
 
-        tvd = TypeVarDef('NT', 1, [], info.tuple_type)
+        tvd = TypeVarDef('NT', 'NT', 1, [], info.tuple_type)
         selftype = TypeVarType(tvd)
 
         def add_method(funcname: str,
                        ret: Type,
                        args: List[Argument],
-                       name: str = None,
+                       name: Optional[str] = None,
                        is_classmethod: bool = False,
                        ) -> None:
             if is_classmethod:
@@ -2433,16 +2458,20 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             types = [arg.type_annotation for arg in args]
             items = [arg.variable.name() for arg in args]
             arg_kinds = [arg.kind for arg in args]
-            signature = CallableType(types, arg_kinds, items, ret, function_type,
-                                     name=name or info.name() + '.' + funcname)
+            assert None not in types
+            signature = CallableType(cast(List[Type], types), arg_kinds, items, ret,
+                                     function_type)
             signature.variables = [tvd]
-            func = FuncDef(funcname, args, Block([]), typ=signature)
+            func = FuncDef(funcname, args, Block([]))
             func.info = info
             func.is_class = is_classmethod
+            func.type = set_callable_name(signature, func)
+            func._fullname = info.fullname() + '.' + funcname
             if is_classmethod:
-                v = Var(funcname, signature)
+                v = Var(funcname, func.type)
                 v.is_classmethod = True
                 v.info = info
+                v._fullname = func._fullname
                 dec = Decorator(func, [NameExpr('classmethod')], v)
                 info.names[funcname] = SymbolTableNode(MDEF, dec)
             else:
@@ -2494,7 +2523,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             node.kind = GDEF   # TODO locally defined TypedDict
             node.node = typed_dict
 
-    def check_typeddict(self, node: Expression, var_name: str = None) -> Optional[TypeInfo]:
+    def check_typeddict(self, node: Expression,
+                        var_name: Optional[str] = None) -> Optional[TypeInfo]:
         """Check if a call defines a TypedDict.
 
         The optional var_name argument is the name of the variable to
@@ -2514,7 +2544,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         fullname = callee.fullname
         if fullname != 'mypy_extensions.TypedDict':
             return None
-        items, types, total, ok = self.parse_typeddict_args(call, fullname)
+        items, types, total, ok = self.parse_typeddict_args(call)
         if not ok:
             # Error. Construct dummy return value.
             info = self.build_typeddict_typeinfo('TypedDict', [], [], set())
@@ -2540,8 +2570,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         call.analyzed.set_line(call.line, call.column)
         return info
 
-    def parse_typeddict_args(self, call: CallExpr,
-                             fullname: str) -> Tuple[List[str], List[Type], bool, bool]:
+    def parse_typeddict_args(self, call: CallExpr) -> Tuple[List[str], List[Type], bool, bool]:
         # TODO: Share code with check_argument_count in checkexpr.py?
         args = call.args
         if len(args) < 2:
@@ -2560,7 +2589,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         if not isinstance(args[1], DictExpr):
             return self.fail_typeddict_arg(
                 "TypedDict() expects a dictionary literal as the second argument", call)
-        total = True
+        total = True  # type: Optional[bool]
         if len(args) == 3:
             total = self.parse_bool(call.args[2])
             if total is None:
@@ -2572,10 +2601,11 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             check_for_explicit_any(t, self.options, self.is_typeshed_stub_file, self.msg,
                                    context=call)
 
-        if 'unimported' in self.options.disallow_any:
+        if self.options.disallow_any_unimported:
             for t in types:
                 if has_any_from_unimported_type(t):
                     self.msg.unimported_type_becomes_any("Type of a TypedDict key", t, dictexpr)
+        assert total is not None
         return items, types, total, ok
 
     def parse_bool(self, expr: Expression) -> Optional[bool]:
@@ -2621,6 +2651,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
 
         def patch() -> None:
             # Calculate the correct value type for the fallback Mapping.
+            assert info.typeddict_type, "TypedDict type deleted before calling the patch"
             fallback.args[1] = join.join_type_list(list(info.typeddict_type.items.values()))
 
         # We can't calculate the complete fallback type until after semantic
@@ -2633,7 +2664,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         lvalue = s.lvalues[0]
         if len(s.lvalues) != 1 or not isinstance(lvalue, RefExpr):
             return
-        if not self.is_classvar(s.type):
+        if not s.type or not self.is_classvar(s.type):
             return
         if self.is_class_scope() and isinstance(lvalue, NameExpr):
             node = lvalue.node
@@ -2711,7 +2742,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                                 "without explicit 'types.ModuleType' annotation".format(lval.name),
                                 ctx)
                         # never create module alias except on initial var definition
-                        elif lval.is_def:
+                        elif lval.is_inferred_def:
                             lnode.kind = MODULE_REF
                             lnode.node = rnode.node
 
@@ -2730,7 +2761,8 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             node.kind = GDEF   # TODO locally defined Enum
             node.node = enum_call
 
-    def check_enum_call(self, node: Expression, var_name: str = None) -> Optional[TypeInfo]:
+    def check_enum_call(self, node: Expression,
+                        var_name: Optional[str] = None) -> Optional[TypeInfo]:
         """Check if a call defines an Enum.
 
         Example:
@@ -3088,10 +3120,10 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
 
     def visit_exec_stmt(self, s: ExecStmt) -> None:
         s.expr.accept(self)
-        if s.variables1:
-            s.variables1.accept(self)
-        if s.variables2:
-            s.variables2.accept(self)
+        if s.globals:
+            s.globals.accept(self)
+        if s.locals:
+            s.locals.accept(self)
 
     #
     # Expressions
@@ -3235,7 +3267,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             for a in call.args:
                 a.accept(self)
             return None
-        expr = DictExpr([(StrExpr(key), value)
+        expr = DictExpr([(StrExpr(cast(str, key)), value)  # since they are all ARG_NAMED
                          for key, value in zip(call.arg_names, call.args)])
         expr.set_line(call)
         expr.accept(self)
@@ -3269,14 +3301,22 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             # In this case base.node is the module's MypyFile and we look up
             # bar in its namespace.  This must be done for all types of bar.
             file = cast(Optional[MypyFile], base.node)  # can't use isinstance due to issue #2999
+            # TODO: Should we actually use this? Not sure if this makes a difference.
+            # if file.fullname() == self.cur_mod_id:
+            #     names = self.globals
+            # else:
+            #     names = file.names
             n = file.names.get(expr.name, None) if file is not None else None
             if n and not n.module_hidden:
                 n = self.normalize_type_alias(n, expr)
                 if not n:
                     return
-                expr.kind = n.kind
-                expr.fullname = n.fullname
-                expr.node = n.node
+                n = self.rebind_symbol_table_node(n)
+                if n:
+                    # TODO: What if None?
+                    expr.kind = n.kind
+                    expr.fullname = n.fullname
+                    expr.node = n.node
             elif file is not None and file.is_stub and '__getattr__' in file.names:
                 # If there is a module-level __getattr__, then any attribute on the module is valid
                 # per PEP 484.
@@ -3367,6 +3407,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             # Special form -- subscripting a generic type alias.
             # Perform the type substitution and create a new alias.
             res, alias_tvars = self.analyze_alias(expr)
+            assert res is not None, "Failed analyzing already defined alias"
             expr.analyzed = TypeAliasExpr(res, alias_tvars, fallback=self.alias_fallback(res),
                                           in_runtime=True)
             expr.analyzed.line = expr.line
@@ -3541,7 +3582,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                     self.name_not_defined(name, ctx)
                 return None
         # 2. Class attributes (if within class definition)
-        if self.is_class_scope() and name in self.type.names:
+        if self.type and not self.is_func_scope() and name in self.type.names:
             node = self.type.names[name]
             if not node.implicit:
                 return node
@@ -3608,7 +3649,14 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                             result = n.node.get(parts[i])
                         n = result
                     elif isinstance(n.node, MypyFile):
-                        n = n.node.names.get(parts[i], None)
+                        names = n.node.names
+                        # Rebind potential references to old version of current module in
+                        # fine-grained incremental mode.
+                        #
+                        # TODO: Do this for all modules in the set of modified files.
+                        if n.node.fullname() == self.cur_mod_id:
+                            names = self.globals
+                        n = names.get(parts[i], None)
                     # TODO: What if node is Var or FuncDef?
                     if not n:
                         if not suppress_errors:
@@ -3619,9 +3667,28 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
                     if n and n.module_hidden:
                         self.name_not_defined(name, ctx)
             if n and not n.module_hidden:
+                n = self.rebind_symbol_table_node(n)
                 return n
             return None
 
+    def rebind_symbol_table_node(self, n: SymbolTableNode) -> Optional[SymbolTableNode]:
+        """If node refers to old version of module, return reference to new version.
+
+        If the reference is removed in the new version, return None.
+        """
+        # TODO: Handle type aliases, type variables and other sorts of references
+        if isinstance(n.node, (FuncDef, OverloadedFuncDef, TypeInfo, Var)):
+            # TODO: Why is it possible for fullname() to be None, even though it's not
+            #   annotated as Optional[str]?
+            # TODO: Do this for all modules in the set of modified files
+            # TODO: This doesn't work for things nested within classes
+            if n.node.fullname() and get_prefix(n.node.fullname()) == self.cur_mod_id:
+                # This is an indirect reference to a name defined in the current module.
+                # Rebind it.
+                return self.globals.get(n.node.name())
+        # No need to rebind.
+        return n
+
     def builtin_type(self, fully_qualified_name: str) -> Instance:
         sym = self.lookup_fully_qualified(fully_qualified_name)
         node = sym.node
@@ -3634,20 +3701,19 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
         Assume that the name is defined. This happens in the global namespace -- the local
         module namespace is ignored.
         """
-        assert '.' in name
         parts = name.split('.')
         n = self.modules[parts[0]]
         for i in range(1, len(parts) - 1):
             next_sym = n.names[parts[i]]
             assert isinstance(next_sym.node, MypyFile)
             n = next_sym.node
-        return n.names.get(parts[-1])
+        return n.names[parts[-1]]
 
     def lookup_fully_qualified_or_none(self, name: str) -> Optional[SymbolTableNode]:
         """Lookup a fully qualified name.
 
-        Assume that the name is defined. This happens in the global namespace -- the local
-        module namespace is ignored.
+        Don't assume that the name is defined. This happens in the global namespace --
+        the local module namespace is ignored.
         """
         assert '.' in name
         parts = name.split('.')
@@ -3692,6 +3758,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
     def add_symbol(self, name: str, node: SymbolTableNode,
                    context: Context) -> None:
         if self.is_func_scope():
+            assert self.locals[-1] is not None
             if name in self.locals[-1]:
                 # Flag redefinition unless this is a reimport of a module.
                 if not (node.kind == MODULE_REF and
@@ -3715,6 +3782,7 @@ class SemanticAnalyzerPass2(NodeVisitor[None]):
             self.globals[name] = node
 
     def add_local(self, node: Union[Var, FuncDef, OverloadedFuncDef], ctx: Context) -> None:
+        assert self.locals[-1] is not None, "Should not add locals outside a function"
         name = node.name()
         if name in self.locals[-1]:
             self.name_already_defined(name, ctx)
@@ -3809,9 +3877,9 @@ def set_callable_name(sig: Type, fdef: FuncDef) -> Type:
     if isinstance(sig, FunctionLike):
         if fdef.info:
             return sig.with_name(
-                '"{}" of "{}"'.format(fdef.name(), fdef.info.name()))
+                '{} of {}'.format(fdef.name(), fdef.info.name()))
         else:
-            return sig.with_name('"{}"'.format(fdef.name()))
+            return sig.with_name(fdef.name())
     else:
         return sig
 
@@ -3838,7 +3906,7 @@ def calculate_class_mro(defn: ClassDef, fail: Callable[[str, Context], None]) ->
     defn.info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in defn.info.mro)
 
 
-def find_duplicate(list: List[T]) -> T:
+def find_duplicate(list: List[T]) -> Optional[T]:
     """If the list has duplicates, return one of the duplicates.
 
     Otherwise, return None.
diff --git a/mypy/semanal_pass1.py b/mypy/semanal_pass1.py
index f0ce826..15ee31e 100644
--- a/mypy/semanal_pass1.py
+++ b/mypy/semanal_pass1.py
@@ -1,16 +1,20 @@
 """The semantic analyzer pass 1.
 
-This sets up externally visible names defined in a module but ignores
-imports and local definitions.  It helps enable (some) cyclic references
-between modules, such as module 'a' that imports module 'b' and used
-names defined in b *and* vice versa.  The first pass can be performed
-before dependent modules have been processed.
+This sets up externally visible names defined in a module but doesn't
+follow imports and mostly ignores local definitions.  It helps enable
+(some) cyclic references between modules, such as module 'a' that
+imports module 'b' and used names defined in b *and* vice versa.  The
+first pass can be performed before dependent modules have been
+processed.
 
 Since this pass can't assume that other modules have been processed,
-this pass cannot determine the types of certain definitions that can
-only be recognized in later passes. Examples of these include TypeVar
-and NamedTuple definitions, as these look like regular assignments until
-we are able to bind names, which only happens in pass 2.
+this pass cannot detect certain definitions that can only be recognized
+in later passes. Examples of these include TypeVar and NamedTuple
+definitions, as these look like regular assignments until we are able to
+bind names, which only happens in pass 2.
+
+This pass also infers the reachability of certain if staments, such as
+those with platform checks.
 """
 
 from typing import List, Tuple
@@ -19,11 +23,13 @@ from mypy import experiments
 from mypy.nodes import (
     MypyFile, SymbolTable, SymbolTableNode, Var, Block, AssignmentStmt, FuncDef, Decorator,
     ClassDef, TypeInfo, ImportFrom, Import, ImportAll, IfStmt, WhileStmt, ForStmt, WithStmt,
-    TryStmt, OverloadedFuncDef, Lvalue, LDEF, GDEF, MDEF, UNBOUND_IMPORTED, implicit_module_attrs
+    TryStmt, OverloadedFuncDef, Lvalue, Context, LDEF, GDEF, MDEF, UNBOUND_IMPORTED, MODULE_REF,
+    implicit_module_attrs
 )
 from mypy.types import Type, UnboundType, UnionType, AnyType, TypeOfAny, NoneTyp
 from mypy.semanal import SemanticAnalyzerPass2, infer_reachability_of_if_statement
 from mypy.options import Options
+from mypy.sametypes import is_same_type
 from mypy.visitor import NodeVisitor
 
 
@@ -133,6 +139,9 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
 
     def visit_func_def(self, func: FuncDef) -> None:
         sem = self.sem
+        if sem.type is not None:
+            # Don't process methods during pass 1.
+            return
         func.is_conditional = sem.block_depth[-1] > 0
         func._fullname = sem.qualified_name(func.name())
         at_module = sem.is_module_scope()
@@ -149,7 +158,8 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
         else:
             if at_module:
                 sem.globals[func.name()] = SymbolTableNode(GDEF, func)
-            # Also analyze the function body (in case there are conditional imports).
+            # Also analyze the function body (needed in case there are unreachable
+            # conditional imports).
             sem.function_stack.append(func)
             sem.errors.push_function(func.name())
             sem.enter()
@@ -159,6 +169,9 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
             sem.function_stack.pop()
 
     def visit_overloaded_func_def(self, func: OverloadedFuncDef) -> None:
+        if self.sem.type is not None:
+            # Don't process methods during pass 1.
+            return
         kind = self.kind_by_scope()
         if kind == GDEF:
             self.sem.check_no_global(func.name(), func, True)
@@ -228,7 +241,7 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
         for name, as_name in node.names:
             imported_name = as_name or name
             if imported_name not in self.sem.globals:
-                self.sem.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
+                self.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
 
     def visit_import(self, node: Import) -> None:
         node.is_top_level = self.sem.is_module_scope()
@@ -237,11 +250,10 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
             return
         for id, as_id in node.ids:
             imported_id = as_id or id
+            # For 'import a.b.c' we create symbol 'a'.
+            imported_id = imported_id.split('.')[0]
             if imported_id not in self.sem.globals:
-                self.sem.add_symbol(imported_id, SymbolTableNode(UNBOUND_IMPORTED, None), node)
-            else:
-                # If the previous symbol is a variable, this should take precedence.
-                self.sem.globals[imported_id] = SymbolTableNode(UNBOUND_IMPORTED, None)
+                self.add_symbol(imported_id, SymbolTableNode(UNBOUND_IMPORTED, None), node)
 
     def visit_import_all(self, node: ImportAll) -> None:
         node.is_top_level = self.sem.is_module_scope()
@@ -267,8 +279,11 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
             s.body.accept(self)
 
     def visit_decorator(self, d: Decorator) -> None:
+        if self.sem.type is not None:
+            # Don't process methods during pass 1.
+            return
         d.var._fullname = self.sem.qualified_name(d.var.name())
-        self.sem.add_symbol(d.var.name(), SymbolTableNode(self.kind_by_scope(), d.var), d)
+        self.add_symbol(d.var.name(), SymbolTableNode(self.kind_by_scope(), d), d)
 
     def visit_if_stmt(self, s: IfStmt) -> None:
         infer_reachability_of_if_statement(s, pyversion=self.pyversion, platform=self.platform)
@@ -294,3 +309,31 @@ class SemanticAnalyzerPass1(NodeVisitor[None]):
             return LDEF
         else:
             assert False, "Couldn't determine scope"
+
+    def add_symbol(self, name: str, node: SymbolTableNode,
+                   context: Context) -> None:
+        # This is related to SemanticAnalyzerPass2.add_symbol. Since both methods will
+        # be called on top-level definitions, they need to co-operate.
+        if self.sem.is_func_scope():
+            assert self.sem.locals[-1] is not None
+            if name in self.sem.locals[-1]:
+                # Flag redefinition unless this is a reimport of a module.
+                if not (node.kind == MODULE_REF and
+                        self.sem.locals[-1][name].node == node.node):
+                    self.sem.name_already_defined(name, context)
+            self.sem.locals[-1][name] = node
+        else:
+            assert self.sem.type is None  # Pass 1 doesn't look inside classes
+            existing = self.sem.globals.get(name)
+            if existing and (not isinstance(node.node, MypyFile) or
+                             existing.node != node.node) and existing.kind != UNBOUND_IMPORTED:
+                # Modules can be imported multiple times to support import
+                # of multiple submodules of a package (e.g. a.x and a.y).
+                ok = False
+                # Only report an error if the symbol collision provides a different type.
+                if existing.type and node.type and is_same_type(existing.type, node.type):
+                    ok = True
+                if not ok:
+                    self.sem.name_already_defined(name, context)
+            elif not existing:
+                self.sem.globals[name] = node
diff --git a/mypy/semanal_pass3.py b/mypy/semanal_pass3.py
index 9e9ba0d..ef11c3e 100644
--- a/mypy/semanal_pass3.py
+++ b/mypy/semanal_pass3.py
@@ -17,7 +17,7 @@ from mypy.nodes import (
     Node, Expression, MypyFile, FuncDef, FuncItem, Decorator, RefExpr, Context, TypeInfo, ClassDef,
     Block, TypedDictExpr, NamedTupleExpr, AssignmentStmt, IndexExpr, TypeAliasExpr, NameExpr,
     CallExpr, NewTypeExpr, ForStmt, WithStmt, CastExpr, TypeVarExpr, TypeApplication, Lvalue,
-    TupleExpr, RevealTypeExpr, SymbolTableNode, Var, ARG_POS
+    TupleExpr, RevealTypeExpr, SymbolTableNode, Var, ARG_POS, OverloadedFuncDef
 )
 from mypy.types import (
     Type, Instance, AnyType, TypeOfAny, CallableType, TupleType, TypeVarType, TypedDictType,
@@ -51,6 +51,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
         self.sem.options = options
         self.patches = patches
         self.is_typeshed_file = self.errors.is_typeshed_file(fnam)
+        self.sem.cur_mod_id = file_node.fullname()
         self.sem.globals = file_node.names
         with experiments.strict_optional_set(options.strict_optional):
             self.accept(file_node)
@@ -85,6 +86,10 @@ class SemanticAnalyzerPass3(TraverserVisitor):
         super().visit_func_def(fdef)
         self.errors.pop_function()
 
+    def visit_overloaded_func_def(self, fdef: OverloadedFuncDef) -> None:
+        self.analyze(fdef.type, fdef)
+        super().visit_overloaded_func_def(fdef)
+
     def visit_class_def(self, tdef: ClassDef) -> None:
         # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to
         # check them again here.
@@ -116,12 +121,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
                 self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True)
             elif isinstance(tdef.analyzed, NamedTupleExpr):
                 self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True)
-                for name in tdef.analyzed.info.names:
-                    sym = tdef.analyzed.info.names[name]
-                    if isinstance(sym.node, (FuncDef, Decorator)):
-                        self.accept(sym.node)
-                    if isinstance(sym.node, Var):
-                        self.analyze(sym.node.type, sym.node)
+                self.analyze_info(tdef.analyzed.info)
         super().visit_class_def(tdef)
 
     def visit_decorator(self, dec: Decorator) -> None:
@@ -152,7 +152,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
         for expr in dec.decorators:
             preserve_type = False
             if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef):
-                if is_identity_signature(expr.node.type):
+                if expr.node.type and is_identity_signature(expr.node.type):
                     preserve_type = True
             if not preserve_type:
                 decorator_preserves_type = False
@@ -189,6 +189,10 @@ class SemanticAnalyzerPass3(TraverserVisitor):
             analyzed = s.rvalue.analyzed
             if isinstance(analyzed, NewTypeExpr):
                 self.analyze(analyzed.old_type, analyzed)
+                if analyzed.info:
+                    # Currently NewTypes only have __init__, but to be future proof,
+                    # we analyze all symbols.
+                    self.analyze_info(analyzed.info)
                 if analyzed.info and analyzed.info.mro:
                     analyzed.info.mro = []  # Force recomputation
                     mypy.semanal.calculate_class_mro(analyzed.info.defn, self.fail_blocker)
@@ -203,12 +207,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
                 self.analyze(analyzed.info.typeddict_type, analyzed, warn=True)
             if isinstance(analyzed, NamedTupleExpr):
                 self.analyze(analyzed.info.tuple_type, analyzed, warn=True)
-                for name in analyzed.info.names:
-                    sym = analyzed.info.names[name]
-                    if isinstance(sym.node, (FuncDef, Decorator)):
-                        self.accept(sym.node)
-                    if isinstance(sym.node, Var):
-                        self.analyze(sym.node.type, sym.node)
+                self.analyze_info(analyzed.info)
         # We need to pay additional attention to assignments that define a type alias.
         # The resulting type is also stored in the 'type_override' attribute of
         # the corresponding SymbolTableNode.
@@ -246,31 +245,53 @@ class SemanticAnalyzerPass3(TraverserVisitor):
                           transform: Callable[[Type], Type]) -> None:
         """Apply transform to all types associated with node."""
         if isinstance(node, ForStmt):
-            node.index_type = transform(node.index_type)
+            if node.index_type:
+                node.index_type = transform(node.index_type)
             self.transform_types_in_lvalue(node.index, transform)
         if isinstance(node, WithStmt):
-            node.target_type = transform(node.target_type)
+            if node.target_type:
+                node.target_type = transform(node.target_type)
             for n in node.target:
                 if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type:
                     n.node.type = transform(n.node.type)
-        if isinstance(node, (FuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)):
+        if isinstance(node, (FuncDef, OverloadedFuncDef, CastExpr, AssignmentStmt,
+                             TypeAliasExpr, Var)):
+            assert node.type, "Scheduled patch for non-existent type"
             node.type = transform(node.type)
         if isinstance(node, NewTypeExpr):
+            assert node.old_type, "Scheduled patch for non-existent type"
             node.old_type = transform(node.old_type)
+            if node.info:
+                new_bases = []  # type: List[Instance]
+                for b in node.info.bases:
+                    new_b = transform(b)
+                    # TODO: this code can be combined with code in second pass.
+                    if isinstance(new_b, Instance):
+                        new_bases.append(new_b)
+                    elif isinstance(new_b, TupleType):
+                        new_bases.append(new_b.fallback)
+                    else:
+                        self.fail("Argument 2 to NewType(...) must be subclassable"
+                                  " (got {})".format(new_b), node)
+                        new_bases.append(self.builtin_type('object'))
+                node.info.bases = new_bases
         if isinstance(node, TypeVarExpr):
             if node.upper_bound:
                 node.upper_bound = transform(node.upper_bound)
             if node.values:
                 node.values = [transform(v) for v in node.values]
         if isinstance(node, TypedDictExpr):
+            assert node.info.typeddict_type, "Scheduled patch for non-existent type"
             node.info.typeddict_type = cast(TypedDictType,
                                             transform(node.info.typeddict_type))
         if isinstance(node, NamedTupleExpr):
+            assert node.info.tuple_type, "Scheduled patch for non-existent type"
             node.info.tuple_type = cast(TupleType,
                                         transform(node.info.tuple_type))
         if isinstance(node, TypeApplication):
             node.types = [transform(t) for t in node.types]
         if isinstance(node, SymbolTableNode):
+            assert node.type_override, "Scheduled patch for non-existent type"
             node.type_override = transform(node.type_override)
         if isinstance(node, TypeInfo):
             for tvar in node.defn.type_vars:
@@ -285,7 +306,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
                     new_bases.append(new_base)
                 else:
                     # Don't fix the NamedTuple bases, they are Instance's intentionally.
-                    # Patch the 'args' just in case, although generic tuple type are
+                    # Patch the 'args' just in case, although generic tuple types are
                     # not supported yet.
                     alt_base = Instance(base.type, [transform(a) for a in base.args])
                     new_bases.append(alt_base)
@@ -296,7 +317,8 @@ class SemanticAnalyzerPass3(TraverserVisitor):
         if isinstance(lvalue, RefExpr):
             if isinstance(lvalue.node, Var):
                 var = lvalue.node
-                var.type = transform(var.type)
+                if var.type:
+                    var.type = transform(var.type)
         elif isinstance(lvalue, TupleExpr):
             for item in lvalue.items:
                 self.transform_types_in_lvalue(item, transform)
@@ -331,6 +353,15 @@ class SemanticAnalyzerPass3(TraverserVisitor):
                                                                   node, warn=False)))
             self.patches.append(patch)
 
+    def analyze_info(self, info: TypeInfo) -> None:
+        # Similar to above but for nodes with synthetic TypeInfos (NamedTuple and NewType).
+        for name in info.names:
+            sym = info.names[name]
+            if isinstance(sym.node, (FuncDef, Decorator)):
+                self.accept(sym.node)
+            if isinstance(sym.node, Var):
+                self.analyze(sym.node.type, sym.node)
+
     def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3:
         return TypeAnalyserPass3(self.sem.lookup_qualified,
                                  self.sem.lookup_fully_qualified,
@@ -342,7 +373,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
                                  indicator)
 
     def check_for_omitted_generics(self, typ: Type) -> None:
-        if 'generics' not in self.options.disallow_any or self.is_typeshed_file:
+        if not self.options.disallow_any_generics or self.is_typeshed_file:
             return
 
         for t in collect_any_types(typ):
@@ -355,7 +386,7 @@ class SemanticAnalyzerPass3(TraverserVisitor):
     def fail_blocker(self, msg: str, ctx: Context) -> None:
         self.fail(msg, ctx, blocker=True)
 
-    def builtin_type(self, name: str, args: List[Type] = None) -> Instance:
+    def builtin_type(self, name: str, args: Optional[List[Type]] = None) -> Instance:
         names = self.modules['builtins']
         sym = names.names[name]
         node = sym.node
diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py
index 6aa9fd7..89a8ac9 100644
--- a/mypy/server/astdiff.py
+++ b/mypy/server/astdiff.py
@@ -7,96 +7,18 @@ that are stale because of the changes.
 Only look at detail at definitions at the current module.
 """
 
-from typing import Set, List, TypeVar
+from typing import Set, List, TypeVar, Dict, Tuple, Optional, Sequence
 
-from mypy.nodes import SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var
+from mypy.nodes import (
+    SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var, MypyFile, SymbolNode, Decorator,
+    TypeVarExpr, MODULE_REF, TYPE_ALIAS, UNBOUND_IMPORTED, TVAR
+)
 from mypy.types import (
     Type, TypeVisitor, UnboundType, TypeList, AnyType, NoneTyp, UninhabitedType,
     ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType,
     UnionType, Overloaded, PartialType, TypeType
 )
-
-
-def compare_symbol_tables(name_prefix: str, table1: SymbolTable, table2: SymbolTable) -> Set[str]:
-    """Return names that are different in two versions of a symbol table.
-
-    Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method').
-    """
-    # Find names only defined only in one version.
-    names1 = {'%s.%s' % (name_prefix, name) for name in table1}
-    names2 = {'%s.%s' % (name_prefix, name) for name in table2}
-    triggers = names1 ^ names2
-
-    # Look for names defined in both versions that are different.
-    for name in set(table1.keys()) & set(table2.keys()):
-        if not is_similar_node_shallow(table1[name], table2[name]):
-            triggers.add('%s.%s' % (name_prefix, name))
-        else:
-            # Nodes are the same when using shallow comparison. Now look into contents of
-            # classes to find changed items.
-            node1 = table1[name].node
-            node2 = table2[name].node
-
-            if node1 and node1.fullname() and get_prefix(node1.fullname()) != name_prefix:
-                # Only look inside things defined in the current module.
-                # TODO: This probably doesn't work generally...
-                continue
-
-            if isinstance(node1, TypeInfo) and isinstance(node2, TypeInfo):
-                # TODO: Only do this is the class is defined in this module.
-                prefix = '%s.%s' % (name_prefix, node1.name())
-                triggers |= compare_symbol_tables(prefix, node1.names, node2.names)
-
-    return triggers
-
-
-def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool:
-    # TODO:
-    #   cross_ref
-    #   tvar_def
-    #   type_override
-    if (n.kind != m.kind
-            or n.module_public != m.module_public):
-        return False
-    if type(n.node) != type(m.node):  # noqa
-        return False
-    if n.node and m.node and n.node.fullname() != m.node.fullname():
-        return False
-    if isinstance(n.node, FuncBase) and isinstance(m.node, FuncBase):
-        # TODO: info
-        return (n.node.is_property == m.node.is_property and
-                is_identical_type(n.node.type, m.node.type))
-    if isinstance(n.node, TypeInfo) and isinstance(m.node, TypeInfo):
-        # TODO:
-        #   type_vars
-        #   bases
-        #   _promote
-        #   tuple_type
-        #   typeddict_type
-        nn = n.node
-        mn = m.node
-        return (nn.is_abstract == mn.is_abstract and
-                nn.is_enum == mn.is_enum and
-                nn.fallback_to_any == mn.fallback_to_any and
-                nn.is_named_tuple == mn.is_named_tuple and
-                nn.is_newtype == mn.is_newtype and
-                is_same_mro(nn.mro, mn.mro))
-    if isinstance(n.node, Var) and isinstance(m.node, Var):
-        if n.node.type is None and m.node.type is None:
-            return True
-        return (n.node.type is not None and m.node.type is not None and
-                is_identical_type(n.node.type, m.node.type))
-    return True
-
-
-def is_same_mro(mro1: List[TypeInfo], mro2: List[TypeInfo]) -> bool:
-    return (len(mro1) == len(mro2)
-            and all(x.fullname() == y.fullname() for x, y in zip(mro1, mro2)))
-
-
-def get_prefix(id: str) -> str:
-    """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
-    return id.rsplit('.', 1)[0]
+from mypy.util import get_prefix
 
 
 def is_identical_type(t: Type, s: Type) -> bool:
@@ -209,3 +131,245 @@ class IdenticalTypeVisitor(TypeVisitor[bool]):
         if isinstance(self.right, TypeType):
             return is_identical_type(left.item, self.right.item)
         return False
+
+
+# Snapshot representation of a symbol table node or type. The representation is
+# opaque -- the only supported operations are comparing for equality and
+# hashing (latter for type snapshots only). Snapshots can contain primitive
+# objects, nested tuples, lists and dictionaries and primitive objects (type
+# snapshots are immutable).
+#
+# For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()).
+SnapshotItem = Tuple[object, ...]
+
+
+def compare_symbol_table_snapshots(
+        name_prefix: str,
+        snapshot1: Dict[str, SnapshotItem],
+        snapshot2: Dict[str, SnapshotItem]) -> Set[str]:
+    """Return names that are different in two snapshots of a symbol table.
+
+    Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method').
+
+    Only shallow (intra-module) differences are considered. References to things defined
+    outside the module are compared based on the name of the target only.
+    """
+    # Find names only defined only in one version.
+    names1 = {'%s.%s' % (name_prefix, name) for name in snapshot1}
+    names2 = {'%s.%s' % (name_prefix, name) for name in snapshot2}
+    triggers = names1 ^ names2
+
+    # Look for names defined in both versions that are different.
+    for name in set(snapshot1.keys()) & set(snapshot2.keys()):
+        item1 = snapshot1[name]
+        item2 = snapshot2[name]
+        kind1 = item1[0]
+        kind2 = item2[0]
+        item_name = '%s.%s' % (name_prefix, name)
+        if kind1 != kind2:
+            # Different kind of node in two snapshots -> trivially different.
+            triggers.add(item_name)
+        elif kind1 == 'TypeInfo':
+            if item1[:-1] != item2[:-1]:
+                # Record major difference (outside class symbol tables).
+                triggers.add(item_name)
+            # Look for differences in nested class symbol table entries.
+            assert isinstance(item1[-1], dict)
+            assert isinstance(item2[-1], dict)
+            triggers |= compare_symbol_table_snapshots(item_name, item1[-1], item2[-1])
+        else:
+            # Shallow node (no interesting internal structure). Just use equality.
+            if snapshot1[name] != snapshot2[name]:
+                triggers.add(item_name)
+
+    return triggers
+
+
+def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> Dict[str, SnapshotItem]:
+    """Create a snapshot description that represents the state of a symbol table.
+
+    The snapshot has a representation based on nested tuples and dicts
+    that makes it easy and fast to find differences.
+
+    Only "shallow" state is included in the snapshot -- references to
+    things defined in other modules are represented just by the names of
+    the targers.
+    """
+    result = {}  # type: Dict[str, SnapshotItem]
+    for name, symbol in table.items():
+        node = symbol.node
+        # TODO: cross_ref?
+        fullname = node.fullname() if node else None
+        common = (fullname, symbol.kind, symbol.module_public)
+        if symbol.kind == MODULE_REF:
+            # This is a cross-reference to another module.
+            assert isinstance(node, MypyFile)
+            result[name] = ('Moduleref', common)
+        elif symbol.kind == TVAR:
+            assert isinstance(node, TypeVarExpr)
+            result[name] = ('TypeVar',
+                            node.variance,
+                            [snapshot_type(value) for value in node.values],
+                            snapshot_type(node.upper_bound))
+        elif symbol.kind == TYPE_ALIAS:
+            result[name] = ('TypeAlias',
+                            symbol.alias_tvars,
+                            snapshot_optional_type(symbol.type_override))
+        else:
+            assert symbol.kind != UNBOUND_IMPORTED
+            if node and get_prefix(node.fullname()) != name_prefix:
+                # This is a cross-reference to a node defined in another module.
+                result[name] = ('CrossRef', common, symbol.normalized)
+            else:
+                result[name] = snapshot_definition(node, common)
+    return result
+
+
+def snapshot_definition(node: Optional[SymbolNode],
+                        common: Tuple[object, ...]) -> Tuple[object, ...]:
+    """Create a snapshot description of a symbol table node.
+
+    The representation is nested tuples and dicts. Only externally
+    visible attributes are included.
+    """
+    if isinstance(node, FuncBase):
+        # TODO: info
+        return ('Func', common, node.is_property, snapshot_type(node.type))
+    elif isinstance(node, Var):
+        return ('Var', common, snapshot_optional_type(node.type))
+    elif isinstance(node, Decorator):
+        # Note that decorated methods are represented by Decorator instances in
+        # a symbol table since we need to preserve information about the
+        # decorated function (whether it's a class function, for
+        # example). Top-level decorated functions, however, are represented by
+        # the corresponding Var node, since that happens to provide enough
+        # context.
+        return ('Decorator',
+                node.is_overload,
+                snapshot_optional_type(node.var.type),
+                snapshot_definition(node.func, common))
+    elif isinstance(node, TypeInfo):
+        # TODO:
+        #   type_vars
+        #   bases
+        #   _promote
+        #   tuple_type
+        #   typeddict_type
+        attrs = (node.is_abstract,
+                 node.is_enum,
+                 node.fallback_to_any,
+                 node.is_named_tuple,
+                 node.is_newtype,
+                 [base.fullname() for base in node.mro])
+        prefix = node.fullname()
+        symbol_table = snapshot_symbol_table(prefix, node.names)
+        return ('TypeInfo', common, attrs, symbol_table)
+    else:
+        # TODO: Handle additional types: TypeVarExpr, MypyFile, ...
+        assert False, type(node)
+
+
+def snapshot_type(typ: Type) -> SnapshotItem:
+    """Create a snapshot representation of a type using nested tuples."""
+    return typ.accept(SnapshotTypeVisitor())
+
+
+def snapshot_optional_type(typ: Optional[Type]) -> Optional[SnapshotItem]:
+    if typ:
+        return snapshot_type(typ)
+    else:
+        return None
+
+
+def snapshot_types(types: Sequence[Type]) -> SnapshotItem:
+    return tuple(snapshot_type(item) for item in types)
+
+
+def snapshot_simple_type(typ: Type) -> SnapshotItem:
+    return (type(typ).__name__,)
+
+
+class SnapshotTypeVisitor(TypeVisitor[SnapshotItem]):
+    """Creates a read-only, self-contained snapshot of a type object.
+
+    Properties of a snapshot:
+
+    - Contains (nested) tuples and other immutable primitive objects only.
+    - References to AST nodes are replaced with full names of targets.
+    - Has no references to mutable or non-primitive objects.
+    - Two snapshots represent the same object if and only if they are
+      equal.
+    """
+
+    def visit_unbound_type(self, typ: UnboundType) -> SnapshotItem:
+        return ('UnboundType',
+                typ.name,
+                typ.optional,
+                typ.empty_tuple_index,
+                snapshot_types(typ.args))
+
+    def visit_any(self, typ: AnyType) -> SnapshotItem:
+        return snapshot_simple_type(typ)
+
+    def visit_none_type(self, typ: NoneTyp) -> SnapshotItem:
+        return snapshot_simple_type(typ)
+
+    def visit_uninhabited_type(self, typ: UninhabitedType) -> SnapshotItem:
+        return snapshot_simple_type(typ)
+
+    def visit_erased_type(self, typ: ErasedType) -> SnapshotItem:
+        return snapshot_simple_type(typ)
+
+    def visit_deleted_type(self, typ: DeletedType) -> SnapshotItem:
+        return snapshot_simple_type(typ)
+
+    def visit_instance(self, typ: Instance) -> SnapshotItem:
+        return ('Instance',
+                typ.type.fullname(),
+                snapshot_types(typ.args))
+
+    def visit_type_var(self, typ: TypeVarType) -> SnapshotItem:
+        return ('TypeVar',
+                typ.name,
+                typ.fullname,
+                typ.id.raw_id,
+                typ.id.meta_level,
+                snapshot_types(typ.values),
+                snapshot_type(typ.upper_bound),
+                typ.variance)
+
+    def visit_callable_type(self, typ: CallableType) -> SnapshotItem:
+        # FIX generics
+        return ('CallableType',
+                snapshot_types(typ.arg_types),
+                snapshot_type(typ.ret_type),
+                tuple(typ.arg_names),
+                tuple(typ.arg_kinds),
+                typ.is_type_obj(),
+                typ.is_ellipsis_args)
+
+    def visit_tuple_type(self, typ: TupleType) -> SnapshotItem:
+        return ('TupleType', snapshot_types(typ.items))
+
+    def visit_typeddict_type(self, typ: TypedDictType) -> SnapshotItem:
+        items = tuple((key, snapshot_type(item_type))
+                      for key, item_type in typ.items.items())
+        return ('TypedDictType', items)
+
+    def visit_union_type(self, typ: UnionType) -> SnapshotItem:
+        # Sort and remove duplicates so that we can use equality to test for
+        # equivalent union type snapshots.
+        items = {snapshot_type(item) for item in typ.items}
+        normalized = tuple(sorted(items))
+        return ('UnionType', normalized)
+
+    def visit_overloaded(self, typ: Overloaded) -> SnapshotItem:
+        return ('Overloaded', snapshot_types(typ.items()))
+
+    def visit_partial_type(self, typ: PartialType) -> SnapshotItem:
+        # A partial type is not fully defined, so the result is indeterminate. We shouldn't
+        # get here.
+        raise RuntimeError
+
+    def visit_type_type(self, typ: TypeType) -> SnapshotItem:
+        return ('TypeType', snapshot_type(typ.item))
diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py
index e4efc62..9bb1b2b 100644
--- a/mypy/server/astmerge.py
+++ b/mypy/server/astmerge.py
@@ -7,7 +7,7 @@ from typing import Dict, List, cast, TypeVar, Optional
 
 from mypy.nodes import (
     Node, MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo,
-    FuncDef, ClassDef, SymbolNode, Var, Statement, MDEF
+    FuncDef, ClassDef, NamedTupleExpr, SymbolNode, Var, Statement, MDEF
 )
 from mypy.traverser import TraverserVisitor
 from mypy.types import (
@@ -15,6 +15,7 @@ from mypy.types import (
     TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
     Overloaded
 )
+from mypy.util import get_prefix
 
 
 def merge_asts(old: MypyFile, old_symbols: SymbolTable,
@@ -97,12 +98,7 @@ class NodeReplaceVisitor(TraverserVisitor):
     def visit_class_def(self, node: ClassDef) -> None:
         # TODO additional things like the MRO
         node.defs.body = self.replace_statements(node.defs.body)
-        replace_nodes_in_symbol_table(node.info.names, self.replacements)
-        info = node.info
-        for i, item in enumerate(info.mro):
-            info.mro[i] = self.fixup(info.mro[i])
-        for i, base in enumerate(info.bases):
-            self.fixup_type(info.bases[i])
+        self.process_type_info(node.info)
         super().visit_class_def(node)
 
     def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
@@ -123,6 +119,10 @@ class NodeReplaceVisitor(TraverserVisitor):
         if node.node is not None:
             node.node = self.fixup(node.node)
 
+    def visit_namedtuple_expr(self, node: NamedTupleExpr) -> None:
+        super().visit_namedtuple_expr(node)
+        self.process_type_info(node.info)
+
     # Helpers
 
     def fixup(self, node: SN) -> SN:
@@ -135,6 +135,14 @@ class NodeReplaceVisitor(TraverserVisitor):
     def fixup_type(self, typ: Type) -> None:
         typ.accept(TypeReplaceVisitor(self.replacements))
 
+    def process_type_info(self, info: TypeInfo) -> None:
+        # TODO additional things like the MRO
+        replace_nodes_in_symbol_table(info.names, self.replacements)
+        for i, item in enumerate(info.mro):
+            info.mro[i] = self.fixup(info.mro[i])
+        for i, base in enumerate(info.bases):
+            self.fixup_type(info.bases[i])
+
     def replace_statements(self, nodes: List[Statement]) -> List[Statement]:
         result = []
         for node in nodes:
@@ -163,12 +171,18 @@ class TypeReplaceVisitor(TypeVisitor[None]):
         for arg in typ.arg_types:
             arg.accept(self)
         typ.ret_type.accept(self)
-        # TODO: typ.definition
+        if typ.definition:
+            # No need to fixup since this is just a cross-reference.
+            typ.definition = self.replacements.get(typ.definition, typ.definition)
         # TODO: typ.fallback
-        assert not typ.variables  # TODO
+        for tv in typ.variables:
+            tv.upper_bound.accept(self)
+            for value in tv.values:
+                value.accept(self)
 
     def visit_overloaded(self, t: Overloaded) -> None:
-        raise NotImplementedError
+        for item in t.items():
+            item.accept(self)
 
     def visit_deleted_type(self, typ: DeletedType) -> None:
         pass
@@ -177,31 +191,37 @@ class TypeReplaceVisitor(TypeVisitor[None]):
         raise RuntimeError
 
     def visit_tuple_type(self, typ: TupleType) -> None:
-        raise NotImplementedError
+        for item in typ.items:
+            item.accept(self)
 
     def visit_type_type(self, typ: TypeType) -> None:
-        raise NotImplementedError
+        typ.item.accept(self)
 
     def visit_type_var(self, typ: TypeVarType) -> None:
-        raise NotImplementedError
+        typ.upper_bound.accept(self)
+        for value in typ.values:
+            value.accept(self)
 
     def visit_typeddict_type(self, typ: TypedDictType) -> None:
         raise NotImplementedError
 
     def visit_unbound_type(self, typ: UnboundType) -> None:
-        raise RuntimeError
+        for arg in typ.args:
+            arg.accept(self)
 
     def visit_uninhabited_type(self, typ: UninhabitedType) -> None:
         pass
 
     def visit_union_type(self, typ: UnionType) -> None:
-        raise NotImplementedError
+        for item in typ.items:
+            item.accept(self)
 
     # Helpers
 
     def fixup(self, node: SN) -> SN:
         if node in self.replacements:
             new = self.replacements[node]
+            # TODO: This may be unnecessary?
             new.__dict__ = node.__dict__
             return cast(SN, new)
         return node
@@ -210,15 +230,20 @@ class TypeReplaceVisitor(TypeVisitor[None]):
 def replace_nodes_in_symbol_table(symbols: SymbolTable,
                                   replacements: Dict[SymbolNode, SymbolNode]) -> None:
     for name, node in symbols.items():
-        if node.node and node.node in replacements:
-            new = replacements[node.node]
-            new.__dict__ = node.node.__dict__
-            node.node = new
-            if isinstance(node.node, Var) and node.node.type:
-                node.node.type.accept(TypeReplaceVisitor(replacements))
-                node.node.info = cast(TypeInfo, replacements.get(node.node.info, node.node.info))
-
-
-def get_prefix(fullname: str) -> str:
-    """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
-    return fullname.rsplit('.', 1)[0]
+        if node.node:
+            if node.node in replacements:
+                new = replacements[node.node]
+                new.__dict__ = node.node.__dict__
+                node.node = new
+                # TODO: Other node types
+                if isinstance(node.node, Var) and node.node.type:
+                    node.node.type.accept(TypeReplaceVisitor(replacements))
+                    node.node.info = cast(TypeInfo, replacements.get(node.node.info,
+                                                                     node.node.info))
+            else:
+                # TODO: Other node types
+                if isinstance(node.node, Var) and node.node.type:
+                    node.node.type.accept(TypeReplaceVisitor(replacements))
+        override = node.type_override
+        if override:
+            override.accept(TypeReplaceVisitor(replacements))
diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py
index 62df86a..f47b118 100644
--- a/mypy/server/aststrip.py
+++ b/mypy/server/aststrip.py
@@ -1,32 +1,36 @@
-"""Strip AST from semantic information."""
+"""Strip AST from semantic information.
+
+This is used in fine-grained incremental checking to reprocess existing AST nodes.
+"""
 
 import contextlib
 from typing import Union, Iterator, Optional
 
 from mypy.nodes import (
     Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt,
-    TypeInfo, Var
+    ImportFrom, Import, TypeInfo, SymbolTable, Var, UNBOUND_IMPORTED, GDEF
 )
 from mypy.traverser import TraverserVisitor
 
 
 def strip_target(node: Union[MypyFile, FuncItem]) -> None:
-    NodeStripVisitor().strip_target(node)
+    """Strip a fine-grained incremental mode target from semantic information."""
+    visitor = NodeStripVisitor()
+    if isinstance(node, MypyFile):
+        visitor.strip_file_top_level(node)
+    else:
+        node.accept(visitor)
 
 
 class NodeStripVisitor(TraverserVisitor):
     def __init__(self) -> None:
         self.type = None  # type: Optional[TypeInfo]
+        self.names = None  # type: Optional[SymbolTable]
 
-    def strip_target(self, node: Union[MypyFile, FuncItem]) -> None:
-        """Strip a fine-grained incremental mode target."""
-        if isinstance(node, MypyFile):
-            self.strip_top_level(node)
-        else:
-            node.accept(self)
-
-    def strip_top_level(self, file_node: MypyFile) -> None:
+    def strip_file_top_level(self, file_node: MypyFile) -> None:
         """Strip a module top-level (don't recursive into functions)."""
+        self.names = file_node.names
+        # TODO: Functions nested within statements
         for node in file_node.defs:
             if not isinstance(node, (FuncItem, ClassDef)):
                 node.accept(self)
@@ -35,6 +39,7 @@ class NodeStripVisitor(TraverserVisitor):
 
     def strip_class_body(self, node: ClassDef) -> None:
         """Strip class body and type info, but don't strip methods."""
+        # TODO: Statements in class body
         node.info.type_vars = []
         node.info.bases = []
         node.info.abstract_attributes = []
@@ -49,6 +54,7 @@ class NodeStripVisitor(TraverserVisitor):
 
     @contextlib.contextmanager
     def enter_class(self, info: TypeInfo) -> Iterator[None]:
+        # TODO: Update and restore self.names
         old = self.type
         self.type = info
         yield
@@ -58,8 +64,40 @@ class NodeStripVisitor(TraverserVisitor):
         node.type = node.unanalyzed_type
         super().visit_assignment_stmt(node)
 
+    def visit_import_from(self, node: ImportFrom) -> None:
+        if node.assignments:
+            node.assignments = []
+        else:
+            if self.names:
+                # Reset entries in the symbol table. This is necessary since
+                # otherwise the semantic analyzer will think that the import
+                # assigns to an existing name instead of defining a new one.
+                for name, as_name in node.names:
+                    imported_name = as_name or name
+                    symnode = self.names[imported_name]
+                    symnode.kind = UNBOUND_IMPORTED
+                    symnode.node = None
+
+    def visit_import(self, node: Import) -> None:
+        if node.assignments:
+            node.assignments = []
+        else:
+            if self.names:
+                # Reset entries in the symbol table. This is necessary since
+                # otherwise the semantic analyzer will think that the import
+                # assigns to an existing name instead of defining a new one.
+                for name, as_name in node.ids:
+                    imported_name = as_name or name
+                    initial = imported_name.split('.')[0]
+                    symnode = self.names[initial]
+                    symnode.kind = UNBOUND_IMPORTED
+                    symnode.node = None
+
     def visit_name_expr(self, node: NameExpr) -> None:
-        self.strip_ref_expr(node)
+        # Global assignments are processed in semantic analysis pass 1, and we
+        # only want to strip changes made in passes 2 or later.
+        if not (node.kind == GDEF and node.is_new_def):
+            self.strip_ref_expr(node)
 
     def visit_member_expr(self, node: MemberExpr) -> None:
         self.strip_ref_expr(node)
@@ -70,11 +108,11 @@ class NodeStripVisitor(TraverserVisitor):
             # definition.
             if self.type is not None:
                 del self.type.names[node.name]
-            node.is_def = False
+            node.is_inferred_def = False
             node.def_var = None
 
     def is_duplicate_attribute_def(self, node: MemberExpr) -> bool:
-        if not node.is_def:
+        if not node.is_inferred_def:
             return False
         assert self.type is not None, "Internal error: Member defined outside class"
         if node.name not in self.type.names:
diff --git a/mypy/server/deps.py b/mypy/server/deps.py
index 05ef0fb..c9b3f60 100644
--- a/mypy/server/deps.py
+++ b/mypy/server/deps.py
@@ -1,114 +1,286 @@
 """Generate fine-grained dependencies for AST nodes."""
 
-from typing import Dict, List, Set, Optional
+from typing import Dict, List, Set, Optional, Tuple, Union
 
 from mypy.checkmember import bind_self
 from mypy.nodes import (
     Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import,
-    ImportFrom, CallExpr, TypeInfo, Var, LDEF
+    ImportFrom, CallExpr, CastExpr, TypeVarExpr, TypeApplication, IndexExpr, UnaryExpr, OpExpr,
+    ComparisonExpr, GeneratorExpr, DictionaryComprehension, StarExpr, PrintStmt, ForStmt, WithStmt,
+    TupleExpr, ListExpr, OperatorAssignmentStmt, DelStmt, YieldFromExpr, Decorator, Block,
+    TypeInfo, FuncBase, OverloadedFuncDef, RefExpr, Var, NamedTupleExpr, LDEF, MDEF, GDEF,
+    op_methods, reverse_op_methods, ops_with_inplace_method, unary_op_methods
 )
 from mypy.traverser import TraverserVisitor
 from mypy.types import (
     Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType,
     TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
-    FunctionLike, ForwardRef
+    FunctionLike, ForwardRef, Overloaded
 )
 from mypy.server.trigger import make_trigger
 
 
-def get_dependencies(prefix: str, node: Node,
-                     type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
+def get_dependencies(target: MypyFile,
+                     type_map: Dict[Expression, Type],
+                     python_version: Tuple[int, int]) -> Dict[str, Set[str]]:
     """Get all dependencies of a node, recursively."""
-    visitor = DependencyVisitor(prefix, type_map)
-    node.accept(visitor)
+    visitor = DependencyVisitor(type_map, python_version)
+    target.accept(visitor)
     return visitor.map
 
 
-def get_dependencies_of_target(prefix: str, node: Node,
-                               type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
+def get_dependencies_of_target(module_id: str,
+                               target: Node,
+                               type_map: Dict[Expression, Type],
+                               python_version: Tuple[int, int]) -> Dict[str, Set[str]]:
     """Get dependencies of a target -- don't recursive into nested targets."""
-    visitor = DependencyVisitor(prefix, type_map)
-    if isinstance(node, MypyFile):
-        for defn in node.defs:
-            if not isinstance(defn, (ClassDef, FuncDef)):
+    # TODO: Add tests for this function.
+    visitor = DependencyVisitor(type_map, python_version)
+    visitor.enter_file_scope(module_id)
+    if isinstance(target, MypyFile):
+        # Only get dependencies of the top-level of the module. Don't recurse into
+        # functions.
+        for defn in target.defs:
+            # TODO: Recurse into top-level statements and class bodies but skip functions.
+            if not isinstance(defn, (ClassDef, Decorator, FuncDef, OverloadedFuncDef)):
                 defn.accept(visitor)
+    elif isinstance(target, FuncBase) and target.info:
+        # It's a method.
+        # TODO: Methods in nested classes.
+        visitor.enter_class_scope(target.info)
+        target.accept(visitor)
+        visitor.leave_scope()
     else:
-        node.accept(visitor)
+        target.accept(visitor)
+    visitor.leave_scope()
     return visitor.map
 
 
 class DependencyVisitor(TraverserVisitor):
-    def __init__(self, prefix: str, type_map: Dict[Expression, Type]) -> None:
-        self.stack = [prefix]
+    def __init__(self,
+                 type_map: Dict[Expression, Type],
+                 python_version: Tuple[int, int]) -> None:
+        # Stack of names of targets being processed. For stack targets we use the
+        # surrounding module.
+        self.target_stack = []  # type: List[str]
+        # Stack of names of targets being processed, including class targets.
+        self.full_target_stack = []  # type: List[str]
+        self.scope_stack = []  # type: List[Union[None, TypeInfo, FuncDef]]
         self.type_map = type_map
+        self.python2 = python_version[0] == 2
         self.map = {}  # type: Dict[str, Set[str]]
         self.is_class = False
 
-    # TODO
-    #   decorated functions
-    #   overloads
+    # TODO (incomplete):
     #   from m import *
+    #   await
+    #   named tuples
+    #   TypedDict
+    #   protocols
+    #   metaclasses
+    #   type aliases
+    #   super()
+    #   relative imports
+    #   functional enum
+    #   type variable with value restriction
 
     def visit_mypy_file(self, o: MypyFile) -> None:
-        # TODO: Do we need to anything here?
+        self.enter_file_scope(o.fullname())
         super().visit_mypy_file(o)
+        self.leave_scope()
 
     def visit_func_def(self, o: FuncDef) -> None:
-        target = self.push(o.name())
+        if not isinstance(self.current_scope(), FuncDef):
+            # Not a nested function, so create a new target.
+            new_scope = True
+            target = self.enter_function_scope(o)
+        else:
+            # Treat nested functions as components of the parent function target.
+            new_scope = False
+            target = self.current_target()
         if o.type:
             if self.is_class and isinstance(o.type, FunctionLike):
                 signature = bind_self(o.type)  # type: Type
             else:
                 signature = o.type
-            for trigger in get_type_dependencies(signature):
+            for trigger in get_type_triggers(signature):
                 self.add_dependency(trigger)
                 self.add_dependency(trigger, target=make_trigger(target))
         if o.info:
             for base in non_trivial_bases(o.info):
                 self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
         super().visit_func_def(o)
-        self.pop()
+        if new_scope:
+            self.leave_scope()
+
+    def visit_decorator(self, o: Decorator) -> None:
+        self.add_dependency(make_trigger(o.func.fullname()))
+        super().visit_decorator(o)
 
     def visit_class_def(self, o: ClassDef) -> None:
-        target = self.push(o.name)
-        self.add_dependency(make_trigger(target))
+        target = self.enter_class_scope(o.info)
+        self.add_dependency(make_trigger(target), target)
         old_is_class = self.is_class
         self.is_class = True
-        # TODO: Add dependencies based on MRO and other attributes.
+        # Add dependencies to type variables of a generic class.
+        for tv in o.type_vars:
+            self.add_dependency(make_trigger(tv.fullname), target)
+        # Add dependencies to base types.
+        for base in o.info.bases:
+            self.add_type_dependencies(base, target=target)
+        # TODO: Add dependencies based on remaining TypeInfo attributes.
         super().visit_class_def(o)
         self.is_class = old_is_class
         info = o.info
         for name, node in info.names.items():
             if isinstance(node.node, Var):
-                for base in non_trivial_bases(info):
+                for base_info in non_trivial_bases(info):
                     # If the type of an attribute changes in a base class, we make references
                     # to the attribute in the subclass stale.
-                    self.add_dependency(make_trigger(base.fullname() + '.' + name),
+                    self.add_dependency(make_trigger(base_info.fullname() + '.' + name),
                                         target=make_trigger(info.fullname() + '.' + name))
-        for base in non_trivial_bases(info):
-            for name, node in base.names.items():
-                self.add_dependency(make_trigger(base.fullname() + '.' + name),
+        for base_info in non_trivial_bases(info):
+            for name, node in base_info.names.items():
+                self.add_dependency(make_trigger(base_info.fullname() + '.' + name),
                                     target=make_trigger(info.fullname() + '.' + name))
-            self.add_dependency(make_trigger(base.fullname() + '.__init__'),
+            self.add_dependency(make_trigger(base_info.fullname() + '.__init__'),
                                 target=make_trigger(info.fullname() + '.__init__'))
-        self.pop()
+        self.leave_scope()
 
     def visit_import(self, o: Import) -> None:
         for id, as_id in o.ids:
             # TODO: as_id
-            self.add_dependency(make_trigger(id), self.current())
+            self.add_dependency(make_trigger(id), self.current_target())
 
     def visit_import_from(self, o: ImportFrom) -> None:
         assert o.relative == 0  # Relative imports not supported
         for name, as_name in o.names:
-            assert as_name is None or as_name == name
             self.add_dependency(make_trigger(o.id + '.' + name))
 
+    def visit_block(self, o: Block) -> None:
+        if not o.is_unreachable:
+            super().visit_block(o)
+
     def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
-        super().visit_assignment_stmt(o)
-        if o.type:
-            for trigger in get_type_dependencies(o.type):
-                self.add_dependency(trigger)
+        # TODO: Implement all assignment special forms, including these:
+        #   TypedDict
+        #   NamedTuple
+        #   Enum
+        #   type aliases
+        rvalue = o.rvalue
+        if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr):
+            # TODO: Support type variable value restriction
+            analyzed = rvalue.analyzed
+            self.add_type_dependencies(analyzed.upper_bound,
+                                       target=make_trigger(analyzed.fullname()))
+        elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr):
+            # Depend on types of named tuple items.
+            info = rvalue.analyzed.info
+            prefix = '%s.%s' % (self.current_full_target(), info.name())
+            for name, symnode in info.names.items():
+                if not name.startswith('_') and isinstance(symnode.node, Var):
+                    typ = symnode.node.type
+                    if typ:
+                        self.add_type_dependencies(typ)
+                        attr_target = make_trigger('%s.%s' % (prefix, name))
+                        self.add_type_dependencies(typ, target=attr_target)
+        else:
+            # Normal assignment
+            super().visit_assignment_stmt(o)
+            for lvalue in o.lvalues:
+                self.process_lvalue(lvalue)
+            items = o.lvalues + [rvalue]
+            for i in range(len(items) - 1):
+                lvalue = items[i]
+                rvalue = items[i + 1]
+                if isinstance(lvalue, (TupleExpr, ListExpr)):
+                    self.add_attribute_dependency_for_expr(rvalue, '__iter__')
+            if o.type:
+                for trigger in get_type_triggers(o.type):
+                    self.add_dependency(trigger)
+
+    def process_lvalue(self, lvalue: Expression) -> None:
+        """Generate additional dependencies for an lvalue."""
+        if isinstance(lvalue, IndexExpr):
+            self.add_operator_method_dependency(lvalue.base, '__setitem__')
+        elif isinstance(lvalue, NameExpr):
+            if lvalue.kind in (MDEF, GDEF):
+                # Assignment to an attribute in the class body, or direct assignment to a
+                # global variable.
+                lvalue_type = self.get_non_partial_lvalue_type(lvalue)
+                type_triggers = get_type_triggers(lvalue_type)
+                attr_trigger = make_trigger('%s.%s' % (self.full_target_stack[-1], lvalue.name))
+                for type_trigger in type_triggers:
+                    self.add_dependency(type_trigger, attr_trigger)
+        elif isinstance(lvalue, MemberExpr):
+            if lvalue.kind is None:
+                # Reference to a non-module attribute
+                if lvalue.expr not in self.type_map:
+                    # Unreachable assignment -> not checked so no dependencies to generate.
+                    return
+                object_type = self.type_map[lvalue.expr]
+                lvalue_type = self.get_non_partial_lvalue_type(lvalue)
+                type_triggers = get_type_triggers(lvalue_type)
+                for attr_trigger in self.attribute_triggers(object_type, lvalue.name):
+                    for type_trigger in type_triggers:
+                        self.add_dependency(type_trigger, attr_trigger)
+        elif isinstance(lvalue, (ListExpr, TupleExpr)):
+            for item in lvalue.items:
+                self.process_lvalue(item)
+        # TODO: star lvalue
+
+    def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type:
+        lvalue_type = self.type_map[lvalue]
+        if isinstance(lvalue_type, PartialType):
+            if isinstance(lvalue.node, Var) and lvalue.node.type:
+                lvalue_type = lvalue.node.type
+            else:
+                assert False, "Unexpected partial type"
+        return lvalue_type
+
+    def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None:
+        super().visit_operator_assignment_stmt(o)
+        self.process_lvalue(o.lvalue)
+        method = op_methods[o.op]
+        self.add_attribute_dependency_for_expr(o.lvalue, method)
+        if o.op in ops_with_inplace_method:
+            inplace_method = '__i' + method[2:]
+            self.add_attribute_dependency_for_expr(o.lvalue, inplace_method)
+
+    def visit_for_stmt(self, o: ForStmt) -> None:
+        super().visit_for_stmt(o)
+        # __getitem__ is only used if __iter__ is missing but for simplicity we
+        # just always depend on both.
+        self.add_attribute_dependency_for_expr(o.expr, '__iter__')
+        self.add_attribute_dependency_for_expr(o.expr, '__getitem__')
+        self.process_lvalue(o.index)
+        if isinstance(o.index, (TupleExpr, ListExpr)):
+            # Process multiple assignment to index variables.
+            item_type = o.inferred_item_type
+            if item_type:
+                # This is similar to above.
+                self.add_attribute_dependency(item_type, '__iter__')
+                self.add_attribute_dependency(item_type, '__getitem__')
+        if o.index_type:
+            self.add_type_dependencies(o.index_type)
+
+    def visit_with_stmt(self, o: WithStmt) -> None:
+        super().visit_with_stmt(o)
+        for e in o.expr:
+            self.add_attribute_dependency_for_expr(e, '__enter__')
+            self.add_attribute_dependency_for_expr(e, '__exit__')
+        if o.target_type:
+            self.add_type_dependencies(o.target_type)
+
+    def visit_print_stmt(self, o: PrintStmt) -> None:
+        super().visit_print_stmt(o)
+        if o.target:
+            self.add_attribute_dependency_for_expr(o.target, 'write')
+
+    def visit_del_stmt(self, o: DelStmt) -> None:
+        super().visit_del_stmt(o)
+        if isinstance(o.expr, IndexExpr):
+            self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__')
 
     # Expressions
 
@@ -121,6 +293,10 @@ class DependencyVisitor(TraverserVisitor):
             # We don't track depdendencies to local variables, since they
             # aren't externally visible.
             return
+        if o.kind == MDEF:
+            # Direct reference to member is only possible in the scope that
+            # defined the name, so no dependency is required.
+            return
         if o.fullname is not None:
             trigger = make_trigger(o.fullname)
             self.add_dependency(trigger)
@@ -134,15 +310,12 @@ class DependencyVisitor(TraverserVisitor):
                 self.add_dependency(trigger)
         else:
             # Reference to a non-module attribute
+            if e.expr not in self.type_map:
+                # No type available -- this happens for unreachable code. Since it's unreachable,
+                # it wasn't type checked and we don't need to generate dependencies.
+                return
             typ = self.type_map[e.expr]
-            if isinstance(typ, Instance):
-                member = '%s.%s' % (typ.type.fullname(), e.name)
-                self.add_dependency(make_trigger(member))
-            elif isinstance(typ, (AnyType, NoneTyp)):
-                pass  # No dependency needed
-            elif isinstance(typ, FunctionLike) and typ.is_type_obj():
-                member = '%s.%s' % (typ.type_object().fullname(), e.name)
-                self.add_dependency(make_trigger(member))
+            self.add_attribute_dependency(typ, e.name)
 
     def visit_call_expr(self, e: CallExpr) -> None:
         super().visit_call_expr(e)
@@ -151,30 +324,206 @@ class DependencyVisitor(TraverserVisitor):
             class_name = callee_type.type_object().fullname()
             self.add_dependency(make_trigger(class_name + '.__init__'))
 
+    def visit_cast_expr(self, e: CastExpr) -> None:
+        super().visit_cast_expr(e)
+        self.add_type_dependencies(e.type)
+
+    def visit_type_application(self, e: TypeApplication) -> None:
+        super().visit_type_application(e)
+        for typ in e.types:
+            self.add_type_dependencies(typ)
+
+    def visit_index_expr(self, e: IndexExpr) -> None:
+        super().visit_index_expr(e)
+        self.add_operator_method_dependency(e.base, '__getitem__')
+
+    def visit_unary_expr(self, e: UnaryExpr) -> None:
+        super().visit_unary_expr(e)
+        if e.op not in unary_op_methods:
+            return
+        method = unary_op_methods[e.op]
+        self.add_operator_method_dependency(e.expr, method)
+
+    def visit_op_expr(self, e: OpExpr) -> None:
+        super().visit_op_expr(e)
+        self.process_binary_op(e.op, e.left, e.right)
+
+    def visit_comparison_expr(self, e: ComparisonExpr) -> None:
+        super().visit_comparison_expr(e)
+        for i, op in enumerate(e.operators):
+            left = e.operands[i]
+            right = e.operands[i + 1]
+            self.process_binary_op(op, left, right)
+            if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='):
+                self.add_operator_method_dependency(left, '__cmp__')
+                self.add_operator_method_dependency(right, '__cmp__')
+
+    def process_binary_op(self, op: str, left: Expression, right: Expression) -> None:
+        method = op_methods.get(op)
+        if method:
+            if op == 'in':
+                self.add_operator_method_dependency(right, method)
+            else:
+                self.add_operator_method_dependency(left, method)
+                rev_method = reverse_op_methods.get(method)
+                if rev_method:
+                    self.add_operator_method_dependency(right, rev_method)
+
+    def add_operator_method_dependency(self, e: Expression, method: str) -> None:
+        typ = self.type_map.get(e)
+        if typ is not None:
+            self.add_operator_method_dependency_for_type(typ, method)
+
+    def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None:
+        # Note that operator methods can't be (non-metaclass) methods of type objects
+        # (that is, TypeType objects or Callables representing a type).
+        # TODO: TypedDict
+        # TODO: metaclasses
+        if isinstance(typ, TypeVarType):
+            typ = typ.upper_bound
+        if isinstance(typ, TupleType):
+            typ = typ.fallback
+        if isinstance(typ, Instance):
+            trigger = make_trigger(typ.type.fullname() + '.' + method)
+            self.add_dependency(trigger)
+        elif isinstance(typ, UnionType):
+            for item in typ.items:
+                self.add_operator_method_dependency_for_type(item, method)
+
+    def visit_generator_expr(self, e: GeneratorExpr) -> None:
+        super().visit_generator_expr(e)
+        for seq in e.sequences:
+            self.add_iter_dependency(seq)
+
+    def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None:
+        super().visit_dictionary_comprehension(e)
+        for seq in e.sequences:
+            self.add_iter_dependency(seq)
+
+    def visit_star_expr(self, e: StarExpr) -> None:
+        super().visit_star_expr(e)
+        self.add_iter_dependency(e.expr)
+
+    def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
+        super().visit_yield_from_expr(e)
+        self.add_iter_dependency(e.expr)
+
     # Helpers
 
     def add_dependency(self, trigger: str, target: Optional[str] = None) -> None:
+        """Add dependency from trigger to a target.
+
+        If the target is not given explicitly, use the current target.
+        """
+        if trigger.startswith(('<builtins.', '<typing.')):
+            # Don't track dependencies to certain builtins to keep the size of
+            # the dependencies manageable. These dependencies should only
+            # change on mypy version updates, which will require a full rebuild
+            # anyway.
+            return
         if target is None:
-            target = self.current()
+            target = self.current_target()
         self.map.setdefault(trigger, set()).add(target)
 
-    def push(self, component: str) -> str:
-        target = '%s.%s' % (self.current(), component)
-        self.stack.append(target)
+    def add_type_dependencies(self, typ: Type, target: Optional[str] = None) -> None:
+        """Add dependencies to all components of a type.
+
+        Args:
+            target: If not None, override the default (current) target of the
+                generated dependency.
+        """
+        # TODO: Use this method in more places where get_type_triggers() + add_dependency()
+        #       are called together.
+        for trigger in get_type_triggers(typ):
+            self.add_dependency(trigger, target)
+
+    def add_attribute_dependency(self, typ: Type, name: str) -> None:
+        """Add dependencies for accessing a named attribute of a type."""
+        targets = self.attribute_triggers(typ, name)
+        for target in targets:
+            self.add_dependency(target)
+
+    def attribute_triggers(self, typ: Type, name: str) -> List[str]:
+        """Return all triggers associated with the attribute of a type."""
+        if isinstance(typ, TypeVarType):
+            typ = typ.upper_bound
+        if isinstance(typ, TupleType):
+            typ = typ.fallback
+        if isinstance(typ, Instance):
+            member = '%s.%s' % (typ.type.fullname(), name)
+            return [make_trigger(member)]
+        elif isinstance(typ, FunctionLike) and typ.is_type_obj():
+            member = '%s.%s' % (typ.type_object().fullname(), name)
+            return [make_trigger(member)]
+        elif isinstance(typ, UnionType):
+            targets = []
+            for item in typ.items:
+                targets.extend(self.attribute_triggers(item, name))
+            return targets
+        elif isinstance(typ, TypeType):
+            # TODO: Metaclass attribute lookup
+            return self.attribute_triggers(typ.item, name)
+        else:
+            return []
+
+    def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None:
+        typ = self.type_map.get(e)
+        if typ is not None:
+            self.add_attribute_dependency(typ, name)
+
+    def add_iter_dependency(self, node: Expression) -> None:
+        typ = self.type_map.get(node)
+        if typ:
+            self.add_attribute_dependency(typ, '__iter__')
+
+    def enter_file_scope(self, prefix: str) -> None:
+        """Enter a module target scope."""
+        self.target_stack.append(prefix)
+        self.full_target_stack.append(prefix)
+        self.scope_stack.append(None)
+
+    def enter_function_scope(self, fdef: FuncDef) -> str:
+        """Enter a function target scope."""
+        target = '%s.%s' % (self.full_target_stack[-1], fdef.name())
+        self.target_stack.append(target)
+        self.full_target_stack.append(target)
+        self.scope_stack.append(fdef)
         return target
 
-    def pop(self) -> None:
-        self.stack.pop()
+    def enter_class_scope(self, info: TypeInfo) -> str:
+        """Enter a class target scope."""
+        # Duplicate the previous top non-class target (it can't be a class but since the
+        # depths of all stacks must agree we need something).
+        self.target_stack.append(self.target_stack[-1])
+        full_target = '%s.%s' % (self.full_target_stack[-1], info.name())
+        self.full_target_stack.append(full_target)
+        self.scope_stack.append(info)
+        return full_target
+
+    def leave_scope(self) -> None:
+        """Leave a target scope."""
+        self.target_stack.pop()
+        self.full_target_stack.pop()
+        self.scope_stack.pop()
+
+    def current_target(self) -> str:
+        """Return the current target (non-class; for a class return enclosing module)."""
+        return self.target_stack[-1]
 
-    def current(self) -> str:
-        return self.stack[-1]
+    def current_full_target(self) -> str:
+        """Return the current target (may be a class)."""
+        return self.full_target_stack[-1]
 
+    def current_scope(self) -> Optional[Node]:
+        return self.scope_stack[-1]
 
-def get_type_dependencies(typ: Type) -> List[str]:
-    return typ.accept(TypeDependenciesVisitor())
 
+def get_type_triggers(typ: Type) -> List[str]:
+    """Return all triggers that correspond to a type becoming stale."""
+    return typ.accept(TypeTriggersVisitor())
 
-class TypeDependenciesVisitor(TypeVisitor[List[str]]):
+
+class TypeTriggersVisitor(TypeVisitor[List[str]]):
     def __init__(self) -> None:
         self.deps = []  # type: List[str]
 
@@ -182,7 +531,7 @@ class TypeDependenciesVisitor(TypeVisitor[List[str]]):
         trigger = make_trigger(typ.type.fullname())
         triggers = [trigger]
         for arg in typ.args:
-            triggers.extend(get_type_dependencies(arg))
+            triggers.extend(get_type_triggers(arg))
         return triggers
 
     def visit_any(self, typ: AnyType) -> List[str]:
@@ -193,10 +542,17 @@ class TypeDependenciesVisitor(TypeVisitor[List[str]]):
 
     def visit_callable_type(self, typ: CallableType) -> List[str]:
         # TODO: generic callables
+        # TODO: fallback?
         triggers = []
         for arg in typ.arg_types:
-            triggers.extend(get_type_dependencies(arg))
-        triggers.extend(get_type_dependencies(typ.ret_type))
+            triggers.extend(get_type_triggers(arg))
+        triggers.extend(get_type_triggers(typ.ret_type))
+        return triggers
+
+    def visit_overloaded(self, typ: Overloaded) -> List[str]:
+        triggers = []
+        for item in typ.items():
+            triggers.extend(get_type_triggers(item))
         return triggers
 
     def visit_deleted_type(self, typ: DeletedType) -> List[str]:
@@ -206,21 +562,28 @@ class TypeDependenciesVisitor(TypeVisitor[List[str]]):
         assert False, "Should not see a partial type here"
 
     def visit_tuple_type(self, typ: TupleType) -> List[str]:
-        raise NotImplementedError
+        triggers = []
+        for item in typ.items:
+            triggers.extend(get_type_triggers(item))
+        triggers.extend(get_type_triggers(typ.fallback))
+        return triggers
 
     def visit_type_type(self, typ: TypeType) -> List[str]:
-        # TODO: replace with actual implementation
-        return []
+        return get_type_triggers(typ.item)
 
     def visit_forwardref_type(self, typ: ForwardRef) -> List[str]:
         assert False, 'Internal error: Leaked forward reference object {}'.format(typ)
 
     def visit_type_var(self, typ: TypeVarType) -> List[str]:
-        # TODO: replace with actual implementation
-        return []
+        # TODO: bound (values?)
+        triggers = []
+        if typ.fullname:
+            triggers.append(make_trigger(typ.fullname))
+        return triggers
 
     def visit_typeddict_type(self, typ: TypedDictType) -> List[str]:
-        raise NotImplementedError
+        # TODO: implement
+        return []
 
     def visit_unbound_type(self, typ: UnboundType) -> List[str]:
         return []
@@ -229,9 +592,33 @@ class TypeDependenciesVisitor(TypeVisitor[List[str]]):
         return []
 
     def visit_union_type(self, typ: UnionType) -> List[str]:
-        raise NotImplementedError
+        triggers = []
+        for item in typ.items:
+            triggers.extend(get_type_triggers(item))
+        return triggers
 
 
 def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]:
     return [base for base in info.mro[1:]
             if base.fullname() != 'builtins.object']
+
+
+def dump_all_dependencies(modules: Dict[str, MypyFile],
+                          type_map: Dict[Expression, Type],
+                          python_version: Tuple[int, int]) -> None:
+    """Generate dependencies for all interesting modules and print them to stdout."""
+    all_deps = {}  # type: Dict[str, Set[str]]
+    for id, node in modules.items():
+        # Uncomment for debugging:
+        # print('processing', id)
+        if id in ('builtins', 'typing') or '/typeshed/' in node.path:
+            continue
+        assert id == node.fullname()
+        deps = get_dependencies(node, type_map, python_version)
+        for trigger, targets in deps.items():
+            all_deps.setdefault(trigger, set()).update(targets)
+
+    for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]):
+        print(trigger)
+        for target in sorted(targets):
+            print('    %s' % target)
diff --git a/mypy/server/target.py b/mypy/server/target.py
index 0b4636b..78e2858 100644
--- a/mypy/server/target.py
+++ b/mypy/server/target.py
@@ -1,17 +1,20 @@
-from typing import Iterable, Tuple, List
+from typing import Iterable, Tuple, List, Optional
 
 
-def module_prefix(modules: Iterable[str], target: str) -> str:
-    return split_target(modules, target)[0]
+def module_prefix(modules: Iterable[str], target: str) -> Optional[str]:
+    result = split_target(modules, target)
+    if result is None:
+        return None
+    return result[0]
 
 
-def split_target(modules: Iterable[str], target: str) -> Tuple[str, str]:
+def split_target(modules: Iterable[str], target: str) -> Optional[Tuple[str, str]]:
     remaining = []  # type: List[str]
     while True:
         if target in modules:
             return target, '.'.join(remaining)
         components = target.rsplit('.', 1)
         if len(components) == 1:
-            assert False, 'Cannot find module prefix for {}'.format(target)
+            return None
         target = components[0]
         remaining.insert(0, components[1])
diff --git a/mypy/server/update.py b/mypy/server/update.py
index c8dc70b..d73daed 100644
--- a/mypy/server/update.py
+++ b/mypy/server/update.py
@@ -46,16 +46,24 @@ Major todo items:
 - Support multiple type checking passes
 """
 
-from typing import Dict, List, Set, Tuple, Iterable, Union, Optional
+import os.path
+from typing import Dict, List, Set, Tuple, Iterable, Union, Optional, Mapping, NamedTuple
 
-from mypy.build import BuildManager, State
+from mypy.build import (
+    BuildManager, State, BuildSource, Graph, load_graph, SavedCache, CacheMeta,
+    cache_meta_from_dict, find_module_clear_caches
+)
 from mypy.checker import DeferredNode
-from mypy.errors import Errors
+from mypy.errors import Errors, CompileError
 from mypy.nodes import (
-    MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, ClassDef
+    MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, ClassDef, Decorator,
+    Import, ImportFrom, SymbolTable
 )
+from mypy.options import Options
 from mypy.types import Type
-from mypy.server.astdiff import compare_symbol_tables, is_identical_type
+from mypy.server.astdiff import (
+    snapshot_symbol_table, compare_symbol_table_snapshots, is_identical_type, SnapshotItem
+)
 from mypy.server.astmerge import merge_asts
 from mypy.server.aststrip import strip_target
 from mypy.server.deps import get_dependencies, get_dependencies_of_target
@@ -70,108 +78,473 @@ DEBUG = False
 class FineGrainedBuildManager:
     def __init__(self,
                  manager: BuildManager,
-                 graph: Dict[str, State]) -> None:
+                 graph: Graph) -> None:
+        """Initialize fine-grained build based on a batch build.
+
+        Args:
+            manager: State of the build (mutated by this class)
+            graph: Additional state of the build (only read to initialize state)
+        """
         self.manager = manager
-        self.graph = graph
-        self.deps = get_all_dependencies(manager, graph)
+        self.options = manager.options
+        self.previous_modules = get_module_to_path_map(manager)
+        self.deps = get_all_dependencies(manager, graph, self.options)
         self.previous_targets_with_errors = manager.errors.targets()
-
-    def update(self, changed_modules: List[str]) -> List[str]:
+        # Module, if any, that had blocking errors in the last run as (id, path) tuple.
+        # TODO: Handle blocking errors in the initial build
+        self.blocking_error = None  # type: Optional[Tuple[str, str]]
+        # Module that we haven't processed yet but that are known to be stale.
+        self.stale = []  # type: List[Tuple[str, str]]
+        mark_all_meta_as_memory_only(graph, manager)
+        manager.saved_cache = preserve_full_cache(graph, manager)
+        self.type_maps = extract_type_maps(graph)
+
+    def update(self, changed_modules: List[Tuple[str, str]]) -> List[str]:
         """Update previous build result by processing changed modules.
 
         Also propagate changes to other modules as needed, but only process
         those parts of other modules that are affected by the changes. Retain
         the existing ASTs and symbol tables of unaffected modules.
 
-        TODO: What about blocking errors?
+        Create new graph with new State objects, but reuse original BuildManager.
 
         Args:
-            manager: State of the build
-            graph: Additional state of the build
-            deps: Fine-grained dependcy map for the build (mutated by this function)
-            changed_modules: Modules changed since the previous update/build (assume
-                this is correct; not validated here)
+            changed_modules: Modules changed since the previous update/build; each is
+                a (module id, path) tuple. Includes modified, added and deleted modules.
+                Assume this is correct; it's not validated here.
 
         Returns:
             A list of errors.
         """
+        assert changed_modules, 'No changed modules'
+
+        # Reset global caches for the new build.
+        find_module_clear_caches()
+
+        changed_modules = dedupe_modules(changed_modules + self.stale)
+        initial_set = {id for id, _ in changed_modules}
+        if DEBUG:
+            print('==== update %s ====' % ', '.join(repr(id)
+                                                    for id, _ in changed_modules))
+
+        if self.blocking_error:
+            # Handle blocking errors first. We'll exit as soon as we find a
+            # module that still has blocking errors.
+            if DEBUG:
+                print('existing blocker: %s' % self.blocking_error[0])
+            changed_modules = dedupe_modules([self.blocking_error] + changed_modules)
+            self.blocking_error = None
+
+        while changed_modules:
+            next_id, next_path = changed_modules.pop(0)
+            if next_id not in self.previous_modules and next_id not in initial_set:
+                print('skip %r (module not in import graph)' % next_id)
+                continue
+            result = self.update_single(next_id, next_path)
+            messages, remaining, (next_id, next_path), blocker = result
+            changed_modules = [(id, path) for id, path in changed_modules
+                               if id != next_id]
+            changed_modules = dedupe_modules(changed_modules + remaining)
+            if blocker:
+                self.blocking_error = (next_id, next_path)
+                self.stale = changed_modules
+                return messages
+
+        return messages
+
+    def update_single(self, module: str, path: str) -> Tuple[List[str],
+                                                             List[Tuple[str, str]],
+                                                             Tuple[str, str],
+                                                             bool]:
+        """Update a single modified module.
+
+        If the module contains imports of previously unseen modules, only process one of
+        the new modules and return the remaining work to be done.
+
+        Returns:
+            Tuple with these items:
+
+            - Error messages
+            - Remaining modules to process as (module id, path) tuples
+            - Module which was actually processed as (id, path) tuple
+            - Whether there was a blocking error in the module
+        """
         if DEBUG:
-            print('==== update ====')
+            print('--- update single %r ---' % module)
+
+        # TODO: If new module brings in other modules, we parse some files multiple times.
         manager = self.manager
-        graph = self.graph
-        old_modules = dict(manager.modules)
+        previous_modules = self.previous_modules
+
+        # Record symbol table snaphot of old version the changed module.
+        old_snapshots = {}  # type: Dict[str, Dict[str, SnapshotItem]]
+        if module in manager.modules:
+            snapshot = snapshot_symbol_table(module, manager.modules[module].names)
+            old_snapshots[module] = snapshot
+
         manager.errors.reset()
-        new_modules, new_type_maps = build_incremental_step(manager, changed_modules)
+        result = update_single_isolated(module, path, manager, previous_modules)
+        if isinstance(result, BlockedUpdate):
+            # Blocking error -- just give up
+            module, path, remaining = result
+            self.previous_modules = get_module_to_path_map(manager)
+            return manager.errors.messages(), remaining, (module, path), True
+        assert isinstance(result, NormalUpdate)  # Work around #4124
+        module, path, remaining, tree, graph = result
+
         # TODO: What to do with stale dependencies?
-        triggered = calculate_active_triggers(manager, old_modules, new_modules)
+        triggered = calculate_active_triggers(manager, old_snapshots, {module: tree})
         if DEBUG:
             print('triggered:', sorted(triggered))
-        replace_modules_with_new_variants(manager, graph, old_modules, new_modules, new_type_maps)
-        update_dependencies(new_modules, self.deps, graph)
+        update_dependencies({module: tree}, self.deps, graph, self.options)
         propagate_changes_using_dependencies(manager, graph, self.deps, triggered,
-                                             set(changed_modules),
+                                             {module},
                                              self.previous_targets_with_errors,
                                              graph)
-        self.previous_targets_with_errors = manager.errors.targets()
-        return manager.errors.messages()
 
-
-def get_all_dependencies(manager: BuildManager, graph: Dict[str, State]) -> Dict[str, Set[str]]:
+        # Preserve state needed for the next update.
+        self.previous_targets_with_errors = manager.errors.targets()
+        # If deleted, module won't be in the graph.
+        if module in graph:
+            # Generate metadata so that we can reuse the AST in the next run.
+            graph[module].write_cache()
+        for id, state in graph.items():
+            # Look up missing ASTs from saved cache.
+            if state.tree is None and id in manager.saved_cache:
+                meta, tree, type_map = manager.saved_cache[id]
+                state.tree = tree
+        mark_all_meta_as_memory_only(graph, manager)
+        manager.saved_cache = preserve_full_cache(graph, manager)
+        self.previous_modules = get_module_to_path_map(manager)
+        self.type_maps = extract_type_maps(graph)
+
+        return manager.errors.messages(), remaining, (module, path), False
+
+
+def mark_all_meta_as_memory_only(graph: Dict[str, State],
+                                 manager: BuildManager) -> None:
+    for id, state in graph.items():
+        if id in manager.saved_cache:
+            # Don't look at disk.
+            old = manager.saved_cache[id]
+            manager.saved_cache[id] = (old[0]._replace(memory_only=True),
+                                       old[1],
+                                       old[2])
+
+
+def get_all_dependencies(manager: BuildManager, graph: Dict[str, State],
+                         options: Options) -> Dict[str, Set[str]]:
     """Return the fine-grained dependency map for an entire build."""
     deps = {}  # type: Dict[str, Set[str]]
-    update_dependencies(manager.modules, deps, graph)
+    update_dependencies(manager.modules, deps, graph, options)
     return deps
 
 
-def build_incremental_step(manager: BuildManager,
-                           changed_modules: List[str]) -> Tuple[Dict[str, MypyFile],
-                                                                Dict[str, Dict[Expression, Type]]]:
-    """Build new versions of changed modules only.
-
-    Return the new ASTs for the changed modules. They will be totally
-    separate from the existing ASTs and need to merged afterwards.
+# The result of update_single_isolated when no blockers, with these items:
+#
+# - Id of the changed module (can be different from the module argument)
+# - Path of the changed module
+# - New AST for the changed module (None if module was deleted)
+# - The entire updated build graph
+# - Remaining changed modules that are not processed yet as (module id, path)
+#   tuples (non-empty if the original changed module imported other new
+#   modules)
+NormalUpdate = NamedTuple('NormalUpdate', [('module', str),
+                                           ('path', str),
+                                           ('remaining', List[Tuple[str, str]]),
+                                           ('tree', Optional[MypyFile]),
+                                           ('graph', Graph)])
+
+# The result of update_single_isolated when there is a blocking error. Items
+# are similar to NormalUpdate (but there are fewer).
+BlockedUpdate = NamedTuple('BlockedUpdate', [('module', str),
+                                             ('path', str),
+                                             ('remaining', List[Tuple[str, str]])])
+
+UpdateResult = Union[NormalUpdate, BlockedUpdate]
+
+
+def update_single_isolated(module: str,
+                           path: str,
+                           manager: BuildManager,
+                           previous_modules: Dict[str, str]) -> UpdateResult:
+    """Build a new version of one changed module only.
+
+    Don't propagate changes to elsewhere in the program. Raise CompleError on
+    encountering a blocking error.
+
+    Args:
+        module: Changed module (modified, created or deleted)
+        path: Path of the changed module
+        manager: Build manager
+        graph: Build graph
+
+    Returns a named tuple describing the result (see above for details).
     """
-    assert len(changed_modules) == 1
-    id = changed_modules[0]
-    path = manager.modules[id].path
-
-    # TODO: what if file is missing?
-    with open(path) as f:
-        source = f.read()
-
-    state = State(id=id,
-                  path=path,
-                  source=source,
-                  manager=manager)  # TODO: more args?
+    if module in manager.modules:
+        assert_equivalent_paths(path, manager.modules[module].path)
+
+    old_modules = dict(manager.modules)
+    sources = get_sources(previous_modules, [(module, path)])
+    invalidate_stale_cache_entries(manager.saved_cache, [(module, path)])
+
+    manager.missing_modules = set()
+    try:
+        graph = load_graph(sources, manager)
+    except CompileError as err:
+        # Parse error somewhere in the program -- a blocker
+        assert err.module_with_blocker
+        if err.module_with_blocker != module:
+            # Blocker is in a fresh module. Delete the state of the original target module
+            # since it will be stale.
+            #
+            # TODO: It would be more efficient to store the original target module
+            path = manager.modules[module].path
+            del manager.modules[module]
+            remaining_modules = [(module, path)]
+        else:
+            remaining_modules = []
+        return BlockedUpdate(err.module_with_blocker, path, remaining_modules)
+
+    if not os.path.isfile(path):
+        graph = delete_module(module, graph, manager)
+        return NormalUpdate(module, path, [], None, graph)
+
+    # Find any other modules brought in by imports.
+    changed_modules = get_all_changed_modules(module, path, previous_modules, graph)
+    # If there are multiple modules to process, only process the last one of them and return
+    # the remaining ones to the caller. Often the last one is going to be imported by
+    # one of the prior modules, making it more efficient to process it first.
+    if len(changed_modules) > 1:
+        module, path = changed_modules.pop()
+        remaining_modules = changed_modules
+        # The remaining modules haven't been processed yet so drop them.
+        for id, _ in remaining_modules:
+            del manager.modules[id]
+            del graph[id]
+        if DEBUG:
+            print('--> %r (newly imported)' % module)
+    else:
+        remaining_modules = []
+
+    state = graph[module]
+
+    # Process the changed file.
     state.parse_file()
     # TODO: state.fix_suppressed_dependencies()?
-    state.semantic_analysis()
+    try:
+        state.semantic_analysis()
+    except CompileError as err:
+        # There was a blocking error, so module AST is incomplete. Restore old modules.
+        manager.modules.clear()
+        manager.modules.update(old_modules)
+        del graph[module]
+        return BlockedUpdate(module, path, remaining_modules)
     state.semantic_analysis_pass_three()
-    # TODO: state.semantic_analysis_apply_patches()
+    state.semantic_analysis_apply_patches()
+
+    # Merge old and new ASTs.
+    assert state.tree is not None, "file must be at least parsed"
+    new_modules = {module: state.tree}  # type: Dict[str, Optional[MypyFile]]
+    replace_modules_with_new_variants(manager, graph, old_modules, new_modules)
+
+    # Perform type checking.
     state.type_check_first_pass()
-    # TODO: state.type_check_second_pass()?
+    state.type_check_second_pass()
     state.finish_passes()
     # TODO: state.write_cache()?
     # TODO: state.mark_as_rechecked()?
 
-    assert state.tree is not None, "file must be at least parsed"
-    return {id: state.tree}, {id: state.type_checker.type_map}
+    graph[module] = state
+
+    return NormalUpdate(module, path, remaining_modules, state.tree, graph)
+
+
+def assert_equivalent_paths(path1: str, path2: str) -> None:
+    path1 = os.path.normpath(path1)
+    path2 = os.path.normpath(path2)
+    assert path1 == path2, '%s != %s' % (path1, path2)
+
+
+def delete_module(module_id: str,
+                  graph: Dict[str, State],
+                  manager: BuildManager) -> Dict[str, State]:
+    # TODO: Deletion of a package
+    # TODO: Remove deps for the module (this only affects memory use, not correctness)
+    assert module_id not in graph
+    new_graph = graph.copy()
+    del manager.modules[module_id]
+    if module_id in manager.saved_cache:
+        del manager.saved_cache[module_id]
+    components = module_id.split('.')
+    if len(components) > 1:
+        parent = manager.modules['.'.join(components[:-1])]
+        if components[-1] in parent.names:
+            del parent.names[components[-1]]
+    return new_graph
+
+
+def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
+    seen = set()  # type: Set[str]
+    result = []
+    for id, path in modules:
+        if id not in seen:
+            seen.add(id)
+            result.append((id, path))
+    return result
 
 
-def update_dependencies(new_modules: Dict[str, MypyFile],
+def get_module_to_path_map(manager: BuildManager) -> Dict[str, str]:
+    return {module: node.path
+            for module, node in manager.modules.items()}
+
+
+def get_sources(modules: Dict[str, str],
+                changed_modules: List[Tuple[str, str]]) -> List[BuildSource]:
+    # TODO: Race condition when reading from the file system; we should only read each
+    #       bit of external state once during a build to have a consistent view of the world
+    items = sorted(modules.items(), key=lambda x: x[0])
+    sources = [BuildSource(path, id, None)
+               for id, path in items
+               if os.path.isfile(path)]
+    for id, path in changed_modules:
+        if os.path.isfile(path) and id not in modules:
+            sources.append(BuildSource(path, id, None))
+    return sources
+
+
+def get_all_changed_modules(root_module: str,
+                            root_path: str,
+                            old_modules: Dict[str, str],
+                            new_graph: Dict[str, State]) -> List[Tuple[str, str]]:
+    changed_set = {root_module}
+    changed_modules = [(root_module, root_path)]
+    for st in new_graph.values():
+        if st.id not in old_modules and st.id not in changed_set:
+            assert st.path
+            changed_set.add(st.id)
+            changed_modules.append((st.id, st.path))
+    return changed_modules
+
+
+def preserve_full_cache(graph: Graph, manager: BuildManager) -> SavedCache:
+    """Preserve every module with an AST in the graph, including modules with errors."""
+    saved_cache = {}
+    for id, state in graph.items():
+        assert state.id == id
+        if state.tree is not None:
+            meta = state.meta
+            if meta is None:
+                # No metadata, likely because of an error. We still want to retain the AST.
+                # There is no corresponding JSON so create partial "memory-only" metadata.
+                assert state.path
+                dep_prios = state.dependency_priorities()
+                meta = memory_only_cache_meta(
+                    id,
+                    state.path,
+                    state.dependencies,
+                    state.suppressed,
+                    list(state.child_modules),
+                    dep_prios,
+                    state.source_hash,
+                    state.ignore_all,
+                    manager)
+            else:
+                meta = meta._replace(memory_only=True)
+            saved_cache[id] = (meta, state.tree, state.type_map())
+    return saved_cache
+
+
+def memory_only_cache_meta(id: str,
+                           path: str,
+                           dependencies: List[str],
+                           suppressed: List[str],
+                           child_modules: List[str],
+                           dep_prios: List[int],
+                           source_hash: str,
+                           ignore_all: bool,
+                           manager: BuildManager) -> CacheMeta:
+    """Create cache metadata for module that doesn't have a JSON cache files.
+
+    JSON cache files aren't written for modules with errors, but we want to still
+    cache them in fine-grained incremental mode.
+    """
+    options = manager.options.clone_for_module(id)
+    # Note that we omit attributes related to the JSON files.
+    meta = {'id': id,
+            'path': path,
+            'memory_only': True,  # Important bit: don't expect JSON files to exist
+            'hash': source_hash,
+            'dependencies': dependencies,
+            'suppressed': suppressed,
+            'child_modules': child_modules,
+            'options': options.select_options_affecting_cache(),
+            'dep_prios': dep_prios,
+            'interface_hash': '',
+            'version_id': manager.version_id,
+            'ignore_all': ignore_all,
+            }
+    return cache_meta_from_dict(meta, '')
+
+
+def invalidate_stale_cache_entries(cache: SavedCache,
+                                   changed_modules: List[Tuple[str, str]]) -> None:
+    for name, _ in changed_modules:
+        if name in cache:
+            del cache[name]
+
+
+def verify_dependencies(state: State, manager: BuildManager) -> None:
+    """Report errors for import targets in module that don't exist."""
+    for dep in state.dependencies + state.suppressed:  # TODO: ancestors?
+        if dep not in manager.modules:
+            assert state.tree
+            line = find_import_line(state.tree, dep) or 1
+            assert state.path
+            manager.module_not_found(state.path, state.id, line, dep)
+
+
+def find_import_line(node: MypyFile, target: str) -> Optional[int]:
+    for imp in node.imports:
+        if isinstance(imp, Import):
+            for name, _ in imp.ids:
+                if name == target:
+                    return imp.line
+        if isinstance(imp, ImportFrom):
+            if imp.id == target:
+                return imp.line
+            # TODO: Relative imports
+            for name, _ in imp.names:
+                if '%s.%s' % (imp.id, name) == target:
+                    return imp.line
+        # TODO: ImportAll
+    return None
+
+
+def update_dependencies(new_modules: Mapping[str, Optional[MypyFile]],
                         deps: Dict[str, Set[str]],
-                        graph: Dict[str, State]) -> None:
+                        graph: Dict[str, State],
+                        options: Options) -> None:
     for id, node in new_modules.items():
-        module_deps = get_dependencies(prefix=id,
-                                       node=node,
-                                       type_map=graph[id].type_checker.type_map)
+        if node is None:
+            continue
+        if '/typeshed/' in node.path:
+            # We don't track changes to typeshed -- the assumption is that they are only changed
+            # as part of mypy updates, which will invalidate everything anyway.
+            #
+            # TODO: Not a reliable test, as we could have a package named typeshed.
+            # TODO: Consider relaxing this -- maybe allow some typeshed changes to be tracked.
+            continue
+        module_deps = get_dependencies(target=node,
+                                       type_map=graph[id].type_map(),
+                                       python_version=options.python_version)
         for trigger, targets in module_deps.items():
             deps.setdefault(trigger, set()).update(targets)
 
 
 def calculate_active_triggers(manager: BuildManager,
-                              old_modules: Dict[str, MypyFile],
-                              new_modules: Dict[str, MypyFile]) -> Set[str]:
+                              old_snapshots: Dict[str, Dict[str, SnapshotItem]],
+                              new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]:
     """Determine activated triggers by comparing old and new symbol tables.
 
     For example, if only the signature of function m.f is different in the new
@@ -179,7 +552,17 @@ def calculate_active_triggers(manager: BuildManager,
     """
     names = set()  # type: Set[str]
     for id in new_modules:
-        names |= compare_symbol_tables(id, old_modules[id].names, new_modules[id].names)
+        snapshot1 = old_snapshots.get(id)
+        if snapshot1 is None:
+            names.add(id)
+            snapshot1 = {}
+        new = new_modules[id]
+        if new is None:
+            snapshot2 = snapshot_symbol_table(id, SymbolTable())
+            names.add(id)
+        else:
+            snapshot2 = snapshot_symbol_table(id, new.names)
+        names |= compare_symbol_table_snapshots(id, snapshot1, snapshot2)
     return {make_trigger(name) for name in names}
 
 
@@ -187,8 +570,7 @@ def replace_modules_with_new_variants(
         manager: BuildManager,
         graph: Dict[str, State],
         old_modules: Dict[str, MypyFile],
-        new_modules: Dict[str, MypyFile],
-        new_type_maps: Dict[str, Dict[Expression, Type]]) -> None:
+        new_modules: Dict[str, Optional[MypyFile]]) -> None:
     """Replace modules with newly builds versions.
 
     Retain the identities of externally visible AST nodes in the
@@ -199,10 +581,11 @@ def replace_modules_with_new_variants(
     propagate_changes_using_dependencies).
     """
     for id in new_modules:
-        merge_asts(old_modules[id], old_modules[id].names,
-                   new_modules[id], new_modules[id].names)
-        manager.modules[id] = old_modules[id]
-        graph[id].type_checker.type_map = new_type_maps[id]
+        new_module = new_modules[id]
+        if id in old_modules and new_module is not None:
+            merge_asts(old_modules[id], old_modules[id].names,
+                       new_module, new_module.names)
+            manager.modules[id] = old_modules[id]
 
 
 def propagate_changes_using_dependencies(
@@ -224,7 +607,7 @@ def propagate_changes_using_dependencies(
         # errors might be lost.
         for target in targets_with_errors:
             id = module_prefix(modules, target)
-            if id not in up_to_date_modules:
+            if id is not None and id not in up_to_date_modules:
                 if id not in todo:
                     todo[id] = set()
                 if DEBUG:
@@ -269,6 +652,9 @@ def find_targets_recursive(
                 worklist |= deps.get(target, set()) - processed
             else:
                 module_id = module_prefix(modules, target)
+                if module_id is None:
+                    # Deleted module.
+                    continue
                 if module_id in up_to_date_modules:
                     # Already processed.
                     continue
@@ -291,6 +677,11 @@ def reprocess_nodes(manager: BuildManager,
 
     Return fired triggers.
     """
+    if module_id not in manager.saved_cache or module_id not in graph:
+        if DEBUG:
+            print('%s not in saved cache or graph (blocking errors or deleted?)' % module_id)
+        return set()
+
     file_node = manager.modules[module_id]
 
     def key(node: DeferredNode) -> str:
@@ -331,13 +722,19 @@ def reprocess_nodes(manager: BuildManager,
     old_types_map = get_enclosing_namespace_types(nodes)
 
     # Type check.
-    graph[module_id].type_checker.check_second_pass(nodes)  # TODO: check return value
+    meta, file_node, type_map = manager.saved_cache[module_id]
+    graph[module_id].tree = file_node
+    graph[module_id].type_checker().type_map = type_map
+    graph[module_id].type_checker().check_second_pass(nodes)  # TODO: check return value
 
     # Check if any attribute types were changed and need to be propagated further.
     new_triggered = get_triggered_namespace_items(old_types_map)
 
     # Dependencies may have changed.
-    update_deps(module_id, nodes, graph, deps)
+    update_deps(module_id, nodes, graph, deps, manager.options)
+
+    # Report missing imports.
+    verify_dependencies(graph[module_id], manager)
 
     return new_triggered
 
@@ -379,21 +776,23 @@ def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, T
 def update_deps(module_id: str,
                 nodes: List[DeferredNode],
                 graph: Dict[str, State],
-                deps: Dict[str, Set[str]]) -> None:
+                deps: Dict[str, Set[str]],
+                options: Options) -> None:
     for deferred in nodes:
         node = deferred.node
-        prefix = module_id
-        if isinstance(node, FuncBase) and node.info:
-            prefix += '.{}'.format(node.info.name())
-        type_map = graph[module_id].type_checker.type_map
-        new_deps = get_dependencies_of_target(prefix, node, type_map)
+        type_map = graph[module_id].type_map()
+        new_deps = get_dependencies_of_target(module_id, node, type_map, options.python_version)
         for trigger, targets in new_deps.items():
             deps.setdefault(trigger, set()).update(targets)
 
 
 def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNode]:
     """Look up a target by fully-qualified name."""
-    module, rest = split_target(modules, target)
+    items = split_target(modules, target)
+    if items is None:
+        # Deleted target
+        return []
+    module, rest = items
     if rest:
         components = rest.split('.')
     else:
@@ -410,6 +809,9 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
         if isinstance(node, MypyFile):
             file = node
         assert isinstance(node, (MypyFile, TypeInfo))
+        if c not in node.names:
+            # Deleted target
+            return []
         node = node.names[c].node
     if isinstance(node, TypeInfo):
         # A ClassDef target covers the body of the class and everything defined
@@ -423,5 +825,12 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
             if isinstance(node, FuncDef):
                 result.extend(lookup_target(modules, target + '.' + name))
         return result
-    assert isinstance(node, (FuncDef, MypyFile))
+    if isinstance(node, Decorator):
+        # Decorator targets actually refer to the function definition only.
+        node = node.func
+    assert isinstance(node, (FuncDef, MypyFile)), 'unexpected type: %s' % type(node)
     return [DeferredNode(node, active_class_name, active_class)]
+
+
+def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]:
+    return {id: state.type_map() for id, state in graph.items()}
diff --git a/mypy/solve.py b/mypy/solve.py
index 8eb12b7..c1243b7 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -63,6 +63,7 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
                 # No constraints for type variable -- 'UninhabitedType' is the most specific type.
                 if strict:
                     candidate = UninhabitedType()
+                    candidate.ambiguous = True
                 else:
                     candidate = AnyType(TypeOfAny.special_form)
         elif top is None:
diff --git a/mypy/stats.py b/mypy/stats.py
index 120fd16..63da9da 100644
--- a/mypy/stats.py
+++ b/mypy/stats.py
@@ -64,7 +64,7 @@ class StatisticsVisitor(TraverserVisitor):
 
         self.line_map = {}  # type: Dict[int, int]
 
-        self.type_of_any_counter = Counter()  # type: typing.Counter[TypeOfAny.TypeOfAny]
+        self.type_of_any_counter = Counter()  # type: typing.Counter[TypeOfAny]
         self.any_line_map = {}  # type: Dict[int, List[AnyType]]
 
         self.output = []  # type: List[str]
@@ -128,7 +128,7 @@ class StatisticsVisitor(TraverserVisitor):
                 else:
                     items = [lvalue]
                 for item in items:
-                    if isinstance(item, RefExpr) and item.is_def:
+                    if isinstance(item, RefExpr) and item.is_inferred_def:
                         if self.typemap is not None:
                             self.type(self.typemap.get(item))
         super().visit_assignment_stmt(o)
@@ -304,110 +304,6 @@ def is_complex(t: Type) -> bool:
                                            TypeVarType))
 
 
-html_files = []  # type: List[Tuple[str, str, int, int]]
-
-
-def generate_html_report(tree: MypyFile, path: str, type_map: Dict[Expression, Type],
-                         output_dir: str) -> None:
-    if is_special_module(path):
-        return
-    # There may be more than one right answer for "what should we do here?"
-    # but this is a reasonable one.
-    path = os.path.relpath(path)
-    if path.startswith('..'):
-        return
-    visitor = StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map,
-                                all_nodes=True)
-    tree.accept(visitor)
-    assert not os.path.isabs(path) and not path.startswith('..')
-    # This line is *wrong* if the preceding assert fails.
-    target_path = os.path.join(output_dir, 'html', path)
-    # replace .py or .pyi with .html
-    target_path = os.path.splitext(target_path)[0] + '.html'
-    assert target_path.endswith('.html')
-    ensure_dir_exists(os.path.dirname(target_path))
-    output = []  # type: List[str]
-    append = output.append
-    append('''\
-<html>
-<head>
-  <style>
-    .red { background-color: #faa; }
-    .yellow { background-color: #ffa; }
-    .white { }
-    .lineno { color: #999; }
-  </style>
-</head>
-<body>
-<pre>''')
-    num_imprecise_lines = 0
-    num_lines = 0
-    with open(path) as input_file:
-        for i, line in enumerate(input_file):
-            lineno = i + 1
-            status = visitor.line_map.get(lineno, TYPE_PRECISE)
-            style_map = {TYPE_PRECISE: 'white',
-                         TYPE_IMPRECISE: 'yellow',
-                         TYPE_ANY: 'red',
-                         TYPE_UNANALYZED: 'red'}
-            style = style_map[status]
-            append('<span class="lineno">%4d</span>   ' % lineno +
-                   '<span class="%s">%s</span>' % (style,
-                                                   cgi.escape(line)))
-            if status != TYPE_PRECISE:
-                num_imprecise_lines += 1
-            if line.strip():
-                num_lines += 1
-    append('</pre>')
-    append('</body></html>')
-    with open(target_path, 'w') as output_file:
-        output_file.writelines(output)
-    target_path = target_path[len(output_dir) + 1:]
-    html_files.append((path, target_path, num_lines, num_imprecise_lines))
-
-
-def generate_html_index(output_dir: str) -> None:
-    path = os.path.join(output_dir, 'index.html')
-    output = []  # type: List[str]
-    append = output.append
-    append('''\
-<html>
-<head>
-  <style>
-  body { font-family: courier; }
-  table { border-collapse: collapse; }
-  table tr td { border: 1px solid black; }
-  td { padding: 0.4em; }
-  .red { background-color: #faa; }
-  .yellow { background-color: #ffa; }
-  </style>
-</head>
-<body>''')
-    append('<h1>Mypy Type Check Coverage Report</h1>\n')
-    append('<table>\n')
-    for source_path, target_path, num_lines, num_imprecise in sorted(html_files):
-        if num_lines == 0:
-            continue
-        source_path = os.path.normpath(source_path)
-        # TODO: Windows paths.
-        if (source_path.startswith('stubs/') or
-                '/stubs/' in source_path):
-            continue
-        percent = 100.0 * num_imprecise / num_lines
-        style = ''
-        if percent >= 20:
-            style = 'class="red"'
-        elif percent >= 5:
-            style = 'class="yellow"'
-        append('<tr %s><td><a href="%s">%s</a><td>%.1f%% imprecise<td>%d LOC\n' % (
-            style, target_path, source_path, percent, num_lines))
-    append('</table>\n')
-    append('</body></html>')
-    with open(path, 'w') as file:
-        file.writelines(output)
-    print('Generated HTML report (old): %s' % os.path.abspath(path))
-
-
 def ensure_dir_exists(dir: str) -> None:
     if not os.path.exists(dir):
         os.makedirs(dir)
diff --git a/mypy/strconv.py b/mypy/strconv.py
index 5b536ab..18f4a0a 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -303,7 +303,7 @@ class StrConv(NodeVisitor[str]):
         return self.dump(a, o)
 
     def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> str:
-        return self.dump([o.expr, o.variables1, o.variables2], o)
+        return self.dump([o.expr, o.globals, o.locals], o)
 
     # Expressions
 
@@ -339,13 +339,13 @@ class StrConv(NodeVisitor[str]):
         return self.dump([o.expr], o)
 
     def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str:
-        pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
+        pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node)
         return short_type(o) + '(' + pretty + ')'
 
     def pretty_name(self, name: str, kind: Optional[int], fullname: Optional[str],
-                    is_def: bool, target_node: 'Optional[mypy.nodes.Node]' = None) -> str:
+                    is_inferred_def: bool, target_node: 'Optional[mypy.nodes.Node]' = None) -> str:
         n = name
-        if is_def:
+        if is_inferred_def:
             n += '*'
         if target_node:
             id = self.format_id(target_node)
@@ -366,7 +366,7 @@ class StrConv(NodeVisitor[str]):
         return n
 
     def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str:
-        pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
+        pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node)
         return self.dump([o.expr, pretty], o)
 
     def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str:
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index ebd58b9..707fb32 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -205,7 +205,7 @@ def generate_stub(path: str,
     options = MypyOptions()
     options.python_version = pyversion
     try:
-        ast = mypy.parse.parse(source, fnam=path, errors=None, options=options)
+        ast = mypy.parse.parse(source, fnam=path, module=module, errors=None, options=options)
     except mypy.errors.CompileError as e:
         # Syntax error!
         for m in e.messages:
@@ -908,7 +908,7 @@ def parse_options(args: List[str]) -> Options:
         elif args[0] == '--include-private':
             include_private = True
         elif args[0] in ('-h', '--help'):
-            usage()
+            usage(exit_nonzero=False)
         else:
             raise SystemExit('Unrecognized option %s' % args[0])
         args = args[1:]
@@ -943,7 +943,7 @@ def default_python2_interpreter() -> str:
     raise SystemExit("Can't find a Python 2 interpreter -- please use the -p option")
 
 
-def usage() -> None:
+def usage(exit_nonzero: bool=True) -> None:
     usage = textwrap.dedent("""\
         usage: stubgen [--py2] [--no-import] [--doc-dir PATH]
                        [--search-path PATH] [-p PATH] [-o PATH]
@@ -976,7 +976,13 @@ def usage() -> None:
           -h, --help      print this help message and exit
     """.rstrip())
 
-    raise SystemExit(usage)
+    if exit_nonzero:
+        # The user made a mistake, so we should return with an error code
+        raise SystemExit(usage)
+    else:
+        # The user asked for help specifically, so we should exit with success
+        print(usage, file=sys.stderr)
+        sys.exit()
 
 
 if __name__ == '__main__':
diff --git a/mypy/test/data.py b/mypy/test/data.py
index a080ede..189c71f 100644
--- a/mypy/test/data.py
+++ b/mypy/test/data.py
@@ -8,13 +8,25 @@ from os import remove, rmdir
 import shutil
 
 import pytest  # type: ignore  # no pytest in typeshed
-from typing import Callable, List, Tuple, Set, Optional, Iterator, Any, Dict
+from typing import Callable, List, Tuple, Set, Optional, Iterator, Any, Dict, NamedTuple, Union
 
 from mypy.myunit import TestCase, SkipTestCaseException
+from mypy.test.config import test_temp_dir
 
 
 root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
 
+# File modify/create operation: copy module contents from source_path.
+UpdateFile = NamedTuple('UpdateFile', [('module', str),
+                                       ('source_path', str),
+                                       ('target_path', str)])
+
+# File delete operation: delete module file.
+DeleteFile = NamedTuple('DeleteFile', [('module', str),
+                                       ('path', str)])
+
+FileOperation = Union[UpdateFile, DeleteFile]
+
 
 def parse_test_cases(
         path: str,
@@ -192,6 +204,7 @@ class DataDrivenTestCase(TestCase):
     expected_stale_modules = None  # type: Dict[int, Set[str]]
     expected_rechecked_modules = None  # type: Dict[int, Set[str]]
 
+    # Files/directories to clean up after test case; (is directory, path) tuples
     clean_up = None  # type: List[Tuple[bool, str]]
 
     def __init__(self,
@@ -229,23 +242,23 @@ class DataDrivenTestCase(TestCase):
         super().set_up()
         encountered_files = set()
         self.clean_up = []
-        all_deleted = []  # type: List[str]
         for paths in self.deleted_paths.values():
-            all_deleted += paths
+            for path in paths:
+                self.clean_up.append((False, path))
+                encountered_files.add(path)
         for path, content in self.files:
             dir = os.path.dirname(path)
             for d in self.add_dirs(dir):
                 self.clean_up.append((True, d))
             with open(path, 'w') as f:
                 f.write(content)
-            if path not in all_deleted:
-                # TODO: Don't assume that deleted files don't get reintroduced.
+            if path not in encountered_files:
                 self.clean_up.append((False, path))
-            encountered_files.add(path)
+                encountered_files.add(path)
             if re.search(r'\.[2-9]$', path):
                 # Make sure new files introduced in the second and later runs are accounted for
                 renamed_path = path[:-2]
-                if renamed_path not in encountered_files and renamed_path not in all_deleted:
+                if renamed_path not in encountered_files:
                     encountered_files.add(renamed_path)
                     self.clean_up.append((False, renamed_path))
         for path, _ in self.output_files:
@@ -279,7 +292,12 @@ class DataDrivenTestCase(TestCase):
         # First remove files.
         for is_dir, path in reversed(self.clean_up):
             if not is_dir:
-                remove(path)
+                try:
+                    remove(path)
+                except FileNotFoundError:
+                    # Breaking early using Ctrl+C may happen before file creation. Also, some
+                    # files may be deleted by a test case.
+                    pass
         # Then remove directories.
         for is_dir, path in reversed(self.clean_up):
             if is_dir:
@@ -301,11 +319,47 @@ class DataDrivenTestCase(TestCase):
                     path = error.filename
                     # Be defensive -- only call rmtree if we're sure we aren't removing anything
                     # valuable.
-                    if path.startswith('tmp/') and os.path.isdir(path):
+                    if path.startswith(test_temp_dir + '/') and os.path.isdir(path):
                         shutil.rmtree(path)
                     raise
         super().tear_down()
 
+    def find_steps(self) -> List[List[FileOperation]]:
+        """Return a list of descriptions of file operations for each incremental step.
+
+        The first list item corresponds to the first incremental step, the second for the
+        second step, etc. Each operation can either be a file modification/creation (UpdateFile)
+        or deletion (DeleteFile).
+        """
+        steps = {}  # type: Dict[int, List[FileOperation]]
+        for path, _ in self.files:
+            m = re.match(r'.*\.([0-9]+)$', path)
+            if m:
+                num = int(m.group(1))
+                assert num >= 2
+                target_path = re.sub(r'\.[0-9]+$', '', path)
+                module = module_from_path(target_path)
+                operation = UpdateFile(module, path, target_path)
+                steps.setdefault(num, []).append(operation)
+        for num, paths in self.deleted_paths.items():
+            assert num >= 2
+            for path in paths:
+                module = module_from_path(path)
+                steps.setdefault(num, []).append(DeleteFile(module, path))
+        max_step = max(steps)
+        return [steps[num] for num in range(2, max_step + 1)]
+
+
+def module_from_path(path: str) -> str:
+    path = re.sub(r'\.py$', '', path)
+    # We can have a mix of Unix-style and Windows-style separators.
+    parts = re.split(r'[/\\]', path)
+    assert parts[0] == test_temp_dir
+    del parts[0]
+    module = '.'.join(parts)
+    module = re.sub(r'\.__init__$', '', module)
+    return module
+
 
 class TestItem:
     """Parsed test caseitem.
diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py
index d6580c4..1cd1411 100644
--- a/mypy/test/testdeps.py
+++ b/mypy/test/testdeps.py
@@ -3,7 +3,7 @@
 import os
 from typing import List, Tuple, Dict, Optional
 
-from mypy import build
+from mypy import build, defaults
 from mypy.build import BuildSource
 from mypy.errors import CompileError
 from mypy.nodes import MypyFile, Expression
@@ -15,7 +15,12 @@ from mypy.test.helpers import assert_string_arrays_equal
 from mypy.types import Type
 
 files = [
-    'deps.test'
+    'deps.test',
+    'deps-types.test',
+    'deps-generics.test',
+    'deps-expressions.test',
+    'deps-statements.test',
+    'deps-classes.test',
 ]
 
 
@@ -31,30 +36,39 @@ class GetDependenciesSuite(DataSuite):
 
     def run_case(self, testcase: DataDrivenTestCase) -> None:
         src = '\n'.join(testcase.input)
-        messages, files, type_map = self.build(src)
+        if testcase.name.endswith('python2'):
+            python_version = defaults.PYTHON2_VERSION
+        else:
+            python_version = defaults.PYTHON3_VERSION
+        messages, files, type_map = self.build(src, python_version)
         a = messages
-        assert files is not None and type_map is not None, ('cases where CompileError'
-                                                            ' occurred should not be run')
-        deps = get_dependencies('__main__', files['__main__'], type_map)
+        if files is None or type_map is None:
+            if not a:
+                a = ['Unknown compile error (likely syntax error in test case or fixture)']
+        else:
+            deps = get_dependencies(files['__main__'], type_map, python_version)
 
-        for source, targets in sorted(deps.items()):
-            line = '%s -> %s' % (source, ', '.join(sorted(targets)))
-            # Clean up output a bit
-            line = line.replace('__main__', 'm')
-            a.append(line)
+            for source, targets in sorted(deps.items()):
+                line = '%s -> %s' % (source, ', '.join(sorted(targets)))
+                # Clean up output a bit
+                line = line.replace('__main__', 'm')
+                a.append(line)
 
         assert_string_arrays_equal(
             testcase.output, a,
             'Invalid output ({}, line {})'.format(testcase.file,
                                                   testcase.line))
 
-    def build(self, source: str) -> Tuple[List[str],
-                                          Optional[Dict[str, MypyFile]],
-                                          Optional[Dict[Expression, Type]]]:
+    def build(self,
+              source: str,
+              python_version: Tuple[int, int]) -> Tuple[List[str],
+                                                        Optional[Dict[str, MypyFile]],
+                                                        Optional[Dict[Expression, Type]]]:
         options = Options()
         options.use_builtins_fixtures = True
         options.show_traceback = True
         options.cache_dir = os.devnull
+        options.python_version = python_version
         try:
             result = build.build(sources=[BuildSource('main', None, source)],
                                  options=options,
diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py
index 3f9d234..248a455 100644
--- a/mypy/test/testdiff.py
+++ b/mypy/test/testdiff.py
@@ -8,7 +8,7 @@ from mypy.build import BuildSource
 from mypy.errors import CompileError
 from mypy.nodes import MypyFile
 from mypy.options import Options
-from mypy.server.astdiff import compare_symbol_tables
+from mypy.server.astdiff import snapshot_symbol_table, compare_symbol_table_snapshots
 from mypy.test.config import test_temp_dir, test_data_prefix
 from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test.helpers import assert_string_arrays_equal
@@ -46,10 +46,10 @@ class ASTDiffSuite(DataSuite):
 
         assert files1 is not None and files2 is not None, ('cases where CompileError'
                                                            ' occurred should not be run')
-        diff = compare_symbol_tables(
-            '__main__',
-            files1['__main__'].names,
-            files2['__main__'].names)
+        prefix = '__main__'
+        snapshot1 = snapshot_symbol_table(prefix, files1['__main__'].names)
+        snapshot2 = snapshot_symbol_table(prefix, files2['__main__'].names)
+        diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2)
         for trigger in sorted(diff):
             a.append(trigger)
 
diff --git a/mypy/test/testdmypy.py b/mypy/test/testdmypy.py
new file mode 100644
index 0000000..298ca60
--- /dev/null
+++ b/mypy/test/testdmypy.py
@@ -0,0 +1,311 @@
+"""Type checker test cases"""
+
+import os
+import re
+import shutil
+import sys
+import time
+import typed_ast
+
+from typing import Dict, List, Optional, Set, Tuple
+
+from mypy import build
+from mypy import defaults
+from mypy.main import process_options
+from mypy.myunit import AssertionFailure
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import (
+    assert_string_arrays_equal, normalize_error_messages,
+    retry_on_error, testcase_pyversion, update_testcase_output,
+)
+from mypy.errors import CompileError
+from mypy.options import Options
+
+from mypy import experiments
+from mypy import dmypy
+from mypy import dmypy_server
+
+# List of files that contain test case descriptions.
+files = [
+    'check-enum.test',
+    'check-incremental.test',
+    'check-newtype.test',
+]
+
+
+class TypeCheckSuite(DataSuite):
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        if sys.platform == 'win32':
+            return []  # Nothing here works on Windows.
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            tc = parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+            c += [case for case in tc
+                  if cls.has_stable_flags(case) and cls.is_incremental(case)]
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        assert self.is_incremental(testcase), "Testcase is not incremental"
+        assert self.has_stable_flags(testcase), "Testcase has varying flags"
+        # All tests run once with a cold cache, then at least once
+        # with a warm cache and maybe changed files.  Expected output
+        # is specified separately for each run.
+        self.clear_cache()
+        num_steps = max([2] + list(testcase.output2.keys()))
+        # Check that there are no file changes beyond the last run (they would be ignored).
+        for dn, dirs, files in os.walk(os.curdir):
+            for file in files:
+                m = re.search(r'\.([2-9])$', file)
+                if m and int(m.group(1)) > num_steps:
+                    raise ValueError(
+                        'Output file {} exists though test case only has {} runs'.format(
+                            file, num_steps))
+        self.server = None  # type: Optional[dmypy_server.Server]
+        for step in range(1, num_steps + 1):
+            self.run_case_once(testcase, step)
+
+    @classmethod
+    def is_incremental(cls, testcase: DataDrivenTestCase) -> bool:
+        return 'incremental' in testcase.name.lower() or 'incremental' in testcase.file
+
+    @classmethod
+    def has_stable_flags(cls, testcase: DataDrivenTestCase) -> bool:
+        if any(re.match(r'# flags[2-9]:', line) for line in testcase.input):
+            return False
+        for filename, contents in testcase.files:
+            if os.path.basename(filename).startswith('mypy.ini.'):
+                return False
+        return True
+
+    def clear_cache(self) -> None:
+        dn = defaults.CACHE_DIR
+        if os.path.exists(dn):
+            shutil.rmtree(dn)
+
+    def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int) -> None:
+        assert incremental_step >= 1
+        build.find_module_clear_caches()
+        original_program_text = '\n'.join(testcase.input)
+        module_data = self.parse_module(original_program_text, incremental_step)
+
+        if incremental_step == 1:
+            # In run 1, copy program text to program file.
+            for module_name, program_path, program_text in module_data:
+                if module_name == '__main__':
+                    with open(program_path, 'w') as f:
+                        f.write(program_text)
+                    break
+        elif incremental_step > 1:
+            # In runs 2+, copy *.[num] files to * files.
+            for dn, dirs, files in os.walk(os.curdir):
+                for file in files:
+                    if file.endswith('.' + str(incremental_step)):
+                        full = os.path.join(dn, file)
+                        target = full[:-2]
+                        # Use retries to work around potential flakiness on Windows (AppVeyor).
+                        retry_on_error(lambda: shutil.copy(full, target))
+
+                        # In some systems, mtime has a resolution of 1 second which can cause
+                        # annoying-to-debug issues when a file has the same size after a
+                        # change. We manually set the mtime to circumvent this.
+                        new_time = os.stat(target).st_mtime + 1
+                        os.utime(target, times=(new_time, new_time))
+            # Delete files scheduled to be deleted in [delete <path>.num] sections.
+            for path in testcase.deleted_paths.get(incremental_step, set()):
+                # Use retries to work around potential flakiness on Windows (AppVeyor).
+                retry_on_error(lambda: os.remove(path))
+
+        # Parse options after moving files (in case mypy.ini is being moved).
+        options = self.parse_options(original_program_text, testcase, incremental_step)
+        if incremental_step == 1:
+            self.server = dmypy_server.Server([])  # TODO: Fix ugly API
+            self.server.options = options
+
+        assert self.server is not None  # Set in step 1 and survives into next steps
+        sources = []
+        for module_name, program_path, program_text in module_data:
+            # Always set to none so we're forced to reread the module in incremental mode
+            sources.append(build.BuildSource(program_path, module_name, None))
+        response = self.server.check(sources, alt_lib_path=test_temp_dir)
+        a = (response['out'] or response['err']).splitlines()
+        a = normalize_error_messages(a)
+
+        # Make sure error messages match
+        if incremental_step == 1:
+            msg = 'Unexpected type checker output in incremental, run 1 ({}, line {})'
+            output = testcase.output
+        elif incremental_step > 1:
+            msg = ('Unexpected type checker output in incremental, run {}'.format(
+                incremental_step) + ' ({}, line {})')
+            output = testcase.output2.get(incremental_step, [])
+        else:
+            raise AssertionError()
+
+        if output != a and self.update_data:
+            update_testcase_output(testcase, a)
+        assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line))
+
+        manager = self.server.last_manager
+        if manager is not None:
+            if options.follow_imports == 'normal' and testcase.output is None:
+                self.verify_cache(module_data, a, manager)
+            if incremental_step > 1:
+                suffix = '' if incremental_step == 2 else str(incremental_step - 1)
+                self.check_module_equivalence(
+                    'rechecked' + suffix,
+                    testcase.expected_rechecked_modules.get(incremental_step - 1),
+                    manager.rechecked_modules)
+                self.check_module_equivalence(
+                    'stale' + suffix,
+                    testcase.expected_stale_modules.get(incremental_step - 1),
+                    manager.stale_modules)
+
+    def check_module_equivalence(self, name: str,
+                                 expected: Optional[Set[str]], actual: Set[str]) -> None:
+        if expected is not None:
+            expected_normalized = sorted(expected)
+            actual_normalized = sorted(actual.difference({"__main__"}))
+            assert_string_arrays_equal(
+                expected_normalized,
+                actual_normalized,
+                ('Actual modules ({}) do not match expected modules ({}) '
+                 'for "[{} ...]"').format(
+                    ', '.join(actual_normalized),
+                    ', '.join(expected_normalized),
+                    name))
+
+    def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str],
+                     manager: build.BuildManager) -> None:
+        # There should be valid cache metadata for each module except
+        # those in error_paths; for those there should not be.
+        #
+        # NOTE: When A imports B and there's an error in B, the cache
+        # data for B is invalidated, but the cache data for A remains.
+        # However build.process_graphs() will ignore A's cache data.
+        #
+        # Also note that when A imports B, and there's an error in A
+        # _due to a valid change in B_, the cache data for B will be
+        # invalidated and updated, but the old cache data for A will
+        # remain unchanged. As before, build.process_graphs() will
+        # ignore A's (old) cache data.
+        error_paths = self.find_error_paths(a)
+        modules = self.find_module_files()
+        modules.update({module_name: path for module_name, path, text in module_data})
+        missing_paths = self.find_missing_cache_files(modules, manager)
+        if not missing_paths.issubset(error_paths):
+            raise AssertionFailure("cache data discrepancy %s != %s" %
+                                   (missing_paths, error_paths))
+
+    def find_error_paths(self, a: List[str]) -> Set[str]:
+        hits = set()
+        for line in a:
+            m = re.match(r'([^\s:]+):\d+: error:', line)
+            if m:
+                # Normalize to Linux paths.
+                p = m.group(1).replace(os.path.sep, '/')
+                hits.add(p)
+        return hits
+
+    def find_module_files(self) -> Dict[str, str]:
+        modules = {}
+        for dn, dirs, files in os.walk(test_temp_dir):
+            dnparts = dn.split(os.sep)
+            assert dnparts[0] == test_temp_dir
+            del dnparts[0]
+            for file in files:
+                if file.endswith('.py'):
+                    if file == "__init__.py":
+                        # If the file path is `a/b/__init__.py`, exclude the file name
+                        # and make sure the module id is just `a.b`, not `a.b.__init__`.
+                        id = '.'.join(dnparts)
+                    else:
+                        base, ext = os.path.splitext(file)
+                        id = '.'.join(dnparts + [base])
+                    modules[id] = os.path.join(dn, file)
+        return modules
+
+    def find_missing_cache_files(self, modules: Dict[str, str],
+                                 manager: build.BuildManager) -> Set[str]:
+        ignore_errors = True
+        missing = {}
+        for id, path in modules.items():
+            meta = build.find_cache_meta(id, path, manager)
+            if not build.validate_meta(meta, id, path, ignore_errors, manager):
+                missing[id] = path
+        return set(missing.values())
+
+    def parse_module(self,
+                     program_text: str,
+                     incremental_step: int) -> List[Tuple[str, str, str]]:
+        """Return the module and program names for a test case.
+
+        Normally, the unit tests will parse the default ('__main__')
+        module and follow all the imports listed there. You can override
+        this behavior and instruct the tests to check multiple modules
+        by using a comment like this in the test case input:
+
+          # cmd: mypy -m foo.bar foo.baz
+
+        You can also use `# cmdN:` to have a different cmd for incremental
+        step N (2, 3, ...).
+
+        Return a list of tuples (module name, file name, program text).
+        """
+        m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE)
+        regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step)
+        alt_m = re.search(regex, program_text, flags=re.MULTILINE)
+        if alt_m is not None and incremental_step > 1:
+            # Optionally return a different command if in a later step
+            # of incremental mode, otherwise default to reusing the
+            # original cmd.
+            m = alt_m
+
+        if m:
+            # The test case wants to use a non-default main
+            # module. Look up the module and give it as the thing to
+            # analyze.
+            module_names = m.group(1)
+            out = []
+            for module_name in module_names.split(' '):
+                path = build.find_module(module_name, [test_temp_dir])
+                assert path is not None, "Can't find ad hoc case file"
+                with open(path) as f:
+                    program_text = f.read()
+                out.append((module_name, path, program_text))
+            return out
+        else:
+            return [('__main__', 'main', program_text)]
+
+    def parse_options(self, program_text: str, testcase: DataDrivenTestCase,
+                      incremental_step: int) -> Options:
+        options = Options()
+        flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
+        if incremental_step > 1:
+            flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text,
+                               flags=re.MULTILINE)
+            if flags2:
+                flags = flags2
+
+        flag_list = None
+        if flags:
+            flag_list = flags.group(1).split()
+            targets, options = process_options(flag_list, require_targets=False)
+            if targets:
+                raise RuntimeError('Specifying targets via the flags pragma is not supported.')
+        else:
+            options = Options()
+
+        # Allow custom python version to override testcase_pyversion
+        if (not flag_list or
+                all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])):
+            options.python_version = testcase_pyversion(testcase.file, testcase.name)
+
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        options.incremental = True
+
+        return options
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
index 7e442d5..13a1a63 100644
--- a/mypy/test/testfinegrained.py
+++ b/mypy/test/testfinegrained.py
@@ -10,7 +10,7 @@ information.
 import os
 import re
 import shutil
-from typing import List, Tuple, Dict
+from typing import List, Tuple, Dict, Optional, Set
 
 from mypy import build
 from mypy.build import BuildManager, BuildSource, Graph
@@ -22,7 +22,7 @@ from mypy.server.subexpr import get_subexpressions
 from mypy.server.update import FineGrainedBuildManager
 from mypy.strconv import StrConv, indent
 from mypy.test.config import test_temp_dir, test_data_prefix
-from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite, UpdateFile
 from mypy.test.helpers import assert_string_arrays_equal
 from mypy.test.testtypegen import ignore_node
 from mypy.types import TypeStrVisitor, Type
@@ -30,7 +30,10 @@ from mypy.util import short_type
 
 
 files = [
-    'fine-grained.test'
+    'fine-grained.test',
+    'fine-grained-cycles.test',
+    'fine-grained-blockers.test',
+    'fine-grained-modules.test',
 ]
 
 
@@ -49,21 +52,24 @@ class FineGrainedSuite(DataSuite):
 
         a = []
         if messages:
-            a.extend(messages)
+            a.extend(normalize_messages(messages))
 
         fine_grained_manager = FineGrainedBuildManager(manager, graph)
 
-        steps = find_steps()
-        for changed_paths in steps:
+        steps = testcase.find_steps()
+        for operations in steps:
             modules = []
-            for module, path in changed_paths:
-                new_path = re.sub(r'\.[0-9]+$', '', path)
-                shutil.copy(path, new_path)
-                modules.append(module)
-
+            for op in operations:
+                if isinstance(op, UpdateFile):
+                    # Modify/create file
+                    shutil.copy(op.source_path, op.target_path)
+                    modules.append((op.module, op.target_path))
+                else:
+                    # Delete file
+                    os.remove(op.path)
+                    modules.append((op.module, op.path))
             new_messages = fine_grained_manager.update(modules)
-            new_messages = [re.sub('^tmp' + re.escape(os.sep), '', message)
-                            for message in new_messages]
+            new_messages = normalize_messages(new_messages)
 
             a.append('==')
             a.extend(new_messages)
@@ -78,11 +84,14 @@ class FineGrainedSuite(DataSuite):
 
     def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]:
         options = Options()
+        options.incremental = True
         options.use_builtins_fixtures = True
         options.show_traceback = True
-        options.cache_dir = os.devnull
+        main_path = os.path.join(test_temp_dir, 'main')
+        with open(main_path, 'w') as f:
+            f.write(source)
         try:
-            result = build.build(sources=[BuildSource('main', None, source)],
+            result = build.build(sources=[BuildSource(main_path, None, None)],
                                  options=options,
                                  alt_lib_path=test_temp_dir)
         except CompileError as e:
@@ -92,26 +101,6 @@ class FineGrainedSuite(DataSuite):
         return result.errors, result.manager, result.graph
 
 
-def find_steps() -> List[List[Tuple[str, str]]]:
-    """Return a list of build step representations.
-
-    Each build step is a list of (module id, path) tuples, and each
-    path is of form 'dir/mod.py.2' (where 2 is the step number).
-    """
-    steps = {}  # type: Dict[int, List[Tuple[str, str]]]
-    for dn, dirs, files in os.walk(test_temp_dir):
-        dnparts = dn.split(os.sep)
-        assert dnparts[0] == test_temp_dir
-        del dnparts[0]
-        for filename in files:
-            m = re.match(r'.*\.([0-9]+)$', filename)
-            if m:
-                num = int(m.group(1))
-                assert num >= 2
-                name = re.sub(r'\.py.*', '', filename)
-                module = '.'.join(dnparts + [name])
-                module = re.sub(r'\.__init__$', '', module)
-                path = os.path.join(dn, filename)
-                steps.setdefault(num, []).append((module, path))
-    max_step = max(steps)
-    return [steps[num] for num in range(2, max_step + 1)]
+def normalize_messages(messages: List[str]) -> List[str]:
+    return [re.sub('^tmp' + re.escape(os.sep), '', message)
+            for message in messages]
diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py
index 72bb26f..39ab474 100644
--- a/mypy/test/testinfer.py
+++ b/mypy/test/testinfer.py
@@ -6,7 +6,7 @@ from mypy.myunit import Suite, assert_equal, assert_true
 from mypy.checkexpr import map_actuals_to_formals
 from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED
 from mypy.types import AnyType, TupleType, Type, TypeOfAny
-from mypy.typefixture import TypeFixture
+from mypy.test.typefixture import TypeFixture
 
 
 class MapActualsToFormalsSuite(Suite):
diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py
index 4694865..7579a57 100644
--- a/mypy/test/testmerge.py
+++ b/mypy/test/testmerge.py
@@ -8,16 +8,16 @@ from mypy import build
 from mypy.build import BuildManager, BuildSource, State
 from mypy.errors import Errors, CompileError
 from mypy.nodes import (
-    Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
+    Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, UNBOUND_IMPORTED
 )
 from mypy.options import Options
 from mypy.server.astmerge import merge_asts
 from mypy.server.subexpr import get_subexpressions
-from mypy.server.update import build_incremental_step, replace_modules_with_new_variants
+from mypy.server.update import FineGrainedBuildManager
 from mypy.strconv import StrConv, indent
 from mypy.test.config import test_temp_dir, test_data_prefix
 from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
-from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages
 from mypy.test.testtypegen import ignore_node
 from mypy.types import TypeStrVisitor, Type
 from mypy.util import short_type, IdMapper
@@ -35,6 +35,9 @@ TYPES = 'TYPES'
 AST = 'AST'
 
 
+NOT_DUMPED_MODULES = ('builtins', 'typing', 'abc')
+
+
 class ASTMergeSuite(DataSuite):
     def __init__(self, *, update_data: bool) -> None:
         super().__init__(update_data=update_data)
@@ -67,33 +70,29 @@ class ASTMergeSuite(DataSuite):
         main_src = '\n'.join(testcase.input)
         messages, manager, graph = self.build(main_src)
         assert manager is not None, 'cases where CompileError occurred should not be run'
+        fine_grained_manager = FineGrainedBuildManager(manager, graph)
 
         a = []
         if messages:
             a.extend(messages)
 
-        shutil.copy(os.path.join(test_temp_dir, 'target.py.next'),
-                    os.path.join(test_temp_dir, 'target.py'))
-
-        a.extend(self.dump(manager.modules, graph, kind))
+        target_path = os.path.join(test_temp_dir, 'target.py')
+        shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path)
 
-        old_modules = dict(manager.modules)
-        old_subexpr = get_subexpressions(old_modules['target'])
-
-        new_file, new_types = self.build_increment(manager, 'target')
-        replace_modules_with_new_variants(manager,
-                                          graph,
-                                          old_modules,
-                                          {'target': new_file},
-                                          {'target': new_types})
+        a.extend(self.dump(manager, kind))
+        old_subexpr = get_subexpressions(manager.modules['target'])
 
         a.append('==>')
-        a.extend(self.dump(manager.modules, graph, kind))
+
+        new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path)
+        a.extend(self.dump(manager, kind))
 
         for expr in old_subexpr:
             # Verify that old AST nodes are removed from the expression type map.
             assert expr not in new_types
 
+        a = normalize_error_messages(a)
+
         assert_string_arrays_equal(
             testcase.output, a,
             'Invalid output ({}, line {})'.format(testcase.file,
@@ -101,11 +100,14 @@ class ASTMergeSuite(DataSuite):
 
     def build(self, source: str) -> Tuple[List[str], Optional[BuildManager], Dict[str, State]]:
         options = Options()
+        options.incremental = True
         options.use_builtins_fixtures = True
         options.show_traceback = True
-        options.cache_dir = os.devnull
+        main_path = os.path.join(test_temp_dir, 'main')
+        with open(main_path, 'w') as f:
+            f.write(source)
         try:
-            result = build.build(sources=[BuildSource('main', None, source)],
+            result = build.build(sources=[BuildSource(main_path, None, None)],
                                  options=options,
                                  alt_lib_path=test_temp_dir)
         except CompileError as e:
@@ -113,16 +115,18 @@ class ASTMergeSuite(DataSuite):
             return e.messages, None, {}
         return result.errors, result.manager, result.graph
 
-    def build_increment(self, manager: BuildManager,
-                        module_id: str) -> Tuple[MypyFile,
-                                                 Dict[Expression, Type]]:
-        module_dict, type_maps = build_incremental_step(manager, [module_id])
-        return module_dict[module_id], type_maps[module_id]
+    def build_increment(self, manager: FineGrainedBuildManager,
+                        module_id: str, path: str) -> Tuple[MypyFile,
+                                                            Dict[Expression, Type]]:
+        manager.update([(module_id, path)])
+        module = manager.manager.modules[module_id]
+        type_map = manager.type_maps[module_id]
+        return module, type_map
 
     def dump(self,
-             modules: Dict[str, MypyFile],
-             graph: Dict[str, State],
+             manager: BuildManager,
              kind: str) -> List[str]:
+        modules = manager.modules
         if kind == AST:
             return self.dump_asts(modules)
         elif kind == TYPEINFO:
@@ -130,14 +134,14 @@ class ASTMergeSuite(DataSuite):
         elif kind == SYMTABLE:
             return self.dump_symbol_tables(modules)
         elif kind == TYPES:
-            return self.dump_types(graph)
+            return self.dump_types(manager)
         assert False, 'Invalid kind %s' % kind
 
     def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]:
         a = []
         for m in sorted(modules):
-            if m == 'builtins':
-                # We don't support incremental checking of changes to builtins.
+            if m in NOT_DUMPED_MODULES:
+                # We don't support incremental checking of changes to builtins, etc.
                 continue
             s = modules[m].accept(self.str_conv)
             a.extend(s.splitlines())
@@ -146,8 +150,8 @@ class ASTMergeSuite(DataSuite):
     def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]:
         a = []
         for id in sorted(modules):
-            if id == 'builtins':
-                # We don't support incremental checking of changes to builtins.
+            if not is_dumped_module(id):
+                # We don't support incremental checking of changes to builtins, etc.
                 continue
             a.extend(self.dump_symbol_table(id, modules[id].names))
         return a
@@ -161,18 +165,24 @@ class ASTMergeSuite(DataSuite):
         return a
 
     def format_symbol_table_node(self, node: SymbolTableNode) -> str:
-        if node is None:
+        if node.node is None:
+            if node.kind == UNBOUND_IMPORTED:
+                return 'UNBOUND_IMPORTED'
             return 'None'
         if isinstance(node.node, Node):
-            return '{}<{}>'.format(str(type(node.node).__name__),
-                                   self.id_mapper.id(node.node))
-        # TODO: type_override?
-        return '?'
+            s = '{}<{}>'.format(str(type(node.node).__name__),
+                                self.id_mapper.id(node.node))
+        else:
+            s = '? ({})'.format(type(node.node))
+        if node.type_override:
+            override = self.format_type(node.type_override)
+            s += '(type_override={})'.format(override)
+        return s
 
     def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]:
         a = []
         for id in sorted(modules):
-            if id == 'builtins':
+            if not is_dumped_module(id):
                 continue
             a.extend(self.dump_typeinfos_recursive(modules[id].names))
         return a
@@ -190,14 +200,14 @@ class ASTMergeSuite(DataSuite):
                       type_str_conv=self.type_str_conv)
         return s.splitlines()
 
-    def dump_types(self, graph: Dict[str, State]) -> List[str]:
+    def dump_types(self, manager: BuildManager) -> List[str]:
         a = []
         # To make the results repeatable, we try to generate unique and
         # deterministic sort keys.
-        for module_id in sorted(graph):
-            if module_id == 'builtins':
+        for module_id in sorted(manager.modules):
+            if not is_dumped_module(module_id):
                 continue
-            type_map = graph[module_id].type_checker.type_map
+            type_map = manager.saved_cache[module_id][2]
             if type_map:
                 a.append('## {}'.format(module_id))
                 for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
@@ -205,5 +215,12 @@ class ASTMergeSuite(DataSuite):
                     typ = type_map[expr]
                     a.append('{}:{}: {}'.format(short_type(expr),
                                                 expr.line,
-                                                typ.accept(self.type_str_conv)))
+                                                self.format_type(typ)))
         return a
+
+    def format_type(self, typ: Type) -> str:
+        return typ.accept(self.type_str_conv)
+
+
+def is_dumped_module(id: str) -> bool:
+    return id not in NOT_DUMPED_MODULES and (not id.startswith('_') or id == '__main__')
diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py
index ef9632a..b66dcab 100644
--- a/mypy/test/testparse.py
+++ b/mypy/test/testparse.py
@@ -46,6 +46,7 @@ def test_parser(testcase: DataDrivenTestCase) -> None:
     try:
         n = parse(bytes('\n'.join(testcase.input), 'ascii'),
                   fnam='main',
+                  module='__main__',
                   errors=None,
                   options=options)
         a = str(n).split('\n')
@@ -76,9 +77,11 @@ class ParseErrorSuite(DataSuite):
 def test_parse_error(testcase: DataDrivenTestCase) -> None:
     try:
         # Compile temporary file. The test file contains non-ASCII characters.
-        parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, None, Options())
+        parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, '__main__', None,
+              Options())
         raise AssertionFailure('No errors reported')
     except CompileError as e:
+        assert e.module_with_blocker == '__main__'
         # Verify that there was a compile error and that the error messages
         # are equivalent.
         assert_string_arrays_equal(
diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py
index 80e6980..285e833 100644
--- a/mypy/test/testreports.py
+++ b/mypy/test/testreports.py
@@ -4,7 +4,7 @@ import textwrap
 from mypy.myunit import Suite, assert_equal
 from mypy.report import CoberturaPackage, get_line_rate
 
-import lxml.etree as etree
+import lxml.etree as etree  # type: ignore
 
 
 class CoberturaReportSuite(Suite):
diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py
index 1b8dc83..5d68f6c 100644
--- a/mypy/test/testsolve.py
+++ b/mypy/test/testsolve.py
@@ -5,7 +5,7 @@ from typing import List, Union, Tuple, Optional
 from mypy.myunit import Suite, assert_equal
 from mypy.constraints import SUPERTYPE_OF, SUBTYPE_OF, Constraint
 from mypy.solve import solve_constraints
-from mypy.typefixture import TypeFixture
+from mypy.test.typefixture import TypeFixture
 from mypy.types import Type, TypeVarType, TypeVarId
 
 
diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py
index 2d03945..7a19c6c 100644
--- a/mypy/test/testsubtypes.py
+++ b/mypy/test/testsubtypes.py
@@ -1,7 +1,7 @@
 from mypy.myunit import Suite, assert_true
 from mypy.nodes import CONTRAVARIANT, INVARIANT, COVARIANT
 from mypy.subtypes import is_subtype
-from mypy.typefixture import TypeFixture, InterfaceTypeFixture
+from mypy.test.typefixture import TypeFixture, InterfaceTypeFixture
 from mypy.types import Type
 
 
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index 3c7b618..8b025d6 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -15,7 +15,7 @@ from mypy.types import (
 )
 from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, CONTRAVARIANT, INVARIANT, COVARIANT
 from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype
-from mypy.typefixture import TypeFixture, InterfaceTypeFixture
+from mypy.test.typefixture import TypeFixture, InterfaceTypeFixture
 
 
 class TypesSuite(Suite):
@@ -76,18 +76,18 @@ class TypesSuite(Suite):
                                    self.fx.std_tuple)), 'Tuple[X?, Any]')
 
     def test_type_variable_binding(self) -> None:
-        assert_equal(str(TypeVarDef('X', 1, [], self.fx.o)), 'X')
-        assert_equal(str(TypeVarDef('X', 1, [self.x, self.y], self.fx.o)),
+        assert_equal(str(TypeVarDef('X', 'X', 1, [], self.fx.o)), 'X')
+        assert_equal(str(TypeVarDef('X', 'X', 1, [self.x, self.y], self.fx.o)),
                      'X in (X?, Y?)')
 
     def test_generic_function_type(self) -> None:
         c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None],
                      self.y, self.function, name=None,
-                     variables=[TypeVarDef('X', -1, [], self.fx.o)])
+                     variables=[TypeVarDef('X', 'X', -1, [], self.fx.o)])
         assert_equal(str(c), 'def [X] (X?, Y?) -> Y?')
 
-        v = [TypeVarDef('Y', -1, [], self.fx.o),
-             TypeVarDef('X', -2, [], self.fx.o)]
+        v = [TypeVarDef('Y', 'Y', -1, [], self.fx.o),
+             TypeVarDef('X', 'X', -2, [], self.fx.o)]
         c2 = CallableType([], [], [], NoneTyp(), self.function, name=None, variables=v)
         assert_equal(str(c2), 'def [Y, X] ()')
 
@@ -346,7 +346,7 @@ class TypeOpsSuite(Suite):
         tv = []  # type: List[TypeVarDef]
         n = -1
         for v in vars:
-            tv.append(TypeVarDef(v, n, [], self.fx.o))
+            tv.append(TypeVarDef(v, v, n, [], self.fx.o))
             n -= 1
         return CallableType(list(a[:-1]),
                             [ARG_POS] * (len(a) - 1),
diff --git a/mypy/typefixture.py b/mypy/test/typefixture.py
similarity index 98%
rename from mypy/typefixture.py
rename to mypy/test/typefixture.py
index ead0c53..47f736f 100644
--- a/mypy/typefixture.py
+++ b/mypy/test/typefixture.py
@@ -29,7 +29,7 @@ class TypeFixture:
 
         def make_type_var(name: str, id: int, values: List[Type], upper_bound: Type,
                           variance: int) -> TypeVarType:
-            return TypeVarType(TypeVarDef(name, id, values, upper_bound, variance))
+            return TypeVarType(TypeVarDef(name, name, id, values, upper_bound, variance))
 
         self.t = make_type_var('T', 1, [], self.o, variance)     # T`1 (type variable)
         self.tf = make_type_var('T', -1, [], self.o, variance)   # T`-1 (type variable)
@@ -216,7 +216,7 @@ class TypeFixture:
                     variance = variances[id - 1]
                 else:
                     variance = COVARIANT
-                v.append(TypeVarDef(n, id, [], self.o, variance=variance))
+                v.append(TypeVarDef(n, n, id, [], self.o, variance=variance))
             class_def.type_vars = v
 
         info = TypeInfo(SymbolTable(), class_def, module_name)
diff --git a/mypy/traverser.py b/mypy/traverser.py
index 53d5a9f..690d726 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -264,3 +264,7 @@ class TraverserVisitor(NodeVisitor[None]):
 
     def visit_exec_stmt(self, o: ExecStmt) -> None:
         o.expr.accept(self)
+        if o.globals:
+            o.globals.accept(self)
+        if o.locals:
+            o.locals.accept(self)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index 17d33e3..690379d 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -38,8 +38,8 @@ class TransformVisitor(NodeVisitor[Node]):
 
      * Do not duplicate TypeInfo nodes. This would generally not be desirable.
      * Only update some name binding cross-references, but only those that
-       refer to Var or FuncDef nodes, not those targeting ClassDef or TypeInfo
-       nodes.
+       refer to Var, Decorator or FuncDef nodes, not those targeting ClassDef or
+       TypeInfo nodes.
      * Types are not transformed, but you can override type() to also perform
        type transformation.
 
@@ -284,8 +284,8 @@ class TransformVisitor(NodeVisitor[Node]):
 
     def visit_exec_stmt(self, node: ExecStmt) -> ExecStmt:
         return ExecStmt(self.expr(node.expr),
-                        self.optional_expr(node.variables1),
-                        self.optional_expr(node.variables2))
+                        self.optional_expr(node.globals),
+                        self.optional_expr(node.locals))
 
     def visit_star_expr(self, node: StarExpr) -> StarExpr:
         return StarExpr(node.expr)
@@ -337,11 +337,14 @@ class TransformVisitor(NodeVisitor[Node]):
         target = original.node
         if isinstance(target, Var):
             target = self.visit_var(target)
+        elif isinstance(target, Decorator):
+            target = self.visit_var(target.var)
         elif isinstance(target, FuncDef):
             # Use a placeholder node for the function if it exists.
             target = self.func_placeholder_map.get(target, target)
         new.node = target
-        new.is_def = original.is_def
+        new.is_new_def = original.is_new_def
+        new.is_inferred_def = original.is_inferred_def
 
     def visit_yield_from_expr(self, node: YieldFromExpr) -> YieldFromExpr:
         return YieldFromExpr(self.expr(node.expr))
diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py
index 66338f4..e9f264a 100644
--- a/mypy/tvar_scope.py
+++ b/mypy/tvar_scope.py
@@ -59,10 +59,14 @@ class TypeVarScope:
         else:
             self.func_id -= 1
             i = self.func_id
-        tvar_def = TypeVarDef(
-            name, i, values=tvar_expr.values,
-            upper_bound=tvar_expr.upper_bound, variance=tvar_expr.variance,
-            line=tvar_expr.line, column=tvar_expr.column)
+        tvar_def = TypeVarDef(name,
+                              tvar_expr.fullname(),
+                              i,
+                              values=tvar_expr.values,
+                              upper_bound=tvar_expr.upper_bound,
+                              variance=tvar_expr.variance,
+                              line=tvar_expr.line,
+                              column=tvar_expr.column)
         self.scope[tvar_expr.fullname()] = tvar_def
         return tvar_def
 
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 6de9b1e..eccca53 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -14,7 +14,7 @@ from mypy.types import (
     Type, UnboundType, TypeVarType, TupleType, TypedDictType, UnionType, Instance, AnyType,
     CallableType, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor, SyntheticTypeVisitor,
     StarType, PartialType, EllipsisType, UninhabitedType, TypeType, get_typ_args, set_typ_args,
-    CallableArgument, get_type_vars, TypeQuery, union_items, TypeOfAny, ForwardRef
+    CallableArgument, get_type_vars, TypeQuery, union_items, TypeOfAny, ForwardRef, Overloaded
 )
 
 from mypy.nodes import (
@@ -53,7 +53,7 @@ ARG_KINDS_BY_CONSTRUCTOR = {
 
 
 def analyze_type_alias(node: Expression,
-                       lookup_func: Callable[[str, Context], SymbolTableNode],
+                       lookup_func: Callable[[str, Context], Optional[SymbolTableNode]],
                        lookup_fqn_func: Callable[[str], SymbolTableNode],
                        tvar_scope: TypeVarScope,
                        fail_func: Callable[[str, Context], None],
@@ -144,7 +144,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
     global_scope = True  # type: bool
 
     def __init__(self,
-                 lookup_func: Callable[[str, Context], SymbolTableNode],
+                 lookup_func: Callable[[str, Context], Optional[SymbolTableNode]],
                  lookup_fqn_func: Callable[[str], SymbolTableNode],
                  tvar_scope: Optional[TypeVarScope],
                  fail_func: Callable[[str, Context], None],
@@ -213,7 +213,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
             elif fullname == 'typing.Tuple':
                 if len(t.args) == 0 and not t.empty_tuple_index:
                     # Bare 'Tuple' is same as 'tuple'
-                    if 'generics' in self.options.disallow_any and not self.is_typeshed_stub:
+                    if self.options.disallow_any_generics and not self.is_typeshed_stub:
                         self.fail(messages.BARE_GENERIC, t)
                     typ = self.named_type('builtins.tuple', line=t.line, column=t.column)
                     typ.from_generic_builtin = True
@@ -389,6 +389,8 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
         # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead.
         if t.implicit and not self.allow_tuple_literal:
             self.fail('Invalid tuple literal type', t)
+            if len(t.items) == 1:
+                self.note_func('Suggestion: Is there a spurious trailing comma?', t)
             return AnyType(TypeOfAny.from_error)
         star_count = sum(1 for item in t.items if isinstance(item, StarType))
         if star_count > 1:
@@ -467,7 +469,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                 self.fail('The first argument to Callable must be a list of types or "..."', t)
                 return AnyType(TypeOfAny.from_error)
         else:
-            self.fail('Invalid function type', t)
+            self.fail('Please use "Callable[[<parameters>], <return type>]" or "Callable"', t)
             return AnyType(TypeOfAny.from_error)
         assert isinstance(ret, CallableType)
         return ret.accept(self)
@@ -555,7 +557,9 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
             return []  # We are in third pass, nothing new here
         if fun_type.variables:
             for var in fun_type.variables:
-                var_expr = self.lookup(var.name, var).node
+                var_node = self.lookup(var.name, var)
+                assert var_node, "Binding for function type variable not found within function"
+                var_expr = var_node.node
                 assert isinstance(var_expr, TypeVarExpr)
                 self.tvar_scope.bind(var.name, var_expr)
             return fun_type.variables
@@ -575,8 +579,12 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
         return defs
 
     def is_defined_type_var(self, tvar: str, context: Context) -> bool:
-        return (self.tvar_scope is not None and
-                self.tvar_scope.get_binding(self.lookup(tvar, context)) is not None)
+        if self.tvar_scope is None:
+            return False
+        tvar_node = self.lookup(tvar, context)
+        if not tvar_node:
+            return False
+        return self.tvar_scope.get_binding(tvar_node) is not None
 
     def anal_array(self, a: List[Type], nested: bool = True) -> List[Type]:
         res = []  # type: List[Type]
@@ -596,7 +604,10 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
     def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]:
         a = []  # type: List[TypeVarDef]
         for vd in var_defs:
-            a.append(TypeVarDef(vd.name, vd.id.raw_id, self.anal_array(vd.values),
+            a.append(TypeVarDef(vd.name,
+                                vd.fullname,
+                                vd.id.raw_id,
+                                self.anal_array(vd.values),
                                 vd.upper_bound.accept(self),
                                 vd.variance,
                                 vd.line))
@@ -638,7 +649,7 @@ class TypeAnalyserPass3(TypeVisitor[None]):
     """
 
     def __init__(self,
-                 lookup_func: Callable[[str, Context], SymbolTableNode],
+                 lookup_func: Callable[[str, Context], Optional[SymbolTableNode]],
                  lookup_fqn_func: Callable[[str], SymbolTableNode],
                  fail_func: Callable[[str, Context], None],
                  note_func: Callable[[str, Context], None],
@@ -663,7 +674,7 @@ class TypeAnalyserPass3(TypeVisitor[None]):
         if len(t.args) != len(info.type_vars):
             if len(t.args) == 0:
                 from_builtins = t.type.fullname() in nongen_builtins and not t.from_generic_builtin
-                if ('generics' in self.options.disallow_any and
+                if (self.options.disallow_any_generics and
                         not self.is_typeshed_stub and
                         from_builtins):
                     alternative = nongen_builtins[t.type.fullname()]
@@ -759,6 +770,10 @@ class TypeAnalyserPass3(TypeVisitor[None]):
         for arg_type in t.arg_types:
             arg_type.accept(self)
 
+    def visit_overloaded(self, t: Overloaded) -> None:
+        for item in t.items():
+            item.accept(self)
+
     def visit_tuple_type(self, t: TupleType) -> None:
         for item in t.items:
             item.accept(self)
@@ -878,7 +893,7 @@ def flatten_tvars(ll: Iterable[List[T]]) -> List[T]:
 class TypeVariableQuery(TypeQuery[TypeVarList]):
 
     def __init__(self,
-                 lookup: Callable[[str, Context], SymbolTableNode],
+                 lookup: Callable[[str, Context], Optional[SymbolTableNode]],
                  scope: 'TypeVarScope',
                  *,
                  include_callables: bool = True,
@@ -920,7 +935,7 @@ def check_for_explicit_any(typ: Optional[Type],
                            is_typeshed_stub: bool,
                            msg: MessageBuilder,
                            context: Context) -> None:
-    if ('explicit' in options.disallow_any and
+    if (options.disallow_any_explicit and
             not is_typeshed_stub and
             typ and
             has_explicit_any(typ)):
diff --git a/mypy/types.py b/mypy/types.py
index e5cc8ed..123654e 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3,9 +3,10 @@
 import copy
 from abc import abstractmethod
 from collections import OrderedDict
+from enum import Enum
 from typing import (
     Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Optional, Union, Iterable, NamedTuple,
-    Callable
+    Callable, Sequence
 )
 
 import mypy.nodes
@@ -105,18 +106,20 @@ class TypeVarId:
 class TypeVarDef(mypy.nodes.Context):
     """Definition of a single type variable."""
 
-    name = ''
+    name = ''  # Name (may be qualified)
+    fullname = ''  # Fully qualified name
     id = None  # type: TypeVarId
     values = None  # type: List[Type]  # Value restriction, empty list if no restriction
     upper_bound = None  # type: Type
     variance = INVARIANT  # type: int
 
-    def __init__(self, name: str, id: Union[TypeVarId, int], values: List[Type],
+    def __init__(self, name: str, fullname: str, id: Union[TypeVarId, int], values: List[Type],
                  upper_bound: Type, variance: int = INVARIANT, line: int = -1,
                  column: int = -1) -> None:
         super().__init__(line, column)
         assert values is not None, "No restrictions must be represented by empty list"
         self.name = name
+        self.fullname = fullname
         if isinstance(id, int):
             id = TypeVarId(id)
         self.id = id
@@ -127,7 +130,7 @@ class TypeVarDef(mypy.nodes.Context):
     @staticmethod
     def new_unification_variable(old: 'TypeVarDef') -> 'TypeVarDef':
         new_id = TypeVarId.new(meta_level=1)
-        return TypeVarDef(old.name, new_id, old.values,
+        return TypeVarDef(old.name, old.fullname, new_id, old.values,
                           old.upper_bound, old.variance, old.line, old.column)
 
     def __repr__(self) -> str:
@@ -142,6 +145,7 @@ class TypeVarDef(mypy.nodes.Context):
         assert not self.id.is_meta_var()
         return {'.class': 'TypeVarDef',
                 'name': self.name,
+                'fullname': self.fullname,
                 'id': self.id.raw_id,
                 'values': [v.serialize() for v in self.values],
                 'upper_bound': self.upper_bound.serialize(),
@@ -152,6 +156,7 @@ class TypeVarDef(mypy.nodes.Context):
     def deserialize(cls, data: JsonDict) -> 'TypeVarDef':
         assert data['.class'] == 'TypeVarDef'
         return TypeVarDef(data['name'],
+                          data['fullname'],
                           data['id'],
                           [deserialize_type(v) for v in data['values']],
                           deserialize_type(data['upper_bound']),
@@ -259,42 +264,32 @@ class TypeList(Type):
 _dummy = object()  # type: Any
 
 
-class TypeOfAny:
+class TypeOfAny(Enum):
     """
     This class describes different types of Any. Each 'Any' can be of only one type at a time.
-
-    TODO: this class should be made an Enum once we drop support for python 3.3.
     """
-    MYPY = False
-    if MYPY:
-        from typing import NewType
-        TypeOfAny = NewType('TypeOfAny', str)
-    else:
-        def TypeOfAny(x: str) -> str:
-            return x
-
     # Was this Any type was inferred without a type annotation?
-    unannotated = TypeOfAny('unannotated')
+    unannotated = 'unannotated'
     # Does this Any come from an explicit type annotation?
-    explicit = TypeOfAny('explicit')
-    # Does this come from an unfollowed import? See --disallow-any=unimported option
-    from_unimported_type = TypeOfAny('from_unimported_type')
+    explicit = 'explicit'
+    # Does this come from an unfollowed import? See --disallow-any-unimported option
+    from_unimported_type = 'from_unimported_type'
     # Does this Any type come from omitted generics?
-    from_omitted_generics = TypeOfAny('from_omitted_generics')
+    from_omitted_generics = 'from_omitted_generics'
     # Does this Any come from an error?
-    from_error = TypeOfAny('from_error')
+    from_error = 'from_error'
     # Is this a type that can't be represented in mypy's type system? For instance, type of
-    # call to NewType(...)). Even though these types aren't real Anys, we treat them as such.
-    special_form = TypeOfAny('special_form')
+    # call to NewType...). Even though these types aren't real Anys, we treat them as such.
+    special_form = 'special_form'
     # Does this Any come from interaction with another Any?
-    from_another_any = TypeOfAny('from_another_any')
+    from_another_any = 'from_another_any'
 
 
 class AnyType(Type):
     """The type 'Any'."""
 
     def __init__(self,
-                 type_of_any: TypeOfAny.TypeOfAny,
+                 type_of_any: TypeOfAny,
                  source_any: Optional['AnyType'] = None,
                  line: int = -1,
                  column: int = -1) -> None:
@@ -315,7 +310,7 @@ class AnyType(Type):
         return visitor.visit_any(self)
 
     def copy_modified(self,
-                      type_of_any: TypeOfAny.TypeOfAny = _dummy,
+                      type_of_any: TypeOfAny = _dummy,
                       original_any: Optional['AnyType'] = _dummy,
                       ) -> 'AnyType':
         if type_of_any is _dummy:
@@ -357,6 +352,10 @@ class UninhabitedType(Type):
     can_be_true = False
     can_be_false = False
     is_noreturn = False  # Does this come from a NoReturn?  Purely for error messages.
+    # It is important to track whether this is an actual NoReturn type, or just a result
+    # of ambiguous type inference, in the latter case we don't want to mark a branch as
+    # unreachable in binder.
+    ambiguous = False  # Is this a result of inference for a variable without constraints?
 
     def __init__(self, is_noreturn: bool = False, line: int = -1, column: int = -1) -> None:
         super().__init__(line, column)
@@ -524,6 +523,7 @@ class TypeVarType(Type):
     """
 
     name = ''  # Name of the type variable (for messages and debugging)
+    fullname = None  # type: str
     id = None  # type: TypeVarId
     values = None  # type: List[Type]  # Value restriction, empty list if no restriction
     upper_bound = None  # type: Type   # Upper bound for values
@@ -532,6 +532,7 @@ class TypeVarType(Type):
 
     def __init__(self, binder: TypeVarDef, line: int = -1, column: int = -1) -> None:
         self.name = binder.name
+        self.fullname = binder.fullname
         self.id = binder.id
         self.values = binder.values
         self.upper_bound = binder.upper_bound
@@ -559,6 +560,7 @@ class TypeVarType(Type):
         assert not self.id.is_meta_var()
         return {'.class': 'TypeVarType',
                 'name': self.name,
+                'fullname': self.fullname,
                 'id': self.id.raw_id,
                 'values': [v.serialize() for v in self.values],
                 'upper_bound': self.upper_bound.serialize(),
@@ -569,6 +571,7 @@ class TypeVarType(Type):
     def deserialize(cls, data: JsonDict) -> 'TypeVarType':
         assert data['.class'] == 'TypeVarType'
         tvdef = TypeVarDef(data['name'],
+                           data['fullname'],
                            data['id'],
                            [deserialize_type(v) for v in data['values']],
                            deserialize_type(data['upper_bound']),
@@ -642,7 +645,7 @@ class CallableType(FunctionLike):
     def __init__(self,
                  arg_types: List[Type],
                  arg_kinds: List[int],
-                 arg_names: List[Optional[str]],
+                 arg_names: Sequence[Optional[str]],
                  ret_type: Type,
                  fallback: Instance,
                  name: Optional[str] = None,
@@ -663,7 +666,7 @@ class CallableType(FunctionLike):
         assert not any(tp is None for tp in arg_types), "No annotation must be Any, not None"
         self.arg_types = arg_types
         self.arg_kinds = arg_kinds
-        self.arg_names = arg_names
+        self.arg_names = list(arg_names)
         self.min_args = arg_kinds.count(ARG_POS)
         self.is_var_arg = ARG_STAR in arg_kinds
         self.is_kw_arg = ARG_STAR2 in arg_kinds
@@ -1668,10 +1671,13 @@ class TypeStrVisitor(SyntheticTypeVisitor[str]):
     def visit_type_var(self, t: TypeVarType) -> str:
         if t.name is None:
             # Anonymous type variable type (only numeric id).
-            return '`{}'.format(t.id)
+            s = '`{}'.format(t.id)
         else:
             # Named type variable type.
-            return '{}`{}'.format(t.name, t.id)
+            s = '{}`{}'.format(t.name, t.id)
+        if self.id_mapper and t.upper_bound:
+            s += '(upper_bound={})'.format(t.upper_bound.accept(self))
+        return s
 
     def visit_callable_type(self, t: CallableType) -> str:
         s = ''
@@ -1699,7 +1705,17 @@ class TypeStrVisitor(SyntheticTypeVisitor[str]):
             s += ' -> {}'.format(t.ret_type.accept(self))
 
         if t.variables:
-            s = '{} {}'.format(t.variables, s)
+            vs = []
+            # We reimplement TypeVarDef.__repr__ here in order to support id_mapper.
+            for var in t.variables:
+                if var.values:
+                    vals = '({})'.format(', '.join(val.accept(self) for val in var.values))
+                    vs.append('{} in {}'.format(var.name, vals))
+                elif not is_named_instance(var.upper_bound, 'builtins.object'):
+                    vs.append('{} <: {}'.format(var.name, var.upper_bound.accept(self)))
+                else:
+                    vs.append(var.name)
+            s = '{} {}'.format('[{}]'.format(', '.join(vs)), s)
 
         return 'def {}'.format(s)
 
@@ -1950,17 +1966,15 @@ def function_type(func: mypy.nodes.FuncBase, fallback: Instance) -> FunctionLike
 
 def callable_type(fdef: mypy.nodes.FuncItem, fallback: Instance,
                   ret_type: Optional[Type] = None) -> CallableType:
-    name = fdef.name()
-    if name:
-        name = '"{}"'.format(name)
-
     return CallableType(
         [AnyType(TypeOfAny.unannotated)] * len(fdef.arg_names),
         fdef.arg_kinds,
         [None if argument_elide_name(n) else n for n in fdef.arg_names],
         ret_type or AnyType(TypeOfAny.unannotated),
         fallback,
-        name,
+        name=fdef.name(),
+        line=fdef.line,
+        column=fdef.column,
         implicit=True,
     )
 
diff --git a/mypy/util.py b/mypy/util.py
index 21038b0..84c892b 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -135,3 +135,8 @@ class IdMapper:
             self.id_map[o] = self.next_id
             self.next_id += 1
         return self.id_map[o]
+
+
+def get_prefix(fullname: str) -> str:
+    """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
+    return fullname.rsplit('.', 1)[0]
diff --git a/mypy/version.py b/mypy/version.py
index f973326..5f14f9f 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1,7 +1,7 @@
 import os
 from mypy import git
 
-__version__ = '0.540'
+__version__ = '0.560'
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index 6b871f0..1e9c55a 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -4,21 +4,12 @@ disallow_subclassing_any = True
 warn_no_return = True
 strict_optional = True
 no_implicit_optional = True
-disallow_any = generics, unimported
+disallow_any_generics = True
+disallow_any_unimported = True
 warn_redundant_casts = True
 warn_unused_ignores = True
 warn_unused_configs = True
 
-# historical exception
-[mypy-mypy.semanal]
-strict_optional = False
-
-[mypy-mypy.semanal_pass1]
-strict_optional = False
-
-[mypy-mypy.semanal_pass3]
-strict_optional = False
-
 # needs py2 compatibility
 [mypy-mypy.test.testextensions]
 disallow_untyped_defs = False
diff --git a/runtests.py b/runtests.py
index 2f1bc7a..d4712bb 100755
--- a/runtests.py
+++ b/runtests.py
@@ -204,6 +204,7 @@ def test_path(*names: str):
 
 PYTEST_FILES = test_path(
     'testcheck',
+    'testdmypy',
     'testextensions',
     'testdeps',
     'testdiff',
diff --git a/scripts/dmypy b/scripts/dmypy
new file mode 100755
index 0000000..b760d5a
--- /dev/null
+++ b/scripts/dmypy
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+"""Mypy daemon client.
+
+This is just a wrapper script. Look at mypy/dmypy.py for the actual
+implementation.
+"""
+
+import os
+import os.path
+import sys
+
+file_dir = os.path.dirname(__file__)
+parent_dir = os.path.join(file_dir, os.pardir)
+if os.path.exists(os.path.join(parent_dir, '.git')):
+    # We are running from a git clone.
+    sys.path.insert(0, parent_dir)
+
+import mypy.dmypy
+
+mypy.dmypy.main()
diff --git a/scripts/finegrained.py b/scripts/finegrained.py
new file mode 100644
index 0000000..f5fea09
--- /dev/null
+++ b/scripts/finegrained.py
@@ -0,0 +1,100 @@
+"""Prototype for using fine-grained incremental checking interactively.
+
+Usage:
+
+- first start it
+  $ finegrained.py <dir>
+- it now waits for user input
+  - an empty line performs an incremental step
+  - 'q' exits
+"""
+
+import glob
+import sys
+import os
+from typing import Tuple, List, Dict, Optional
+
+from mypy import build
+from mypy.build import BuildManager, Graph
+from mypy.main import expand_dir
+from mypy.options import Options
+from mypy.errors import CompileError
+from mypy.server.update import FineGrainedBuildManager
+
+
+def main() -> None:
+    if len(sys.argv) != 2 or not os.path.isdir(sys.argv[1]):
+        usage()
+    target_dir = sys.argv[1]
+    messages, manager, graph = build_dir(target_dir)
+    for message in messages:
+        sys.stdout.write(message + '\n')
+    fine_grained_manager = FineGrainedBuildManager(manager, graph)
+    ts = timestamps(target_dir)
+    while True:
+        inp = input('>>> ').strip()
+        if inp.startswith('q'):
+            sys.exit(0)
+        if inp != '':
+            print("Press enter to perform type checking; enter 'q' to quit")
+            continue
+        new_ts = timestamps(target_dir)
+        changed = find_changed_module(ts, new_ts)
+        ts = new_ts
+        if not changed:
+            print('[nothing changed]')
+            continue
+        print('[update {}]'.format(changed[0]))
+        messages = fine_grained_manager.update([changed])
+        for message in messages:
+            sys.stdout.write(message + '\n')
+
+
+def find_changed_module(old_ts: Dict[str, Tuple[float, str]],
+                        new_ts: Dict[str, Tuple[float, str]]) -> Optional[Tuple[str, str]]:
+    for module_id in new_ts:
+        if module_id not in old_ts or new_ts[module_id] != old_ts[module_id]:
+            # Modified or created
+            return (module_id, new_ts[module_id][1])
+    for module_id in old_ts:
+        if module_id not in new_ts:
+            # Deleted
+            return (module_id, old_ts[module_id][1])
+    return None
+
+
+def build_dir(target_dir: str) -> Tuple[List[str], BuildManager, Graph]:
+    sources = expand_dir(target_dir)
+    options = Options()
+    options.incremental = True
+    options.show_traceback = True
+    options.cache_dir = os.devnull
+    try:
+        result = build.build(sources=sources,
+                             options=options)
+    except CompileError as e:
+        # TODO: We need a manager and a graph in this case as well
+        assert False, str('\n'.join(e.messages))
+        return e.messages, None, None
+    return result.errors, result.manager, result.graph
+
+
+def timestamps(target_dir: str) -> Dict[str, Tuple[float, str]]:
+    paths = glob.glob('%s/**/*.py' % target_dir) + glob.glob('%s/*.py' % target_dir)
+    result = {}
+    for path in paths:
+        mod = path[:-3].replace('/', '.')
+        result[mod] = (os.stat(path).st_mtime, path)
+    return result
+
+
+def usage() -> None:
+    print('usage: finegrained.py DIRECTORY')
+    sys.exit(1)
+
+
+if __name__ == '__main__':
+    try:
+        main()
+    except EOFError:
+        print('^D')
diff --git a/setup.py b/setup.py
index 64d5fac..746fb84 100644
--- a/setup.py
+++ b/setup.py
@@ -5,8 +5,8 @@ import os
 import os.path
 import sys
 
-if sys.version_info < (3, 2, 0):
-    sys.stderr.write("ERROR: You need Python 3.2 or later to use mypy.\n")
+if sys.version_info < (3, 4, 0):
+    sys.stderr.write("ERROR: You need Python 3.4 or later to use mypy.\n")
     exit(1)
 
 # This requires setuptools when building; setuptools is not needed
@@ -84,7 +84,6 @@ classifiers = [
     'License :: OSI Approved :: MIT License',
     'Operating System :: POSIX',
     'Programming Language :: Python :: 3',
-    'Programming Language :: Python :: 3.3',
     'Programming Language :: Python :: 3.4',
     'Programming Language :: Python :: 3.5',
     'Programming Language :: Python :: 3.6',
@@ -103,11 +102,15 @@ setup(name='mypy',
       py_modules=[],
       packages=['mypy', 'mypy.test', 'mypy.myunit', 'mypy.server'],
       entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry',
-                                        'stubgen=mypy.stubgen:main']},
+                                        'stubgen=mypy.stubgen:main',
+                                        'dmypy=mypy.dmypy:main',
+                                        ]},
       data_files=data_files,
       classifiers=classifiers,
       cmdclass={'build_py': CustomPythonBuild},
-      install_requires = ['typed-ast >= 1.1.0, < 1.2.0'],
+      install_requires = ['typed-ast >= 1.1.0, < 1.2.0',
+                          'psutil >= 5.4.0, < 5.5.0',
+                          ],
       extras_require = {
           ':python_version < "3.5"': 'typing >= 3.5.3',
       },
diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
index 0d0925d..5f51934 100644
--- a/test-data/unit/check-abstract.test
+++ b/test-data/unit/check-abstract.test
@@ -638,6 +638,21 @@ class C:
     def __add__(self, other: int) -> B: pass
 [out]
 
+[case testAbstractClassWithAnyBase]
+from typing import Any
+from abc import abstractmethod, ABCMeta
+
+A: Any
+
+class D(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None: pass
+
+class C(A, D):
+    pass
+
+C()  # A might implement 'f'
+
 
 -- Abstract properties
 -- -------------------
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
index c40ceb5..fd35ee7 100644
--- a/test-data/unit/check-class-namedtuple.test
+++ b/test-data/unit/check-class-namedtuple.test
@@ -283,7 +283,7 @@ class X(NamedTuple):
 x: X
 reveal_type(x._replace())  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
 x._replace(x=5)
-x._replace(y=5)  # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
+x._replace(y=5)  # E: Argument 1 to "_replace" of "X" has incompatible type "int"; expected "str"
 
 [case testNewNamedTupleFields]
 # flags: --python-version 3.6
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index f5ea5d2..ba623ee 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -1639,6 +1639,18 @@ class C:
     def __radd__(self, x: Any) -> int: pass
 [out]
 
+[case testReverseOperatorMethodInvalid]
+from foo import *
+[file foo.pyi]
+class A: ...
+class B:
+    def __rmul__(self) -> A: ...
+class C:
+    def __radd__(self, other, oops) -> int: ...
+[out]
+tmp/foo.pyi:3: error: Invalid signature "def (foo.B) -> foo.A"
+tmp/foo.pyi:5: error: Invalid signature "def (foo.C, Any, Any) -> builtins.int"
+
 [case testReverseOperatorMethodForwardIsAny]
 from typing import Any
 def deco(f: Any) -> Any: return f
@@ -2904,10 +2916,13 @@ class B(object, A): # E: Cannot determine consistent method resolution order (MR
     __iter__ = readlines
 
 [case testDynamicMetaclass]
-
 class C(metaclass=int()):  # E: Dynamic metaclass not supported for 'C'
     pass
 
+[case testDynamicMetaclassCrash]
+class C(metaclass=int().x):  # E: Dynamic metaclass not supported for 'C'
+    pass
+
 [case testVariableSubclass]
 class A:
     a = 1  # type: int
@@ -3620,6 +3635,55 @@ def parse_ast(name_dict: NameDict) -> None:
 [builtins fixtures/isinstancelist.pyi]
 [out]
 
+[case testNoCrashForwardRefToBrokenDoubleNewType]
+from typing import Any, Dict, List, NewType
+
+Foo = NewType('NotFoo', int) # E: String argument 1 'NotFoo' to NewType(...) does not match variable name 'Foo'
+Foos = NewType('Foos', List[Foo]) # type: ignore
+
+def frob(foos: Dict[Any, Foos]) -> None:
+    foo = foos.get(1)
+    dict(foo)
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNoCrashForwardRefToBrokenDoubleNewTypeClass]
+from typing import Any, Dict, List, NewType
+
+Foo = NewType('NotFoo', int) # type: ignore
+Foos = NewType('Foos', List[Foo]) # type: ignore
+
+x: C
+class C:
+    def frob(self, foos: Dict[Any, Foos]) -> None:
+        foo = foos.get(1)
+        dict(foo)
+
+reveal_type(x.frob) # E: Revealed type is 'def (foos: builtins.dict[Any, __main__.Foos])'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNewTypeFromForwardNamedTuple]
+from typing import NewType, NamedTuple, Tuple
+
+NT = NewType('NT', N)
+class N(NamedTuple):
+    x: int
+
+x: NT = N(1) # E: Incompatible types in assignment (expression has type "N", variable has type "NT")
+x = NT(N(1))
+[out]
+
+[case testNewTypeFromForwardTypedDict]
+from typing import NewType, Tuple
+from mypy_extensions import TypedDict
+
+NT = NewType('NT', N) # E: Argument 2 to NewType(...) must be subclassable (got TypedDict('__main__.N', {'x': builtins.int}))
+class N(TypedDict):
+    x: int
+[builtins fixtures/dict.pyi]
+[out]
+
 [case testCorrectAttributeInForwardRefToNamedTuple]
 from typing import NamedTuple
 proc: Process
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index 8b862d0..06d3b45 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -613,6 +613,13 @@ class A: pass
 [out]
 main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
 
+[case testIsRightOperand]
+
+1 is 1()
+[builtins fixtures/bool.pyi]
+[out]
+main:2: error: "int" not callable
+
 [case testReverseBinaryOperator]
 
 class A:
@@ -875,10 +882,11 @@ main:3: error: "A" not callable
 [case testNoneReturnTypeBasics]
 
 a, o = None, None # type: (A, object)
-a = f()         # Fail
-o = A().g(a)    # Fail
-A().g(f())      # Fail
-x = f() # type: A # Fail
+a = f()         # E: "f" does not return a value
+o = a()         # E: Function does not return a value
+o = A().g(a)    # E: "g" of "A" does not return a value
+A().g(f())      # E: "f" does not return a value
+x: A = f()      # E: "f" does not return a value
 f()
 A().g(a)
 
@@ -888,11 +896,8 @@ def f() -> None:
 class A:
     def g(self, x: object) -> None:
         pass
-[out]
-main:3: error: "f" does not return a value
-main:4: error: "g" of "A" does not return a value
-main:5: error: "f" does not return a value
-main:6: error: "f" does not return a value
+    def __call__(self) -> None:
+        pass
 
 [case testNoneReturnTypeWithStatements]
 import typing
@@ -1525,9 +1530,7 @@ from typing import Iterator
 def f() -> Iterator[int]:
     yield 5
 def g() -> Iterator[int]:
-    a = yield from f()
-[out]
-main:5: error: Function does not return a value
+    a = yield from f()  # E: Function does not return a value
 
 [case testYieldFromGeneratorHasValue]
 from typing import Iterator, Generator
diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test
index 7aa1521..e6aaa04 100644
--- a/test-data/unit/check-fastparse.test
+++ b/test-data/unit/check-fastparse.test
@@ -14,10 +14,16 @@ x = None # type: a + b  # E: invalid type comment or annotation
 -- This happens in both parsers.
 [case testFastParseFunctionAnnotationSyntaxError]
 
-def f():  # E: syntax error in type comment
+def f():  # E: syntax error in type comment # N: Suggestion: wrap argument types in parentheses
   # type: None -> None
   pass
 
+[case testFastParseFunctionAnnotationSyntaxErrorSpaces]
+
+def f():  # E: syntax error in type comment # N: Suggestion: wrap argument types in parentheses
+  # type:             None -> None
+  pass
+
 [case testFastParseInvalidFunctionAnnotation]
 
 def f(x):  # E: invalid type comment or annotation
@@ -235,6 +241,30 @@ def f(a):
     pass
 [out]
 main:3: error: invalid type comment or annotation
+main:3: note: Suggestion: use Tuple[...] instead of Tuple(...)
+
+[case testFasterParseTypeErrorList_python2]
+
+from typing import List
+def f(a):
+    # type: (List(int)) -> int
+    pass
+[out]
+main:3: error: invalid type comment or annotation
+main:3: note: Suggestion: use List[...] instead of List(...)
+
+[case testFasterParseTypeErrorCustom]
+
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Foo(Generic[T]):
+  pass
+
+def f(a: Foo(int)) -> int:
+    pass
+[out]
+main:7: error: invalid type comment or annotation
+main:7: note: Suggestion: use Foo[...] instead of Foo(...)
 
 [case testFastParseMatMul]
 
@@ -364,3 +394,44 @@ def m(x, ((x, y), z)):  # E: Duplicate argument 'x' in function definition
   pass
 
 lambda x, (y, x): None  # E: Duplicate argument 'x' in function definition
+
+[case testNoCrashOnImportFromStar]
+from pack import *
+[file pack/__init__.py]
+from . import *
+
+[case testNoCrashOnImportFromStarNested]
+import blamodule
+[file blamodule/__init__.py]
+from . import command
+from . import backends
+
+[file blamodule/backends/__init__.py]
+from .Bla import Bla
+reveal_type(Bla().method())  # E: Revealed type is 'builtins.str'
+
+[file blamodule/backends/Bla.py]
+from .. import *
+
+class Bla:
+    def method(self) -> str:
+        return command.call()
+
+[file blamodule/command.py]
+def call() -> str: pass
+
+[case testNoCrashOnImportFromStarPython2]
+# flags: --py2
+from . import * # E: No parent module -- cannot perform relative import
+
+[case testSpuriousTrailingComma_python2]
+from typing import Optional
+
+def update_state(tid,                # type: int
+                 vid,                # type: int
+                 update_ts=None,     # type: Optional[float],
+                 ):         # type: (...) -> str
+    pass
+[out]
+main:3: error: Invalid tuple literal type
+main:3: note: Suggestion: Is there a spurious trailing comma?
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index 37c2bbe..198b687 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -10,12 +10,6 @@ def f(x) -> int: pass
 [out]
 main:2: error: Function is missing a type annotation for one or more arguments
 
-[case testUnannotatedArgumentWithFastParser]
-# flags: --disallow-untyped-defs
-def f(x) -> int: pass
-[out]
-main:2: error: Function is missing a type annotation for one or more arguments
-
 [case testNoArgumentFunction]
 # flags: --disallow-untyped-defs
 def f() -> int: pass
@@ -45,6 +39,31 @@ def f():
 [out]
 main:2: error: Function is missing a type annotation
 
+[case testUntypedAsyncDef]
+# flags: --disallow-untyped-defs
+async def f():  # E: Function is missing a type annotation
+    pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncUnannotatedArgument]
+# flags: --disallow-untyped-defs
+async def f(x) -> None:  # E: Function is missing a type annotation for one or more arguments
+    pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncUnannotatedReturn]
+# flags: --disallow-untyped-defs
+from typing import Any
+async def f(x: int):  # E: Function is missing a return type annotation
+    pass
+# Make sure explicit Any is allowed.
+async def g(x: int) -> Any:
+    pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
 [case testDisallowUntypedDefsUntypedDecorator]
 # flags: --disallow-untyped-decorators
 def d(p):
@@ -113,13 +132,6 @@ def d3(p) -> Any:
 @d1  # E: Untyped decorator makes function "f" untyped
 def f() -> None: pass
 
-[case testUntypedDefDisallowUnannotated]
-# flags: --disallow-any=unannotated
-def f():
-    1 + "str"
-[out]
-main:2: error: Function is missing a type annotation
-
 [case testSubclassingAny]
 # flags: --disallow-subclassing-any
 from typing import Any
@@ -570,14 +582,14 @@ strict_optional = False
 strict_optional = True
 
 [case testDisallowImplicitTypesIgnoreMissingTypes]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import MyType
 
 def f(x: MyType) -> None:  # E: Argument 1 to "f" becomes "Any" due to an unfollowed import
     pass
 
 [case testDisallowImplicitTypes]
-# flags: --disallow-any=unimported
+# flags: --disallow-any-unimported
 from missing import MyType
 
 def f(x: MyType) -> None:
@@ -588,13 +600,13 @@ main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports"
 main:4: error: Argument 1 to "f" becomes "Any" due to an unfollowed import
 
 [case testDisallowImplicitAnyVariableDefinition]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import Unchecked
 
 t: Unchecked = 12  # E: Type of variable becomes "Any" due to an unfollowed import
 
 [case testDisallowImplicitAnyGeneric]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import Unchecked
 from typing import List
 
@@ -608,7 +620,7 @@ main:5: error: Argument 1 to "foo" becomes "List[Any]" due to an unfollowed impo
 main:6: error: Type of variable becomes "List[Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnyInherit]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import Unchecked
 from typing import List
 
@@ -620,7 +632,7 @@ class A(List[Unchecked]): # E: Base type becomes "List[Any]" due to an unfollowe
 [builtins fixtures/list.pyi]
 
 [case testDisallowImplicitAnyAlias]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import Unchecked
 from typing import List
 
@@ -631,7 +643,7 @@ def f(x: X) -> None:  # E: Argument 1 to "f" becomes "List[Any]" due to an unfol
 [builtins fixtures/list.pyi]
 
 [case testDisallowImplicitAnyCast]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import Unchecked
 from typing import List, cast
 
@@ -642,7 +654,7 @@ cast(Unchecked, foo)  # E: Target type of cast becomes "Any" due to an unfollowe
 [builtins fixtures/list.pyi]
 
 [case testDisallowImplicitAnyNamedTuple]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from typing import List, NamedTuple
 from missing import Unchecked
 
@@ -653,7 +665,7 @@ Point = NamedTuple('Point', [('x', List[Unchecked]),
 main:5: error: NamedTuple type becomes "Tuple[List[Any], Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnyTypeVarConstraints]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from typing import List, NamedTuple, TypeVar, Any
 from missing import Unchecked
 
@@ -664,7 +676,7 @@ main:5: error: Constraint 1 becomes "Any" due to an unfollowed import
 main:5: error: Constraint 2 becomes "List[Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnyNewType]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from typing import NewType, List
 from missing import Unchecked
 
@@ -674,7 +686,7 @@ Bar = NewType('Bar', List[Unchecked])  # E: Argument 2 to NewType(...) becomes "
 [builtins fixtures/list.pyi]
 
 [case testDisallowImplicitAnyCallableAndTuple]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from typing import Callable, Tuple
 from missing import Unchecked
 
@@ -686,14 +698,14 @@ main:5: error: Return type becomes "Tuple[Any]" due to an unfollowed import
 main:5: error: Argument 1 to "foo" becomes "Callable[[], Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnySubclassingExplicitAny]
-# flags: --ignore-missing-imports --disallow-any=unimported --disallow-subclassing-any
+# flags: --ignore-missing-imports --disallow-any-unimported --disallow-subclassing-any
 from typing import Any
 
 class C(Any): # E: Class cannot subclass 'Any' (has type 'Any')
     pass
 
 [case testDisallowImplicitAnyVarDeclaration]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from missing import Unchecked
 
 foo: Unchecked = ""
@@ -704,7 +716,7 @@ main:4: error: Type of variable becomes "Any" due to an unfollowed import
 main:6: error: A type on this line becomes "Any" due to an unfollowed import
 
 [case testDisallowUnimportedAnyTypedDictSimple]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 from mypy_extensions import TypedDict
 from x import Unchecked
 
@@ -714,7 +726,7 @@ def f(m: M) -> M: pass  # no error
 [builtins fixtures/dict.pyi]
 
 [case testDisallowUnimportedAnyTypedDictGeneric]
-# flags: --ignore-missing-imports --disallow-any=unimported
+# flags: --ignore-missing-imports --disallow-any-unimported
 
 from mypy_extensions import TypedDict
 from typing import List
@@ -726,7 +738,7 @@ def f(m: M) -> M: pass  # no error
 [builtins fixtures/dict.pyi]
 
 [case testDisallowAnyDecoratedUnannotatedDecorator]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 from typing import Any
 
 def d(f):
@@ -740,7 +752,7 @@ def h(x):  # E: Function is untyped after decorator transformation
     pass
 [builtins fixtures/list.pyi]
 [case testDisallowAnyDecoratedErrorIsReportedOnlyOnce]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 
 def d(f):
     return f
@@ -753,7 +765,7 @@ def d2(f):
 @d
 def f(x: int) -> None: pass  # E: Function is untyped after decorator transformation
 [case testDisallowAnyDecoratedReturnAny]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 from typing import Any
 
 def d(f) -> Any:
@@ -763,7 +775,7 @@ def d(f) -> Any:
 def f() -> None: pass  # E: Function is untyped after decorator transformation
 [builtins fixtures/list.pyi]
 [case testDisallowAnyDecoratedReturnCallable]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 from typing import Any, Callable
 
 def d(f) -> Callable[..., None]:
@@ -774,7 +786,7 @@ def g(i: int, s: str) -> None: pass  # E: Type of decorated function contains ty
 
 [builtins fixtures/list.pyi]
 [case testDisallowAnyDecoratedNonexistentDecorator]
-# flags: --disallow-any=decorated --ignore-missing-imports
+# flags: --disallow-any-decorated --ignore-missing-imports
 from nonexistent import d
 
 @d
@@ -782,7 +794,7 @@ def f() -> None: pass  # E: Function is untyped after decorator transformation
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyDecoratedPartlyTypedCallable]
-# flags: --disallow-any=decorated --ignore-missing-imports
+# flags: --disallow-any-decorated --ignore-missing-imports
 from typing import Callable, Any, List
 
 def d(f) -> Callable[[int, Any], Any]: pass
@@ -801,7 +813,7 @@ def h(i: int) -> None:  # E: Type of decorated function contains type "Any" ("Ca
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyDecoratedReturnsCallableNoParams]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 from typing import Callable
 
 def d(p) -> Callable[[], int]:
@@ -813,7 +825,7 @@ def f(i):
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyDecoratedDecoratorReturnsNonCallable]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 def d(p) -> int:
     return p(0)
 
@@ -822,14 +834,14 @@ def f(i):
     return i
 
 [case testDisallowAnyDecoratedUntypedUndecoratedFunction]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 from typing import Callable
 
 def f(i):  # no error
     return i
 
 [case testDisallowAnyDecoratedTwoDecorators]
-# flags: --disallow-any=decorated
+# flags: --disallow-any-decorated
 from typing import Callable
 
 def typed_dec(f) -> Callable[[], int]: pass
@@ -846,7 +858,7 @@ def g():  # E: Function is untyped after decorator transformation
     return i
 
 [case testDisallowAnyExprSimple]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import Any
 def f(s):
     yield s
@@ -865,7 +877,7 @@ f(f(0))  # E: Expression has type "Any"
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprUnannotatedFunction]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 def g(s):
     return s
 
@@ -873,7 +885,7 @@ g(0)
 w: int = g(1)
 
 [case testDisallowAnyExprExplicitAnyParam]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import Any, List
 def f(s: Any) -> None:
     pass
@@ -889,7 +901,7 @@ g([''])  # E: Expression type contains "Any" (has type "List[Any]")
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprAllowsAnyInCast]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import Any, cast
 class Foo:
     g: Any = 2
@@ -900,7 +912,7 @@ k = Foo.g  # E: Expression has type "Any"
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprAllowsAnyInVariableAssignmentWithExplicitTypeAnnotation]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import Any
 class Foo:
     g: Any = 2
@@ -912,7 +924,7 @@ n = Foo().g  # type: Any  # E: Expression has type "Any"
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprGeneric]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import List
 
 l: List = []
@@ -921,7 +933,7 @@ k = l[0]  # E: Expression type contains "Any" (has type "List[Any]")  # E: Expre
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprTypeVar]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import TypeVar
 
 T = TypeVar('T')  # no error
@@ -931,7 +943,7 @@ def f(t: T) -> T:
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprNamedTuple]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import NamedTuple
 
 Point = NamedTuple('Point', [('x', int), ('y', int)])  # no error
@@ -941,7 +953,7 @@ def origin() -> Point:
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprNewType]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from typing import NewType
 
 NT = NewType('NT', int)  # no error
@@ -951,7 +963,7 @@ def nt() -> NT:
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprEnum]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from enum import Enum
 E = Enum('E', '1, 2, 3')  # no error
 
@@ -959,7 +971,7 @@ def k(s: E) -> None: pass
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprTypedDict]
-# flags: --disallow-any=expr
+# flags: --disallow-any-expr
 from mypy_extensions import TypedDict
 
 Movie = TypedDict('Movie', {'name': str, 'year': int})
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index 5e99613..608752b 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -1398,6 +1398,37 @@ f()
 f(1)
 [builtins fixtures/list.pyi]
 
+[case testDefineConditionallyAsImportedAndDecorated]
+from typing import Callable
+
+def dec(f: Callable[[], None]) -> Callable[[], None]: ...
+
+if int():
+    from m import f
+else:
+    @dec
+    def f():
+        yield
+[file m.py]
+def f(): pass
+
+[case testDefineConditionallyAsImportedAndDecoratedWithInference]
+if int():
+    from m import f
+else:
+    from contextlib import contextmanager
+
+    @contextmanager
+    def f():
+        yield
+[file m.py]
+from contextlib import contextmanager
+
+ at contextmanager
+def f():
+    yield
+[typing fixtures/typing-full.pyi]
+
 
 -- Conditional method definition
 -- -----------------------------
@@ -1482,9 +1513,24 @@ f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
 g('x')
 g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
 
+[case testConditionalMethodDefinitionUsingDecorator]
+from typing import Callable
+
+def dec(f) -> Callable[['A', int], None]: pass
+
+class A:
+    x = int()
+    if x:
+        @dec
+        def f(self): pass
+    else:
+        def f(self, x: int) -> None: pass
+
+
 -- Callable with specific arg list
 -- -------------------------------
 
+
 [case testCallableWithNamedArg]
 from typing import Callable
 from mypy_extensions import Arg
@@ -1879,6 +1925,34 @@ class A(Generic[t]):
 [out]
 main:5: error: Cannot use a covariant type variable as a parameter
 
+[case testRejectCovariantArgumentSplitLine]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', covariant=True)
+class A(Generic[t]):
+    def foo(self,
+            x: t) -> None:
+        return None
+[builtins fixtures/bool.pyi]
+[out]
+main:6: error: Cannot use a covariant type variable as a parameter
+
+[case testRejectCovariantArgumentInLambda]
+from typing import TypeVar, Generic, Callable
+
+t = TypeVar('t', covariant=True)
+class Thing(Generic[t]):
+    def chain(self, func: Callable[[t], None]) -> None: pass
+    def end(self) -> None:
+        return self.chain(  # Note that lambda args have no line numbers
+            lambda _: None)
+[builtins fixtures/bool.pyi]
+[out]
+main:8: error: Cannot use a covariant type variable as a parameter
+
+[case testRejectCovariantArgumentInLambdaSplitLine]
+from typing import TypeVar, Generic, Callable
+
 [case testRejectContravariantReturnType]
 from typing import TypeVar, Generic
 
@@ -2138,7 +2212,7 @@ foo(y=2)  # E: Missing positional arguments
 def dec(f): pass
 
 @dec
-def test(a: str) -> (str,): # E: Invalid tuple literal type
+def test(a: str) -> (str,): # E: Invalid tuple literal type # N: Suggestion: Is there a spurious trailing comma?
     return None
 
 [case testReturnTypeLineNumberNewLine]
@@ -2212,3 +2286,19 @@ def i() -> List[Union[str, int]]:
     return x
 
 [builtins fixtures/dict.pyi]
+
+[case testLambdaSemanal]
+f = lambda: xyz
+[out]
+main:1: error: Name 'xyz' is not defined
+
+[case testLambdaTypeCheck]
+f = lambda: 1 + '1'
+[out]
+main:1: error: Unsupported operand types for + ("int" and "str")
+
+[case testLambdaTypeInference]
+f = lambda: 5
+reveal_type(f)
+[out]
+main:2: error: Revealed type is 'def () -> builtins.int'
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index b4a5050..8b9b4d5 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -812,7 +812,7 @@ class Node(Generic[T]):
     def __init__(self, x: T) -> None:
         ...
 
-BadC = Callable[T] # E: Invalid function type
+BadC = Callable[T] # E: Please use "Callable[[<parameters>], <return type>]" or "Callable"
 
 C = Callable[..., T]
 C2 = Callable[[T, T], Node[T]]
@@ -1757,3 +1757,30 @@ def f(c: Type[T]) -> T: ...
 
 x: Any
 reveal_type(f(x))  # E: Revealed type is 'Any'
+
+[case testQualifiedTypeVariableName]
+import b
+def f(x: b.T) -> b.T: return x
+reveal_type(f)
+reveal_type(b.g)
+[file b.py]
+from typing import TypeVar
+T = TypeVar('T')
+def g(x: T) -> T: return x
+[out]
+main:3: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1'
+main:4: error: Revealed type is 'def [T] (x: T`-1) -> T`-1'
+
+[case testPartiallyQualifiedTypeVariableName]
+from p import b
+def f(x: b.T) -> b.T: return x
+reveal_type(f)
+reveal_type(b.g)
+[file p/__init__.py]
+[file p/b.py]
+from typing import TypeVar
+T = TypeVar('T')
+def g(x: T) -> T: return x
+[out]
+main:3: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1'
+main:4: error: Revealed type is 'def [T] (x: T`-1) -> T`-1'
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 5c952c8..6df0d81 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -1,7 +1,7 @@
 -- Checks for incremental mode (see testcheck.py).
--- Each test is run twice, once with a cold cache, once with a warm cache.
+-- Each test is run at least twice, once with a cold cache, once with a warm cache.
 -- Before the tests are run again, in step N any *.py.N files are copied to
--- *.py.
+-- *.py.  There are at least two runs; more as long as there are *.py.N files.
 --
 -- You can add an empty section like `[delete mod.py.2]` to delete `mod.py`
 -- before the second run.
@@ -1113,7 +1113,7 @@ val = "foo"
 
 [builtins fixtures/module_all.pyi]
 [rechecked main, c, c.submodule]
-[stale]
+[stale c]
 [out2]
 tmp/c/submodule.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
 tmp/main.py:7: error: "C" has no attribute "foo"
@@ -2817,10 +2817,8 @@ from b import x
 [out]
 [out2]
 tmp/b.py:1: error: Module 'c' has no attribute 'x'
-tmp/a.py:1: error: Module 'b' has no attribute 'x'
 [out3]
 tmp/b.py:1: error: Module 'c' has no attribute 'x'
-tmp/a.py:1: error: Module 'b' has no attribute 'x'
 
 [case testCacheDeletedAfterErrorsFound2]
 import a
@@ -2875,12 +2873,8 @@ from b import x
 [out]
 [out2]
 tmp/c.py:1: error: Module 'd' has no attribute 'x'
-tmp/b.py:1: error: Module 'c' has no attribute 'x'
-tmp/a.py:1: error: Module 'b' has no attribute 'x'
 [out3]
 tmp/c.py:1: error: Module 'd' has no attribute 'x'
-tmp/b.py:1: error: Module 'c' has no attribute 'x'
-tmp/a.py:1: error: Module 'b' has no attribute 'x'
 
 [case testNoCrashOnDoubleImportAliasQuick]
 # cmd: mypy -m e
@@ -3020,7 +3014,6 @@ tmp/mod.py:7: error: Revealed type is 'builtins.bytes'
 # cmd: mypy -m a
 # cmd2: mypy -m b
 # flags: --follow-imports=silent
-# flags2: --follow-imports=silent
 [file a.py]
 import b
 
@@ -3047,6 +3040,71 @@ class Pair(NamedTuple):
 Person(name=Pair(first="John", last="Doe"))
 [out]
 
+[case testNoCrashForwardRefToBrokenDoubleNewTypeIncremental]
+from typing import Any, List, NewType
+
+Foo = NewType('NotFoo', int) # type: ignore
+Foos = NewType('Foos', List[Foo]) # type: ignore
+
+def frob(foos: List[Foos]) -> None:
+    pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNoCrashForwardRefOverloadIncremental]
+from typing import overload, List
+
+ at overload
+def f(x: int) -> int: ...
+ at overload
+def f(x: F) -> F: ...
+def f(x):
+    pass
+
+F = List[int]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNoCrashForwardRefOverloadIncrementalClass]
+from typing import overload, Tuple, NamedTuple
+
+x: C
+class C:
+    @overload
+    def f(self, x: str) -> N: pass
+    @overload
+    def f(self, x: int) -> int: pass
+    def f(self, x):
+        pass
+
+class N(NamedTuple):
+    x: A
+A = Tuple[int]
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNewTypeFromForwardNamedTupleIncremental]
+from typing import NewType, NamedTuple, Tuple
+
+NT = NewType('NT', N)
+class N(NamedTuple):
+    x: int
+
+x: NT = N(1) # type: ignore
+x = NT(N(1))
+[out]
+
+[case testNewTypeFromForwardTypedDictIncremental]
+from typing import NewType, Tuple, Dict
+from mypy_extensions import TypedDict
+
+NT = NewType('NT', N) # type: ignore
+class N(TypedDict):
+    x: A
+A = Dict[str, int]
+[builtins fixtures/dict.pyi]
+[out]
+
 -- Some crazy selef-referential named tuples, types dicts, and aliases
 -- to be sure that everything can be _serialized_ (i.e. ForwardRef's are removed).
 -- For this reason errors are silenced (tests with # type: ignore have equivalents in other files)
@@ -3266,3 +3324,126 @@ def func(x: int) -> int:
 x = tp(func)
 [out]
 [out2]
+
+[case testReprocessModuleEvenIfInterfaceHashDoesNotChange]
+import a
+import d
+
+[file a.py]
+import b
+x: b.c.A
+x = b.c.A()
+
+[file b.py]
+import c
+
+[file c.py]
+class A:
+    x = 1
+
+[file d.py]
+import a
+def f() -> None: pass
+
+[file a.py.2]
+import b
+x: b.c.A
+
+[file c.py.3]
+class A:
+    x = 2
+
+[file d.py.4]
+import a
+def f() -> None:
+    from c import A
+    a.x = [A(), a.x][0]
+
+[builtins fixtures/list.pyi]
+[stale]
+[rechecked a]
+[stale2]
+[rechecked2 c]
+[stale3]
+[rechecked3 d]
+[out1]
+[out2]
+[out3]
+[out4]
+
+[case testTreeShadowingViaParentPackage]
+import m.semanal
+
+[file m/__init__.py]
+pass
+
+[file m/nodes.py]
+if False:
+    import m.types
+    import m.semanal
+class Node:
+    line: int
+class FuncBase(Node):
+    type: m.types.Type
+class OverloadedFuncDef(FuncBase): pass
+
+[file m/types.py]
+from m.nodes import Node
+class Type(Node): pass
+class Overloaded(Type): pass
+
+[file m/semanal.py]
+from m.nodes import OverloadedFuncDef
+from m.types import Overloaded
+
+class C:
+    def func(self, defn: OverloadedFuncDef):
+        defn.type = Overloaded()
+        defn.type.line = 0
+
+[file m/nodes.py.2]
+if False:
+    import m.types
+    import m.semanal
+class Node:
+    line: int
+class FuncBase(Node):
+    type: m.types.Type
+class OverloadedFuncDef(FuncBase): pass
+extra = 1
+
+[file m/types.py.2]
+from m.nodes import Node
+class Type(Node): pass
+class Overloaded(Type): pass
+extra = 1
+[builtins fixtures/list.pyi]
+
+[file m/semanal.py.2]
+from m.nodes import OverloadedFuncDef
+from m.types import Overloaded
+
+class C:
+    def func(self, defn: OverloadedFuncDef):
+        defn.type = Overloaded()
+        defn.type.line = 0
+
+extra = 1
+
+[out1]
+[out2]
+
+[case testErrorsAffectDependentsOnly]
+# cmd: mypy -m m.a m.b m.c
+[file m/__init__.py]
+[file m/a.py]
+1 + ''  # Deliberate error
+[file m/b.py]
+import m.a  # Depends on module with error
+[file m/c.py]
+import m  # No error here
+[rechecked m.a, m.b]
+[out1]
+tmp/m/a.py:1: error: Unsupported operand types for + ("int" and "str")
+[out2]
+tmp/m/a.py:1: error: Unsupported operand types for + ("int" and "str")
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index 541b0dc..cbfd940 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -911,3 +911,11 @@ class M(Generic[_KT, _VT]):
 
 def f(d: M[_KT, _VT], k: _KT) -> Union[_VT, None]:
     return d.get(k, None)
+
+[case testLambdaDeferredCrash]
+from typing import Callable
+
+class C:
+    def f(self) -> None:
+        g: Callable[[], int] = lambda: 1 or self.x
+        self.x = int()
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 8eb9f18..a904cac 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -421,7 +421,7 @@ a = None # type: A
 
 def ff() -> None:
     x = f() # E: Need type annotation for variable
-    reveal_type(x)
+    reveal_type(x) # E: Revealed type is 'Any'
 
 g(None) # Ok
 f()     # Ok because not used to infer local variable type
@@ -1946,3 +1946,52 @@ def main() -> None:
     reveal_type(b) # E: Revealed type is 'builtins.int'
 [builtins fixtures/tuple.pyi]
 [out]
+
+[case testDontMarkUnreachableAfterInferenceUninhabited]
+from typing import TypeVar
+T = TypeVar('T')
+def f() -> T: pass
+
+class C:
+    x = f() # E: Need type annotation for variable
+    def m(self) -> str:
+        return 42 # E: Incompatible return value type (got "int", expected "str")
+
+if bool():
+    f()
+    1 + '' # E: Unsupported left operand type for + ("int")
+[builtins fixtures/list.pyi]
+[out]
+
+[case testDontMarkUnreachableAfterInferenceUninhabited2]
+# flags: --strict-optional
+from typing import TypeVar, Optional
+T = TypeVar('T')
+def f(x: Optional[T] = None) -> T: pass
+
+class C:
+    x = f() # E: Need type annotation for variable
+    def m(self) -> str:
+        return 42 # E: Incompatible return value type (got "int", expected "str")
+
+if bool():
+    f()
+    1 + '' # E: Unsupported left operand type for + ("int")
+[builtins fixtures/list.pyi]
+[out]
+
+[case testDontMarkUnreachableAfterInferenceUninhabited3]
+from typing import TypeVar, List
+T = TypeVar('T')
+def f(x: List[T]) -> T: pass
+
+class C:
+    x = f([]) # E: Need type annotation for variable
+    def m(self) -> str:
+        return 42 # E: Incompatible return value type (got "int", expected "str")
+
+if bool():
+    f([])
+    1 + '' # E: Unsupported left operand type for + ("int")
+[builtins fixtures/list.pyi]
+[out]
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
index fade8bf..a9f243d 100644
--- a/test-data/unit/check-isinstance.test
+++ b/test-data/unit/check-isinstance.test
@@ -1975,3 +1975,33 @@ def f() -> None:
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dict.pyi]
 [out]
+
+[case testIsinstanceWidensWithAnyArg]
+from typing import Any
+class A: ...
+B: Any
+x: A
+x.foo()  # E: "A" has no attribute "foo"
+assert isinstance(x, B)
+x.foo()
+reveal_type(x)  # E: Revealed type is 'Any'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceWidensUnionWithAnyArg]
+from typing import Any, Union
+class A: ...
+B: Any
+x: Union[A, B]
+reveal_type(x)  # E: Revealed type is 'Union[__main__.A, Any]'
+assert isinstance(x, B)
+reveal_type(x)  # E: Revealed type is 'Any'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceIgnoredImport]
+from typing import Union
+from foo import A  # type: ignore
+def f(x: Union[A, str]) -> None:
+    x.method_only_in_a()  # E: Item "str" of "Union[Any, str]" has no attribute "method_only_in_a"
+    if isinstance(x, A):
+        x.method_only_in_a()
+[builtins fixtures/isinstance.pyi]
diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test
index 3d682b4..f65b626 100644
--- a/test-data/unit/check-kwargs.test
+++ b/test-data/unit/check-kwargs.test
@@ -117,16 +117,16 @@ f(A())
 f(A(), B()) # E: Too many positional arguments for "f"
 g(A(), b=B())
 g(b=B(), a=A())
-g(A()) # E: Missing named argument "b" for function "g"
+g(A()) # E: Missing named argument "b" for "g"
 g(A(), B()) # E: Too many positional arguments for "g"
-h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
-h(A(), b=B()) # E: Missing named argument "aa" for function "h"
-h(A(), aa=A()) # E: Missing named argument "b" for function "h"
+h(A()) # E: Missing named argument "b" for "h" # E: Missing named argument "aa" for "h"
+h(A(), b=B()) # E: Missing named argument "aa" for "h"
+h(A(), aa=A()) # E: Missing named argument "b" for "h"
 h(A(), b=B(), aa=A())
 h(A(), aa=A(), b=B())
-i(A()) # E: Missing named argument "b" for function "i"
+i(A()) # E: Missing named argument "b" for "i"
 i(A(), b=B())
-i(A(), aa=A()) # E: Missing named argument "b" for function "i"
+i(A(), aa=A()) # E: Missing named argument "b" for "i"
 i(A(), b=B(), aa=A())
 i(A(), aa=A(), b=B())
 
@@ -146,16 +146,16 @@ f(A())
 f(A(), B()) # E: Too many positional arguments for "f"
 g(A(), b=B())
 g(b=B(), a=A())
-g(A()) # E: Missing named argument "b" for function "g"
+g(A()) # E: Missing named argument "b" for "g"
 g(A(), B()) # E: Too many positional arguments for "g"
-h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
-h(A(), b=B()) # E: Missing named argument "aa" for function "h"
-h(A(), aa=A()) # E: Missing named argument "b" for function "h"
+h(A()) # E: Missing named argument "b" for "h" # E: Missing named argument "aa" for "h"
+h(A(), b=B()) # E: Missing named argument "aa" for "h"
+h(A(), aa=A()) # E: Missing named argument "b" for "h"
 h(A(), b=B(), aa=A())
 h(A(), aa=A(), b=B())
-i(A()) # E: Missing named argument "b" for function "i"
+i(A()) # E: Missing named argument "b" for "i"
 i(A(), b=B())
-i(A(), aa=A()) # E: Missing named argument "b" for function "i"
+i(A(), aa=A()) # E: Missing named argument "b" for "i"
 i(A(), b=B(), aa=A())
 i(A(), aa=A(), b=B())
 
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index 9726dad..743ed6d 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -260,8 +260,8 @@ reveal_type(x._replace())  # E: Revealed type is 'Tuple[Any, Any, fallback=__mai
 x._replace(y=5)
 x._replace(x=3)
 x._replace(x=3, y=5)
-x._replace(z=5)  # E: Unexpected keyword argument "z" for X._replace
-x._replace(5)  # E: Too many positional arguments for X._replace
+x._replace(z=5)  # E: Unexpected keyword argument "z" for "_replace" of "X"
+x._replace(5)  # E: Too many positional arguments for "_replace" of "X"
 
 [case testNamedTupleReplaceAsClass]
 from collections import namedtuple
@@ -269,7 +269,7 @@ from collections import namedtuple
 X = namedtuple('X', ['x', 'y'])
 x = None  # type: X
 X._replace(x, x=1, y=2)
-X._replace(x=1, y=2)  # E: Missing positional argument "self" in call to X._replace
+X._replace(x=1, y=2)  # E: Missing positional argument "self" in call to "_replace" of "X"
 
 
 [case testNamedTupleReplaceTyped]
@@ -279,19 +279,19 @@ X = NamedTuple('X', [('x', int), ('y', str)])
 x = None  # type: X
 reveal_type(x._replace())  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
 x._replace(x=5)
-x._replace(y=5)  # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
+x._replace(y=5)  # E: Argument 1 to "_replace" of "X" has incompatible type "int"; expected "str"
 
 [case testNamedTupleMake]
 from typing import NamedTuple
 
 X = NamedTuple('X', [('x', int), ('y', str)])
 reveal_type(X._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
-X._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected "Iterable[Any]"
+X._make('a b')  # E: Argument 1 to "_make" of "X" has incompatible type "str"; expected "Iterable[Any]"
 
 -- # FIX: not a proper class method
 -- x = None  # type: X
 -- reveal_type(x._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
--- x._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
+-- x._make('a b')  # E: Argument 1 to "_make" of "X" has incompatible type "str"; expected Iterable[Any]
 
 [builtins fixtures/list.pyi]
 
@@ -638,3 +638,14 @@ class A(NamedTuple):
 
 class B:
     pass
+
+[case testTypeNamedTupleClassmethod]
+from typing import Type, NamedTuple
+class D(NamedTuple):
+    @classmethod
+    def f(cls) -> None: pass
+
+d: Type[D]
+d.g()  # E: "Type[D]" has no attribute "g"
+d.f()
+[builtins fixtures/classmethod.pyi]
diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test
index 1386432..b7f6b04 100644
--- a/test-data/unit/check-optional.test
+++ b/test-data/unit/check-optional.test
@@ -642,3 +642,34 @@ def test_or_shortcut(value: Optional[Any]) -> None:
     if not value or value.get('foo') == 'hello':
         pass
 [builtins fixtures/bool.pyi]
+
+[case testNarrowingFromObjectToOptional]
+from typing import Optional
+x: object
+y: Optional[int]
+x = y
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[out]
+
+[case testNarrowOptionalOutsideLambda]
+from typing import Optional
+
+class A:
+    a: int
+
+def f(x: Optional[A]) -> None:
+    assert x
+    lambda: x.a
+[builtins fixtures/isinstancelist.pyi]
+
+[case testNarrowOptionalOutsideLambdaWithDeferred]
+from typing import Optional
+
+class A:
+    a: int
+
+    def f(self, x: Optional['A']) -> None:
+        assert x
+        lambda: (self.y, x.a) # E: Cannot determine type of 'y'
+        self.y = int()
+[builtins fixtures/isinstancelist.pyi]
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index def8818..3b0abad 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -1319,3 +1319,83 @@ class MyInt:
     @overload
     def __init__(self, x: str, y: int) -> None: pass
 [out]
+
+[case testOverloadTupleInstance]
+from typing import overload, Tuple, Any
+
+class A: ...
+class A1(A): ...
+class B: ...
+class C: ...
+class D: ...
+
+ at overload
+def f(x: A) -> A: ...
+ at overload
+def f(x: Tuple[C]) -> B: ...
+ at overload
+def f(x: Tuple[A1, int]) -> C: ...  # E: Overloaded function signatures 3 and 5 overlap with incompatible return types
+ at overload
+def f(x: Tuple[A, str]) -> D: ...
+ at overload
+def f(x: Tuple[A, int]) -> D: ...
+ at overload
+def f(x: Tuple[()]) -> D: ...
+def f(x: Any) -> Any:...
+
+[case testOverloadTupleEllipsisNumargs]
+from typing import overload, Tuple, Any
+
+class A: ...
+class B: ...
+
+ at overload
+def r1(x: Tuple[()]) -> B: ...  # E: Overloaded function signatures 1 and 4 overlap with incompatible return types
+ at overload
+def r1(x: Tuple[A]) -> B: ...  # E: Overloaded function signatures 2 and 4 overlap with incompatible return types
+ at overload
+def r1(x: Tuple[A, A]) -> B: ...  # E: Overloaded function signatures 3 and 4 overlap with incompatible return types
+ at overload
+def r1(x: Tuple[A, ...]) -> A: ...
+def r1(x: Any) -> Any: ...
+
+ at overload
+def r2(x: Tuple[A, ...]) -> A: ...
+ at overload
+def r2(x: Tuple[A, A]) -> B: ...
+ at overload
+def r2(x: Tuple[A]) -> B: ...
+ at overload
+def r2(x: Tuple[()]) -> B: ...
+def r2(x: Any) -> Any: ...
+
+[builtins fixtures/tuple.pyi]
+
+[case testOverloadTupleEllipsisVariance]
+from typing import overload, Tuple, Any
+
+class A: ...
+class A1(A): ...
+class B: ...
+class C: ...
+class D: ...
+
+ at overload
+def r(x: Tuple[A1, ...]) -> A: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def r(x: Tuple[A, ...]) -> B: ...
+ at overload
+def r(x: Tuple[B, ...]) -> C: ...
+def r(x: Any) -> Any:...
+
+ at overload
+def g(x: A) -> A: ...
+ at overload
+def g(x: Tuple[A1, ...]) -> B: ...  # E: Overloaded function signatures 2 and 3 overlap with incompatible return types
+ at overload
+def g(x: Tuple[A, A]) -> C: ...
+ at overload
+def g(x: Tuple[A, B]) -> D: ...
+def g(x: Any) -> Any:...
+
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index ab3cd6e..6886c26 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -940,3 +940,30 @@ y = ""
 reveal_type(t[x:])  # E: Revealed type is 'Union[builtins.int, builtins.str]'
 t[y:]  # E: Slice index must be an integer or None
 [builtins fixtures/tuple.pyi]
+
+[case testInferTupleTypeFallbackAgainstInstance]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+
+class Base(Generic[T]): pass
+def f(x: Base[T]) -> T: pass
+
+class DT(Tuple[str, str], Base[int]):
+    pass
+
+reveal_type(f(DT())) # E: Revealed type is 'builtins.int*'
+
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTypeTupleClassmethod]
+from typing import Tuple, Type
+
+class C(Tuple[int, str]):
+    @classmethod
+    def f(cls) -> None: pass
+
+t: Type[C]
+t.g()  # E: "Type[C]" has no attribute "g"
+t.f()
+[builtins fixtures/classmethod.pyi]
diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test
index eab5e3f..09aef74 100644
--- a/test-data/unit/check-typevar-values.test
+++ b/test-data/unit/check-typevar-values.test
@@ -573,3 +573,18 @@ from typing import Sequence, Iterable, TypeVar
 S = TypeVar('S', Sequence, Iterable)
 def my_len(s: S) -> None: pass
 def crash() -> None: my_len((0,))
+
+[case testReferenceToDecoratedFunctionAndTypeVarValues]
+from typing import TypeVar, Callable
+
+T = TypeVar('T')
+S = TypeVar('S', int, str)
+
+def dec(f: Callable[..., T]) -> Callable[..., T]: ...
+
+ at dec
+def g(s: S) -> Callable[[S], None]: ...
+
+def f(x: S) -> None:
+    h = g(x)
+    h(x)
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
index 465b180..0f40b36 100644
--- a/test-data/unit/check-unions.test
+++ b/test-data/unit/check-unions.test
@@ -902,10 +902,10 @@ x: object
 a: Any
 d: Dict[str, Tuple[List[Tuple[str, str]], str]]
 x, _ = d.get(a, (None, None))
-# FIXME: fix narrow_declared_type for narrowed Optional types.
-reveal_type(x) # E: Revealed type is 'builtins.list[Tuple[builtins.str, builtins.str]]'
+reveal_type(x) # E: Revealed type is 'Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.None]'
 
-for y in x: pass
+if x:
+    for y in x: pass
 [builtins fixtures/dict.pyi]
 [out]
 
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index 245bcd2..b861543 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -390,7 +390,7 @@ class C:
 [builtins fixtures/ops.pyi]
 [out]
 
-[case testSysPlatformInFunctionImport]
+[case testSysPlatformInFunctionImport1]
 import sys
 def foo() -> None:
     if sys.platform != 'fictional':
@@ -403,6 +403,36 @@ x = 1
 [builtins fixtures/ops.pyi]
 [out]
 
+[case testSysPlatformInFunctionImport2]
+import sys
+def foo() -> None:
+    if sys.platform == 'fictional':
+        import b as a
+    else:
+        import a
+    a.x
+[file a.py]
+x = 1
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformInMethodImport2]
+import sys
+class A:
+    def foo(self) -> None:
+        if sys.platform == 'fictional':
+            # TODO: This is inconsistent with how top-level functions work
+            #     (https://github.com/python/mypy/issues/4324)
+            import b as a # E: Cannot find module named 'b' \
+            # N: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+        else:
+            import a
+        a.x
+[file a.py]
+x = 1
+[builtins fixtures/ops.pyi]
+[out]
+
 [case testCustomSysVersionInfo]
 # flags: --python-version 3.5
 import sys
diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test
index ef2f683..4acbfe6 100644
--- a/test-data/unit/check-warnings.test
+++ b/test-data/unit/check-warnings.test
@@ -197,3 +197,15 @@ class Test(object):
             return None
 [builtins fixtures/list.pyi]
 [out]
+
+[case testReturnAnyDeferred]
+# flags: --warn-return-any
+def foo(a1: A) -> int:
+    if a1._x:
+        return 1
+    n = 1
+    return n
+
+class A:
+    def __init__(self, x: int) -> None:
+        self._x = x
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index 8464fe0..9ed8e60 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -176,76 +176,6 @@ z.py:1: error: Function is missing a type annotation
 z.py:4: error: Call to untyped function "f" in typed context
 x.py:1: error: Function is missing a type annotation
 
-[case testPerFileConfigSectionUntypedWithDisallowUnannotated]
-# cmd: mypy w.py x.py y.py z.py
-[file mypy.ini]
-[[mypy]
-disallow_any = unannotated
-[[mypy-y*]
-disallow_any =
-[[mypy-z*]
-disallow_untyped_defs = True
-[[mypy-w*]
-disallow_untyped_defs = False
-[file x.py]
-def f(a):
-    pass
-[file y.py]
-def f(a):
-    pass
-[file z.py]
-def f(a):
-    pass
-[file w.py]
-def f(a):
-    pass
-[out]
-z.py:1: error: Function is missing a type annotation
-x.py:1: error: Function is missing a type annotation
-
-[case testPerFileConfigSectionDisallowUnannotatedWithUntyped]
-# cmd: mypy x.py y.py z.py
-[file mypy.ini]
-[[mypy]
-disallow_untyped_defs = True
-[[mypy-y*]
-disallow_any =
-[[mypy-z*]
-disallow_any = unannotated
-[file x.py]
-def f(a):
-    pass
-[file y.py]
-def f(a):
-    pass
-[file z.py]
-def f(a):
-    pass
-[out]
-z.py:1: error: Function is missing a type annotation
-x.py:1: error: Function is missing a type annotation
-
-[case testPerFileConfigSectionDisallowUnannotatedNoOverride]
-# cmd: mypy x.py y.py z.py
-[file mypy.ini]
-[[mypy]
-[[mypy-x*]
-disallow_untyped_defs = True
-[[mypy-z*]
-disallow_any = unannotated
-[file x.py]
-def f(a):
-    pass
-[file y.py]
-def f(a):
-    pass
-[file z.py]
-def f(a):
-    pass
-[out]
-z.py:1: error: Function is missing a type annotation
-x.py:1: error: Function is missing a type annotation
-
 [case testPerFileConfigSectionMultipleMatches]
 # cmd: mypy xx.py xy.py yx.py yy.py
 [file mypy.ini]
@@ -595,7 +525,7 @@ python_version = 3.6
 # cmd: mypy main.py
 [file mypy.ini]
 [[mypy]
-disallow_any = unimported
+disallow_any_unimported = True
 ignore_missing_imports = True
 [file main.py]
 from unreal import F
@@ -604,24 +534,12 @@ def f(x: F) -> None: pass
 [out]
 main.py:3: error: Argument 1 to "f" becomes "Any" due to an unfollowed import
 
-[case testDisallowAnyEmpty]
-# cmd: mypy main.py
-[file mypy.ini]
-[[mypy]
-disallow_any =
-ignore_missing_imports = True
-[file main.py]
-from unreal import F
-
-def f(x: F) -> None: pass
-[out]
-
 [case testDisallowAnyExplicitDefSignature]
 # cmd: mypy m.py
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List
@@ -646,7 +564,7 @@ m.py:9: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List
@@ -666,7 +584,7 @@ m.py:5: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List
@@ -680,7 +598,7 @@ m.py:2: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List
@@ -700,7 +618,7 @@ m.py:6: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List
@@ -722,7 +640,7 @@ m.py:4: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List, TypeVar, Tuple
@@ -747,7 +665,7 @@ m.py:10: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List, cast
@@ -765,7 +683,7 @@ m.py:5: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List, NamedTuple
@@ -782,7 +700,7 @@ m.py:3: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List, TypeVar
@@ -797,12 +715,12 @@ m.py:3: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from typing import Any, List, NewType
 
-Baz = NewType('Baz', Any)  # this error does not come from `--disallow-any=explicit` flag
+Baz = NewType('Baz', Any)  # this error does not come from `--disallow-any-explicit` flag
 Bar = NewType('Bar', List[Any])
 
 [out]
@@ -815,7 +733,7 @@ m.py:4: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from mypy_extensions import TypedDict
@@ -833,7 +751,7 @@ m.py:4: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m*]
-disallow_any = explicit
+disallow_any_explicit = True
 
 [file m.py]
 from mypy_extensions import TypedDict
@@ -849,7 +767,7 @@ m.py:4: error: Explicit "Any" is not allowed
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import Tuple
@@ -870,7 +788,7 @@ m.py:8: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import Tuple, List
@@ -885,7 +803,7 @@ m.py:3: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import Type, Any
@@ -905,7 +823,7 @@ m.py:8: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import List
@@ -922,7 +840,7 @@ m.py:5: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import List, TypeVar, Tuple
@@ -946,7 +864,7 @@ m.py:11: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import List
@@ -970,7 +888,7 @@ m.py:9: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import Generic, TypeVar, Any
@@ -993,7 +911,7 @@ m.py:10: error: Missing type parameters for generic type
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 s = tuple([1, 2, 3])  # no error
@@ -1015,7 +933,7 @@ m.py:7: error: Implicit generic "Any". Use 'typing.FrozenSet' and specify generi
 [file mypy.ini]
 [[mypy]
 [[mypy-m]
-disallow_any = generics
+disallow_any_generics = True
 
 [file m.py]
 from typing import Tuple, List, Dict, Set, FrozenSet
@@ -1084,7 +1002,7 @@ a/b/c/d/e/__init__.py:1: error: "int" not callable
 [file mypy.ini]
 [[mypy]
 disallow_untyped_defs = True
-disallow_any = generics
+disallow_any_generics = True
 [file a.py]
 def get_tasks(self):
     return 'whatever'
diff --git a/test-data/unit/deps-classes.test b/test-data/unit/deps-classes.test
new file mode 100644
index 0000000..e10cf46
--- /dev/null
+++ b/test-data/unit/deps-classes.test
@@ -0,0 +1,21 @@
+-- Test cases for generating fine-grained dependencies for classes.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+-- TODO: Move class related test cases from deps.test to here
+
+[case testNamedTuple]
+from typing import NamedTuple, Any
+from a import A
+N = NamedTuple('N', [('a', 'A')])
+
+def f(a: Any) -> None:
+    n = N(a)
+    n.a
+[file a.py]
+class A: pass
+[out]
+<m.N.__init__> -> m.f
+<m.N.a> -> m.f
+<m.N> -> m.f
+<a.A> -> <m.N.a>, m
diff --git a/test-data/unit/deps-expressions.test b/test-data/unit/deps-expressions.test
new file mode 100644
index 0000000..de46f67
--- /dev/null
+++ b/test-data/unit/deps-expressions.test
@@ -0,0 +1,435 @@
+-- Test cases for generating fine-grained dependencies for expressions.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+[case testListExpr]
+def f() -> int: pass
+def g() -> None:
+    a = [f()]
+[builtins fixtures/list.pyi]
+[out]
+<m.f> -> m.g
+
+[case testDictExpr]
+def f1() -> int: pass
+def f2() -> int: pass
+def g() -> None:
+    a = {f1(): 1, 2: f2()}
+[builtins fixtures/dict.pyi]
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testSetExpr]
+def f() -> int: pass
+def g() -> None:
+    a = {f()}
+[builtins fixtures/set.pyi]
+[out]
+<m.f> -> m.g
+
+[case testTupleExpr]
+def f1() -> int: pass
+def f2() -> int: pass
+def g() -> None:
+    a = (f1(), f2())
+[builtins fixtures/tuple.pyi]
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testListComprehension]
+from typing import Iterator
+
+class A:
+    def __iter__(self) -> Iterator[int]: pass
+
+def f1() -> int: pass
+def f2() -> int: pass
+def g() -> None:
+    a = [f1() for x in A() if f2()]
+[builtins fixtures/list.pyi]
+[out]
+<m.A.__init__> -> m.g
+<m.A.__iter__> -> m.g
+<m.A> -> m.A, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testSetComprehension]
+from typing import Set
+def f1() -> int: pass
+def f2() -> Set[int]: pass
+def f3() -> int: pass
+def g() -> None:
+    a = {f1() for x in f2() if f3()}
+[builtins fixtures/set.pyi]
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testDictComprehension]
+from typing import Iterator
+
+class A:
+    def __iter__(self) -> Iterator[int]: pass
+
+def f1() -> int: pass
+def f2() -> int: pass
+def f3() -> int: pass
+def g() -> None:
+    a = {f1(): f2() for x in A() if f3()}
+[builtins fixtures/dict.pyi]
+[out]
+<m.A.__init__> -> m.g
+<m.A.__iter__> -> m.g
+<m.A> -> m.A, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testGeneratorExpr]
+from typing import List
+def f1() -> int: pass
+def f2() -> List[int]: pass
+def f3() -> int: pass
+def g() -> None:
+    a = (f1() for x in f2() if f3())
+[builtins fixtures/list.pyi]
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testConditionalExpr]
+def f1() -> int: pass
+def f2() -> int: pass
+def f3() -> int: pass
+def g() -> None:
+    a = f1() if f2() else f3()
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testAwaitExpr]
+def f(): pass
+async def g() -> None:
+    x = await f()
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+<m.f> -> m.g
+
+[case testStarExpr]
+from typing import Iterator
+
+class A:
+    def __iter__(self) -> Iterator[int]: pass
+
+def g() -> None:
+    a = [*A()]
+[builtins fixtures/list.pyi]
+[out]
+<m.A.__init__> -> m.g
+<m.A.__iter__> -> m.g
+<m.A> -> m.A, m.g
+
+[case testCast]
+from typing import cast
+class A: pass
+def f() -> object: pass
+def g() -> None:
+    x = cast(A, f())
+[out]
+<m.A> -> m.A, m.g
+<m.f> -> m.g
+
+[case testTypeApplication]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Generic[T, S]):
+    def __init__(self, x): pass
+class B: pass
+class C: pass
+
+def f() -> int: pass
+
+def g() -> None:
+    x = A[B, C](f())
+[out]
+<m.A.__init__> -> m.g
+<m.A> -> m.A, m.g
+<m.B> -> m.B, m.g
+<m.C> -> m.C, m.g
+<m.S> -> m.A
+<m.T> -> m.A
+<m.f> -> m.g
+
+[case testIndexExpr]
+class A:
+    def __getitem__(self, x: int) -> int: pass
+
+def f1() -> A: pass
+def f2() -> int: pass
+
+def g(a: A) -> int:
+    return f1()[f2()]
+[out]
+<m.A.__getitem__> -> m.g
+<m.A> -> <m.f1>, <m.g>, m.A, m.f1, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testIndexExpr]
+class A:
+    def __getitem__(self, x: int) -> int: pass
+
+def f1() -> A: pass
+def f2() -> int: pass
+
+def g(a: A) -> int:
+    return f1()[f2()]
+[out]
+<m.A.__getitem__> -> m.g
+<m.A> -> <m.f1>, <m.g>, m.A, m.f1, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testIndexExprLvalue]
+class A:
+    def __setitem__(self, x: int, y: int) -> None: pass
+
+def f1() -> A: pass
+def f2() -> int: pass
+def f3() -> int: pass
+
+def g(a: A) -> None:
+    f1()[f2()] = f3()
+[out]
+-- __getitem__ dependency is redundant but harmless
+<m.A.__getitem__> -> m.g
+<m.A.__setitem__> -> m.g
+<m.A> -> <m.f1>, <m.g>, m.A, m.f1, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testUnaryExpr]
+class A:
+    def __neg__(self) -> int: pass
+    def __pos__(self) -> int: pass
+    def __invert__(self) -> int: pass
+
+def f1() -> A: pass
+def f2() -> A: pass
+def f3() -> A: pass
+
+def g1() -> int:
+    return +f1()
+def g2() -> int:
+    return -f2()
+def g3() -> int:
+    return ~f3()
+[out]
+<m.A.__invert__> -> m.g3
+<m.A.__neg__> -> m.g2
+<m.A.__pos__> -> m.g1
+<m.A> -> <m.f1>, <m.f2>, <m.f3>, m.A, m.f1, m.f2, m.f3
+<m.f1> -> m.g1
+<m.f2> -> m.g2
+<m.f3> -> m.g3
+
+[case testOpExpr]
+class A:
+    def __add__(self, x: 'B') -> int: pass
+class B: pass
+
+def f() -> int:
+    a: A
+    b: B
+    return a + b
+[out]
+<m.A.__add__> -> m.f
+<m.A> -> m.A, m.f
+<m.B.__radd__> -> m.f
+<m.B> -> <m.A.__add__>, m.A.__add__, m.B, m.f
+
+[case testComparisonExpr]
+class A:
+    def __lt__(self, x: 'B') -> int: pass
+class B: pass
+
+def f() -> int:
+    return A() < B()
+[out]
+<m.A.__init__> -> m.f
+<m.A.__lt__> -> m.f
+<m.A> -> m.A, m.f
+<m.B.__gt__> -> m.f
+<m.B.__init__> -> m.f
+<m.B> -> <m.A.__lt__>, m.A.__lt__, m.B, m.f
+
+[case testIsOp-skip]
+class A: pass
+class B: pass
+
+def f() -> bool:
+    return A() is B()
+[builtins fixtures/bool.pyi]
+[out]
+-- fails because of https://github.com/python/mypy/issues/4055
+<m.A.__init__> -> m.f
+<m.A> -> m.A, m.f
+<m.B.__init__> -> m.f
+<m.B> -> m.B, m.f
+
+[case testInOp]
+class A:
+    def __contains__(self, x: B) -> int: pass
+class B: pass
+
+def f() -> int:
+    return B() in A()
+[out]
+<m.A.__contains__> -> m.f
+<m.A.__init__> -> m.f
+<m.A> -> m.A, m.f
+<m.B.__init__> -> m.f
+<m.B> -> <m.A.__contains__>, m.A.__contains__, m.B, m.f
+
+[case testComparisonExprWithMultipleOperands]
+class A:
+    def __lt__(self, x: 'B') -> int: pass
+class B: pass
+class C:
+    def __ge__(self, x: 'B') -> int: pass
+
+def f() -> int:
+    return A() < B() <= C()
+[out]
+<m.A.__init__> -> m.f
+<m.A.__lt__> -> m.f
+<m.A> -> m.A, m.f
+<m.B.__gt__> -> m.f
+<m.B.__init__> -> m.f
+<m.B.__le__> -> m.f
+<m.B> -> <m.A.__lt__>, <m.C.__ge__>, m.A.__lt__, m.B, m.C.__ge__, m.f
+<m.C.__ge__> -> m.f
+<m.C.__init__> -> m.f
+<m.C> -> m.C, m.f
+
+[case testOperatorWithTupleOperand]
+from typing import Tuple
+
+class C(Tuple[int, str]):
+    def __and__(self, x: D) -> int: pass
+    def __neg__(self) -> int: pass
+class D: pass
+
+def f() -> None:
+    c: C
+    d: D
+    x = c & d
+    y = -c
+[builtins fixtures/tuple.pyi]
+[out]
+<m.C.__and__> -> m.f
+<m.C.__neg__> -> m.f
+<m.C> -> m.C, m.f
+<m.D.__rand__> -> m.f
+<m.D> -> <m.C.__and__>, m.C.__and__, m.D, m.f
+
+[case testUnionTypeOperation]
+from typing import Union
+
+class A:
+    def __add__(self, x: str) -> int: pass
+class B:
+    def __add__(self, x: str) -> int: pass
+
+def f(a: Union[A, B]) -> int:
+    return a + ''
+[out]
+<m.A.__add__> -> m.f
+<m.A> -> <m.f>, m.A, m.f
+<m.B.__add__> -> m.f
+<m.B> -> <m.f>, m.B, m.f
+
+[case testBackquoteExpr_python2]
+def g(): # type: () -> int
+    pass
+def f(): # type: () -> str
+    return `g()`
+[out]
+<m.g> -> m.f
+
+[case testComparison_python2]
+class A:
+    def __cmp__(self, other): # type: (B) -> int
+        pass
+class B:
+    pass
+
+def f(a, b): # type: (A, B) -> None
+    x = a == b
+
+def g(a, b): # type: (A, B) -> None
+    x = a < b
+[out]
+<m.A.__cmp__> -> m.f, m.g
+<m.A.__eq__> -> m.f
+<m.A.__lt__> -> m.g
+<m.A> -> <m.f>, <m.g>, m.A, m.f, m.g
+<m.B.__cmp__> -> m.f, m.g
+<m.B.__eq__> -> m.f
+<m.B.__gt__> -> m.g
+<m.B> -> <m.A.__cmp__>, <m.f>, <m.g>, m.A.__cmp__, m.B, m.f, m.g
+
+[case testSliceExpr]
+class A:
+    def __getitem__(self, x) -> None: pass
+
+def f1() -> int: pass
+def f2() -> int: pass
+def f3() -> int: pass
+def f4() -> int: pass
+def f5() -> int: pass
+
+def f() -> None:
+    a: A
+    a[f1():f2():f3()]
+    a[f4():]
+    a[::f5()]
+[builtins fixtures/slice.pyi]
+[out]
+<m.A.__getitem__> -> m.f
+<m.A> -> m.A, m.f
+<m.f1> -> m.f
+<m.f2> -> m.f
+<m.f3> -> m.f
+<m.f4> -> m.f
+<m.f5> -> m.f
+
+[case testRevealTypeExpr]
+def f1() -> int: pass
+def f() -> None:
+    reveal_type(f1())  # type: ignore
+[out]
+<m.f1> -> m.f
+
+[case testLambdaExpr]
+from typing import Callable
+
+def f1(c: Callable[[int], str]) -> None: pass
+def f2() -> str: pass
+
+def g() -> None:
+    f1(lambda x: f2())
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
diff --git a/test-data/unit/deps-generics.test b/test-data/unit/deps-generics.test
new file mode 100644
index 0000000..96902b3
--- /dev/null
+++ b/test-data/unit/deps-generics.test
@@ -0,0 +1,142 @@
+-- Test cases for generating fine-grained dependencies involving generics.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+[case testGenericFunction]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+class A: pass
+
+def f(x: T) -> T:
+    y: T
+    z: A
+    return x
+[out]
+<m.A> -> m.A, m.f
+<m.T> -> <m.f>, m.f
+
+[case testGenericClass]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+class A(Generic[T]): pass
+class B: pass
+
+def f() -> None:
+    a: A[B]
+[out]
+<m.A> -> m.A, m.f
+<m.B> -> m.B, m.f
+<m.T> -> m.A
+
+[case testGenericClassWithMembers]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+class A(Generic[T]):
+    def g(self, a: T) -> None:
+        self.x = a
+    def f(self) -> T:
+        return self.x
+[out]
+<m.A.x> -> m.A.f, m.A.g
+<m.A> -> m.A
+<m.T> -> <m.A.f>, <m.A.g>, <m.A.x>, m.A, m.A.f, m.A.g
+
+[case testGenericClassInit]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+class A(Generic[T]):
+    def __init__(self, a: T) -> None:
+        self.x = a
+
+class B: pass
+
+def f() -> None:
+    a = A(B())
+[out]
+<m.A.__init__> -> m.f
+<m.A.x> -> m.A.__init__
+<m.A> -> m.A, m.f
+<m.B.__init__> -> m.f
+<m.B> -> m.B, m.f
+<m.T> -> <m.A.__init__>, <m.A.x>, m.A, m.A.__init__
+
+[case testGenericMethod]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+class A:
+    def f(self, x: T) -> T:
+        return x
+[out]
+<m.A> -> m.A
+<m.T> -> <m.A.f>, m.A.f
+
+[case testGenericBaseClass]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+class A(Generic[T]): pass
+class B(A[C]): pass
+class C: pass
+[out]
+<m.A.__init__> -> <m.B.__init__>
+<m.A> -> m.A, m.B
+<m.B> -> m.B
+<m.C> -> m.B, m.C
+<m.T> -> m.A
+
+[case testGenericBaseClass2]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+class A(Generic[T]): pass
+class B(A[T]): pass
+[out]
+<m.A.__init__> -> <m.B.__init__>
+<m.A> -> m.A, m.B
+<m.B> -> m.B
+<m.T> -> m.A, m.B
+
+[case testTypeVarBound]
+from typing import TypeVar, Tuple
+
+class A: pass
+class B: pass
+
+T = TypeVar('T', bound=Tuple[A, B])
+
+def f(x: T) -> T:
+    return x
+[out]
+<m.A> -> <m.T>, m.A
+<m.B> -> <m.T>, m.B
+<m.T> -> <m.f>, m.f
+
+[case testTypeVarBoundOperations]
+from typing import TypeVar, Tuple
+
+class A:
+    def f(self) -> None: pass
+    def __add__(self, other: int) -> int: pass
+
+T = TypeVar('T', bound=A)
+
+def f(x: T) -> None:
+    x.f()
+    x + 1
+[out]
+<m.A.__add__> -> m.f
+<m.A.f> -> m.f
+<m.A> -> <m.T>, m.A
+<m.T> -> <m.f>, m.f
diff --git a/test-data/unit/deps-statements.test b/test-data/unit/deps-statements.test
new file mode 100644
index 0000000..7356b04
--- /dev/null
+++ b/test-data/unit/deps-statements.test
@@ -0,0 +1,551 @@
+-- Test cases for generating fine-grained dependencies for statements.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+[case testIfStmt]
+def f1() -> int: pass
+def f2() -> None: pass
+def f3() -> int: pass
+def f4() -> None: pass
+def f5() -> None: pass
+
+def g() -> None:
+    if f1():
+        f2()
+    elif f3():
+        f4()
+    else:
+        f5()
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+<m.f4> -> m.g
+<m.f5> -> m.g
+
+[case testWhileStmt]
+def f1() -> int: pass
+def f2() -> None: pass
+def f3() -> None: pass
+
+def g() -> None:
+    while f1():
+        f2()
+    else:
+        f3()
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testAssertStmt]
+def f1() -> int: pass
+def f2() -> str: pass
+def f3() -> int: pass
+
+def g() -> None:
+    assert f1(), f2()
+    assert f3()
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testRaiseStmt]
+def f1() -> BaseException: pass
+def f2() -> BaseException: pass
+
+def g1() -> None:
+    raise f1()
+
+def g2() -> None:
+    raise f1() from f2()
+[builtins fixtures/exception.pyi]
+[out]
+<m.f1> -> m.g1, m.g2
+<m.f2> -> m.g2
+
+[case testTryFinallyStmt]
+def f1() -> None: pass
+def f2() -> None: pass
+
+def g() -> None:
+    try:
+        f1()
+    finally:
+        f2()
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testPrintStmt_python2]
+def f1(): # type: () -> int
+    pass
+def f2(): # type: () -> int
+    pass
+
+def g1(): # type: () -> None
+    print f1()
+
+def g2(): # type: () -> None
+    print f1(), f2()
+[out]
+<m.f1> -> m.g1, m.g2
+<m.f2> -> m.g2
+
+[case testPrintStmtWithFile_python2]
+class A:
+    def write(self, s): # type: (str) -> None
+        pass
+
+def f1(): # type: () -> A
+    pass
+def f2(): # type: () -> int
+    pass
+
+def g(): # type: () -> None
+    print >>f1(), f2()
+[out]
+<m.A.write> -> m.g
+<m.A> -> <m.f1>, m.A, m.f1
+<m.f2> -> m.g
+
+[case testExecStmt_python2]
+def f1(): pass
+def f2(): pass
+def f3(): pass
+
+def g1(): # type: () -> None
+    exec f1()
+
+def g2(): # type: () -> None
+    exec f1() in f2()
+
+def g3(): # type: () -> None
+    exec f1() in f2(), f3()
+[out]
+<m.f1> -> m.g1, m.g2, m.g3
+<m.f2> -> m.g2, m.g3
+<m.f3> -> m.g3
+
+[case testForStmt]
+from typing import Iterator
+
+class A:
+    def __iter__(self) -> Iterator[int]: pass
+
+def f1() -> None: pass
+def f2() -> None: pass
+
+def g() -> None:
+    a: A
+    for x in a:
+        f1()
+    else:
+        f2()
+[builtins fixtures/list.pyi]
+[out]
+<m.A.__getitem__> -> m.g
+<m.A.__iter__> -> m.g
+<m.A> -> m.A, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testTryExceptStmt]
+class A(BaseException): pass
+class B(BaseException):
+    def f(self) -> None: pass
+
+def f1() -> None: pass
+def f2() -> None: pass
+def f3() -> None: pass
+
+def g() -> None:
+    try:
+        f1()
+    except A:
+        f2()
+    except B as e:
+        e.f()
+    else:
+        f3()
+[builtins fixtures/exception.pyi]
+[out]
+<m.A> -> m.A, m.g
+<m.B.f> -> m.g
+<m.B> -> m.B, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+<m.f3> -> m.g
+
+[case testTryExceptStmt2]
+class A(BaseException): pass
+class B(BaseException):
+    def f(self) -> None: pass
+
+def f1() -> None: pass
+def f2() -> None: pass
+
+def g() -> None:
+    try:
+        f1()
+    except (A, B):
+        f2()
+[builtins fixtures/exception.pyi]
+[out]
+<m.A> -> m.A, m.g
+<m.B> -> m.B, m.g
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testWithStmt]
+from typing import Any
+class A:
+    def __enter__(self) -> 'B': pass
+    def __exit__(self, a, b, c) -> None: pass
+
+class B:
+    def f(self) -> None: pass
+
+def g() -> None:
+    a: A
+    with a as x:
+        x.f()
+[out]
+<m.A.__enter__> -> m.g
+<m.A.__exit__> -> m.g
+<m.A> -> m.A, m.g
+<m.B.f> -> m.g
+<m.B> -> <m.A.__enter__>, m.A.__enter__, m.B
+
+[case testWithStmt2]
+from typing import Any
+class A:
+    def __enter__(self) -> 'C': pass
+    def __exit__(self, a, b, c) -> None: pass
+class B:
+    def __enter__(self) -> 'D': pass
+    def __exit__(self, a, b, c) -> None: pass
+
+class C: pass
+class D: pass
+
+def g() -> None:
+    a: A
+    b: B
+    with a as x, b as y:
+        pass
+[out]
+<m.A.__enter__> -> m.g
+<m.A.__exit__> -> m.g
+<m.A> -> m.A, m.g
+<m.B.__enter__> -> m.g
+<m.B.__exit__> -> m.g
+<m.B> -> m.B, m.g
+<m.C> -> <m.A.__enter__>, m.A.__enter__, m.C
+<m.D> -> <m.B.__enter__>, m.B.__enter__, m.D
+
+[case testWithStmtAnnotation]
+from typing import Any
+class A:
+    def __enter__(self) -> Any: pass
+    def __exit__(self, a, b, c) -> None: pass
+
+class B: pass
+
+def f(b: B) -> None: pass
+
+def g() -> None:
+    a: A
+    with a as x: # type: B
+        f(x)
+[out]
+<m.A.__enter__> -> m.g
+<m.A.__exit__> -> m.g
+<m.A> -> m.A, m.g
+<m.B> -> <m.f>, m.B, m.f, m.g
+<m.f> -> m.g
+
+[case testForStmtAnnotation]
+class A:
+    def __iter__(self): pass
+
+class B:
+    def f(self) -> None: pass
+
+def g() -> None:
+    a: A
+    for x in a: # type: B
+        x.f()
+[builtins fixtures/list.pyi]
+[out]
+<m.A.__getitem__> -> m.g
+<m.A.__iter__> -> m.g
+<m.A> -> m.A, m.g
+<m.B.f> -> m.g
+<m.B> -> m.B, m.g
+
+[case testMultipleAssignment]
+from typing import Iterator
+class A:
+    def __iter__(self) -> Iterator[int]: pass
+
+def f() -> None:
+    a: A
+    x, y = a
+[out]
+<m.A.__iter__> -> m.f
+<m.A> -> m.A, m.f
+
+[case testMultipleLvalues]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = 1
+
+def g() -> None:
+    a: A
+    a.x = a.y = 1
+[out]
+<m.A.x> -> m.A.f, m.g
+<m.A.y> -> m.A.f, m.g
+<m.A> -> m.A, m.g
+
+[case testNestedLvalues]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = ''
+
+def g() -> None:
+    a: A
+    a.x, a.y = 1, ''
+[out]
+<m.A.x> -> m.A.f, m.g
+<m.A.y> -> m.A.f, m.g
+<m.A> -> m.A, m.g
+
+[case testForAndSetItem]
+class A:
+    def __setitem__(self, x: int, y: int) -> None: pass
+
+def f(): pass
+
+def g() -> None:
+    a: A
+    for a[0] in f():
+        pass
+[builtins fixtures/list.pyi]
+[out]
+<m.A.__getitem__> -> m.g
+<m.A.__setitem__> -> m.g
+<m.A> -> m.A, m.g
+<m.f> -> m.g
+
+[case testMultipleAssignmentAndFor]
+from typing import Iterator, Iterable
+
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = 1
+
+class B:
+    def __iter__(self) -> Iterator[int]: pass
+
+def f() -> Iterable[B]: pass
+
+def g() -> None:
+    a: A
+    for a.x, a.y in f():
+        pass
+[builtins fixtures/list.pyi]
+[out]
+<m.A.x> -> m.A.f, m.g
+<m.A.y> -> m.A.f, m.g
+<m.A> -> m.A, m.g
+<m.B.__getitem__> -> m.g
+<m.B.__iter__> -> m.g
+<m.B> -> <m.f>, m.B, m.f
+<m.f> -> m.g
+
+[case testNestedSetItem]
+class A:
+    def __setitem__(self, x: int, y: int) -> None: pass
+class B:
+    def __setitem__(self, x: int, y: int) -> None: pass
+
+def f(): pass
+
+def g() -> None:
+    a: A
+    b: B
+    a[0], b[0] = f()
+[out]
+<m.A.__getitem__> -> m.g
+<m.A.__setitem__> -> m.g
+<m.A> -> m.A, m.g
+<m.B.__getitem__> -> m.g
+<m.B.__setitem__> -> m.g
+<m.B> -> m.B, m.g
+<m.f> -> m.g
+
+[case testOperatorAssignmentStmt]
+class A:
+    def __add__(self, other: 'B') -> 'A': pass
+
+class B: pass
+
+def f() -> B: pass
+
+def g() -> None:
+    a: A
+    a += f()
+[out]
+<m.A.__add__> -> m.g
+<m.A.__iadd__> -> m.g
+<m.A> -> <m.A.__add__>, m.A, m.A.__add__, m.g
+<m.B> -> <m.A.__add__>, <m.f>, m.A.__add__, m.B, m.f
+<m.f> -> m.g
+
+[case testOperatorAssignmentStmtSetItem]
+class A:
+    def __add__(self, other: 'B') -> 'A': pass
+
+class B: pass
+
+class C:
+    def __getitem__(self, x: int) -> A: pass
+    def __setitem__(self, x: int, y: A) -> None: pass
+
+def f() -> int: pass
+
+def g() -> None:
+    b: B
+    c: C
+    c[f()] += b
+[out]
+<m.A.__add__> -> m.g
+<m.A.__iadd__> -> m.g
+<m.A> -> <m.A.__add__>, <m.C.__getitem__>, <m.C.__setitem__>, m.A, m.A.__add__, m.C.__getitem__, m.C.__setitem__
+<m.B> -> <m.A.__add__>, m.A.__add__, m.B, m.g
+<m.C.__getitem__> -> m.g
+<m.C.__setitem__> -> m.g
+<m.C> -> m.C, m.g
+<m.f> -> m.g
+
+[case testYieldStmt]
+from typing import Iterator
+
+class A: pass
+
+def f1() -> A: pass
+
+def g() -> Iterator[A]:
+    yield f1()
+[builtins fixtures/list.pyi]
+[out]
+<m.A> -> <m.f1>, <m.g>, m.A, m.f1, m.g
+<m.f1> -> m.g
+
+[case testDelStmt]
+class A:
+    def f(self) -> None:
+        self.x = 1
+
+def f() -> A: pass
+
+def g() -> None:
+    del f().x
+[out]
+<m.A.x> -> m.A.f, m.g
+<m.A> -> <m.f>, m.A, m.f
+<m.f> -> m.g
+
+[case testDelStmtWithIndexing]
+class A:
+    def __delitem__(self, x: int) -> None: pass
+
+def f1() -> A: pass
+def f2() -> int: pass
+
+def g() -> None:
+    del f1()[f2()]
+[out]
+<m.A.__delitem__> -> m.g
+-- __getitem__ is redundant but harmless
+<m.A.__getitem__> -> m.g
+<m.A> -> <m.f1>, m.A, m.f1
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testYieldFrom]
+from typing import Iterator
+class A:
+    def __iter__(self) -> Iterator[int]: pass
+def f() -> Iterator[int]:
+    yield from A()
+[out]
+<m.A.__init__> -> m.f
+<m.A.__iter__> -> m.f
+<m.A> -> m.A, m.f
+
+[case testFunctionDecorator]
+from typing import Callable
+def dec(f: Callable[[int], int]) -> Callable[[str], str]:
+    pass
+
+def f() -> int: pass
+
+ at dec
+def g(x: int) -> int:
+    return f()
+[out]
+<m.dec> -> m
+<m.f> -> m.g
+<m.g> -> m
+
+[case testMethodDecorator]
+from typing import Callable, Any
+def dec(f: Callable[[Any, int], int]) -> Callable[[Any, str], str]:
+    pass
+
+def f() -> int: pass
+
+class A:
+    @dec
+    def g(self, x: int) -> int:
+        return f()
+[out]
+<m.A.g> -> m
+<m.A> -> m.A
+<m.dec> -> m
+<m.f> -> m.A.g
+
+[case testNestedFunction]
+class A: pass
+
+def h() -> None: pass
+
+def f() -> None:
+    def g(x: A) -> None:
+        h()
+[out]
+<m.A> -> <m.f>, m.A, m.f
+<m.h> -> m.f
+
+[case testPlatformCheck]
+import a
+import sys
+
+def f() -> int:
+    if sys.platform == 'nonexistent':
+        return a.g()
+    else:
+        return 1
+[file a.py]
+[builtins fixtures/ops.pyi]
+[out]
+<a> -> m
+<sys.platform> -> m.f
+<sys> -> m, m.f
diff --git a/test-data/unit/deps-types.test b/test-data/unit/deps-types.test
new file mode 100644
index 0000000..8b03a32
--- /dev/null
+++ b/test-data/unit/deps-types.test
@@ -0,0 +1,151 @@
+-- Test cases for generating fine-grained dependencies between types.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+[case testFilterOutBuiltInTypes]
+class A: pass
+
+def f(x: int, y: str, z: A) -> None:
+    pass
+[out]
+<m.A> -> <m.f>, m.A, m.f
+
+[case testTupleType]
+from typing import Tuple
+
+class A: pass
+class B: pass
+
+def f(x: Tuple[A, B]) -> None:
+    pass
+[out]
+<m.A> -> <m.f>, m.A, m.f
+<m.B> -> <m.f>, m.B, m.f
+
+[case testUnionType]
+from typing import Union
+
+class A: pass
+class B: pass
+
+def f() -> None:
+    x: Union[int, A, B]
+[out]
+<m.A> -> m.A, m.f
+<m.B> -> m.B, m.f
+
+[case testCallableType]
+from typing import Callable
+
+class A: pass
+class B: pass
+
+def f() -> None:
+    x: Callable[[int, A], None]
+    y: Callable[[int, str], B]
+[out]
+<m.A> -> m.A, m.f
+<m.B> -> m.B, m.f
+
+[case testTypeType]
+from typing import Type
+
+class A: pass
+
+def f() -> None:
+    x: Type[A]
+    y: Type[int]
+[out]
+<m.A> -> m.A, m.f
+
+[case testTypeTypeAttribute]
+from typing import Type
+
+class A:
+    @staticmethod
+    def f() -> None: pass
+
+def f(x: Type[A]) -> None:
+    x.f()
+[builtins fixtures/staticmethod.pyi]
+[out]
+<m.A.f> -> m, m.f
+<m.A> -> <m.f>, m.A, m.f
+
+[case testComplexNestedType]
+from typing import Union, Callable, Type
+
+class A: pass
+class B: pass
+class C: pass
+
+def f() -> None:
+    x: Union[int, Callable[[Type[A]], B], C]
+[out]
+<m.A> -> m.A, m.f
+<m.B> -> m.B, m.f
+<m.C> -> m.C, m.f
+
+[case testUnionTypeAttributeAccess]
+from typing import Union
+
+class A:
+    def f(self) -> None:
+        self.x = 0
+
+class B:
+    def f(self) -> None:
+        self.x = ''
+
+def f(a: Union[A, B]) -> None:
+    a.x
+    a.f()
+[out]
+<m.A.f> -> m.f
+<m.A.x> -> m.A.f, m.f
+<m.A> -> <m.f>, m.A, m.f
+<m.B.f> -> m.f
+<m.B.x> -> m.B.f, m.f
+<m.B> -> <m.f>, m.B, m.f
+
+[case testTupleTypeAttributeAccess]
+from typing import Tuple
+
+class C(Tuple[int, str]):
+    def f(self) -> None: pass
+
+def f(c: C) -> None:
+    c.f()
+[builtins fixtures/tuple.pyi]
+[out]
+<m.C.f> -> m.f
+<m.C> -> <m.f>, m.C, m.f
+
+[case testOverloaded]
+from typing import overload
+
+class A: pass
+class B: pass
+
+def g() -> None: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass
+
+def f(x):
+    g()
+
+ff = f
+
+def h() -> None:
+    f(A())
+    ff(A())
+[out]
+<m.A.__init__> -> m.h
+<m.A> -> <m.f>, <m.ff>, m.A, m.f, m.h
+<m.B> -> <m.f>, <m.ff>, m.B, m.f
+<m.f> -> m, m.h
+<m.ff> -> m, m.h
+<m.g> -> m.f
diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test
index a3d6533..5b6f638 100644
--- a/test-data/unit/deps.test
+++ b/test-data/unit/deps.test
@@ -39,12 +39,14 @@ class A: pass
 <m.A> -> m.A, m.f
 
 [case testAccessModuleAttribute]
-x = 1
+class A: pass
+x = A()
 def f() -> None:
     x
 [out]
+<m.A.__init__> -> m
+<m.A> -> <m.x>, m, m.A
 <m.x> -> m, m.f
-<builtins.int> -> m
 
 [case testAccessModuleAttribute2]
 import n
@@ -72,6 +74,15 @@ def f() -> None: pass
 <n.f> -> m
 <n> -> m
 
+[case testImportModuleAs]
+import n as x
+x.f()
+[file n.py]
+def f() -> None: pass
+[out]
+<n.f> -> m
+<n> -> m
+
 [case testCallImportedFunctionInFunction]
 import n
 def g() -> None:
@@ -89,7 +100,7 @@ class B(A):
     pass
 [out]
 <m.A.__init__> -> <m.B.__init__>
-<m.A> -> m.A, m.B
+<m.A> -> m, m.A, m.B
 <m.B> -> m.B
 
 [case testInheritanceWithMethodAndAttribute]
@@ -102,7 +113,7 @@ class B(A):
 <m.A.__init__> -> <m.B.__init__>
 <m.A.f> -> m.B.f
 <m.A.x> -> <m.B.x>
-<m.A> -> m.A, m.B
+<m.A> -> m, m.A, m.B
 <m.B.x> -> m.B.f
 <m.B> -> m.B
 
@@ -118,11 +129,11 @@ class C(B):
 <m.A.__init__> -> <m.B.__init__>, <m.C.__init__>
 <m.A.f> -> m.C.f
 <m.A.x> -> <m.C.x>
-<m.A> -> m.A, m.B
+<m.A> -> m, m.A, m.B
 <m.B.__init__> -> <m.C.__init__>
 <m.B.f> -> m.C.f
 <m.B.x> -> <m.C.x>
-<m.B> -> m.B, m.C
+<m.B> -> m, m.B, m.C
 <m.C.x> -> m.C.f
 <m.C> -> m.C
 
@@ -143,8 +154,8 @@ class A:
 <n.A.f> -> m.B.f
 <n.A.g> -> <m.B.g>
 <n.A.x> -> <m.B.x>
-<n.A> -> m.B
-<n> -> m, m.B
+<n.A> -> m, m.B
+<n> -> m
 
 [case testInheritMethod]
 class A:
@@ -156,7 +167,7 @@ class B(A):
 <m.A.__init__> -> <m.B.__init__>
 <m.A.f> -> m.B.f
 <m.A.g> -> <m.B.g>
-<m.A> -> m.A, m.B
+<m.A> -> m, m.A, m.B
 <m.B.g> -> m.B.f
 <m.B> -> m.B
 
@@ -215,13 +226,23 @@ class A:
 
 [case testConstructor]
 class A:
-    def __init__(self, x: int) -> None: pass
+    def __init__(self, x: C) -> None: pass
+class C: pass
 def f() -> None:
-    A(1)
+    A(C())
 [out]
 <m.A.__init__> -> m.f
 <m.A> -> m.A, m.f
-<builtins.int> -> <m.A.__init__>, m.A.__init__
+<m.C.__init__> -> m.f
+<m.C> -> <m.A.__init__>, m.A.__init__, m.C, m.f
+
+[case testNonTrivialConstructor]
+class C:
+    def __init__(self) -> None:
+        self.x = 1
+[out]
+<m.C.x> -> m.C.__init__
+<m.C> -> m.C
 
 [case testImportFrom]
 from n import f
@@ -233,6 +254,16 @@ def f() -> None: pass
 [out]
 <n.f> -> m, m.g
 
+[case testImportFromAs]
+from n import f as ff
+
+def g() -> None:
+    ff()
+[file n.py]
+def f() -> None: pass
+[out]
+<n.f> -> m, m.g
+
 [case testNestedClass]
 def f() -> None:
     b = A.B()
@@ -279,3 +310,217 @@ class A:
 <m.A.B.f> -> m.f
 <m.A.B> -> <m.f>, m.A.B, m.f
 <m.A> -> m.A
+
+[case testDefaultArgValue]
+def f1(x: int) -> int: pass
+def f2() -> int: pass
+def g(x: int = f1(f2())) -> None: pass
+[out]
+<m.f1> -> m.g
+<m.f2> -> m.g
+
+[case testIsInstance]
+class A:
+    def g(self) -> None: pass
+
+def f(x: object) -> None:
+    if isinstance(x, A):
+        x.g()
+[builtins fixtures/isinstancelist.pyi]
+[out]
+<m.A.g> -> m.f
+<m.A> -> m.A, m.f
+
+[case testUnreachableIsInstance]
+class A:
+    x: int
+
+class B:
+    y: int
+
+def f(x: A) -> None:
+    if isinstance(x, B):
+        x.y
+[builtins fixtures/isinstancelist.pyi]
+[out]
+<m.A> -> <m.f>, m.A, m.f
+<m.B> -> m.B, m.f
+
+[case testAttributeWithClassType1]
+from n import A
+
+class B:
+    def h(self, z: A) -> None:
+        self.z = z
+[file n.py]
+class A: pass
+[out]
+<m.B.z> -> m.B.h
+<m.B> -> m.B
+<n.A> -> <m.B.h>, <m.B.z>, m, m.B.h
+
+[case testAttributeWithClassType2]
+from m import A
+
+class B:
+    def f(self) -> None:
+        self.x = A()
+[file m.py]
+class A: pass
+[out]
+<m.B.x> -> m.B.f
+<m.B> -> m.B
+<m.A.__init__> -> m.B.f
+<m.A> -> <m.B.x>, m, m.B.f
+
+[case testAttributeWithClassType3]
+from n import A, x
+
+class B:
+    def g(self) -> None:
+        self.x = x
+[file n.py]
+class A: pass
+
+x = A()
+[out]
+<m.B.x> -> m.B.g
+<m.B> -> m.B
+<n.A> -> <m.B.x>, m
+<n.x> -> m, m.B.g
+
+[case testAttributeWithClassType4]
+from n import A
+
+class B:
+    def g(self) -> None:
+        self.x: A
+[file n.py]
+class A: pass
+[out]
+<m.B.x> -> m.B.g
+<m.B> -> m.B
+<n.A> -> <m.B.x>, m, m.B.g
+
+[case testClassBody]
+def f() -> int: pass
+def g() -> int: pass
+def h() -> int: pass
+
+class A:
+    h()
+    if f():
+        g()
+[out]
+<m.A> -> m.A
+<m.f> -> m
+<m.g> -> m
+<m.h> -> m
+
+[case testVariableInitializedInClass]
+from n import A
+
+class B:
+    x = None  # type: A
+[file n.py]
+class A: pass
+[out]
+<m.B> -> m.B
+<n.A> -> <m.B.x>, m
+
+[case testVariableAnnotationInClass]
+from n import A
+
+class B:
+    x: A
+
+    def f(self) -> None:
+        y = self.x
+[file n.py]
+class A: pass
+[out]
+<m.B.x> -> m.B.f
+<m.B> -> m.B
+<n.A> -> <m.B.x>, m
+
+[case testGlobalVariableInitialized]
+from n import A
+
+x = A()
+[file n.py]
+class A: pass
+[out]
+<m.x> -> m
+<n.A.__init__> -> m
+<n.A> -> <m.x>, m
+
+[case testGlobalVariableAnnotation]
+from n import A
+
+x: A
+[file n.py]
+class A: pass
+[out]
+<m.x> -> m
+<n.A> -> <m.x>, m
+
+[case testProperty]
+class B: pass
+
+class A:
+    @property
+    def x(self) -> B: pass
+
+def f(a: A) -> None:
+    b = a.x
+[builtins fixtures/property.pyi]
+[out]
+<m.A.x> -> m, m.f
+<m.A> -> <m.f>, m.A, m.f
+<m.B> -> <m.A.x>, m.A.x, m.B
+
+[case testUnreachableAssignment]
+from typing import List, Tuple
+
+def f() -> None: pass
+
+class C:
+    def __init__(self, x: int) -> None:
+        if isinstance(x, int):
+            self.y = 1
+        else:
+            self.y = f()
+[builtins fixtures/isinstancelist.pyi]
+[out]
+<m.C.y> -> m.C.__init__
+<m.C> -> m.C
+<m.f> -> m.C.__init__
+
+[case testPartialNoneTypeAttributeCrash1]
+class C: pass
+
+class A:
+    x = None
+
+    def f(self) -> None:
+        self.x = C()
+[out]
+<m.A.x> -> m.A.f
+<m.A> -> m.A
+<m.C.__init__> -> m.A.f
+<m.C> -> <m.A.x>, m.A.f, m.C
+
+[case testPartialNoneTypeAttributeCrash2]
+# flags: --strict-optional
+class C: pass
+
+class A:
+    x = None
+
+    def f(self) -> None:
+        self.x = C()
+[out]
+<m.A.x> -> m.A.f
+<m.A> -> m.A
+<m.C.__init__> -> m.A.f
+<m.C> -> <m.A.x>, m.A.f, m.C
diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test
index 638948c..56366c3 100644
--- a/test-data/unit/diff.test
+++ b/test-data/unit/diff.test
@@ -1,10 +1,6 @@
 -- Test cases for taking a diff of two module ASTs/symbol tables.
 -- The diffs are used for fined-grained incremental checking.
 
---
--- Module top-levels
---
-
 [case testChangeTypeOfModuleAttribute]
 x = 1
 y = 1
@@ -265,3 +261,191 @@ class A:
 
 [out]
 __main__.A.B
+
+[case testChangeNamedTupleAttribute]
+from typing import NamedTuple
+class A:
+    x: str
+N = NamedTuple('N', [('x', int), ('y', str)])
+M = NamedTuple('M', [('x', int), ('y', str)])
+[file next.py]
+from typing import NamedTuple
+N = NamedTuple('N', [('x', int), ('y', int)])
+M = NamedTuple('M', [('x', int), ('y', str)])
+[out]
+__main__.A
+__main__.N.__init__
+__main__.N._asdict
+__main__.N._make
+__main__.N._replace
+__main__.N.y
+
+[case testSimpleDecoratedFunction]
+from a import dec
+ at dec
+def f() -> None: pass
+ at dec
+def g() -> None: pass
+[file next.py]
+from a import dec
+ at dec
+def f(x: int) -> None: pass
+ at dec
+def g() -> None: pass
+[file a.py]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+def dec(f: T) -> T:
+    return f
+[out]
+__main__.f
+
+[case testSimpleDecoratedMethod]
+from a import dec
+class A:
+    @dec
+    def f(self) -> None:
+        self.g()
+    @dec
+    def g(self) -> None: pass
+[file next.py]
+from a import dec
+class A:
+    @dec
+    def f(self, x: int) -> None:
+        self.g()
+    @dec
+    def g(self) -> None: pass
+[file a.py]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+def dec(f: T) -> T:
+    return f
+[out]
+__main__.A.f
+
+[case testTypeVarBound]
+from typing import TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+[file next.py]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+S = TypeVar('S')
+[out]
+__main__.T
+
+[case testTypeVarVariance]
+from typing import TypeVar
+A = TypeVar('A', covariant=True)
+B = TypeVar('B', covariant=True)
+C = TypeVar('C', covariant=True)
+[file next.py]
+from typing import TypeVar
+A = TypeVar('A', covariant=True)
+B = TypeVar('B', contravariant=True)
+C = TypeVar('C')
+[out]
+__main__.B
+__main__.C
+
+[case testTypeVarValues]
+from typing import TypeVar
+A = TypeVar('A', int, str)
+B = TypeVar('B', int, str)
+C = TypeVar('C', int, str)
+[file next.py]
+from typing import TypeVar
+A = TypeVar('A', int, str)
+B = TypeVar('B', int, str, object)
+C = TypeVar('C')
+[out]
+__main__.B
+__main__.C
+
+[case testGenericFunction]
+from typing import TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+def f(x: T) -> T: pass
+def g(x: S) -> S: pass
+[file next.py]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+S = TypeVar('S')
+def f(x: T) -> T: pass
+def g(x: S) -> S: pass
+[out]
+__main__.T
+__main__.f
+
+[case testGenericTypes]
+from typing import List
+x: List[int]
+y: List[int]
+[file next.py]
+from typing import List
+x: List[int]
+y: List[str]
+[builtins fixtures/list.pyi]
+[out]
+__main__.y
+
+[case testTypeAliasOfList]
+from typing import List
+X = List[int]
+Y = List[int]
+[file next.py]
+from typing import List
+X = List[str]
+Y = List[int]
+[builtins fixtures/list.pyi]
+[out]
+__main__.X
+
+[case testTypeAliasOfCallable]
+from typing import Callable
+A = Callable[[int], str]
+B = Callable[[int], str]
+C = Callable[[int], str]
+[file next.py]
+from typing import Callable
+A = Callable[[int], str]
+B = Callable[[], str]
+C = Callable[[int], int]
+[out]
+__main__.B
+__main__.C
+
+[case testGenericTypeAlias]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+A = Callable[[T], T]
+B = Callable[[T], T]
+[file next.py]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+A = Callable[[T], T]
+B = Callable[[T], S]
+[out]
+__main__.B
+__main__.S
+
+[case testDifferentListTypes]
+from typing import List
+A = List
+B = list
+C = List
+[file next.py]
+from typing import List
+A = List
+B = list
+C = list
+[builtins fixtures/list.pyi]
+[out]
+__main__.C
diff --git a/test-data/unit/fine-grained-blockers.test b/test-data/unit/fine-grained-blockers.test
new file mode 100644
index 0000000..f4af062
--- /dev/null
+++ b/test-data/unit/fine-grained-blockers.test
@@ -0,0 +1,368 @@
+-- Test cases for fine-grained incremental mode and blocking errors
+--
+-- The comments in fine-grained.test explain how these tests work.
+
+-- TODO:
+-- - blocking error while other existing errors as well (that get preserved)
+-- - differences in other modules + blocking error
+
+[case testParseError]
+import a
+a.f()
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+def f(x: int) ->
+[file a.py.3]
+def f(x: int) -> None: pass
+[file a.py.4]
+def f() -> None: pass
+[out]
+==
+a.py:1: error: invalid syntax
+==
+main:2: error: Too few arguments for "f"
+==
+
+[case testParseErrorMultipleTimes]
+import a
+a.f()
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+def f(x: int) ->
+[file a.py.3]
+def f(x: int
+      ) -> None
+[file a.py.4]
+def f(x: int) -> None: pass
+[out]
+==
+a.py:1: error: invalid syntax
+==
+a.py:2: error: invalid syntax
+==
+main:2: error: Too few arguments for "f"
+
+[case testSemanticAnalysisBlockingError]
+import a
+a.f()
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+def f() -> None: pass
+break
+[file a.py.3]
+def f(x: int) -> None: pass
+[out]
+==
+a.py:2: error: 'break' outside loop
+==
+main:2: error: Too few arguments for "f"
+
+[case testBlockingErrorWithPreviousError]
+import a
+import b
+a.f(1)
+def g() -> None:
+    b.f(1)
+[file a.py]
+def f() -> None: pass
+[file b.py]
+def f() -> None: pass
+[file a.py.2]
+def f() -> None
+[file a.py.3]
+def f() -> None: pass
+[out]
+main:3: error: Too many arguments for "f"
+main:5: error: Too many arguments for "f"
+==
+a.py:1: error: invalid syntax
+==
+main:3: error: Too many arguments for "f"
+main:5: error: Too many arguments for "f"
+
+[case testUpdateClassReferenceAcrossBlockingError]
+import a
+c: a.C
+
+def f() -> None:
+    c.f()
+[file a.py]
+class C:
+    def f(self) -> None: pass
+[file a.py.2]
+error error
+[file a.py.3]
+class C:
+    def f(self, x: int) -> None: pass
+[out]
+==
+a.py:1: error: invalid syntax
+==
+main:5: error: Too few arguments for "f" of "C"
+
+[case testAddFileWithBlockingError]
+import a
+a.f(1)
+[file a.py.2]
+x x
+[file a.py.3]
+def f() -> None: pass
+[out]
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+a.py:1: error: invalid syntax
+==
+main:2: error: Too many arguments for "f"
+
+[case testModifyTwoFilesOneWithBlockingError1]
+import a
+[file a.py]
+import b
+def f() -> None: pass
+b.g()
+[file b.py]
+import a
+a.f()
+def g() -> None: pass
+[file a.py.2]
+import b  # Dummy edit
+def f() -> None: pass
+b.g()
+[file b.py.2]
+import a a  # Syntax error
+a.f()
+def g() -> None: pass
+[file b.py.3]
+import a
+a.f()
+def g() -> None: pass
+[out]
+==
+b.py:1: error: invalid syntax
+==
+
+[case testModifyTwoFilesOneWithBlockingError2]
+import a
+[file a.py]
+import b
+def f() -> None: pass
+b.g()
+[file b.py]
+import a
+a.f()
+def g() -> None: pass
+[file a.py.2]
+import b b
+def f() -> None: pass
+b.g()
+[file b.py.2]
+import a  # Dummy edit
+a.f()
+def g() -> None: pass
+[file a.py.3]
+import b
+def f() -> None: pass
+b.g()
+[out]
+==
+a.py:1: error: invalid syntax
+==
+
+[case testBlockingErrorRemainsUnfixed]
+import a
+[file a.py]
+import b
+b.f()
+[file b.py]
+def f() -> None: pass
+[file a.py.2]
+x x
+[file b.py.3]
+def f(x: int) -> None: pass
+[file a.py.4]
+import b
+b.f()
+[out]
+==
+a.py:1: error: invalid syntax
+==
+a.py:1: error: invalid syntax
+==
+a.py:2: error: Too few arguments for "f"
+
+[case testModifyTwoFilesIntroduceTwoBlockingErrors]
+import a
+[file a.py]
+import b
+def f() -> None: pass
+b.g()
+[file b.py]
+import a
+a.f()
+def g() -> None: pass
+[file a.py.2]
+import b b
+def f() -> None: pass
+b.g()
+[file b.py.2]
+import a a
+a.f()
+def g() -> None: pass
+[file a.py.3]
+import b b
+def f() -> None: pass
+b.g()
+[file b.py.3]
+import a a
+a.f()
+def g() -> None: pass
+[file a.py.4]
+import b
+def f() -> None: pass
+b.g(1)
+[file b.py.4]
+import a
+def g() -> None: pass
+a.f(1)
+[out]
+==
+a.py:1: error: invalid syntax
+==
+a.py:1: error: invalid syntax
+==
+b.py:3: error: Too many arguments for "f"
+a.py:3: error: Too many arguments for "g"
+
+[case testDeleteFileWithBlockingError]
+import a
+import b
+[file a.py]
+def f() -> None: pass
+[file b.py]
+import a
+a.f()
+[file a.py.2]
+x x
+[delete a.py.3]
+[out]
+==
+a.py:1: error: invalid syntax
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+b.py:1: error: Cannot find module named 'a'
+-- TODO: Remove redundant errors
+main:1: error: Cannot find module named 'a'
+b.py:1: error: Cannot find module named 'a'
+
+[case testModifyFileWhileBlockingErrorElsewhere]
+import a
+import b
+[file a.py]
+[file b.py]
+import a
+[file a.py.2]
+x x
+[file b.py.3]
+import a
+a.f()
+1()
+[file a.py.4]
+[builtins fixtures/module.pyi]
+[out]
+==
+a.py:1: error: invalid syntax
+==
+a.py:1: error: invalid syntax
+==
+b.py:2: error: Module has no attribute "f"
+b.py:3: error: "int" not callable
+
+[case testImportBringsAnotherFileWithBlockingError1]
+import a
+[file a.py]
+[file a.py.2]
+import blocker
+1()
+[file a.py.3]
+1()
+def f() -> None: pass
+[out]
+==
+<ROOT>/test-data/unit/lib-stub/blocker.pyi:2: error: invalid syntax
+==
+a.py:1: error: "int" not callable
+
+[case testImportBringsAnotherFileWithSemanticAnalysisBlockingError]
+import a
+[file a.py]
+[file a.py.2]
+import blocker2
+1()
+[file a.py.3]
+1()
+[out]
+==
+<ROOT>/test-data/unit/lib-stub/blocker2.pyi:2: error: 'continue' outside loop
+==
+a.py:1: error: "int" not callable
+
+[case testFixingBlockingErrorTriggersDeletion1]
+import a
+
+def g(x: a.A) -> None:
+    x.f()
+[file a.py]
+class A:
+    def f(self) -> None: pass
+[delete a.py.2]
+[file a.py.3]
+class A: pass
+[builtins fixtures/module.pyi]
+[out]
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+main:4: error: "A" has no attribute "f"
+
+[case testFixingBlockingErrorTriggersDeletion2]
+from a import A
+
+def g(x: A) -> None:
+    x.f()
+[file a.py]
+class A:
+    def f(self) -> None: pass
+[delete a.py.2]
+[file a.py.3]
+[builtins fixtures/module.pyi]
+[out]
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+main:1: error: Module 'a' has no attribute 'A'
+
+[case testFixingBlockingErrorBringsInAnotherModuleWithBlocker]
+import a
+[file a.py]
+[file a.py.2]
+x y
+[file a.py.3]
+import blocker
+1()
+[file a.py.4]
+import sys
+1()
+[out]
+==
+a.py:1: error: invalid syntax
+==
+<ROOT>/test-data/unit/lib-stub/blocker.pyi:2: error: invalid syntax
+==
+a.py:2: error: "int" not callable
diff --git a/test-data/unit/fine-grained-cycles.test b/test-data/unit/fine-grained-cycles.test
new file mode 100644
index 0000000..eeac0b4
--- /dev/null
+++ b/test-data/unit/fine-grained-cycles.test
@@ -0,0 +1,218 @@
+-- Test cases for fine-grained incremental checking and import cycles
+--
+-- The comment at the top of fine-grained.test explains how these tests
+-- work.
+
+[case testFunctionSelfReferenceThroughImportCycle]
+import a
+[file a.py]
+from b import f
+[file b.py]
+import a
+
+def f() -> None:
+    a.f()
+[file b.py.2]
+import a
+
+def f(x: int) -> None:
+    a.f()
+[out]
+==
+b.py:4: error: Too few arguments for "f"
+
+[case testClassSelfReferenceThroughImportCycle]
+import a
+[file a.py]
+from b import A
+[file b.py]
+import a
+
+class A:
+    def g(self) -> None: pass
+
+def f() -> None:
+    a.A().g()
+[file b.py.2]
+import a
+
+class A:
+    def g(self, x: int) -> None: pass
+
+def f() -> None:
+    a.A().g()
+[out]
+==
+b.py:7: error: Too few arguments for "g" of "A"
+
+[case testAnnotationSelfReferenceThroughImportCycle]
+import a
+[file a.py]
+from b import A
+[file b.py]
+import a
+
+x: a.A
+
+class A:
+    def g(self) -> None: pass
+
+def f() -> None:
+    x.g()
+[file b.py.2]
+import a
+
+x: a.A
+
+class A:
+    def g(self, x: int) -> None: pass
+
+def f() -> None:
+    x.g()
+[out]
+==
+b.py:9: error: Too few arguments for "g" of "A"
+
+[case testModuleSelfReferenceThroughImportCycle]
+import a
+[file a.py]
+import b
+[file b.py]
+import a
+
+def f() -> None:
+    a.b.f()
+[file b.py.2]
+import a
+
+def f(x: int) -> None:
+    a.b.f()
+[out]
+==
+b.py:4: error: Too few arguments for "f"
+
+[case testVariableSelfReferenceThroughImportCycle]
+import a
+[file a.py]
+from b import x
+[file b.py]
+import a
+
+x: int
+
+def f() -> None:
+    a.x = 1
+[file b.py.2]
+import a
+
+x: str
+
+def f() -> None:
+    a.x = 1
+[out]
+==
+b.py:6: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testReferenceToTypeThroughCycle]
+import a
+
+[file a.py]
+from b import C
+
+def f() -> C: pass
+
+[file b.py]
+import a
+
+class C:
+    def g(self) -> None: pass
+
+def h() -> None:
+    c = a.f()
+    c.g()
+
+[file b.py.2]
+import a
+
+class C:
+    def g(self, x: int) -> None: pass
+
+def h() -> None:
+    c = a.f()
+    c.g()
+
+[out]
+==
+b.py:8: error: Too few arguments for "g" of "C"
+
+[case testReferenceToTypeThroughCycleAndDeleteType]
+import a
+
+[file a.py]
+from b import C
+
+def f() -> C: pass
+
+[file b.py]
+import a
+
+class C:
+    def g(self) -> None: pass
+
+def h() -> None:
+    c = a.f()
+    c.g()
+
+[file b.py.2]
+import a
+
+def h() -> None:
+    c = a.f()
+    c.g()
+
+[out]
+==
+a.py:1: error: Module 'b' has no attribute 'C'
+
+[case testReferenceToTypeThroughCycleAndReplaceWithFunction]
+import a
+
+[file a.py]
+from b import C
+
+def f() -> C: pass
+
+[file b.py]
+import a
+
+class C:
+    def g(self) -> None: pass
+
+def h() -> None:
+    c = a.f()
+    c.g()
+
+[file b.py.2]
+import a
+
+def C() -> int: pass
+
+def h() -> None:
+    c = a.f()
+    c.g()
+
+[out]
+==
+a.py:3: error: Invalid type "b.C"
+
+-- TODO: More import cycle:
+--
+-- * "from x import y" through cycle
+-- * "from x import *" through cycle
+-- * "Cls.module" though cycle
+-- * TypeVar
+-- * type alias
+-- * all kinds of reference deleted
+-- * all kinds of reference rebound to different kind
+--
+-- etc.
diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test
new file mode 100644
index 0000000..a4d7bc1
--- /dev/null
+++ b/test-data/unit/fine-grained-modules.test
@@ -0,0 +1,534 @@
+-- Test cases for fine-grained incremental mode related to modules
+--
+-- Covers adding and deleting modules, changes to multiple modules, and
+-- changes to import graph.
+--
+-- The comments in fine-grained.test explain how these tests work.
+
+
+-- Add file
+-- --------
+
+
+[case testAddFile]
+import b
+[file b.py]
+[file a.py.2]
+def f() -> None: pass
+[file b.py.3]
+import a
+a.f(1)
+[out]
+==
+==
+b.py:2: error: Too many arguments for "f"
+
+[case testAddFileWithErrors]
+import b
+[file b.py]
+[file a.py.2]
+def f() -> str:
+    return 1
+[file b.py.3]
+import a
+a.f(1)
+[file a.py.4]
+def f(x: int) -> None: pass
+[out]
+==
+a.py:2: error: Incompatible return value type (got "int", expected "str")
+==
+b.py:2: error: Too many arguments for "f"
+a.py:2: error: Incompatible return value type (got "int", expected "str")
+==
+
+[case testAddFileFixesError]
+import b
+[file b.py]
+[file b.py.2]
+from a import f
+f()
+[file a.py.3]
+def f() -> None: pass
+[out]
+==
+b.py:1: error: Cannot find module named 'a'
+b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+
+[case testAddFileFixesAndGeneratesError]
+import b
+[file b.py]
+[file b.py.2]
+from a import f
+[file b.py.3]
+from a import f
+f(1)
+[file a.py.4]
+def f() -> None: pass
+[out]
+==
+b.py:1: error: Cannot find module named 'a'
+b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+b.py:1: error: Cannot find module named 'a'
+b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+b.py:2: error: Too many arguments for "f"
+
+[case testAddFilePreservesError1]
+import b
+[file b.py]
+[file b.py.2]
+from a import f
+f(1)
+[file x.py.3]
+# unrelated change
+[out]
+==
+b.py:1: error: Cannot find module named 'a'
+b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+b.py:1: error: Cannot find module named 'a'
+b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAddFilePreservesError2]
+import b
+[file b.py]
+f()
+[file a.py.2]
+[out]
+b.py:1: error: Name 'f' is not defined
+==
+b.py:1: error: Name 'f' is not defined
+
+[case testImportLineNumber1]
+import b
+[file b.py]
+[file b.py.2]
+x = 1
+import a
+[out]
+==
+b.py:2: error: Cannot find module named 'a'
+b.py:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testImportLineNumber2]
+import b
+[file b.py]
+[file b.py.2]
+x = 1
+import a
+from c import f
+[file x.py.3]
+[out]
+==
+b.py:2: error: Cannot find module named 'a'
+b.py:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+b.py:3: error: Cannot find module named 'c'
+==
+b.py:2: error: Cannot find module named 'a'
+b.py:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+b.py:3: error: Cannot find module named 'c'
+
+
+-- Delete file
+-- -----------
+
+
+[case testDeleteBasic]
+import a
+[file a.py]
+import b
+[file b.py]
+def f() -> None: pass
+[file a.py.2]
+[delete b.py.3]
+[out]
+==
+==
+
+[case testDeletionTriggersImportFrom]
+import a
+[file a.py]
+from b import f
+
+def g() -> None:
+    f()
+[file b.py]
+def f() -> None: pass
+[delete b.py.2]
+[file b.py.3]
+def f(x: int) -> None: pass
+[out]
+==
+a.py:1: error: Cannot find module named 'b'
+a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+a.py:4: error: Too few arguments for "f"
+
+[case testDeletionTriggersImport]
+import a
+[file a.py]
+def f() -> None: pass
+[delete a.py.2]
+[file a.py.3]
+def f() -> None: pass
+[out]
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+
+[case testDeletionOfSubmoduleTriggersImportFrom1]
+from p import q
+[file p/__init__.py]
+[file p/q.py]
+[delete p/q.py.2]
+[file p/q.py.3]
+[out]
+==
+main:1: error: Cannot find module named 'p.q'
+-- TODO: The following messages are different compared to non-incremental mode
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:1: error: Module 'p' has no attribute 'q'
+==
+
+[case testDeletionOfSubmoduleTriggersImportFrom2]
+from p.q import f
+f()
+[file p/__init__.py]
+[file p/q.py]
+def f() -> None: pass
+[delete p/q.py.2]
+[file p/q.py.3]
+def f(x: int) -> None: pass
+[out]
+==
+main:1: error: Cannot find module named 'p.q'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+main:2: error: Too few arguments for "f"
+
+[case testDeletionOfSubmoduleTriggersImport]
+import p.q
+[file p/__init__.py]
+[file p/q.py]
+def f() -> None: pass
+[delete p/q.py.2]
+[file p/q.py.3]
+def f(x: int) -> None: pass
+[out]
+==
+main:1: error: Cannot find module named 'p.q'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+
+[case testDeleteModuleWithError]
+import a
+[file a.py]
+def f() -> int:
+    return 1
+[file a.py.2]
+def f() -> str:
+    return 1
+[delete a.py.3]
+def f() -> str:
+    return 1
+[out]
+==
+a.py:2: error: Incompatible return value type (got "int", expected "str")
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testDeleteModuleWithErrorInsidePackage]
+import a.b
+[file a/__init__.py]
+[file a/b.py]
+def f() -> int:
+    return ''
+[delete a/b.py.2]
+def f() -> str:
+    return 1
+[out]
+a/b.py:2: error: Incompatible return value type (got "str", expected "int")
+==
+main:1: error: Cannot find module named 'a.b'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testModifyTwoFilesNoError1]
+import a
+[file a.py]
+import b
+b.f()
+[file b.py]
+def f() -> None: pass
+[file a.py.2]
+import b
+b.f(1)
+[file b.py.2]
+def f(x: int) -> None: pass
+[out]
+==
+
+[case testModifyTwoFilesNoError2]
+import a
+[file a.py]
+from b import g
+def f() -> None: pass
+[file b.py]
+import a
+def g() -> None: pass
+a.f()
+[file a.py.2]
+from b import g
+def f(x: int) -> None: pass
+[file b.py.2]
+import a
+def g() -> None: pass
+a.f(1)
+[out]
+==
+
+[case testModifyTwoFilesErrorsElsewhere]
+import a
+import b
+a.f()
+b.g(1)
+[file a.py]
+def f() -> None: pass
+[file b.py]
+def g(x: int) -> None: pass
+[file a.py.2]
+def f(x: int) -> None: pass
+[file b.py.2]
+def g() -> None: pass
+[out]
+==
+main:3: error: Too few arguments for "f"
+main:4: error: Too many arguments for "g"
+
+[case testModifyTwoFilesErrorsInBoth]
+import a
+[file a.py]
+import b
+def f() -> None: pass
+b.g(1)
+[file b.py]
+import a
+def g(x: int) -> None: pass
+a.f()
+[file a.py.2]
+import b
+def f(x: int) -> None: pass
+b.g(1)
+[file b.py.2]
+import a
+def g() -> None: pass
+a.f()
+[out]
+==
+b.py:3: error: Too few arguments for "f"
+a.py:3: error: Too many arguments for "g"
+
+[case testModifyTwoFilesFixErrorsInBoth]
+import a
+[file a.py]
+import b
+def f(x: int) -> None: pass
+b.g(1)
+[file b.py]
+import a
+def g() -> None: pass
+a.f()
+[file a.py.2]
+import b
+def f() -> None: pass
+b.g(1)
+[file b.py.2]
+import a
+def g(x: int) -> None: pass
+a.f()
+[out]
+b.py:3: error: Too few arguments for "f"
+a.py:3: error: Too many arguments for "g"
+==
+
+[case testAddTwoFilesNoError]
+import a
+[file a.py]
+import b
+import c
+b.f()
+c.g()
+[file b.py.2]
+import c
+def f() -> None: pass
+c.g()
+[file c.py.2]
+import b
+def g() -> None: pass
+b.f()
+[out]
+a.py:1: error: Cannot find module named 'b'
+a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+a.py:2: error: Cannot find module named 'c'
+==
+
+[case testAddTwoFilesErrorsInBoth]
+import a
+[file a.py]
+import b
+import c
+b.f()
+c.g()
+[file b.py.2]
+import c
+def f() -> None: pass
+c.g(1)
+[file c.py.2]
+import b
+def g() -> None: pass
+b.f(1)
+[out]
+a.py:1: error: Cannot find module named 'b'
+a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+a.py:2: error: Cannot find module named 'c'
+==
+b.py:3: error: Too many arguments for "g"
+c.py:3: error: Too many arguments for "f"
+
+[case testAddTwoFilesErrorsElsewhere]
+import a
+import b
+a.f(1)
+b.g(1)
+[file a.py.2]
+def f() -> None: pass
+[file b.py.2]
+def g() -> None: pass
+[out]
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Cannot find module named 'b'
+==
+main:3: error: Too many arguments for "f"
+main:4: error: Too many arguments for "g"
+
+[case testDeleteTwoFilesErrorsElsewhere]
+import a
+import b
+a.f()
+b.g()
+[file a.py]
+def f() -> None: pass
+[file b.py]
+def g() -> None: pass
+[delete a.py.2]
+[delete b.py.2]
+[out]
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+-- TODO: Remove redundant error message
+main:1: error: Cannot find module named 'b'
+main:2: error: Cannot find module named 'b'
+
+[case testDeleteTwoFilesNoErrors]
+import a
+[file a.py]
+import b
+import c
+b.f()
+c.g()
+[file b.py]
+def f() -> None: pass
+[file c.py]
+def g() -> None: pass
+[file a.py.2]
+[delete b.py.3]
+[delete c.py.3]
+[out]
+==
+==
+
+[case testDeleteTwoFilesFixErrors]
+import a
+import b
+a.f()
+b.g()
+[file a.py]
+import b
+def f() -> None: pass
+b.g(1)
+[file b.py]
+import a
+def g() -> None: pass
+a.f(1)
+[delete a.py.2]
+[delete b.py.2]
+[out]
+b.py:3: error: Too many arguments for "f"
+a.py:3: error: Too many arguments for "g"
+==
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+-- TODO: Remove redundant error message
+main:1: error: Cannot find module named 'b'
+main:2: error: Cannot find module named 'b'
+
+[case testAddFileWhichImportsLibModule]
+import a
+a.x = 0
+[file a.py.2]
+import sys
+x = sys.platform
+[out]
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+main:2: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testAddFileWhichImportsLibModuleWithErrors]
+import a
+a.x = 0
+[file a.py.2]
+import broken
+x = broken.x
+z
+[out]
+main:1: error: Cannot find module named 'a'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+==
+a.py:3: error: Name 'z' is not defined
+<ROOT>/test-data/unit/lib-stub/broken.pyi:2: error: Name 'y' is not defined
+
+[case testRenameModule]
+import a
+[file a.py]
+import b
+b.f()
+[file b.py]
+def f() -> None: pass
+[file a.py.2]
+import c
+c.f()
+[file c.py.2]
+def f() -> None: pass
+[file a.py.3]
+import c
+c.f(1)
+[out]
+==
+==
+a.py:2: error: Too many arguments for "f"
+
+-- TODO:
+-- - add one file which imports another new file, blocking error in new file
+-- - arbitrary blocking errors
+-- - packages
+--   - add two files that form a package
+--   - delete two files that form a package
+-- - order of processing makes a difference
+-- - mix of modify, add and delete in one iteration
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
index 17d126f..76c0248 100644
--- a/test-data/unit/fine-grained.test
+++ b/test-data/unit/fine-grained.test
@@ -870,3 +870,129 @@ class C:
 main:8: error: "B" has no attribute "x"
 ==
 main:4: error: Name 'm.C' is not defined
+
+[case testImportQualifiedModuleName]
+import a
+[file a.py]
+import b.c
+b.c.f()
+[file a.py.2]
+import b.c
+b.c.f() # dummy change
+[file b/__init__.py]
+[file b/c.py]
+def f() -> None: pass
+[out]
+==
+
+[case testTypeAliasRefresh]
+from typing import Callable
+from a import f
+C = Callable[[int], str]
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+[out]
+==
+main:2: error: Module 'a' has no attribute 'f'
+
+[case testTypeVarRefresh]
+from typing import TypeVar
+from a import f
+T = TypeVar('T')
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+[out]
+==
+main:2: error: Module 'a' has no attribute 'f'
+
+[case testNamedTupleRefresh]
+from typing import NamedTuple
+from a import f
+N = NamedTuple('N', [('x', int)])
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+[out]
+==
+main:2: error: Module 'a' has no attribute 'f'
+
+[case testModuleLevelAttributeRefresh]
+from typing import Callable
+from a import f
+x = 1
+y = ''  # type: str
+[file a.py]
+def f() -> None: pass
+[file a.py.2]
+[out]
+==
+main:2: error: Module 'a' has no attribute 'f'
+
+[case testClassBodyRefresh]
+from a import f
+class A:
+    x = 1
+    y = '' # type: str
+
+    def f(self) -> None:
+        self.x = 1
+[file a.py]
+f = 1
+[file a.py.2]
+[out]
+==
+main:1: error: Module 'a' has no attribute 'f'
+
+[case testDecoratedMethodRefresh]
+from typing import Iterator, Callable, List
+from a import f
+import a
+
+def dec(f: Callable[['A'], Iterator[int]]) -> Callable[[int], int]: pass
+
+class A:
+    @dec
+    def f(self) -> Iterator[int]:
+        self.x = a.g()  # type: int
+        return None
+[builtins fixtures/list.pyi]
+[file a.py]
+f = 1
+def g() -> int: pass
+[file a.py.2]
+def f() -> None: pass
+def g() -> int: pass
+[file a.py.3]
+def f() -> None: pass
+def g() -> str: pass
+[out]
+==
+==
+main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTwoPassTypeChecking]
+import a
+[file a.py]
+[file a.py.2]
+class A:
+    def __init__(self, b: B) -> None:
+        self.a = b.a
+
+class B:
+    def __init__(self) -> None:
+        self.a = int()
+[file a.py.3]
+class A:
+    def __init__(self, b: B) -> None:
+        self.a = b.a
+        reveal_type(self.a) # E
+
+class B:
+    def __init__(self) -> None:
+        self.a = int()
+[out]
+==
+==
+a.py:4: error: Revealed type is 'builtins.int'
diff --git a/test-data/unit/fixtures/async_await.pyi b/test-data/unit/fixtures/async_await.pyi
index dcd25de..3cb45cb 100644
--- a/test-data/unit/fixtures/async_await.pyi
+++ b/test-data/unit/fixtures/async_await.pyi
@@ -5,11 +5,12 @@ U = typing.TypeVar('U')
 class list(typing.Sequence[T]): pass
 
 class object:
-    def __init__(self): pass
+    def __init__(self) -> None: pass
 class type: pass
 class function: pass
 class int: pass
 class str: pass
+class bool: pass
 class dict(typing.Generic[T, U]): pass
 class set(typing.Generic[T]): pass
 class tuple(typing.Generic[T]): pass
diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi
index e231900..ea522fa 100644
--- a/test-data/unit/fixtures/tuple.pyi
+++ b/test-data/unit/fixtures/tuple.pyi
@@ -16,6 +16,7 @@ class tuple(Sequence[Tco], Generic[Tco]):
     def __getitem__(self, x: int) -> Tco: pass
     def count(self, obj: Any) -> int: pass
 class function: pass
+class ellipsis: pass
 
 # We need int and slice for indexing tuples.
 class int: pass
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
index fb6b1d3..5acba07 100644
--- a/test-data/unit/fixtures/typing-full.pyi
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -140,6 +140,7 @@ def runtime(cls: T) -> T:
 
 class ContextManager(Generic[T]):
     def __enter__(self) -> T: pass
-    def __exit__(self, exc_type, exc_value, traceback): pass
+    # Use Any because not all the precise types are in the fixtures.
+    def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass
 
 TYPE_CHECKING = 1
diff --git a/test-data/unit/lib-stub/blocker.pyi b/test-data/unit/lib-stub/blocker.pyi
new file mode 100644
index 0000000..01bd734
--- /dev/null
+++ b/test-data/unit/lib-stub/blocker.pyi
@@ -0,0 +1,2 @@
+# Stub file that generates a blocking parse error
+x y
diff --git a/test-data/unit/lib-stub/blocker2.pyi b/test-data/unit/lib-stub/blocker2.pyi
new file mode 100644
index 0000000..dfacc34
--- /dev/null
+++ b/test-data/unit/lib-stub/blocker2.pyi
@@ -0,0 +1,2 @@
+# Stub file that generates a blocking semantic analysis error
+continue
diff --git a/test-data/unit/lib-stub/broken.pyi b/test-data/unit/lib-stub/broken.pyi
new file mode 100644
index 0000000..22cfc72
--- /dev/null
+++ b/test-data/unit/lib-stub/broken.pyi
@@ -0,0 +1,2 @@
+# Stub file that generates an error
+x = y
diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test
index a6d2a42..dc51f9d 100644
--- a/test-data/unit/merge.test
+++ b/test-data/unit/merge.test
@@ -1,4 +1,26 @@
--- Test cases for AST merge (user for fine-grained incremental checking)
+-- Test cases for AST merge (used for fine-grained incremental checking)
+--
+-- Each test case has two versions of the module 'target' (target.py and
+-- target.py.next). A test cases type checks both of them, merges the ASTs,
+-- and finally dumps certain parts of the ASTs for both versions (==>
+-- separates the first and second versions). A test case passes if the
+-- dumped output is as expected.
+--
+-- The dumped output uses <N> to denote identities of objects. Objects
+-- suffixed by the same <N> refer to the same object; <N> and <M> (if
+-- N != M) refer to different objects. The objective of these test cases
+-- is to verify that identities of publicly visible AST nodes is
+-- preserved across merge. Other AST nodes may get new identities.
+--
+-- Each test case dumps one of four kinds of information:
+--
+-- 1) ASTs (test case has no magic suffix)
+-- 2) Symbol tables (_symtable test case name suffix)
+-- 3) TypeInfos (_typeinfo suffix)
+-- 4) Inferred types (_types suffix)
+--
+-- If you need to dump multiple different kinds of information, write
+-- multiple test cases.
 
 [case testFunction]
 import target
@@ -10,6 +32,7 @@ def f() -> int:
     pass
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -20,6 +43,7 @@ MypyFile:1<1>(
       PassStmt:2<5>())))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -41,6 +65,7 @@ class A:
         pass
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -56,6 +81,7 @@ MypyFile:1<1>(
         PassStmt:3<8>()))))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -112,6 +138,7 @@ class A:
         return B()
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -132,6 +159,7 @@ MypyFile:1<1>(
     PassStmt:4<10>()))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -165,6 +193,7 @@ class A:
         self.f()
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -184,6 +213,7 @@ MypyFile:1<1>(
             Args()))))))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -216,6 +246,7 @@ class A:
         self.x
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -238,6 +269,7 @@ MypyFile:1<1>(
             x))))))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -344,6 +376,7 @@ A()
 B()
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -356,6 +389,7 @@ MypyFile:1<1>(
       Args())))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -468,6 +502,7 @@ def f() -> int: pass
 def g() -> int: pass
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -478,6 +513,7 @@ MypyFile:1<1>(
       PassStmt:1<5>())))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -553,6 +589,7 @@ x = 1
 x = 2
 [out]
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -562,6 +599,7 @@ MypyFile:1<1>(
     builtins.int<4>))
 ==>
 MypyFile:1<0>(
+  tmp/main
   Import:1(target))
 MypyFile:1<1>(
   tmp/target.py
@@ -606,3 +644,394 @@ TypeInfo<2>(
   Mro(target.A.B<2>, builtins.object<1>)
   Names(
     f<3>))
+
+[case testNamedTuple_typeinfo]
+import target
+[file target.py]
+from typing import NamedTuple
+class A: pass
+N = NamedTuple('N', [('x', A)])
+[file target.py.next]
+from typing import NamedTuple
+class A: pass
+N = NamedTuple('N', [('x', A), ('y', A)])
+[out]
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names())
+TypeInfo<2>(
+  Name(target.N)
+  Bases(builtins.tuple[target.A<0>]<3>)
+  Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>)
+  Names(
+    __annotations__<4> (builtins.object<1>)
+    __doc__<5> (builtins.str<6>)
+    __init__<7>
+    _asdict<8>
+    _field_defaults<9> (builtins.object<1>)
+    _field_types<10> (builtins.object<1>)
+    _fields<11> (Tuple[builtins.str<6>])
+    _make<12>
+    _replace<13>
+    _source<14> (builtins.str<6>)
+    x<15> (target.A<0>)))
+==>
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names())
+TypeInfo<2>(
+  Name(target.N)
+  Bases(builtins.tuple[target.A<0>]<3>)
+  Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>)
+  Names(
+    __annotations__<4> (builtins.object<1>)
+    __doc__<5> (builtins.str<6>)
+    __init__<7>
+    _asdict<8>
+    _field_defaults<9> (builtins.object<1>)
+    _field_types<10> (builtins.object<1>)
+    _fields<11> (Tuple[builtins.str<6>, builtins.str<6>])
+    _make<12>
+    _replace<13>
+    _source<14> (builtins.str<6>)
+    x<15> (target.A<0>)
+    y<16> (target.A<0>)))
+
+[case testUnionType_types]
+import target
+[file target.py]
+from typing import Union
+class A: pass
+a: A
+[file target.py.next]
+from typing import Union
+class A: pass
+a: Union[A, int]
+[out]
+## target
+TempNode:-1: Any
+NameExpr:3: target.A<0>
+==>
+## target
+TempNode:-1: Any
+NameExpr:3: Union[target.A<0>, builtins.int<1>]
+
+[case testTypeType_types]
+import target
+[file target.py]
+from typing import Type
+class A: pass
+a: Type[A]
+[file target.py.next]
+from typing import Type
+class A: pass
+a: Type[A]
+[out]
+## target
+TempNode:-1: Any
+NameExpr:3: Type[target.A<0>]
+==>
+## target
+TempNode:-1: Any
+NameExpr:3: Type[target.A<0>]
+
+[case testTypeVar_types]
+import target
+[file target.py]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class A(Generic[T]):
+    x: T
+[file target.py.next]
+from typing import TypeVar
+T = TypeVar('T', bound='A')
+class A(Generic[T]):
+    x: T
+[out]
+## target
+TempNode:-1: Any
+CallExpr:2: Any
+NameExpr:2: Any
+TypeVarExpr:2: Any
+NameExpr:4: T`1(upper_bound=builtins.int<0>)
+==>
+## target
+TempNode:-1: Any
+CallExpr:2: Any
+NameExpr:2: Any
+TypeVarExpr:2: Any
+NameExpr:4: T`1(upper_bound=target.A[Any]<1>)
+
+[case testUnboundType_types]
+import target
+[file target.py]
+from typing import TypeVar, Generic
+class A: pass
+foo: int
+x: foo[A]
+[file target.py.next]
+from typing import TypeVar, Generic
+class A: pass
+foo: int
+x: foo[A]
+[out]
+tmp/target.py:4: error: Invalid type "target.foo"
+## target
+TempNode:-1: Any
+TempNode:-1: Any
+NameExpr:3: builtins.int<0>
+NameExpr:4: foo?[A?]
+==>
+## target
+TempNode:-1: Any
+TempNode:-1: Any
+NameExpr:3: builtins.int<0>
+NameExpr:4: foo?[A?]
+
+[case testOverloaded_types]
+import target
+[file target.py]
+from typing import overload
+class A: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: int) -> int: pass
+
+def f(x): pass
+
+g = f
+[file target.py.next]
+from typing import overload
+
+class A: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: str) -> str: pass
+
+def f(x): pass
+
+g = f
+[out]
+-- TODO: It is unclear why this works correctly...
+## target
+NameExpr:11: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.int<1>) -> builtins.int<1>)
+NameExpr:11: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.int<1>) -> builtins.int<1>)
+==>
+## target
+NameExpr:12: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.str<2>) -> builtins.str<2>)
+NameExpr:12: Overload(def (x: target.A<0>) -> target.A<0>, def (x: builtins.str<2>) -> builtins.str<2>)
+
+[case testTypeVar_symtable]
+import target
+
+[file target.py]
+from typing import TypeVar
+T = TypeVar('T')
+
+[file target.py.next]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+
+[out]
+__main__:
+    target: MypyFile<0>
+target:
+    T: TypeVarExpr<1>
+    TypeVar: Var<2>
+==>
+__main__:
+    target: MypyFile<0>
+target:
+    T: TypeVarExpr<1>
+    TypeVar: Var<2>
+
+[case testTypeAlias_symtable]
+import target
+
+[file target.py]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]): pass
+X = A[int]
+
+[file target.py.next]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]): pass
+X = A[str]
+
+[out]
+__main__:
+    target: MypyFile<0>
+target:
+    A: TypeInfo<1>
+    Generic: Var<2>
+    T: TypeVarExpr<3>
+    TypeVar: Var<4>
+    X: Var<5>(type_override=target.A[builtins.int<6>]<1>)
+==>
+__main__:
+    target: MypyFile<0>
+target:
+    A: TypeInfo<1>
+    Generic: Var<2>
+    T: TypeVarExpr<3>
+    TypeVar: Var<4>
+    X: Var<5>(type_override=target.A[builtins.str<7>]<1>)
+
+[case testGenericFunction_types]
+import target
+
+[file target.py]
+from typing import TypeVar
+class A: pass
+T = TypeVar('T', bound=A)
+def f(x: T) -> T: pass
+f
+
+[file target.py.next]
+from typing import TypeVar
+class A: pass
+T = TypeVar('T', bound=A)
+def f(x: T, y: A) -> T: pass
+f
+
+[out]
+## target
+CallExpr:3: Any
+NameExpr:3: Any
+TypeVarExpr:3: Any
+NameExpr:5: def [T <: target.A<0>] (x: T`-1(upper_bound=target.A<0>)) -> T`-1(upper_bound=target.A<0>)
+==>
+## target
+CallExpr:3: Any
+NameExpr:3: Any
+TypeVarExpr:3: Any
+NameExpr:5: def [T <: target.A<0>] (x: T`-1(upper_bound=target.A<0>), y: target.A<0>) -> T`-1(upper_bound=target.A<0>)
+
+[case testMergeOverloaded_types]
+import target
+[file target.py]
+from _x import A
+a: A
+[file target.py.next]
+from _x import A
+a: A
+[file _x.pyi]
+from typing import Generic, TypeVar, overload
+
+T = TypeVar('T')
+
+class C(Generic[T]):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: int) -> None: pass
+A = C[int]
+[out]
+## target
+TempNode:-1: Any
+NameExpr:2: _x.C[builtins.int<0>]<1>
+==>
+## target
+TempNode:-1: Any
+NameExpr:2: _x.C[builtins.int<0>]<1>
+
+[case testRefreshVar_symtable]
+from typing import TypeVar
+from target import f
+x = 1
+y = '' # type: str
+[file target.py]
+f = 1
+[file target.py.next]
+[out]
+__main__:
+    TypeVar: Var<0>
+    f: Var<1>
+    x: Var<2>
+    y: Var<3>
+target:
+    f: Var<1>
+==>
+__main__:
+    TypeVar: Var<0>
+    f: Var<4>
+    x: Var<2>
+    y: Var<3>
+target:
+
+[case testRefreshTypeVar_symtable]
+from typing import TypeVar
+from target import f
+T = TypeVar('T')
+[file target.py]
+f = 1
+[file target.py.next]
+[out]
+__main__:
+    T: TypeVarExpr<0>
+    TypeVar: Var<1>
+    f: Var<2>
+target:
+    f: Var<2>
+==>
+__main__:
+    T: TypeVarExpr<0>
+    TypeVar: Var<1>
+    f: Var<3>
+target:
+
+[case testRefreshNamedTuple_symtable]
+from typing import NamedTuple
+from target import f
+N = NamedTuple('N', [('x', int)])
+[file target.py]
+f = 1
+[file target.py.next]
+[out]
+__main__:
+    N: TypeInfo<0>
+    NamedTuple: Var<1>
+    f: Var<2>
+target:
+    f: Var<2>
+==>
+__main__:
+    N: TypeInfo<0>
+    NamedTuple: Var<1>
+    f: Var<3>
+target:
+
+[case testRefreshAttributeDefinedInClassBody_typeinfo]
+from target import f
+class A:
+    a = 1
+    b = '' # type: str
+[file target.py]
+f = 1
+[file target.py.next]
+[out]
+TypeInfo<0>(
+  Name(__main__.A)
+  Bases(builtins.object<1>)
+  Mro(__main__.A<0>, builtins.object<1>)
+  Names(
+    a<2> (builtins.int<3>)
+    b<4> (builtins.str<5>)))
+==>
+TypeInfo<0>(
+  Name(__main__.A)
+  Bases(builtins.object<1>)
+  Mro(__main__.A<0>, builtins.object<1>)
+  Names(
+    a<2> (builtins.int<3>)
+    b<4> (builtins.str<5>)))
diff --git a/test-data/unit/parse-errors.test b/test-data/unit/parse-errors.test
index 22a3c5c..5988617 100644
--- a/test-data/unit/parse-errors.test
+++ b/test-data/unit/parse-errors.test
@@ -169,6 +169,7 @@ def f(): # type: x
   pass
 [out]
 file:1: error: syntax error in type comment
+file:1: note: Suggestion: wrap argument types in parentheses
 
 [case testInvalidSignatureInComment2]
 def f(): # type:
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 8f9ffd8..6737496 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1402,6 +1402,11 @@ o: object = p
 it2: Iterable[int] = p
 [out]
 _testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]")
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note: Following member(s) of "Point" have conflicts:
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note:     Expected:
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note:         def __iter__(self) -> Iterator[int]
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note:     Got:
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: note:         def __iter__(self) -> Iterator[str]
 
 [case testAsyncioGatherPreciseType]
 import asyncio
@@ -1418,6 +1423,17 @@ async def main() -> None:
 _testAsyncioGatherPreciseType.py:9: error: Revealed type is 'builtins.str'
 _testAsyncioGatherPreciseType.py:10: error: Revealed type is 'builtins.str'
 
+[case testMultipleInheritanceWorksWithTupleTypeGeneric]
+from typing import SupportsAbs, NamedTuple
+
+class Point(NamedTuple('Point', [('x', int), ('y', int)]), SupportsAbs[int]):
+    def __abs__(p) -> int:
+        return abs(p.x) + abs(p.y)
+
+def test(a: Point) -> bool:
+    return abs(a) == 2
+[out]
+
 [case testNoCrashOnGenericUnionUnpacking]
 from typing import Union, Dict
 
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
index ccd13f1..5d77871 100644
--- a/test-data/unit/semanal-errors.test
+++ b/test-data/unit/semanal-errors.test
@@ -819,8 +819,8 @@ y = None # type: Callable[int]
 z = None # type: Callable[int, int, int]
 [out]
 main:2: error: The first argument to Callable must be a list of types or "..."
-main:3: error: Invalid function type
-main:4: error: Invalid function type
+main:3: error: Please use "Callable[[<parameters>], <return type>]" or "Callable"
+main:4: error: Please use "Callable[[<parameters>], <return type>]" or "Callable"
 
 [case testAbstractGlobalFunction]
 import typing
diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test
index 9f7a6d9..6baa57a 100644
--- a/test-data/unit/semanal-symtable.test
+++ b/test-data/unit/semanal-symtable.test
@@ -64,3 +64,37 @@ m:
   SymbolTable(
     x : Gdef/TypeInfo (m.x)
     y : Gdef/Var (m.y))
+
+[case testFailingImports]
+from sys import non_existing1  # type: ignore
+from xyz import non_existing2  # type: ignore
+if int():
+    from sys import non_existing3  # type: ignore
+import non_existing4  # type: ignore
+[out]
+__main__:
+  SymbolTable(
+    non_existing1 : Gdef/Var (__main__.non_existing1) : Any
+    non_existing2 : Gdef/Var (__main__.non_existing2) : Any
+    non_existing3 : Gdef/Var (__main__.non_existing3) : Any
+    non_existing4 : Gdef/Var (__main__.non_existing4) : Any)
+sys:
+  SymbolTable(
+    platform : Gdef/Var (sys.platform)
+    version_info : Gdef/Var (sys.version_info))
+
+[case testDecorator]
+from typing import Callable
+
+def dec(f: Callable[[], None]) -> Callable[[], None]:
+    return f
+
+ at dec
+def g() -> None:
+    pass
+[out]
+__main__:
+  SymbolTable(
+    Callable : Gdef/Var (typing.Callable)
+    dec : Gdef/FuncDef (__main__.dec) : def (f: def ()) -> def ()
+    g : Gdef/Decorator (__main__.g) : def ())
diff --git a/typeshed/stdlib/2/SocketServer.pyi b/typeshed/stdlib/2/SocketServer.pyi
index 8d03b8f..91f781d 100644
--- a/typeshed/stdlib/2/SocketServer.pyi
+++ b/typeshed/stdlib/2/SocketServer.pyi
@@ -21,7 +21,8 @@ class BaseServer:
     def serve_forever(self, poll_interval: float = ...) -> None: ...
     def shutdown(self) -> None: ...
     def server_close(self) -> None: ...
-    def finish_request(self) -> None: ...
+    def finish_request(self, request: bytes,
+                       client_address: Tuple[str, int]) -> None: ...
     def get_request(self) -> None: ...
     def handle_error(self, request: bytes,
                      client_address: Tuple[str, int]) -> None: ...
diff --git a/typeshed/stdlib/2/__builtin__.pyi b/typeshed/stdlib/2/__builtin__.pyi
index 0b8dfb1..0c476ce 100644
--- a/typeshed/stdlib/2/__builtin__.pyi
+++ b/typeshed/stdlib/2/__builtin__.pyi
@@ -28,6 +28,7 @@ _TT = TypeVar('_TT', bound='type')
 class object:
     __doc__ = ...  # type: Optional[str]
     __class__ = ...  # type: type
+    __dict__ = ...  # type: Dict[str, Any]
     __slots__ = ...  # type: Optional[Union[str, unicode, Iterable[Union[str, unicode]]]]
     __module__ = ...  # type: str
 
@@ -64,7 +65,6 @@ class type(object):
     __bases__ = ...  # type: Tuple[type, ...]
     __name__ = ...  # type: str
     __module__ = ...  # type: str
-    __dict__ = ...  # type: Dict[unicode, Any]
 
     @overload
     def __init__(self, o: object) -> None: ...
@@ -86,7 +86,7 @@ class type(object):
     def __instancecheck__(self, instance: Any) -> bool: ...
     def __subclasscheck__(self, subclass: type) -> bool: ...
 
-class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
+class int:
     @overload
     def __init__(self, x: SupportsInt = ...) -> None: ...
     @overload
@@ -138,7 +138,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __hash__(self) -> int: ...
     def __nonzero__(self) -> bool: ...
 
-class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
+class float:
     def __init__(self, x: Union[SupportsFloat, str, unicode, bytearray] = ...) -> None: ...
     def as_integer_ratio(self) -> Tuple[int, int]: ...
     def hex(self) -> str: ...
@@ -179,9 +179,9 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __hash__(self) -> int: ...
     def __nonzero__(self) -> bool: ...
 
-class complex(SupportsAbs[float]):
+class complex:
     @overload
-    def __init__(self, re: float = 0.0, im: float = 0.0) -> None: ...
+    def __init__(self, re: float = ..., im: float = ...) -> None: ...
     @overload
     def __init__(self, s: str) -> None: ...
 
@@ -231,17 +231,17 @@ class unicode(basestring, Sequence[unicode]):
     @overload
     def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ...
     def capitalize(self) -> unicode: ...
-    def center(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def center(self, width: int, fillchar: unicode = ...) -> unicode: ...
     def count(self, x: unicode) -> int: ...
     def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
     def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
-    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = ...,
                  end: int = ...) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> unicode: ...
-    def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def expandtabs(self, tabsize: int = ...) -> unicode: ...
+    def find(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
     def format(self, *args: Any, **kwargs: Any) -> unicode: ...
     def format_map(self, map: Mapping[unicode, Any]) -> unicode: ...
-    def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def index(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
     def isdecimal(self) -> bool: ...
@@ -254,20 +254,20 @@ class unicode(basestring, Sequence[unicode]):
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
     def join(self, iterable: Iterable[unicode]) -> unicode: ...
-    def ljust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def ljust(self, width: int, fillchar: unicode = ...) -> unicode: ...
     def lower(self) -> unicode: ...
     def lstrip(self, chars: unicode = ...) -> unicode: ...
     def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
     def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ...
-    def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
-    def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
-    def rjust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def rfind(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+    def rindex(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+    def rjust(self, width: int, fillchar: unicode = ...) -> unicode: ...
     def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
     def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def rstrip(self, chars: unicode = ...) -> unicode: ...
     def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def splitlines(self, keepends: bool = ...) -> List[unicode]: ...
-    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = ...,
                    end: int = ...) -> bool: ...
     def strip(self, chars: unicode = ...) -> unicode: ...
     def swapcase(self) -> unicode: ...
@@ -283,6 +283,7 @@ class unicode(basestring, Sequence[unicode]):
     def __getslice__(self, start: int, stop: int) -> unicode: ...
     def __add__(self, s: unicode) -> unicode: ...
     def __mul__(self, n: int) -> unicode: ...
+    def __rmul__(self, n: int) -> unicode: ...
     def __mod__(self, x: Any) -> unicode: ...
     def __eq__(self, x: object) -> bool: ...
     def __ne__(self, x: object) -> bool: ...
@@ -308,7 +309,7 @@ class str(basestring, Sequence[str]):
     def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
     def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
     def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> str: ...
+    def expandtabs(self, tabsize: int = ...) -> str: ...
     def find(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
     def format(self, *args: Any, **kwargs: Any) -> str: ...
     def index(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
@@ -405,9 +406,9 @@ class bytearray(MutableSequence[int]):
     def count(self, x: str) -> int: ...
     def decode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
     def endswith(self, suffix: Union[str, Tuple[str, ...]]) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> bytearray: ...
-    def find(self, sub: str, start: int = 0, end: int = ...) -> int: ...
-    def index(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def expandtabs(self, tabsize: int = ...) -> bytearray: ...
+    def find(self, sub: str, start: int = ..., end: int = ...) -> int: ...
+    def index(self, sub: str, start: int = ..., end: int = ...) -> int: ...
     def insert(self, index: int, object: int) -> None: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
@@ -422,8 +423,8 @@ class bytearray(MutableSequence[int]):
     def lstrip(self, chars: str = ...) -> bytearray: ...
     def partition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
     def replace(self, old: str, new: str, count: int = ...) -> bytearray: ...
-    def rfind(self, sub: str, start: int = 0, end: int = ...) -> int: ...
-    def rindex(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def rfind(self, sub: str, start: int = ..., end: int = ...) -> int: ...
+    def rindex(self, sub: str, start: int = ..., end: int = ...) -> int: ...
     def rjust(self, width: int, fillchar: str = ...) -> bytearray: ...
     def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
     def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ...
@@ -469,7 +470,7 @@ class bytearray(MutableSequence[int]):
     def __gt__(self, x: str) -> bool: ...
     def __ge__(self, x: str) -> bool: ...
 
-class bool(int, SupportsInt, SupportsFloat):
+class bool(int):
     def __init__(self, o: object = ...) -> None: ...
 
 class slice(object):
@@ -479,7 +480,7 @@ class slice(object):
     @overload
     def __init__(self, stop: Optional[int]) -> None: ...
     @overload
-    def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = None) -> None: ...
+    def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> None: ...
     def indices(self, len: int) -> Tuple[int, int, int]: ...
 
 class tuple(Sequence[_T_co], Generic[_T_co]):
@@ -513,8 +514,8 @@ class list(MutableSequence[_T], Generic[_T]):
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def append(self, object: _T) -> None: ...
     def extend(self, iterable: Iterable[_T]) -> None: ...
-    def pop(self, index: int = -1) -> _T: ...
-    def index(self, object: _T, start: int = 0, stop: int = ...) -> int: ...
+    def pop(self, index: int = ...) -> _T: ...
+    def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ...
     def count(self, object: _T) -> int: ...
     def insert(self, index: int, object: _T) -> None: ...
     def remove(self, object: _T) -> None: ...
@@ -654,7 +655,7 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     def __gt__(self, s: AbstractSet[object]) -> bool: ...
 
 class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
+    def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ...
     def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
     def next(self) -> Tuple[int, _T]: ...
     # TODO __getattribute__
@@ -663,20 +664,20 @@ class xrange(Sized, Iterable[int], Reversible[int]):
     @overload
     def __init__(self, stop: int) -> None: ...
     @overload
-    def __init__(self, start: int, stop: int, step: int = 1) -> None: ...
+    def __init__(self, start: int, stop: int, step: int = ...) -> None: ...
     def __len__(self) -> int: ...
     def __iter__(self) -> Iterator[int]: ...
     def __getitem__(self, i: int) -> int: ...
     def __reversed__(self) -> Iterator[int]: ...
 
 class property(object):
-    def __init__(self, fget: Optional[Callable[[Any], Any]] = None,
-                 fset: Optional[Callable[[Any, Any], None]] = None,
-                 fdel: Optional[Callable[[Any], None]] = None, doc: Optional[str] = None) -> None: ...
+    def __init__(self, fget: Optional[Callable[[Any], Any]] = ...,
+                 fset: Optional[Callable[[Any, Any], None]] = ...,
+                 fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ...
     def getter(self, fget: Callable[[Any], Any]) -> property: ...
     def setter(self, fset: Callable[[Any, Any], None]) -> property: ...
     def deleter(self, fdel: Callable[[Any], None]) -> property: ...
-    def __get__(self, obj: Any, type: Optional[type] = None) -> Any: ...
+    def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ...
     def __set__(self, obj: Any, value: Any) -> None: ...
     def __delete__(self, obj: Any) -> None: ...
     def fget(self) -> Any: ...
@@ -694,8 +695,8 @@ def any(i: Iterable[object]) -> bool: ...
 def bin(number: int) -> str: ...
 def callable(o: object) -> bool: ...
 def chr(code: int) -> str: ...
-def compile(source: Any, filename: unicode, mode: str, flags: int = 0,
-            dont_inherit: int = 0) -> Any: ...
+def compile(source: Any, filename: unicode, mode: str, flags: int = ...,
+            dont_inherit: int = ...) -> Any: ...
 def delattr(o: Any, name: unicode) -> None: ...
 def dir(o: object = ...) -> List[str]: ...
 @overload
@@ -709,8 +710,8 @@ def filter(function: Callable[[_T], Any],
 @overload
 def filter(function: None,
            iterable: Iterable[Optional[_T]]) -> List[_T]: ...
-def format(o: object, format_spec: str = '') -> str: ...  # TODO unicode
-def getattr(o: Any, name: unicode, default: Optional[Any] = None) -> Any: ...
+def format(o: object, format_spec: str = ...) -> str: ...  # TODO unicode
+def getattr(o: Any, name: unicode, default: Optional[Any] = ...) -> Any: ...
 def hasattr(o: Any, name: unicode) -> bool: ...
 def hash(o: object) -> int: ...
 def hex(i: int) -> str: ...  # TODO __index__
@@ -731,6 +732,12 @@ def map(func: Callable[[_T1, _T2], _S],
         iter1: Iterable[_T1],
         iter2: Iterable[_T2]) -> List[_S]: ...  # TODO more than two iterables
 @overload
+def map(func: None, iter1: Iterable[_T1]) -> List[_T1]: ...
+ at overload
+def map(func: None,
+        iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ...  # TODO more than two iterables
+ at overload
 def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
@@ -873,7 +880,10 @@ class memoryview(Sized, Container[bytes]):
 class BaseException(object):
     args = ...  # type: Tuple[Any, ...]
     message = ...  # type: str
-    def __init__(self, *args: Any) -> None: ...
+    def __init__(self, *args: object, **kwargs: object) -> None: ...
+    def __getitem__(self, i: int) -> Any: ...
+    def __getslice__(self, start: int, stop: int) -> Tuple[Any, ...]: ...
+
 class GeneratorExit(BaseException): ...
 class KeyboardInterrupt(BaseException): ...
 class SystemExit(BaseException):
@@ -938,24 +948,24 @@ class ResourceWarning(Warning): ...
 
 def eval(s: Union[str, unicode], globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ...
 def exec(object: str,
-         globals: Optional[Dict[str, Any]] = None,
-         locals: Optional[Dict[str, Any]] = None) -> Any: ...  # TODO code object as source
+         globals: Optional[Dict[str, Any]] = ...,
+         locals: Optional[Dict[str, Any]] = ...) -> Any: ...  # TODO code object as source
 
 def cmp(x: Any, y: Any) -> int: ...
 
-def execfile(filename: str, globals: Optional[Dict[str, Any]] = None, locals: Optional[Dict[str, Any]] = None) -> None: ...
+def execfile(filename: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ...
 
 class file(BinaryIO):
     @overload
-    def __init__(self, file: str, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: str, mode: str = ..., buffering: int = ...) -> None: ...
     @overload
-    def __init__(self, file: unicode, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: unicode, mode: str = ..., buffering: int = ...) -> None: ...
     @overload
-    def __init__(self, file: int, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: int, mode: str = ..., buffering: int = ...) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def read(self, n: int = ...) -> str: ...
     def __enter__(self) -> BinaryIO: ...
-    def __exit__(self, t: Optional[type] = None, exc: Optional[BaseException] = None, tb: Optional[Any] = None) -> bool: ...
+    def __exit__(self, t: Optional[type] = ..., exc: Optional[BaseException] = ..., tb: Optional[Any] = ...) -> bool: ...
     def flush(self) -> None: ...
     def fileno(self) -> int: ...
     def isatty(self) -> bool: ...
@@ -973,6 +983,6 @@ class file(BinaryIO):
     def truncate(self, pos: Optional[int] = ...) -> int: ...
 
 # Very old builtins
-def apply(func: Callable[..., _T], args: Optional[Sequence[Any]] = None, kwds: Optional[Mapping[str, Any]] = None) -> _T: ...
+def apply(func: Callable[..., _T], args: Optional[Sequence[Any]] = ..., kwds: Optional[Mapping[str, Any]] = ...) -> _T: ...
 _N = TypeVar('_N', bool, int, float, complex)
 def coerce(x: _N, y: _N) -> Tuple[_N, _N]: ...
diff --git a/typeshed/stdlib/2/array.pyi b/typeshed/stdlib/2/array.pyi
deleted file mode 100644
index 508eadd..0000000
--- a/typeshed/stdlib/2/array.pyi
+++ /dev/null
@@ -1,56 +0,0 @@
-"""Stub file for the 'array' module."""
-
-from typing import (Any, Generic, IO, Iterable, Sequence, TypeVar,
-                    Union, overload, Iterator, Tuple, BinaryIO, List)
-
-_T = TypeVar('_T')
-
-class array(Generic[_T]):
-    def __init__(self, typecode: str, init: Iterable[_T] = ...) -> None: ...
-    def __add__(self, y: "array[_T]") -> "array[_T]": ...
-    def __contains__(self, y: Any) -> bool: ...
-    def __copy__(self) -> "array[_T]": ...
-    def __deepcopy__(self) -> "array": ...
-    def __delitem__(self, y: Union[slice, int]) -> None: ...
-    def __delslice__(self, i: int, j: int) -> None: ...
-    @overload
-    def __getitem__(self, i: int) -> Any: ...
-    @overload
-    def __getitem__(self, s: slice) -> "array": ...
-    def __iadd__(self, y: "array[_T]") -> "array[_T]": ...
-    def __imul__(self, y: int) -> "array[_T]": ...
-    def __iter__(self) -> Iterator[_T]: ...
-    def __len__(self) -> int: ...
-    def __mul__(self, n: int) -> "array[_T]": ...
-    def __rmul__(self, n: int) -> "array[_T]": ...
-    @overload
-    def __setitem__(self, i: int, y: _T) -> None: ...
-    @overload
-    def __setitem__(self, i: slice, y: "array[_T]") -> None: ...
-
-    def append(self, x: _T) -> None: ...
-    def buffer_info(self) -> Tuple[int, int]: ...
-    def byteswap(self) -> None:
-        raise RuntimeError()
-    def count(self) -> int: ...
-    def extend(self, x: Sequence[_T]) -> None: ...
-    def fromlist(self, list: List[_T]) -> None:
-        raise EOFError()
-        raise IOError()
-    def fromfile(self, f: BinaryIO, n: int) -> None: ...
-    def fromstring(self, s: str) -> None: ...
-    def fromunicode(self, u: unicode) -> None: ...
-    def index(self, x: _T) -> int: ...
-    def insert(self, i: int, x: _T) -> None: ...
-    def pop(self, i: int = ...) -> _T: ...
-    def read(self, f: IO[str], n: int) -> None:
-        raise DeprecationWarning()
-    def remove(self, x: _T) -> None: ...
-    def reverse(self) -> None: ...
-    def tofile(self, f: BinaryIO) -> None:
-        raise IOError()
-    def tolist(self) -> List[_T]: ...
-    def tostring(self) -> str: ...
-    def tounicode(self) -> unicode: ...
-    def write(self, f: IO[str]) -> None:
-        raise DeprecationWarning()
diff --git a/typeshed/stdlib/2/builtins.pyi b/typeshed/stdlib/2/builtins.pyi
index 0b8dfb1..0c476ce 100644
--- a/typeshed/stdlib/2/builtins.pyi
+++ b/typeshed/stdlib/2/builtins.pyi
@@ -28,6 +28,7 @@ _TT = TypeVar('_TT', bound='type')
 class object:
     __doc__ = ...  # type: Optional[str]
     __class__ = ...  # type: type
+    __dict__ = ...  # type: Dict[str, Any]
     __slots__ = ...  # type: Optional[Union[str, unicode, Iterable[Union[str, unicode]]]]
     __module__ = ...  # type: str
 
@@ -64,7 +65,6 @@ class type(object):
     __bases__ = ...  # type: Tuple[type, ...]
     __name__ = ...  # type: str
     __module__ = ...  # type: str
-    __dict__ = ...  # type: Dict[unicode, Any]
 
     @overload
     def __init__(self, o: object) -> None: ...
@@ -86,7 +86,7 @@ class type(object):
     def __instancecheck__(self, instance: Any) -> bool: ...
     def __subclasscheck__(self, subclass: type) -> bool: ...
 
-class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
+class int:
     @overload
     def __init__(self, x: SupportsInt = ...) -> None: ...
     @overload
@@ -138,7 +138,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __hash__(self) -> int: ...
     def __nonzero__(self) -> bool: ...
 
-class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
+class float:
     def __init__(self, x: Union[SupportsFloat, str, unicode, bytearray] = ...) -> None: ...
     def as_integer_ratio(self) -> Tuple[int, int]: ...
     def hex(self) -> str: ...
@@ -179,9 +179,9 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __hash__(self) -> int: ...
     def __nonzero__(self) -> bool: ...
 
-class complex(SupportsAbs[float]):
+class complex:
     @overload
-    def __init__(self, re: float = 0.0, im: float = 0.0) -> None: ...
+    def __init__(self, re: float = ..., im: float = ...) -> None: ...
     @overload
     def __init__(self, s: str) -> None: ...
 
@@ -231,17 +231,17 @@ class unicode(basestring, Sequence[unicode]):
     @overload
     def __init__(self, o: str, encoding: unicode = ..., errors: unicode = ...) -> None: ...
     def capitalize(self) -> unicode: ...
-    def center(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def center(self, width: int, fillchar: unicode = ...) -> unicode: ...
     def count(self, x: unicode) -> int: ...
     def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
     def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
-    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+    def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]], start: int = ...,
                  end: int = ...) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> unicode: ...
-    def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def expandtabs(self, tabsize: int = ...) -> unicode: ...
+    def find(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
     def format(self, *args: Any, **kwargs: Any) -> unicode: ...
     def format_map(self, map: Mapping[unicode, Any]) -> unicode: ...
-    def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+    def index(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
     def isdecimal(self) -> bool: ...
@@ -254,20 +254,20 @@ class unicode(basestring, Sequence[unicode]):
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
     def join(self, iterable: Iterable[unicode]) -> unicode: ...
-    def ljust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def ljust(self, width: int, fillchar: unicode = ...) -> unicode: ...
     def lower(self) -> unicode: ...
     def lstrip(self, chars: unicode = ...) -> unicode: ...
     def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
     def replace(self, old: unicode, new: unicode, count: int = ...) -> unicode: ...
-    def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
-    def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
-    def rjust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
+    def rfind(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+    def rindex(self, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+    def rjust(self, width: int, fillchar: unicode = ...) -> unicode: ...
     def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
     def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def rstrip(self, chars: unicode = ...) -> unicode: ...
     def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def splitlines(self, keepends: bool = ...) -> List[unicode]: ...
-    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
+    def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = ...,
                    end: int = ...) -> bool: ...
     def strip(self, chars: unicode = ...) -> unicode: ...
     def swapcase(self) -> unicode: ...
@@ -283,6 +283,7 @@ class unicode(basestring, Sequence[unicode]):
     def __getslice__(self, start: int, stop: int) -> unicode: ...
     def __add__(self, s: unicode) -> unicode: ...
     def __mul__(self, n: int) -> unicode: ...
+    def __rmul__(self, n: int) -> unicode: ...
     def __mod__(self, x: Any) -> unicode: ...
     def __eq__(self, x: object) -> bool: ...
     def __ne__(self, x: object) -> bool: ...
@@ -308,7 +309,7 @@ class str(basestring, Sequence[str]):
     def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
     def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
     def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> str: ...
+    def expandtabs(self, tabsize: int = ...) -> str: ...
     def find(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
     def format(self, *args: Any, **kwargs: Any) -> str: ...
     def index(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
@@ -405,9 +406,9 @@ class bytearray(MutableSequence[int]):
     def count(self, x: str) -> int: ...
     def decode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
     def endswith(self, suffix: Union[str, Tuple[str, ...]]) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> bytearray: ...
-    def find(self, sub: str, start: int = 0, end: int = ...) -> int: ...
-    def index(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def expandtabs(self, tabsize: int = ...) -> bytearray: ...
+    def find(self, sub: str, start: int = ..., end: int = ...) -> int: ...
+    def index(self, sub: str, start: int = ..., end: int = ...) -> int: ...
     def insert(self, index: int, object: int) -> None: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
@@ -422,8 +423,8 @@ class bytearray(MutableSequence[int]):
     def lstrip(self, chars: str = ...) -> bytearray: ...
     def partition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
     def replace(self, old: str, new: str, count: int = ...) -> bytearray: ...
-    def rfind(self, sub: str, start: int = 0, end: int = ...) -> int: ...
-    def rindex(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def rfind(self, sub: str, start: int = ..., end: int = ...) -> int: ...
+    def rindex(self, sub: str, start: int = ..., end: int = ...) -> int: ...
     def rjust(self, width: int, fillchar: str = ...) -> bytearray: ...
     def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
     def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ...
@@ -469,7 +470,7 @@ class bytearray(MutableSequence[int]):
     def __gt__(self, x: str) -> bool: ...
     def __ge__(self, x: str) -> bool: ...
 
-class bool(int, SupportsInt, SupportsFloat):
+class bool(int):
     def __init__(self, o: object = ...) -> None: ...
 
 class slice(object):
@@ -479,7 +480,7 @@ class slice(object):
     @overload
     def __init__(self, stop: Optional[int]) -> None: ...
     @overload
-    def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = None) -> None: ...
+    def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> None: ...
     def indices(self, len: int) -> Tuple[int, int, int]: ...
 
 class tuple(Sequence[_T_co], Generic[_T_co]):
@@ -513,8 +514,8 @@ class list(MutableSequence[_T], Generic[_T]):
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def append(self, object: _T) -> None: ...
     def extend(self, iterable: Iterable[_T]) -> None: ...
-    def pop(self, index: int = -1) -> _T: ...
-    def index(self, object: _T, start: int = 0, stop: int = ...) -> int: ...
+    def pop(self, index: int = ...) -> _T: ...
+    def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ...
     def count(self, object: _T) -> int: ...
     def insert(self, index: int, object: _T) -> None: ...
     def remove(self, object: _T) -> None: ...
@@ -654,7 +655,7 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     def __gt__(self, s: AbstractSet[object]) -> bool: ...
 
 class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
+    def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ...
     def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
     def next(self) -> Tuple[int, _T]: ...
     # TODO __getattribute__
@@ -663,20 +664,20 @@ class xrange(Sized, Iterable[int], Reversible[int]):
     @overload
     def __init__(self, stop: int) -> None: ...
     @overload
-    def __init__(self, start: int, stop: int, step: int = 1) -> None: ...
+    def __init__(self, start: int, stop: int, step: int = ...) -> None: ...
     def __len__(self) -> int: ...
     def __iter__(self) -> Iterator[int]: ...
     def __getitem__(self, i: int) -> int: ...
     def __reversed__(self) -> Iterator[int]: ...
 
 class property(object):
-    def __init__(self, fget: Optional[Callable[[Any], Any]] = None,
-                 fset: Optional[Callable[[Any, Any], None]] = None,
-                 fdel: Optional[Callable[[Any], None]] = None, doc: Optional[str] = None) -> None: ...
+    def __init__(self, fget: Optional[Callable[[Any], Any]] = ...,
+                 fset: Optional[Callable[[Any, Any], None]] = ...,
+                 fdel: Optional[Callable[[Any], None]] = ..., doc: Optional[str] = ...) -> None: ...
     def getter(self, fget: Callable[[Any], Any]) -> property: ...
     def setter(self, fset: Callable[[Any, Any], None]) -> property: ...
     def deleter(self, fdel: Callable[[Any], None]) -> property: ...
-    def __get__(self, obj: Any, type: Optional[type] = None) -> Any: ...
+    def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ...
     def __set__(self, obj: Any, value: Any) -> None: ...
     def __delete__(self, obj: Any) -> None: ...
     def fget(self) -> Any: ...
@@ -694,8 +695,8 @@ def any(i: Iterable[object]) -> bool: ...
 def bin(number: int) -> str: ...
 def callable(o: object) -> bool: ...
 def chr(code: int) -> str: ...
-def compile(source: Any, filename: unicode, mode: str, flags: int = 0,
-            dont_inherit: int = 0) -> Any: ...
+def compile(source: Any, filename: unicode, mode: str, flags: int = ...,
+            dont_inherit: int = ...) -> Any: ...
 def delattr(o: Any, name: unicode) -> None: ...
 def dir(o: object = ...) -> List[str]: ...
 @overload
@@ -709,8 +710,8 @@ def filter(function: Callable[[_T], Any],
 @overload
 def filter(function: None,
            iterable: Iterable[Optional[_T]]) -> List[_T]: ...
-def format(o: object, format_spec: str = '') -> str: ...  # TODO unicode
-def getattr(o: Any, name: unicode, default: Optional[Any] = None) -> Any: ...
+def format(o: object, format_spec: str = ...) -> str: ...  # TODO unicode
+def getattr(o: Any, name: unicode, default: Optional[Any] = ...) -> Any: ...
 def hasattr(o: Any, name: unicode) -> bool: ...
 def hash(o: object) -> int: ...
 def hex(i: int) -> str: ...  # TODO __index__
@@ -731,6 +732,12 @@ def map(func: Callable[[_T1, _T2], _S],
         iter1: Iterable[_T1],
         iter2: Iterable[_T2]) -> List[_S]: ...  # TODO more than two iterables
 @overload
+def map(func: None, iter1: Iterable[_T1]) -> List[_T1]: ...
+ at overload
+def map(func: None,
+        iter1: Iterable[_T1],
+        iter2: Iterable[_T2]) -> List[Tuple[_T1, _T2]]: ...  # TODO more than two iterables
+ at overload
 def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
@@ -873,7 +880,10 @@ class memoryview(Sized, Container[bytes]):
 class BaseException(object):
     args = ...  # type: Tuple[Any, ...]
     message = ...  # type: str
-    def __init__(self, *args: Any) -> None: ...
+    def __init__(self, *args: object, **kwargs: object) -> None: ...
+    def __getitem__(self, i: int) -> Any: ...
+    def __getslice__(self, start: int, stop: int) -> Tuple[Any, ...]: ...
+
 class GeneratorExit(BaseException): ...
 class KeyboardInterrupt(BaseException): ...
 class SystemExit(BaseException):
@@ -938,24 +948,24 @@ class ResourceWarning(Warning): ...
 
 def eval(s: Union[str, unicode], globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ...
 def exec(object: str,
-         globals: Optional[Dict[str, Any]] = None,
-         locals: Optional[Dict[str, Any]] = None) -> Any: ...  # TODO code object as source
+         globals: Optional[Dict[str, Any]] = ...,
+         locals: Optional[Dict[str, Any]] = ...) -> Any: ...  # TODO code object as source
 
 def cmp(x: Any, y: Any) -> int: ...
 
-def execfile(filename: str, globals: Optional[Dict[str, Any]] = None, locals: Optional[Dict[str, Any]] = None) -> None: ...
+def execfile(filename: str, globals: Optional[Dict[str, Any]] = ..., locals: Optional[Dict[str, Any]] = ...) -> None: ...
 
 class file(BinaryIO):
     @overload
-    def __init__(self, file: str, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: str, mode: str = ..., buffering: int = ...) -> None: ...
     @overload
-    def __init__(self, file: unicode, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: unicode, mode: str = ..., buffering: int = ...) -> None: ...
     @overload
-    def __init__(self, file: int, mode: str = 'r', buffering: int = ...) -> None: ...
+    def __init__(self, file: int, mode: str = ..., buffering: int = ...) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def read(self, n: int = ...) -> str: ...
     def __enter__(self) -> BinaryIO: ...
-    def __exit__(self, t: Optional[type] = None, exc: Optional[BaseException] = None, tb: Optional[Any] = None) -> bool: ...
+    def __exit__(self, t: Optional[type] = ..., exc: Optional[BaseException] = ..., tb: Optional[Any] = ...) -> bool: ...
     def flush(self) -> None: ...
     def fileno(self) -> int: ...
     def isatty(self) -> bool: ...
@@ -973,6 +983,6 @@ class file(BinaryIO):
     def truncate(self, pos: Optional[int] = ...) -> int: ...
 
 # Very old builtins
-def apply(func: Callable[..., _T], args: Optional[Sequence[Any]] = None, kwds: Optional[Mapping[str, Any]] = None) -> _T: ...
+def apply(func: Callable[..., _T], args: Optional[Sequence[Any]] = ..., kwds: Optional[Mapping[str, Any]] = ...) -> _T: ...
 _N = TypeVar('_N', bool, int, float, complex)
 def coerce(x: _N, y: _N) -> Tuple[_N, _N]: ...
diff --git a/typeshed/stdlib/2/commands.pyi b/typeshed/stdlib/2/commands.pyi
index 864dbf4..e321f08 100644
--- a/typeshed/stdlib/2/commands.pyi
+++ b/typeshed/stdlib/2/commands.pyi
@@ -1,5 +1,12 @@
-from typing import Tuple
+from typing import overload, AnyStr, Text, Tuple
 
-def getstatus(file: str) -> str: ...
-def getoutput(cmd: str) -> str: ...
-def getstatusoutput(cmd: str) -> Tuple[int, str]: ...
+def getstatus(file: Text) -> str: ...
+def getoutput(cmd: Text) -> str: ...
+def getstatusoutput(cmd: Text) -> Tuple[int, str]: ...
+
+ at overload
+def mk2arg(head: bytes, x: bytes) -> bytes: ...
+ at overload
+def mk2arg(head: Text, x: Text) -> Text: ...
+
+def mkarg(x: AnyStr) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/decimal.pyi b/typeshed/stdlib/2/decimal.pyi
index 7d51c8f..98d7541 100644
--- a/typeshed/stdlib/2/decimal.pyi
+++ b/typeshed/stdlib/2/decimal.pyi
@@ -51,7 +51,7 @@ class Underflow(Inexact, Rounded, Subnormal): ...
 
 def setcontext(context: Context): ...
 def getcontext() -> Context: ...
-def localcontext(ctx: Optional[Context] = None) -> _ContextManager: ...
+def localcontext(ctx: Optional[Context] = ...) -> _ContextManager: ...
 
 class Decimal(SupportsAbs[Decimal], SupportsFloat, SupportsInt):
     def __init__(cls, value: Union[_Decimal, float, str, unicode,
@@ -72,7 +72,7 @@ class Decimal(SupportsAbs[Decimal], SupportsFloat, SupportsInt):
     def to_eng_string(self, context: Context = ...) -> str: ...
     def __neg__(self) -> Decimal: ...
     def __pos__(self) -> Decimal: ...
-    def __abs__(self, round: bool = True) -> Decimal: ...
+    def __abs__(self, round: bool = ...) -> Decimal: ...
     def __add__(self, other: _Decimal) -> Decimal: ...
     def __radd__(self, other: int) -> Decimal: ...
     def __sub__(self, other: _Decimal) -> Decimal: ...
diff --git a/typeshed/stdlib/2/shelve.pyi b/typeshed/stdlib/2/shelve.pyi
index f5a92b9..d7d9b8c 100644
--- a/typeshed/stdlib/2/shelve.pyi
+++ b/typeshed/stdlib/2/shelve.pyi
@@ -3,13 +3,13 @@ import collections
 
 
 class Shelf(collections.MutableMapping):
-    def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+    def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def keys(self) -> List[Any]: ...
     def __len__(self) -> int: ...
     def has_key(self, key: Any) -> bool: ...
     def __contains__(self, key: Any) -> bool: ...
-    def get(self, key: Any, default: Any = None) -> Any: ...
+    def get(self, key: Any, default: Any = ...) -> Any: ...
     def __getitem__(self, key: Any) -> Any: ...
     def __setitem__(self, key: Any, value: Any) -> None: ...
     def __delitem__(self, key: Any) -> None: ...
@@ -20,7 +20,7 @@ class Shelf(collections.MutableMapping):
     def sync(self) -> None: ...
 
 class BsdDbShelf(Shelf):
-    def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+    def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ...
     def set_location(self, key: Any) -> Tuple[str, Any]: ...
     def next(self) -> Tuple[str, Any]: ...
     def previous(self) -> Tuple[str, Any]: ...
@@ -28,6 +28,6 @@ class BsdDbShelf(Shelf):
     def last(self) -> Tuple[str, Any]: ...
 
 class DbfilenameShelf(Shelf):
-    def __init__(self, filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> None: ...
+    def __init__(self, filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> None: ...
 
-def open(filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> DbfilenameShelf: ...
+def open(filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> DbfilenameShelf: ...
diff --git a/typeshed/stdlib/2/typing.pyi b/typeshed/stdlib/2/typing.pyi
index d8c49a4..957b320 100644
--- a/typeshed/stdlib/2/typing.pyi
+++ b/typeshed/stdlib/2/typing.pyi
@@ -18,6 +18,7 @@ class _SpecialForm(object):
 
 Tuple: _SpecialForm = ...
 Generic: _SpecialForm = ...
+Protocol: _SpecialForm = ...
 Callable: _SpecialForm = ...
 Type: _SpecialForm = ...
 ClassVar: _SpecialForm = ...
@@ -61,47 +62,60 @@ _V_co = TypeVar('_V_co', covariant=True)  # Any type covariant containers.
 _KT_co = TypeVar('_KT_co', covariant=True)  # Key type covariant containers.
 _VT_co = TypeVar('_VT_co', covariant=True)  # Value type covariant containers.
 _T_contra = TypeVar('_T_contra', contravariant=True)  # Ditto contravariant.
+_TC = TypeVar('_TC', bound=Type[object])
 
-class SupportsInt(metaclass=ABCMeta):
+def runtime(cls: _TC) -> _TC: ...
+
+ at runtime
+class SupportsInt(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __int__(self) -> int: ...
 
-class SupportsFloat(metaclass=ABCMeta):
+ at runtime
+class SupportsFloat(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __float__(self) -> float: ...
 
-class SupportsComplex(metaclass=ABCMeta):
+ at runtime
+class SupportsComplex(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __complex__(self) -> complex: ...
 
-class SupportsAbs(Generic[_T]):
+ at runtime
+class SupportsAbs(Protocol[_T_co]):
     @abstractmethod
-    def __abs__(self) -> _T: ...
+    def __abs__(self) -> _T_co: ...
 
-class SupportsRound(Generic[_T]):
+ at runtime
+class SupportsRound(Protocol[_T_co]):
     @abstractmethod
-    def __round__(self, ndigits: int = ...) -> _T: ...
+    def __round__(self, ndigits: int = ...) -> _T_co: ...
 
-class Reversible(Generic[_T_co]):
+ at runtime
+class Reversible(Protocol[_T_co]):
     @abstractmethod
     def __reversed__(self) -> Iterator[_T_co]: ...
 
-class Sized(metaclass=ABCMeta):
+ at runtime
+class Sized(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __len__(self) -> int: ...
 
-class Hashable(metaclass=ABCMeta):
+ at runtime
+class Hashable(Protocol, metaclass=ABCMeta):
     # TODO: This is special, in that a subclass of a hashable class may not be hashable
     #   (for example, list vs. object). It's not obvious how to represent this. This class
     #   is currently mostly useless for static checking.
     @abstractmethod
     def __hash__(self) -> int: ...
 
-class Iterable(Generic[_T_co]):
+ at runtime
+class Iterable(Protocol[_T_co]):
     @abstractmethod
     def __iter__(self) -> Iterator[_T_co]: ...
 
-class Iterator(Iterable[_T_co], Generic[_T_co]):
+ at runtime
+class Iterator(Iterable[_T_co], Protocol[_T_co]):
     @abstractmethod
     def next(self) -> _T_co: ...
 
@@ -113,9 +127,9 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
     def send(self, value: _T_contra) -> _T_co: ...
 
     @abstractmethod
-    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = None,
+    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ...,
               # TODO: tb should be TracebackType but that's defined in types
-              tb: Any = None) -> None: ...
+              tb: Any = ...) -> _T_co: ...
 
     @abstractmethod
     def close(self) -> None: ...
@@ -124,7 +138,8 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
     gi_frame = ...  # type: FrameType
     gi_running = ...  # type: bool
 
-class Container(Generic[_T_co]):
+ at runtime
+class Container(Protocol[_T_co]):
     @abstractmethod
     def __contains__(self, x: object) -> bool: ...
 
@@ -209,7 +224,8 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_VT_co]: ...
 
-class ContextManager(Generic[_T_co]):
+ at runtime
+class ContextManager(Protocol[_T_co]):
     def __enter__(self) -> _T_co: ...
     def __exit__(self, exc_type: Optional[Type[BaseException]],
                  exc_value: Optional[BaseException],
diff --git a/typeshed/stdlib/2/unittest.pyi b/typeshed/stdlib/2/unittest.pyi
index 1a7a069..bcd13e1 100644
--- a/typeshed/stdlib/2/unittest.pyi
+++ b/typeshed/stdlib/2/unittest.pyi
@@ -55,6 +55,10 @@ class TestCase(Testable):
     def __init__(self, methodName: str = ...) -> None: ...
     def setUp(self) -> None: ...
     def tearDown(self) -> None: ...
+    @classmethod
+    def setUpClass(cls) -> None: ...
+    @classmethod
+    def tearDownClass(cls) -> None: ...
     def run(self, result: TestResult = ...) -> None: ...
     def debug(self) -> None: ...
     def assert_(self, expr: Any, msg: object = ...) -> None: ...
diff --git a/typeshed/stdlib/2/urllib2.pyi b/typeshed/stdlib/2/urllib2.pyi
index cfa399e..64ee202 100644
--- a/typeshed/stdlib/2/urllib2.pyi
+++ b/typeshed/stdlib/2/urllib2.pyi
@@ -41,7 +41,7 @@ class Request(object):
     def add_header(self, key: str, val: str) -> None: ...
     def add_unredirected_header(self, key: str, val: str) -> None: ...
     def has_header(self, header_name: str) -> bool: ...
-    def get_header(self, header_name: str, default: Optional[str] = None) -> str: ...
+    def get_header(self, header_name: str, default: Optional[str] = ...) -> str: ...
     def header_items(self): ...
 
 class OpenerDirector(object):
@@ -128,7 +128,7 @@ class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
     def http_error_407(self, req, fp, code, msg, headers): ...
 
 class AbstractHTTPHandler(BaseHandler):
-    def __init__(self, debuglevel: int=0) -> None: ...
+    def __init__(self, debuglevel: int = ...) -> None: ...
     def do_request_(self, request): ...
     def do_open(self, http_class, req): ...
 
diff --git a/typeshed/stdlib/2/wsgiref/types.pyi b/typeshed/stdlib/2/wsgiref/types.pyi
index b7bd533..977ec83 100644
--- a/typeshed/stdlib/2/wsgiref/types.pyi
+++ b/typeshed/stdlib/2/wsgiref/types.pyi
@@ -15,16 +15,17 @@
 # you need to use 'WSGIApplication' and not simply WSGIApplication when type
 # hinting your code.  Otherwise Python will raise NameErrors.
 
-from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union
+from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union, Any
 from types import TracebackType
 
 _exc_info = Tuple[Optional[Type[BaseException]],
                   Optional[BaseException],
                   Optional[TracebackType]]
 _Text = Union[unicode, str]
+WSGIEnvironment = Dict[_Text, Any]
 WSGIApplication = Callable[
     [
-        Dict[_Text, _Text],
+        WSGIEnvironment,
         Union[
             Callable[[_Text, List[Tuple[_Text, _Text]]], Callable[[_Text], None]],
             Callable[[_Text, List[Tuple[_Text, _Text]], _exc_info], Callable[[_Text], None]]
diff --git a/typeshed/stdlib/3/array.pyi b/typeshed/stdlib/2and3/array.pyi
similarity index 75%
rename from typeshed/stdlib/3/array.pyi
rename to typeshed/stdlib/2and3/array.pyi
index c740556..84e7e5e 100644
--- a/typeshed/stdlib/3/array.pyi
+++ b/typeshed/stdlib/2and3/array.pyi
@@ -1,13 +1,15 @@
 # Stubs for array
 
-# Based on http://docs.python.org/3.2/library/array.html
+# Based on http://docs.python.org/3.6/library/array.html
 
+import sys
 from typing import (Any, BinaryIO, Generic, Iterable, Iterator, List, MutableSequence,
                     overload, Text, Tuple, TypeVar, Union)
 
 _T = TypeVar('_T', int, float, Text)
 
-typecodes = ...  # type: str
+if sys.version_info >= (3,):
+    typecodes = ...  # type: str
 
 class array(MutableSequence[_T], Generic[_T]):
     typecode = ...  # type: str
@@ -19,7 +21,8 @@ class array(MutableSequence[_T], Generic[_T]):
     def byteswap(self) -> None: ...
     def count(self, x: Any) -> int: ...
     def extend(self, iterable: Iterable[_T]) -> None: ...
-    def frombytes(self, s: bytes) -> None: ...
+    if sys.version_info >= (3, 2):
+        def frombytes(self, s: bytes) -> None: ...
     def fromfile(self, f: BinaryIO, n: int) -> None: ...
     def fromlist(self, list: List[_T]) -> None: ...
     def fromstring(self, s: bytes) -> None: ...
@@ -27,13 +30,18 @@ class array(MutableSequence[_T], Generic[_T]):
     def index(self, x: _T) -> int: ...  # type: ignore  # Overrides Sequence
     def insert(self, i: int, x: _T) -> None: ...
     def pop(self, i: int = ...) -> _T: ...
+    if sys.version_info < (3,):
+        def read(self, f: BinaryIO, n: int) -> None: ...
     def remove(self, x: Any) -> None: ...
     def reverse(self) -> None: ...
-    def tobytes(self) -> bytes: ...
+    if sys.version_info >= (3, 2):
+        def tobytes(self) -> bytes: ...
     def tofile(self, f: BinaryIO) -> None: ...
     def tolist(self) -> List[_T]: ...
     def tostring(self) -> bytes: ...
     def tounicode(self) -> str: ...
+    if sys.version_info < (3,):
+        def write(self, f: BinaryIO) -> None: ...
 
     def __len__(self) -> int: ...
 
@@ -57,3 +65,9 @@ class array(MutableSequence[_T], Generic[_T]):
     def __lt__(self, other: array[_T]) -> bool: ...
     def __mul__(self, n: int) -> array[_T]: ...
     def __rmul__(self, n: int) -> array[_T]: ...
+    if sys.version_info < (3,):
+        def __delslice__(self, i: int, j: int) -> None: ...
+        def __getslice__(self, i: int, j: int) -> array[_T]: ...
+        def __setslice__(self, i: int, j: int, y: array[_T]) -> None: ...
+
+ArrayType = array
diff --git a/typeshed/stdlib/2and3/asynchat.pyi b/typeshed/stdlib/2and3/asynchat.pyi
index 7195f80..0e6ca7e 100644
--- a/typeshed/stdlib/2and3/asynchat.pyi
+++ b/typeshed/stdlib/2and3/asynchat.pyi
@@ -12,7 +12,7 @@ class simple_producer:
 class async_chat(asyncore.dispatcher):
     ac_in_buffer_size = ...  # type: int
     ac_out_buffer_size = ...  # type: int
-    def __init__(self, sock: Optional[socket.socket] = None, map: Optional[asyncore._maptype] = None) -> None: ...
+    def __init__(self, sock: Optional[socket.socket] = ..., map: Optional[asyncore._maptype] = ...) -> None: ...
 
     @abstractmethod
     def collect_incoming_data(self, data: bytes) -> None: ...
diff --git a/typeshed/stdlib/2and3/asyncore.pyi b/typeshed/stdlib/2and3/asyncore.pyi
index 0753197..983db7d 100644
--- a/typeshed/stdlib/2and3/asyncore.pyi
+++ b/typeshed/stdlib/2and3/asyncore.pyi
@@ -25,7 +25,7 @@ def poll2(timeout: float = ..., map: _maptype = ...) -> None: ...
 
 poll3 = poll2
 
-def loop(timeout: float = ..., use_poll: bool = ..., map: _maptype = ..., count: Optional[int] = None) -> None: ...
+def loop(timeout: float = ..., use_poll: bool = ..., map: _maptype = ..., count: Optional[int] = ...) -> None: ...
 
 
 # Not really subclass of socket.socket; it's only delegation.
@@ -39,7 +39,7 @@ class dispatcher:
     closing = ...  # type: bool
     ignore_log_types = ...  # type: frozenset[str]
 
-    def __init__(self, sock: Optional[socket.socket] = None, map: _maptype = ...) -> None: ...
+    def __init__(self, sock: Optional[socket.socket] = ..., map: _maptype = ...) -> None: ...
     def add_channel(self, map: _maptype = ...) -> None: ...
     def del_channel(self, map: _maptype = ...) -> None: ...
     def create_socket(self, family: int, type: int) -> None: ...
diff --git a/typeshed/stdlib/2and3/bz2.pyi b/typeshed/stdlib/2and3/bz2.pyi
index 7c92e43..12cb9cc 100644
--- a/typeshed/stdlib/2and3/bz2.pyi
+++ b/typeshed/stdlib/2and3/bz2.pyi
@@ -1,33 +1,47 @@
-# Stubs for bz2
+import sys
+from typing import Any, BinaryIO, IO, Optional, Union
 
-from typing import Any, BinaryIO, TextIO, IO, Optional, Union
+if sys.version_info >= (3, 6):
+    from os import PathLike
+    _PathOrFile = Union[str, bytes, IO[Any], PathLike[Any]]
+elif sys.version_info >= (3, 3):
+    _PathOrFile = Union[str, bytes, IO[Any]]
+else:
+    _PathOrFile = str
 
 def compress(data: bytes, compresslevel: int = ...) -> bytes: ...
 def decompress(data: bytes) -> bytes: ...
 
-def open(filename: Union[str, bytes, IO[Any]],
-         mode: str = 'rb',
-         encoding: Optional[str] = None,
-         errors: Optional[str] = None,
-         newline: Optional[str] = None) -> IO[Any]: ...
+if sys.version_info >= (3, 3):
+    def open(filename: _PathOrFile,
+             mode: str = ...,
+             compresslevel: int = ...,
+             encoding: Optional[str] = ...,
+             errors: Optional[str] = ...,
+             newline: Optional[str] = ...) -> IO[Any]: ...
 
 class BZ2File(BinaryIO):
     def __init__(self,
-                 filename: Union[str, bytes, IO[Any]],
-                 mode: str = "r",
-                 buffering: Optional[Any] = None,
-                 compresslevel: int = 9) -> None: ...
+                 filename: _PathOrFile,
+                 mode: str = ...,
+                 buffering: Optional[Any] = ...,
+                 compresslevel: int = ...) -> None: ...
 
 class BZ2Compressor(object):
-    def __init__(self, compresslevel: int = 9) -> None: ...
+    def __init__(self, compresslevel: int = ...) -> None: ...
     def compress(self, data: bytes) -> bytes: ...
     def flush(self) -> bytes: ...
 
 class BZ2Decompressor(object):
-    def decompress(self, data: bytes) -> bytes: ...
-    @property
-    def eof(self) -> bool: ...
-    @property
-    def needs_input(self) -> bool: ...
+    if sys.version_info >= (3, 5):
+        def decompress(self, data: bytes, max_length: int = ...) -> bytes: ...
+    else:
+        def decompress(self, data: bytes) -> bytes: ...
+    if sys.version_info >= (3, 3):
+        @property
+        def eof(self) -> bool: ...
+    if sys.version_info >= (3, 5):
+        @property
+        def needs_input(self) -> bool: ...
     @property
     def unused_data(self) -> bytes: ...
diff --git a/typeshed/stdlib/2and3/calendar.pyi b/typeshed/stdlib/2and3/calendar.pyi
index 6b59fd0..99a24a7 100644
--- a/typeshed/stdlib/2and3/calendar.pyi
+++ b/typeshed/stdlib/2and3/calendar.pyi
@@ -70,12 +70,12 @@ if sys.version_info < (3, 0):
     class TimeEncoding:
         def __init__(self, locale: _LocaleType) -> None: ...
         def __enter__(self) -> _LocaleType: ...
-        def __exit__(self, *args) -> None: ...
+        def __exit__(self, *args: Any) -> None: ...
 else:
     class different_locale:
         def __init__(self, locale: _LocaleType) -> None: ...
         def __enter__(self) -> _LocaleType: ...
-        def __exit__(self, *args) -> None: ...
+        def __exit__(self, *args: Any) -> None: ...
 
 class LocaleTextCalendar(TextCalendar):
     def __init__(self, firstweekday: int = ..., locale: Optional[_LocaleType] = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/distutils/cmd.pyi b/typeshed/stdlib/2and3/distutils/cmd.pyi
index 126ef15..07e4989 100644
--- a/typeshed/stdlib/2and3/distutils/cmd.pyi
+++ b/typeshed/stdlib/2and3/distutils/cmd.pyi
@@ -1,6 +1,6 @@
 # Stubs for distutils.cmd
 
-from typing import Callable, List, Tuple, Union
+from typing import Callable, List, Tuple, Union, Optional, Iterable, Any, Text
 from abc import abstractmethod
 from distutils.dist import Distribution
 
@@ -13,3 +13,28 @@ class Command:
     def finalize_options(self) -> None: ...
     @abstractmethod
     def run(self) -> None: ...
+
+    def announce(self, msg: Text, level: int = ...) -> None: ...
+    def debug_print(self, msg: Text) -> None: ...
+
+    def ensure_string(self, option: str, default: Optional[str] = ...) -> None: ...
+    def ensure_string_list(self, option: Union[str, List[str]]) -> None: ...
+    def ensure_filename(self, option: str) -> None: ...
+    def ensure_dirname(self, option: str) -> None: ...
+
+    def get_command_name(self) -> str: ...
+    def set_undefined_options(self, src_cmd: Text, *option_pairs: Tuple[str, str]) -> None: ...
+    def get_finalized_command(self, command: Text, create: int = ...) -> Command: ...
+    def reinitialize_command(self, command: Union[Command, Text], reinit_subcommands: int = ...) -> Command: ...
+    def run_command(self, command: Text) -> None: ...
+    def get_sub_commands(self) -> List[str]: ...
+
+    def warn(self, msg: Text) -> None: ...
+    def execute(self, func: Callable[..., Any], args: Iterable[Any], msg: Optional[Text] = ..., level: int = ...) -> None: ...
+    def mkpath(self, name: str, mode: int = ...) -> None: ...
+    def copy_file(self, infile: str, outfile: str, preserve_mode: int = ..., preserve_times: int = ..., link: Optional[str] = ..., level: Any = ...) -> Tuple[str, bool]: ...  # level is not used
+    def copy_tree(self, infile: str, outfile: str, preserve_mode: int = ..., preserve_times: int = ..., preserve_symlinks: int = ..., level: Any = ...) -> List[str]: ...  # level is not used
+    def move_file(self, src: str, dest: str, level: Any = ...) -> str: ...  # level is not used
+    def spawn(self, cmd: Iterable[str], search_path: int = ..., level: Any = ...) -> None: ...  # level is not used
+    def make_archive(self, base_name: str, format: str, root_dir: Optional[str] = ..., base_dir: Optional[str] = ..., owner: Optional[str] = ..., group: Optional[str] = ...) -> str: ...
+    def make_file(self, infiles: Union[str, List[str], Tuple[str]], outfile: str, func: Callable[..., Any], args: List[Any], exec_msg: Optional[str] = ..., skip_msg: Optional[str] = ..., level: Any = ...) -> None: ...  # level is not used
diff --git a/typeshed/stdlib/2and3/distutils/core.pyi b/typeshed/stdlib/2and3/distutils/core.pyi
index 9d41daf..125b799 100644
--- a/typeshed/stdlib/2and3/distutils/core.pyi
+++ b/typeshed/stdlib/2and3/distutils/core.pyi
@@ -27,7 +27,7 @@ def setup(name: str = ...,
           license: str = ...,
           keywords: Union[List[str], str] = ...,
           platforms: Union[List[str], str] = ...,
-          cmdclass: Mapping[str, Command] = ...,
+          cmdclass: Mapping[str, Type[Command]] = ...,
           data_files: List[Tuple[str, List[str]]] = ...,
           package_dir: Mapping[str, str] = ...,
           obsoletes: List[str] = ...,
diff --git a/typeshed/stdlib/2and3/distutils/log.pyi b/typeshed/stdlib/2and3/distutils/log.pyi
index e69de29..6c37cc5 100644
--- a/typeshed/stdlib/2and3/distutils/log.pyi
+++ b/typeshed/stdlib/2and3/distutils/log.pyi
@@ -0,0 +1,28 @@
+from typing import Any, Callable, Iterable, Text
+
+DEBUG: int
+INFO: int
+WARN: int
+ERROR: int
+FATAL: int
+
+class Log:
+    def __init__(self, threshold: int = ...) -> None: ...
+    def log(self, level: int, msg: Text, *args: Any) -> None: ...
+    def debug(self, msg: Text, *args: Any) -> None: ...
+    def info(self, msg: Text, *args: Any) -> None: ...
+    def warn(self, msg: Text, *args: Any) -> None: ...
+    def error(self, msg: Text, *args: Any) -> None: ...
+    def fatal(self, msg: Text, *args: Any) -> None: ...
+
+_LogFunc = Callable[[Text, Iterable[Any]], None]
+
+log: Callable[[int, Text, Iterable[Any]], None]
+debug: _LogFunc
+info: _LogFunc
+warn: _LogFunc
+error: _LogFunc
+fatal: _LogFunc
+
+def set_threshold(level: int) -> int: ...
+def set_verbosity(v: int) -> None: ...
diff --git a/typeshed/stdlib/2and3/fractions.pyi b/typeshed/stdlib/2and3/fractions.pyi
index 90771a6..e02b5e7 100644
--- a/typeshed/stdlib/2and3/fractions.pyi
+++ b/typeshed/stdlib/2and3/fractions.pyi
@@ -25,22 +25,22 @@ def gcd(a: Integral, b: Integral) -> Integral: ...
 class Fraction(Rational):
     @overload
     def __init__(self,
-                 numerator: Union[int, Rational] = 0,
-                 denominator: Optional[Union[int, Rational]] = 0,
+                 numerator: Union[int, Rational] = ...,
+                 denominator: Optional[Union[int, Rational]] = ...,
                  *,
-                 _normalize: bool = True) -> None: ...
+                 _normalize: bool = ...) -> None: ...
     @overload
-    def __init__(self, value: float, *, _normalize: bool = True) -> None: ...
+    def __init__(self, value: float, *, _normalize: bool = ...) -> None: ...
     @overload
-    def __init__(self, value: Decimal, *, _normalize: bool = True) -> None: ...
+    def __init__(self, value: Decimal, *, _normalize: bool = ...) -> None: ...
     @overload
-    def __init__(self, value: str, *, _normalize: bool = True) -> None: ...
+    def __init__(self, value: str, *, _normalize: bool = ...) -> None: ...
 
     @classmethod
     def from_float(cls, f: float) -> 'Fraction': ...
     @classmethod
     def from_decimal(cls, dec: Decimal) -> 'Fraction': ...
-    def limit_denominator(self, max_denominator: int = 1000000) -> 'Fraction': ...
+    def limit_denominator(self, max_denominator: int = ...) -> 'Fraction': ...
 
     @property
     def numerator(self) -> int: ...
diff --git a/typeshed/stdlib/2and3/imaplib.pyi b/typeshed/stdlib/2and3/imaplib.pyi
new file mode 100644
index 0000000..4f53146
--- /dev/null
+++ b/typeshed/stdlib/2and3/imaplib.pyi
@@ -0,0 +1,133 @@
+# Stubs for imaplib (Python 2)
+
+import imaplib
+import subprocess
+import sys
+import time
+from socket import socket as _socket
+from ssl import SSLSocket
+from typing import Any, Callable, Dict, IO, List, Optional, Pattern, Text, Tuple, Union
+
+CommandResults = Tuple[str, List[Any]]
+
+
+class IMAP4:
+    error: Exception
+    abort: Exception
+    readonly: Exception
+    mustquote: Pattern[Text] = ...
+    debug: int = ...
+    state: str = ...
+    literal: Optional[Text] = ...
+    tagged_commands: Dict[str, str] = ...
+    untagged_responses: Dict[str, str] = ...
+    continuation_response: str = ...
+    is_readonly: bool = ...
+    tagnum: int = ...
+    tagpre: str = ...
+    tagre: Pattern[Text] = ...
+    welcome: bytes = ...
+    capabilities: Tuple[str] = ...
+    PROTOCOL_VERSION: str = ...
+    def __init__(self, host: str, port: int) -> None: ...
+    def __getattr__(self, attr: str) -> Any: ...
+    host: str = ...
+    port: int = ...
+    sock: _socket = ...
+    file: Union[IO[Text], IO[bytes]] = ...
+    def open(self, host: str = ..., port: int = ...) -> None: ...
+    def read(self, size: int) -> bytes: ...
+    def readline(self) -> bytes: ...
+    def send(self, data: bytes) -> None: ...
+    def shutdown(self) -> None: ...
+    def socket(self) -> _socket: ...
+    def recent(self) -> CommandResults: ...
+    def response(self, code: str) -> CommandResults: ...
+    def append(self, mailbox: str, flags: str, date_time: str, message: str) -> str: ...
+    def authenticate(self, mechanism: str, authobject: Callable) -> Tuple[str, str]: ...
+    def capability(self) -> CommandResults: ...
+    def check(self) -> CommandResults: ...
+    def close(self) -> CommandResults: ...
+    def copy(self, message_set: str, new_mailbox: str) -> CommandResults: ...
+    def create(self, mailbox: str) -> CommandResults: ...
+    def delete(self, mailbox: str) -> CommandResults: ...
+    def deleteacl(self, mailbox: str, who: str) -> CommandResults: ...
+    def expunge(self) -> CommandResults: ...
+    def fetch(self, message_set: str, message_parts: str) -> CommandResults: ...
+    def getacl(self, mailbox: str) -> CommandResults: ...
+    def getannotation(self, mailbox: str, entry: str, attribute: str) -> CommandResults: ...
+    def getquota(self, root: str) -> CommandResults: ...
+    def getquotaroot(self, mailbox: str) -> CommandResults: ...
+    def list(self, directory: str = ..., pattern: str = ...) -> CommandResults: ...
+    def login(self, user: str, password: str) -> CommandResults: ...
+    def login_cram_md5(self, user: str, password: str) -> CommandResults: ...
+    def logout(self) -> CommandResults: ...
+    def lsub(self, directory: str = ..., pattern: str = ...) -> CommandResults: ...
+    def myrights(self, mailbox: str) -> CommandResults: ...
+    def namespace(self) -> CommandResults: ...
+    def noop(self) -> CommandResults: ...
+    def partial(self, message_num: str, message_part: str, start: str, length: str) -> CommandResults: ...
+    def proxyauth(self, user: str) -> CommandResults: ...
+    def rename(self, oldmailbox: str, newmailbox: str) -> CommandResults: ...
+    def search(self, charset: str, *criteria: str) -> CommandResults: ...
+    def select(self, mailbox: str = ..., readonly: bool = ...) -> CommandResults: ...
+    def setacl(self, mailbox: str, who: str, what: str) -> CommandResults: ...
+    def setannotation(self, *args: List[str]) -> CommandResults: ...
+    def setquota(self, root: str, limits: str) -> CommandResults: ...
+    def sort(self, sort_criteria: str, charset: str, *search_criteria: List[str]) -> CommandResults: ...
+    if sys.version_info >= (3,):
+        def starttls(self, ssl_context: Optional[Any] = ...) -> CommandResults: ...
+    def status(self, mailbox: str, names: str) -> CommandResults: ...
+    def store(self, message_set: str, command: str, flags: str) -> CommandResults: ...
+    def subscribe(self, mailbox: str) -> CommandResults: ...
+    def thread(self, threading_algorithm: str, charset: str, *search_criteria: List[str]) -> CommandResults: ...
+    def uid(self, command: str, *args: List[str]) -> CommandResults: ...
+    def unsubscribe(self, mailbox: str) -> CommandResults: ...
+    def xatom(self, name: str, *args: List[str]) -> CommandResults: ...
+    def print_log(self) -> None: ...
+
+class IMAP4_SSL(IMAP4):
+    keyfile: str = ...
+    certfile: str = ...
+    def __init__(self, host: str = ..., port: int = ..., keyfile: Optional[str] = ..., certfile: Optional[str] = ...) -> None: ...
+    host: str = ...
+    port: int = ...
+    sock: _socket = ...
+    sslobj: SSLSocket = ...
+    file: IO[Any] = ...
+    def open(self, host: str = ..., port: Optional[int] = ...) -> None: ...
+    def read(self, size: int) -> bytes: ...
+    def readline(self) -> bytes: ...
+    def send(self, data: bytes) -> None: ...
+    def shutdown(self) -> None: ...
+    def socket(self) -> _socket: ...
+    def ssl(self) -> SSLSocket: ...
+
+
+class IMAP4_stream(IMAP4):
+    command: str = ...
+    def __init__(self, command: str) -> None: ...
+    host: str = ...
+    port: int = ...
+    sock: _socket = ...
+    file: IO[Any] = ...
+    process: subprocess.Popen = ...
+    writefile: IO[Any] = ...
+    readfile: IO[Any] = ...
+    def open(self, host: str = ..., port: Optional[int] = ...) -> None: ...
+    def read(self, size: int) -> bytes: ...
+    def readline(self) -> bytes: ...
+    def send(self, data: bytes) -> None: ...
+    def shutdown(self) -> None: ...
+
+class _Authenticator:
+    mech: Callable = ...
+    def __init__(self, mechinst: Callable) -> None: ...
+    def process(self, data: str) -> str: ...
+    def encode(self, inp: bytes) -> str: ...
+    def decode(self, inp: str) -> bytes: ...
+
+def Internaldate2tuple(resp: str) -> time.struct_time: ...
+def Int2AP(num: int) -> str: ...
+def ParseFlags(resp: str) -> Tuple[str]: ...
+def Time2Internaldate(date_time: Union[float, time.struct_time, str]) -> str: ...
diff --git a/typeshed/stdlib/2and3/logging/__init__.pyi b/typeshed/stdlib/2and3/logging/__init__.pyi
index 2b60503..9175fff 100644
--- a/typeshed/stdlib/2and3/logging/__init__.pyi
+++ b/typeshed/stdlib/2and3/logging/__init__.pyi
@@ -366,7 +366,7 @@ if sys.version_info >= (3,):
 
 
 class StreamHandler(Handler):
-    stream = ...  # type IO[str]
+    stream = ...  # type: IO[str]
     if sys.version_info >= (3,):
         terminator = ...  # type: str
     def __init__(self, stream: Optional[IO[str]] = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/numbers.pyi b/typeshed/stdlib/2and3/numbers.pyi
index 2a3f493..38943a8 100644
--- a/typeshed/stdlib/2and3/numbers.pyi
+++ b/typeshed/stdlib/2and3/numbers.pyi
@@ -70,7 +70,7 @@ class Real(Complex, SupportsFloat):
         @abstractmethod
         def __ceil__(self) -> int: ...
         @abstractmethod
-        def __round__(self, ndigits: Optional[int] = None): ...
+        def __round__(self, ndigits: Optional[int] = ...): ...
     def __divmod__(self, other): ...
     def __rdivmod__(self, other): ...
     @abstractmethod
diff --git a/typeshed/stdlib/2and3/pdb.pyi b/typeshed/stdlib/2and3/pdb.pyi
index 226436b..83f2c54 100644
--- a/typeshed/stdlib/2and3/pdb.pyi
+++ b/typeshed/stdlib/2and3/pdb.pyi
@@ -3,13 +3,13 @@
 from typing import Any, Dict, Optional
 
 def run(statement: str,
-        globals: Optional[Dict[str, Any]] = None,
-        locals: Optional[Dict[str, Any]] = None) -> None:
+        globals: Optional[Dict[str, Any]] = ...,
+        locals: Optional[Dict[str, Any]] = ...) -> None:
     ...
 
 def runeval(expression: str,
-            globals: Optional[Dict[str, Any]] = None,
-            locals: Optional[Dict[str, Any]] = None) -> Any:
+            globals: Optional[Dict[str, Any]] = ...,
+            locals: Optional[Dict[str, Any]] = ...) -> Any:
     ...
 
 def runctx(statement: str,
@@ -23,7 +23,7 @@ def runcall(*args: Any, **kwds: Any) -> Any:
 def set_trace() -> None:
     ...
 
-def post_mortem(t: Optional[Any] = None) -> None:
+def post_mortem(t: Optional[Any] = ...) -> None:
     ...
 
 def pm() -> None:
diff --git a/typeshed/stdlib/2and3/pkgutil.pyi b/typeshed/stdlib/2and3/pkgutil.pyi
index e6d4672..3f49036 100644
--- a/typeshed/stdlib/2and3/pkgutil.pyi
+++ b/typeshed/stdlib/2and3/pkgutil.pyi
@@ -26,6 +26,6 @@ def get_loader(module_or_name: str) -> Loader: ...
 def iter_importers(fullname: str = ...) -> Generator[Any, None, None]: ...  # TODO precise type
 def iter_modules(path: Optional[List[str]] = ...,
                  prefix: str = ...) -> _YMFNI: ...  # TODO precise type
-def walk_packages(path: Optional[str] = ..., prefix: str = ...,
+def walk_packages(path: Optional[List[str]] = ..., prefix: str = ...,
                   onerror: Optional[Callable[[str], None]] = ...) -> _YMFNI: ...
-def get_data(package: str, resource: str) -> bytes: ...
+def get_data(package: str, resource: str) -> Optional[bytes]: ...
diff --git a/typeshed/stdlib/2and3/plistlib.pyi b/typeshed/stdlib/2and3/plistlib.pyi
index 40a3f97..6b65496 100644
--- a/typeshed/stdlib/2and3/plistlib.pyi
+++ b/typeshed/stdlib/2and3/plistlib.pyi
@@ -5,8 +5,15 @@ from typing import (
     Type, TypeVar,
 )
 from typing import Dict as DictT
-from enum import Enum
 import sys
+if sys.version_info >= (3,):
+    from enum import Enum
+
+    class PlistFormat(Enum):
+        FMT_XML = ...  # type: PlistFormat
+        FMT_BINARY = ...  # type: PlistFormat
+    FMT_XML = PlistFormat.FMT_XML
+    FMT_BINARY = PlistFormat.FMT_BINARY
 
 mm = MutableMapping[str, Any]
 _D = TypeVar('_D', bound=mm)
@@ -15,15 +22,9 @@ if sys.version_info >= (3,):
 else:
     _Path = Union[str, unicode]
 
-
-if sys.version_info >= (3,):
-    class PlistFormat(Enum):
-        FMT_XML = ...  # type: PlistFormat
-        FMT_BINARY = ...  # type: PlistFormat
-
 if sys.version_info >= (3, 4):
     def load(fp: IO[bytes], *, fmt: Optional[PlistFormat] = ...,
-             use_builtin_types: bool, dict_type: Type[_D] =...) -> _D: ...
+             use_builtin_types: bool = ..., dict_type: Type[_D] = ...) -> _D: ...
     def loads(data: bytes, *, fmt: Optional[PlistFormat] = ...,
               use_builtin_types: bool = ..., dict_type: Type[_D] = ...) -> _D: ...
     def dump(value: Mapping[str, Any], fp: IO[bytes], *,
@@ -54,7 +55,3 @@ if sys.version_info >= (3,):
 class Data:
     data = ...  # type: bytes
     def __init__(self, data: bytes) -> None: ...
-
-if sys.version_info >= (3,):
-    FMT_XML = PlistFormat.FMT_XML
-    FMT_BINARY = PlistFormat.FMT_BINARY
diff --git a/typeshed/stdlib/2and3/py_compile.pyi b/typeshed/stdlib/2and3/py_compile.pyi
index cdd51ec..ad8e8c8 100644
--- a/typeshed/stdlib/2and3/py_compile.pyi
+++ b/typeshed/stdlib/2and3/py_compile.pyi
@@ -17,4 +17,4 @@ if sys.version_info >= (3, 2):
 else:
     def compile(file: _EitherStr, cfile: Optional[_EitherStr] = ..., dfile: Optional[_EitherStr] = ..., doraise: bool = ...) -> None: ...
 
-def main(args: Optional[List[Text]] = ...): ...
+def main(args: Optional[List[Text]] = ...) -> int: ...
diff --git a/typeshed/stdlib/2and3/threading.pyi b/typeshed/stdlib/2and3/threading.pyi
index efd941f..2bcf8a7 100644
--- a/typeshed/stdlib/2and3/threading.pyi
+++ b/typeshed/stdlib/2and3/threading.pyi
@@ -54,14 +54,14 @@ class Thread:
         def __init__(self, group: None = ...,
                      target: Optional[Callable[..., None]] = ...,
                      name: Optional[str] = ...,
-                     args: Tuple[Any, ...] = ...,
+                     args: Iterable = ...,
                      kwargs: Mapping[str, Any] = ...,
                      *, daemon: Optional[bool] = ...) -> None: ...
     else:
         def __init__(self, group: None = ...,
                      target: Optional[Callable[..., None]] = ...,
                      name: Optional[str] = ...,
-                     args: Tuple[Any, ...] = ...,
+                     args: Iterable = ...,
                      kwargs: Mapping[str, Any] = ...) -> None: ...
     def start(self) -> None: ...
     def run(self) -> None: ...
@@ -123,7 +123,7 @@ class Condition:
     def wait(self, timeout: Optional[float] = ...) -> bool: ...
     if sys.version_info >= (3,):
         def wait_for(self, predicate: Callable[[], _T],
-                     timeout: Optional[float]) -> _T: ...
+                     timeout: Optional[float] = ...) -> _T: ...
     def notify(self, n: int = ...) -> None: ...
     def notify_all(self) -> None: ...
     def notifyAll(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/traceback.pyi b/typeshed/stdlib/2and3/traceback.pyi
index d53adfe..07f158d 100644
--- a/typeshed/stdlib/2and3/traceback.pyi
+++ b/typeshed/stdlib/2and3/traceback.pyi
@@ -1,6 +1,6 @@
 # Stubs for traceback
 
-from typing import Any, Dict, Generator, IO, Iterator, List, Mapping, Optional, Tuple, Type
+from typing import Any, Dict, Generator, IO, Iterator, List, Mapping, Optional, Tuple, Type, Iterable
 from types import FrameType, TracebackType
 import sys
 
@@ -91,7 +91,7 @@ if sys.version_info >= (3, 5):
 
 
 if sys.version_info >= (3, 5):
-    class FrameSummary:
+    class FrameSummary(Iterable):
         filename: str
         lineno: int
         name: str
diff --git a/typeshed/stdlib/2and3/xml/sax/__init__.pyi b/typeshed/stdlib/2and3/xml/sax/__init__.pyi
index 93a0f9a..ca34cfd 100644
--- a/typeshed/stdlib/2and3/xml/sax/__init__.pyi
+++ b/typeshed/stdlib/2and3/xml/sax/__init__.pyi
@@ -1,4 +1,4 @@
-from typing import Any, List, Optional, Text, Union
+from typing import Any, List, Optional, Text, Union, IO
 from mypy_extensions import NoReturn
 
 import xml.sax
@@ -26,7 +26,7 @@ default_parser_list = ...  # type: List[str]
 
 def make_parser(parser_list: List[str] = ...) -> xml.sax.xmlreader.XMLReader: ...
 
-def parse(source: str, handler: xml.sax.handler.ContentHandler,
+def parse(source: Union[str, IO[str]], handler: xml.sax.handler.ContentHandler,
           errorHandler: xml.sax.handler.ErrorHandler = ...) -> None: ...
 
 def parseString(string: Union[bytes, Text], handler: xml.sax.handler.ContentHandler,
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
index 28dbefb..f0c2553 100644
--- a/typeshed/stdlib/3.4/asyncio/futures.pyi
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -1,5 +1,5 @@
 import sys
-from typing import Any, Union, Callable, TypeVar, List, Generic, Iterable, Generator, Awaitable
+from typing import Any, Union, Callable, TypeVar, Type, List, Generic, Iterable, Generator, Awaitable
 from .events import AbstractEventLoop
 from concurrent.futures import (
     CancelledError as CancelledError,
@@ -31,7 +31,7 @@ class Future(Iterable[_T], Awaitable[_T], Generic[_T]):
     _exception = ...  # type: BaseException
     _blocking = False
     _log_traceback = False
-    _tb_logger = _TracebackLogger
+    _tb_logger = ...  # type: Type[_TracebackLogger]
     def __init__(self, *, loop: AbstractEventLoop = ...) -> None: ...
     def __repr__(self) -> str: ...
     def __del__(self) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/locks.pyi b/typeshed/stdlib/3.4/asyncio/locks.pyi
index 837c50c..56b8a67 100644
--- a/typeshed/stdlib/3.4/asyncio/locks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/locks.pyi
@@ -37,7 +37,7 @@ class Event:
     def wait(self) -> Generator[Any, None, bool]: ...
 
 class Condition(_ContextManagerMixin):
-    def __init__(self, lock: Optional[Lock] = None, *, loop: Optional[AbstractEventLoop] = ...) -> None: ...
+    def __init__(self, lock: Optional[Lock] = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ...
     def locked(self) -> bool: ...
     @coroutine
     def acquire(self) -> Generator[Any, None, bool]: ...
@@ -46,7 +46,7 @@ class Condition(_ContextManagerMixin):
     def wait(self) -> Generator[Any, None, bool]: ...
     @coroutine
     def wait_for(self, predicate: Callable[[], _T]) -> Generator[Any, None, _T]: ...
-    def notify(self, n: int = 1) -> None: ...
+    def notify(self, n: int = ...) -> None: ...
     def notify_all(self) -> None: ...
 
 class Semaphore(_ContextManagerMixin):
diff --git a/typeshed/stdlib/3.4/asyncio/protocols.pyi b/typeshed/stdlib/3.4/asyncio/protocols.pyi
index 118659c..9fcff65 100644
--- a/typeshed/stdlib/3.4/asyncio/protocols.pyi
+++ b/typeshed/stdlib/3.4/asyncio/protocols.pyi
@@ -1,5 +1,5 @@
 from asyncio import transports
-from typing import AnyStr, List
+from typing import AnyStr, List, Tuple
 
 __all__: List[str]
 
@@ -15,7 +15,7 @@ class Protocol(BaseProtocol):
     def eof_received(self) -> bool: ...
 
 class DatagramProtocol(BaseProtocol):
-    def datagram_received(self, data: AnyStr, addr: str) -> None: ...
+    def datagram_received(self, data: AnyStr, addr: Tuple[str, int]) -> None: ...
     def error_received(self, exc: Exception) -> None: ...
 
 class SubprocessProtocol(BaseProtocol):
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index b17c321..d44fe6b 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -26,26 +26,26 @@ def ensure_future(coro_or_future: _FutureT[_T],
 async = ensure_future
 @overload
 def gather(coro_or_future1: _FutureT[_T1],
-           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1]]: ...
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1]]: ...
 @overload
 def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2],
-           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2]]: ...
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2]]: ...
 @overload
 def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3],
-           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2, _T3]]: ...
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2, _T3]]: ...
 @overload
 def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3],
            coro_or_future4: _FutureT[_T4],
-           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ...
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ...
 @overload
 def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3],
            coro_or_future4: _FutureT[_T4], coro_or_future5: _FutureT[_T5],
-           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ...
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ...
 @overload
 def gather(coro_or_future1: _FutureT[Any], coro_or_future2: _FutureT[Any], coro_or_future3: _FutureT[Any],
            coro_or_future4: _FutureT[Any], coro_or_future5: _FutureT[Any], coro_or_future6: _FutureT[Any],
            *coros_or_futures: _FutureT[Any],
-           loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[Any, ...]]: ...
+           loop: AbstractEventLoop = ..., return_exceptions: bool = ...) -> Future[Tuple[Any, ...]]: ...
 def run_coroutine_threadsafe(coro: _FutureT[_T],
                              loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ...
 def shield(arg: _FutureT[_T], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
diff --git a/typeshed/stdlib/3.4/enum.pyi b/typeshed/stdlib/3.4/enum.pyi
index 1501be2..001f6e9 100644
--- a/typeshed/stdlib/3.4/enum.pyi
+++ b/typeshed/stdlib/3.4/enum.pyi
@@ -2,7 +2,7 @@ import sys
 from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
 from abc import ABCMeta
 
-_T = TypeVar('_T', bound=Enum)
+_T = TypeVar('_T')
 _S = TypeVar('_S', bound=Type[Enum])
 
 # Note: EnumMeta actually subclasses type directly, not ABCMeta.
diff --git a/typeshed/stdlib/3/_importlib_modulespec.pyi b/typeshed/stdlib/3/_importlib_modulespec.pyi
index 33c30d5..4ce6278 100644
--- a/typeshed/stdlib/3/_importlib_modulespec.pyi
+++ b/typeshed/stdlib/3/_importlib_modulespec.pyi
@@ -7,13 +7,13 @@
 
 from abc import ABCMeta
 import sys
-from typing import Dict, Any, Optional
+from typing import Any, Dict, List, Optional
 
 if sys.version_info >= (3, 4):
     class ModuleSpec:
         def __init__(self, name: str, loader: Optional['Loader'], *,
-                     origin: Optional[str] = None, loader_state: Any = None,
-                     is_package: Optional[bool] = None) -> None: ...
+                     origin: Optional[str] = ..., loader_state: Any = ...,
+                     is_package: Optional[bool] = ...) -> None: ...
         name = ...  # type: str
         loader = ...  # type: Optional[Loader]
         origin = ...  # type: Optional[str]
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 7780fcc..f2cea0f 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -103,7 +103,7 @@ class super:
     @overload
     def __init__(self) -> None: ...
 
-class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
+class int:
     @overload
     def __init__(self, x: Union[str, bytes, SupportsInt] = ...) -> None: ...
     @overload
@@ -157,7 +157,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __hash__(self) -> int: ...
     def __bool__(self) -> bool: ...
 
-class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
+class float:
     def __init__(self, x: Union[SupportsFloat, str, bytes] = ...) -> None: ...
     def as_integer_ratio(self) -> Tuple[int, int]: ...
     def hex(self) -> str: ...
@@ -196,9 +196,9 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __hash__(self) -> int: ...
     def __bool__(self) -> bool: ...
 
-class complex(SupportsAbs[float]):
+class complex:
     @overload
-    def __init__(self, re: float = 0.0, im: float = 0.0) -> None: ...
+    def __init__(self, re: float = ..., im: float = ...) -> None: ...
     @overload
     def __init__(self, s: str) -> None: ...
 
@@ -238,12 +238,12 @@ class str(Sequence[str]):
 
     def capitalize(self) -> str: ...
     def casefold(self) -> str: ...
-    def center(self, width: int, fillchar: str = ' ') -> str: ...
+    def center(self, width: int, fillchar: str = ...) -> str: ...
     def count(self, x: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
-    def encode(self, encoding: str = 'utf-8', errors: str = 'strict') -> bytes: ...
-    def endswith(self, suffix: Union[str, Tuple[str, ...]], start: Optional[int] = None,
-                 end: Optional[int] = None) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> str: ...
+    def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ...
+    def endswith(self, suffix: Union[str, Tuple[str, ...]], start: Optional[int] = ...,
+                 end: Optional[int] = ...) -> bool: ...
+    def expandtabs(self, tabsize: int = ...) -> str: ...
     def find(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
     def format(self, *args: Any, **kwargs: Any) -> str: ...
     def format_map(self, map: Mapping[str, Any]) -> str: ...
@@ -260,33 +260,33 @@ class str(Sequence[str]):
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
     def join(self, iterable: Iterable[str]) -> str: ...
-    def ljust(self, width: int, fillchar: str = ' ') -> str: ...
+    def ljust(self, width: int, fillchar: str = ...) -> str: ...
     def lower(self) -> str: ...
-    def lstrip(self, chars: Optional[str] = None) -> str: ...
+    def lstrip(self, chars: Optional[str] = ...) -> str: ...
     def partition(self, sep: str) -> Tuple[str, str, str]: ...
-    def replace(self, old: str, new: str, count: int = -1) -> str: ...
+    def replace(self, old: str, new: str, count: int = ...) -> str: ...
     def rfind(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
     def rindex(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
-    def rjust(self, width: int, fillchar: str = ' ') -> str: ...
+    def rjust(self, width: int, fillchar: str = ...) -> str: ...
     def rpartition(self, sep: str) -> Tuple[str, str, str]: ...
-    def rsplit(self, sep: Optional[str] = None, maxsplit: int = -1) -> List[str]: ...
-    def rstrip(self, chars: Optional[str] = None) -> str: ...
-    def split(self, sep: Optional[str] = None, maxsplit: int = -1) -> List[str]: ...
+    def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
+    def rstrip(self, chars: Optional[str] = ...) -> str: ...
+    def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
     def splitlines(self, keepends: bool = ...) -> List[str]: ...
-    def startswith(self, prefix: Union[str, Tuple[str, ...]], start: Optional[int] = None,
-                   end: Optional[int] = None) -> bool: ...
-    def strip(self, chars: Optional[str] = None) -> str: ...
+    def startswith(self, prefix: Union[str, Tuple[str, ...]], start: Optional[int] = ...,
+                   end: Optional[int] = ...) -> bool: ...
+    def strip(self, chars: Optional[str] = ...) -> str: ...
     def swapcase(self) -> str: ...
     def title(self) -> str: ...
-    def translate(self, table: Dict[int, Any]) -> str: ...
+    def translate(self, table: Union[Mapping[int, Union[int, str, None]], Sequence[Union[int, str, None]]]) -> str: ...
     def upper(self) -> str: ...
     def zfill(self, width: int) -> str: ...
     @staticmethod
     @overload
-    def maketrans(x: Union[Dict[int, Any], Dict[str, Any]]) -> Dict[int, Any]: ...
+    def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ...
     @staticmethod
     @overload
-    def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Any]: ...
+    def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ...
 
     def __getitem__(self, i: Union[int, slice]) -> str: ...
     def __add__(self, s: str) -> str: ...
@@ -312,7 +312,7 @@ class bytes(ByteString):
     def __init__(self, ints: Iterable[int]) -> None: ...
     @overload
     def __init__(self, string: str, encoding: str,
-                 errors: str = 'strict') -> None: ...
+                 errors: str = ...) -> None: ...
     @overload
     def __init__(self, length: int) -> None: ...
     @overload
@@ -322,22 +322,22 @@ class bytes(ByteString):
     def capitalize(self) -> bytes: ...
     def center(self, width: int, fillchar: bytes = ...) -> bytes: ...
     if sys.version_info >= (3, 3):
-        def count(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def count(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
-    def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
+        def count(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
+    def decode(self, encoding: str = ..., errors: str = ...) -> str: ...
     def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> bytes: ...
+    def expandtabs(self, tabsize: int = ...) -> bytes: ...
     if sys.version_info >= (3, 3):
-        def find(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def find(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def find(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     if sys.version_info >= (3, 5):
         def hex(self) -> str: ...
     if sys.version_info >= (3, 3):
-        def index(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def index(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def index(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
     def isdigit(self) -> bool: ...
@@ -348,25 +348,25 @@ class bytes(ByteString):
     def join(self, iterable: Iterable[bytes]) -> bytes: ...
     def ljust(self, width: int, fillchar: bytes = ...) -> bytes: ...
     def lower(self) -> bytes: ...
-    def lstrip(self, chars: Optional[bytes] = None) -> bytes: ...
+    def lstrip(self, chars: Optional[bytes] = ...) -> bytes: ...
     def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
-    def replace(self, old: bytes, new: bytes, count: int = -1) -> bytes: ...
+    def replace(self, old: bytes, new: bytes, count: int = ...) -> bytes: ...
     if sys.version_info >= (3, 3):
-        def rfind(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def rfind(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rfind(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     if sys.version_info >= (3, 3):
-        def rindex(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def rindex(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rindex(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     def rjust(self, width: int, fillchar: bytes = ...) -> bytes: ...
     def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
-    def rsplit(self, sep: Optional[bytes] = None, maxsplit: int = -1) -> List[bytes]: ...
-    def rstrip(self, chars: Optional[bytes] = None) -> bytes: ...
-    def split(self, sep: Optional[bytes] = None, maxsplit: int = -1) -> List[bytes]: ...
+    def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ...
+    def rstrip(self, chars: Optional[bytes] = ...) -> bytes: ...
+    def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ...
     def splitlines(self, keepends: bool = ...) -> List[bytes]: ...
     def startswith(self, prefix: Union[bytes, Tuple[bytes, ...]]) -> bool: ...
-    def strip(self, chars: Optional[bytes] = None) -> bytes: ...
+    def strip(self, chars: Optional[bytes] = ...) -> bytes: ...
     def swapcase(self) -> bytes: ...
     def title(self) -> bytes: ...
     def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytes: ...
@@ -405,7 +405,7 @@ class bytearray(MutableSequence[int], ByteString):
     @overload
     def __init__(self, ints: Iterable[int]) -> None: ...
     @overload
-    def __init__(self, string: str, encoding: str, errors: str = 'strict') -> None: ...
+    def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ...
     @overload
     def __init__(self, length: int) -> None: ...
     @overload
@@ -413,22 +413,22 @@ class bytearray(MutableSequence[int], ByteString):
     def capitalize(self) -> bytearray: ...
     def center(self, width: int, fillchar: bytes = ...) -> bytearray: ...
     if sys.version_info >= (3, 3):
-        def count(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def count(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def count(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
-    def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
+        def count(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
+    def decode(self, encoding: str = ..., errors: str = ...) -> str: ...
     def endswith(self, suffix: bytes) -> bool: ...
-    def expandtabs(self, tabsize: int = 8) -> bytearray: ...
+    def expandtabs(self, tabsize: int = ...) -> bytearray: ...
     if sys.version_info >= (3, 3):
-        def find(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def find(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def find(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def find(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     if sys.version_info >= (3, 5):
         def hex(self) -> str: ...
     if sys.version_info >= (3, 3):
-        def index(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def index(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def index(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def index(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     def insert(self, index: int, object: int) -> None: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
@@ -440,25 +440,25 @@ class bytearray(MutableSequence[int], ByteString):
     def join(self, iterable: Iterable[bytes]) -> bytearray: ...
     def ljust(self, width: int, fillchar: bytes = ...) -> bytearray: ...
     def lower(self) -> bytearray: ...
-    def lstrip(self, chars: Optional[bytes] = None) -> bytearray: ...
+    def lstrip(self, chars: Optional[bytes] = ...) -> bytearray: ...
     def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
-    def replace(self, old: bytes, new: bytes, count: int = -1) -> bytearray: ...
+    def replace(self, old: bytes, new: bytes, count: int = ...) -> bytearray: ...
     if sys.version_info >= (3, 3):
-        def rfind(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rfind(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def rfind(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rfind(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     if sys.version_info >= (3, 3):
-        def rindex(self, sub: Union[bytes, int], start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rindex(self, sub: Union[bytes, int], start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     else:
-        def rindex(self, sub: bytes, start: Optional[int] = None, end: Optional[int] = None) -> int: ...
+        def rindex(self, sub: bytes, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...
     def rjust(self, width: int, fillchar: bytes = ...) -> bytearray: ...
     def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
-    def rsplit(self, sep: Optional[bytes] = None, maxsplit: int = -1) -> List[bytearray]: ...
-    def rstrip(self, chars: Optional[bytes] = None) -> bytearray: ...
-    def split(self, sep: Optional[bytes] = None, maxsplit: int = -1) -> List[bytearray]: ...
+    def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def rstrip(self, chars: Optional[bytes] = ...) -> bytearray: ...
+    def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ...
     def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
     def startswith(self, prefix: bytes) -> bool: ...
-    def strip(self, chars: Optional[bytes] = None) -> bytearray: ...
+    def strip(self, chars: Optional[bytes] = ...) -> bytearray: ...
     def swapcase(self) -> bytearray: ...
     def title(self) -> bytearray: ...
     def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytearray: ...
@@ -535,7 +535,7 @@ class memoryview(Sized, Container[bytes]):
         def hex(self) -> str: ...
 
 
-class bool(int, SupportsInt, SupportsFloat):
+class bool(int):
     def __init__(self, o: object = ...) -> None: ...
 
 class slice:
@@ -545,7 +545,7 @@ class slice:
     @overload
     def __init__(self, stop: Optional[int]) -> None: ...
     @overload
-    def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = None) -> None: ...
+    def __init__(self, start: Optional[int], stop: Optional[int], step: Optional[int] = ...) -> None: ...
     def indices(self, len: int) -> Tuple[int, int, int]: ...
 
 class tuple(Sequence[_T_co], Generic[_T_co]):
@@ -566,7 +566,7 @@ class tuple(Sequence[_T_co], Generic[_T_co]):
     def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ...
     def count(self, x: Any) -> int: ...
     if sys.version_info >= (3, 5):
-        def index(self, x: Any, start: int = 0, end: int = 0) -> int: ...
+        def index(self, x: Any, start: int = ..., end: int = ...) -> int: ...
     else:
         def index(self, x: Any) -> int: ...
 
@@ -587,13 +587,13 @@ class list(MutableSequence[_T], Generic[_T]):
     def copy(self) -> List[_T]: ...
     def append(self, object: _T) -> None: ...
     def extend(self, iterable: Iterable[_T]) -> None: ...
-    def pop(self, index: int = -1) -> _T: ...
-    def index(self, object: _T, start: int = 0, stop: int = ...) -> int: ...
+    def pop(self, index: int = ...) -> _T: ...
+    def index(self, object: _T, start: int = ..., stop: int = ...) -> int: ...
     def count(self, object: _T) -> int: ...
     def insert(self, index: int, object: _T) -> None: ...
     def remove(self, object: _T) -> None: ...
     def reverse(self) -> None: ...
-    def sort(self, *, key: Optional[Callable[[_T], Any]] = None, reverse: bool = ...) -> None: ...
+    def sort(self, *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> None: ...
 
     def __len__(self) -> int: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -635,7 +635,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def clear(self) -> None: ...
     def copy(self) -> Dict[_KT, _VT]: ...
     def popitem(self) -> Tuple[_KT, _VT]: ...
-    def setdefault(self, k: _KT, default: Optional[_VT] = None) -> _VT: ...
+    def setdefault(self, k: _KT, default: Optional[_VT] = ...) -> _VT: ...
     @overload
     def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
     @overload
@@ -719,7 +719,7 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     def __gt__(self, s: AbstractSet[object]) -> bool: ...
 
 class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
+    def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ...
     def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
     def __next__(self) -> Tuple[int, _T]: ...
 
@@ -730,9 +730,9 @@ class range(Sequence[int]):
     @overload
     def __init__(self, stop: int) -> None: ...
     @overload
-    def __init__(self, start: int, stop: int, step: int = 1) -> None: ...
+    def __init__(self, start: int, stop: int, step: int = ...) -> None: ...
     def count(self, value: int) -> int: ...
-    def index(self, value: int, start: int = 0, stop: Optional[int] = None) -> int: ...
+    def index(self, value: int, start: int = ..., stop: Optional[int] = ...) -> int: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[int]: ...
@@ -744,14 +744,14 @@ class range(Sequence[int]):
     def __reversed__(self) -> Iterator[int]: ...
 
 class property:
-    def __init__(self, fget: Optional[Callable[[Any], Any]] = None,
-                 fset: Optional[Callable[[Any, Any], None]] = None,
-                 fdel: Optional[Callable[[Any], None]] = None,
-                 doc: Optional[str] = None) -> None: ...
+    def __init__(self, fget: Optional[Callable[[Any], Any]] = ...,
+                 fset: Optional[Callable[[Any, Any], None]] = ...,
+                 fdel: Optional[Callable[[Any], None]] = ...,
+                 doc: Optional[str] = ...) -> None: ...
     def getter(self, fget: Callable[[Any], Any]) -> property: ...
     def setter(self, fset: Callable[[Any, Any], None]) -> property: ...
     def deleter(self, fdel: Callable[[Any], None]) -> property: ...
-    def __get__(self, obj: Any, type: Optional[type] = None) -> Any: ...
+    def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ...
     def __set__(self, obj: Any, value: Any) -> None: ...
     def __delete__(self, obj: Any) -> None: ...
     def fget(self) -> Any: ...
@@ -767,22 +767,22 @@ def ascii(o: object) -> str: ...
 def bin(number: int) -> str: ...
 def callable(o: object) -> bool: ...
 def chr(code: int) -> str: ...
-def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = 0, dont_inherit: int = 0) -> CodeType: ...
+def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = ..., dont_inherit: int = ...) -> CodeType: ...
 def copyright() -> None: ...
 def credits() -> None: ...
 def delattr(o: Any, name: str) -> None: ...
 def dir(o: object = ...) -> List[str]: ...
 _N = TypeVar('_N', int, float)
 def divmod(a: _N, b: _N) -> Tuple[_N, _N]: ...
-def eval(source: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = None, locals: Optional[Mapping[str, Any]] = None) -> Any: ...
-def exec(object: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = None, locals: Optional[Mapping[str, Any]] = None) -> Any: ...
+def eval(source: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> Any: ...
+def exec(object: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> Any: ...
 def exit(code: Any = ...) -> NoReturn: ...
 @overload
 def filter(function: Optional[Callable[[_T], Any]],
            iterable: Iterable[_T]) -> Iterator[_T]: ...
 @overload
 def filter(function: None, iterable: Iterable[Optional[_T]]) -> Iterator[_T]: ...
-def format(o: object, format_spec: str = '') -> str: ...
+def format(o: object, format_spec: str = ...) -> str: ...
 def getattr(o: Any, name: str, default: Any = ...) -> Any: ...
 def globals() -> Dict[str, Any]: ...
 def hasattr(o: Any, name: str) -> bool: ...
@@ -790,7 +790,7 @@ def hash(o: object) -> int: ...
 def help(*args: Any, **kwds: Any) -> None: ...
 def hex(i: int) -> str: ...  # TODO __index__
 def id(o: object) -> int: ...
-def input(prompt: Optional[Any] = None) -> str: ...
+def input(prompt: Optional[Any] = ...) -> str: ...
 @overload
 def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
 @overload
@@ -826,15 +826,15 @@ if sys.version_info >= (3, 6):
     class _PathLike(Generic[AnyStr]):
         def __fspath__(self) -> AnyStr: ...
 
-    def open(file: Union[str, bytes, int, _PathLike], mode: str = 'r', buffering: int = -1, encoding: Optional[str] = None,
-             errors: Optional[str] = None, newline: Optional[str] = None, closefd: bool = ...) -> IO[Any]: ...
+    def open(file: Union[str, bytes, int, _PathLike], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ...,
+             errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ...) -> IO[Any]: ...
 else:
-    def open(file: Union[str, bytes, int], mode: str = 'r', buffering: int = -1, encoding: Optional[str] = None,
-             errors: Optional[str] = None, newline: Optional[str] = None, closefd: bool = ...) -> IO[Any]: ...
+    def open(file: Union[str, bytes, int], mode: str = ..., buffering: int = ..., encoding: Optional[str] = ...,
+             errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ...) -> IO[Any]: ...
 
 def ord(c: Union[str, bytes, bytearray]) -> int: ...
 # TODO: in Python 3.2, print() does not support flush
-def print(*values: Any, sep: str = ' ', end: str = '\n', file: Optional[IO[str]] = None, flush: bool = False) -> None: ...
+def print(*values: Any, sep: str = ..., end: str = ..., file: Optional[IO[str]] = ..., flush: bool = ...) -> None: ...
 @overload
 def pow(x: int, y: int) -> Any: ...  # The return type can be int or float, depending on y
 @overload
@@ -843,7 +843,7 @@ def pow(x: int, y: int, z: int) -> Any: ...
 def pow(x: float, y: float) -> float: ...
 @overload
 def pow(x: float, y: float, z: float) -> float: ...
-def quit(code: Optional[int] = None) -> None: ...
+def quit(code: Optional[int] = ...) -> None: ...
 @overload
 def reversed(object: Reversible[_T]) -> Iterator[_T]: ...
 @overload
@@ -859,8 +859,8 @@ def round(number: SupportsRound[_T]) -> _T: ...
 def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
 def setattr(object: Any, name: str, value: Any) -> None: ...
 def sorted(iterable: Iterable[_T], *,
-           key: Optional[Callable[[_T], Any]] = None,
-           reverse: bool = False) -> List[_T]: ...
+           key: Optional[Callable[[_T], Any]] = ...,
+           reverse: bool = ...) -> List[_T]: ...
 @overload
 def sum(iterable: Iterable[_T]) -> Union[_T, int]: ...
 @overload
@@ -886,7 +886,7 @@ def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any],
         iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any],
         *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ...
 def __import__(name: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...,
-               fromlist: List[str] = ..., level: int = -1) -> Any: ...
+               fromlist: List[str] = ..., level: int = ...) -> Any: ...
 
 # Ellipsis
 
@@ -902,7 +902,7 @@ class BaseException:
     __cause__ = ...  # type: BaseException
     __context__ = ...  # type: BaseException
     __traceback__ = ...  # type: TracebackType
-    def __init__(self, *args: object) -> None: ...
+    def __init__(self, *args: object, **kwargs: object) -> None: ...
     def with_traceback(self, tb: Any) -> BaseException: ...
 
 class GeneratorExit(BaseException): ...
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index d80cb2f..d5e8f34 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -159,10 +159,10 @@ class UserString(Sequence[str]):
     if sys.version_info >= (3, 5):
         @staticmethod
         @overload
-        def maketrans(x: Union[Dict[int, Any], Dict[str, Any]]) -> Dict[int, Any]: ...
+        def maketrans(x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ...
         @staticmethod
         @overload
-        def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Any]: ...
+        def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Union[int, None]]: ...
     def partition(self, sep: str) -> Tuple[str, str, str]: ...
     def replace(self: _UserStringT, old: Union[str, UserString], new: Union[str, UserString], maxsplit: int = ...) -> _UserStringT: ...
     def rfind(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ...
@@ -215,15 +215,18 @@ class deque(MutableSequence[_T], Generic[_T]):
     @overload
     def __getitem__(self, index: int) -> _T: ...
     @overload
-    def __getitem__(self, s: slice) -> Sequence[_T]: raise TypeError
+    def __getitem__(self, s: slice) -> Sequence[_T]:
+        raise TypeError
     @overload
     def __setitem__(self, i: int, x: _T) -> None: ...
     @overload
-    def __setitem__(self, s: slice, o: Iterable[_T]) -> None: raise TypeError
+    def __setitem__(self, s: slice, o: Iterable[_T]) -> None:
+        raise TypeError
     @overload
     def __delitem__(self, i: int) -> None: ...
     @overload
-    def __delitem__(self, s: slice) -> None: raise TypeError
+    def __delitem__(self, s: slice) -> None:
+        raise TypeError
 
     def __contains__(self, o: object) -> bool: ...
     def __reversed__(self) -> Iterator[_T]: ...
diff --git a/typeshed/stdlib/3/concurrent/futures/_base.pyi b/typeshed/stdlib/3/concurrent/futures/_base.pyi
index 510b7c0..56b119b 100644
--- a/typeshed/stdlib/3/concurrent/futures/_base.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/_base.pyi
@@ -1,5 +1,4 @@
-from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional, Set
-from collections import namedtuple
+from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional, Set, NamedTuple
 
 FIRST_COMPLETED = ...  # type: str
 FIRST_EXCEPTION = ...  # type: str
@@ -15,7 +14,7 @@ class Error(Exception): ...
 class CancelledError(Error): ...
 class TimeoutError(Error): ...
 
-DoneAndNotDoneFutures = namedtuple('DoneAndNotDoneFutures', 'done not_done')
+DoneAndNotDoneFutures = NamedTuple('DoneAndNotDoneFutures', [('done', Future[Any]), ('not_done', Future[Any])])
 
 _T = TypeVar('_T')
 
@@ -36,7 +35,7 @@ class Executor:
     def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
     def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ..., chunksize: int = ...) -> Iterator[_T]: ...
     def shutdown(self, wait: bool = ...) -> None: ...
-    def __enter__(self) -> Executor: ...
+    def __enter__(self: _T) -> _T: ...
     def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ...
 
 def as_completed(fs: Iterable[Future[_T]], timeout: Optional[float] = ...) -> Iterator[Future[_T]]: ...
diff --git a/typeshed/stdlib/3/configparser.pyi b/typeshed/stdlib/3/configparser.pyi
index 33d6fce..5659862 100644
--- a/typeshed/stdlib/3/configparser.pyi
+++ b/typeshed/stdlib/3/configparser.pyi
@@ -91,9 +91,9 @@ class RawConfigParser(_parser):
     def has_option(self, section: str, option: str) -> bool: ...
 
     def read(self, filenames: Union[_Path, Iterable[_Path]],
-             encoding: Optional[str] = None) -> List[str]: ...
+             encoding: Optional[str] = ...) -> List[str]: ...
 
-    def read_file(self, f: Iterable[str], source: Optional[str] = None) -> None: ...
+    def read_file(self, f: Iterable[str], source: Optional[str] = ...) -> None: ...
 
     def read_string(self, string: str, source: str = ...) -> None: ...
 
@@ -124,7 +124,7 @@ class RawConfigParser(_parser):
 
     def write(self,
               fileobject: IO[str],
-              space_around_delimiters: bool = True) -> None: ...
+              space_around_delimiters: bool = ...) -> None: ...
 
     def remove_option(self, section: str, option: str) -> bool: ...
 
@@ -135,16 +135,16 @@ class RawConfigParser(_parser):
 
 class ConfigParser(RawConfigParser):
     def __init__(self,
-                 defaults: Optional[_section] = None,
+                 defaults: Optional[_section] = ...,
                  dict_type: Mapping[str, str] = ...,
                  allow_no_value: bool = ...,
                  delimiters: Sequence[str] = ...,
                  comment_prefixes: Sequence[str] = ...,
-                 inline_comment_prefixes: Optional[Sequence[str]] = None,
+                 inline_comment_prefixes: Optional[Sequence[str]] = ...,
                  strict: bool = ...,
                  empty_lines_in_values: bool = ...,
                  default_section: str = ...,
-                 interpolation: Optional[Interpolation] = None,
+                 interpolation: Optional[Interpolation] = ...,
                  converters: _converters = ...) -> None: ...
 
 class SafeConfigParser(ConfigParser): ...
diff --git a/typeshed/stdlib/3/curses/__init__.pyi b/typeshed/stdlib/3/curses/__init__.pyi
index 078a6cd..ef9f420 100644
--- a/typeshed/stdlib/3/curses/__init__.pyi
+++ b/typeshed/stdlib/3/curses/__init__.pyi
@@ -1,6 +1,9 @@
 import _curses
 from _curses import *  # noqa: F403
 
+LINES: int
+COLS: int
+
 def initscr() -> _curses._CursesWindow: ...
 def start_color(): ...
 def wrapper(func, *args, **kwds): ...
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
index 6ff3ada..44adfe5 100644
--- a/typeshed/stdlib/3/datetime.pyi
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -95,7 +95,7 @@ class time:
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
     def replace(self, hour: int = ..., minute: int = ..., second: int = ...,
-                microsecond: int = ..., tzinfo: Optional[_tzinfo] = None) -> time: ...
+                microsecond: int = ..., tzinfo: Optional[_tzinfo] = ...) -> time: ...
 
 _date = date
 _time = time
@@ -195,7 +195,7 @@ class datetime:
     def timetz(self) -> _time: ...
     def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ...,
                 minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo:
-                Optional[_tzinfo] = None) -> datetime: ...
+                Optional[_tzinfo] = ...) -> datetime: ...
     def astimezone(self, tz: Optional[_tzinfo] = ...) -> datetime: ...
     def ctime(self) -> str: ...
     if sys.version_info >= (3, 6):
diff --git a/typeshed/stdlib/3/email/message.pyi b/typeshed/stdlib/3/email/message.pyi
index 8244939..6e30176 100644
--- a/typeshed/stdlib/3/email/message.pyi
+++ b/typeshed/stdlib/3/email/message.pyi
@@ -6,6 +6,7 @@ from typing import (
 import sys
 from email.charset import Charset
 from email.errors import MessageDefect
+from email.header import Header
 if sys.version_info >= (3, 3):
     from email.policy import Policy
 if sys.version_info >= (3, 4):
@@ -19,6 +20,7 @@ _PayloadType = Union[List[Message], str, bytes]
 _CharsetType = Union[Charset, str, None]
 _ParamsType = Union[str, None, Tuple[str, Optional[str], str]]
 _ParamType = Union[str, Tuple[Optional[str], Optional[str], str]]
+_HeaderType = Union[str, Header]
 
 class Message:
     preamble = ...  # type: Optional[str]
@@ -36,16 +38,16 @@ class Message:
     def get_charset(self) -> _CharsetType: ...
     def __len__(self) -> int: ...
     def __contains__(self, name: str) -> bool: ...
-    def __getitem__(self, name: str) -> Optional[str]: ...
-    def __setitem__(self, name: str, val: str) -> None: ...
+    def __getitem__(self, name: str) -> Optional[_HeaderType]: ...
+    def __setitem__(self, name: str, val: _HeaderType) -> None: ...
     def __delitem__(self, name: str) -> None: ...
     def keys(self) -> List[str]: ...
-    def values(self) -> List[str]: ...
-    def items(self) -> List[Tuple[str, str]]: ...
-    def get(self, name: str, failobj: _T = ...) -> Union[str, _T]: ...
-    def get_all(self, name: str, failobj: _T = ...) -> Union[List[str], _T]: ...
+    def values(self) -> List[_HeaderType]: ...
+    def items(self) -> List[Tuple[str, _HeaderType]]: ...
+    def get(self, name: str, failobj: _T = ...) -> Union[_HeaderType, _T]: ...
+    def get_all(self, name: str, failobj: _T = ...) -> Union[List[_HeaderType], _T]: ...
     def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ...
-    def replace_header(self, _name: str, _value: str) -> None: ...
+    def replace_header(self, _name: str, _value: _HeaderType) -> None: ...
     def get_content_type(self) -> str: ...
     def get_content_maintype(self) -> str: ...
     def get_content_subtype(self) -> str: ...
diff --git a/typeshed/stdlib/3/getpass.pyi b/typeshed/stdlib/3/getpass.pyi
index f55023c..71df376 100644
--- a/typeshed/stdlib/3/getpass.pyi
+++ b/typeshed/stdlib/3/getpass.pyi
@@ -3,7 +3,7 @@
 from typing import Optional, TextIO
 
 
-def getpass(prompt: str = ..., stream: Optional[TextIO] = None) -> str: ...
+def getpass(prompt: str = ..., stream: Optional[TextIO] = ...) -> str: ...
 
 
 def getuser() -> str: ...
diff --git a/typeshed/stdlib/3/gzip.pyi b/typeshed/stdlib/3/gzip.pyi
index 8a6f43e..024413b 100644
--- a/typeshed/stdlib/3/gzip.pyi
+++ b/typeshed/stdlib/3/gzip.pyi
@@ -1,47 +1,49 @@
-from typing import Any, Optional
+from typing import Any, IO, Optional
+from os.path import _PathType
 import _compression
+import zlib
 
-def open(filename, mode: str = ..., compresslevel: int = ..., encoding=None, errors=None, newline=None): ...
+def open(filename, mode: str = ..., compresslevel: int = ..., encoding: Optional[str] = ..., errors: Optional[str] = ..., newline: Optional[str] = ...) -> IO[Any]: ...
 
 class _PaddedFile:
-    file = ...  # type: Any
-    def __init__(self, f, prepend: bytes = ...) -> None: ...
-    def read(self, size): ...
-    def prepend(self, prepend: bytes = ...): ...
-    def seek(self, off): ...
-    def seekable(self): ...
+    file: IO[bytes]
+    def __init__(self, f: IO[bytes], prepend: bytes = ...) -> None: ...
+    def read(self, size: int) -> bytes: ...
+    def prepend(self, prepend: bytes = ...) -> None: ...
+    def seek(self, off: int) -> int: ...
+    def seekable(self) -> bool: ...
 
 class GzipFile(_compression.BaseStream):
-    myfileobj = ...  # type: Any
-    mode = ...  # type: Any
-    name = ...  # type: Any
-    compress = ...  # type: Any
-    fileobj = ...  # type: Any
-    def __init__(self, filename=None, mode=None, compresslevel: int = ..., fileobj=None, mtime=None) -> None: ...
+    myfileobj: Optional[IO[bytes]]
+    mode: str
+    name: str
+    compress: zlib._Compress
+    fileobj: IO[bytes]
+    def __init__(self, filename: Optional[_PathType] = ..., mode: Optional[str] = ..., compresslevel: int = ..., fileobj: Optional[IO[bytes]] = ..., mtime: Optional[float] = ...) -> None: ...
     @property
-    def filename(self): ...
+    def filename(self) -> str: ...
     @property
     def mtime(self): ...
-    crc = ...  # type: Any
-    def write(self, data): ...
-    def read(self, size: Optional[int] = ...): ...
-    def read1(self, size: int = ...): ...
-    def peek(self, n): ...
+    crc: int
+    def write(self, data: bytes) -> int: ...
+    def read(self, size: Optional[int] = ...) -> bytes: ...
+    def read1(self, size: int = ...) -> bytes: ...
+    def peek(self, n: int) -> bytes: ...
     @property
-    def closed(self): ...
-    def close(self): ...
-    def flush(self, zlib_mode=...): ...
-    def fileno(self): ...
-    def rewind(self): ...
-    def readable(self): ...
-    def writable(self): ...
-    def seekable(self): ...
-    def seek(self, offset, whence=...): ...
-    def readline(self, size: int = ...): ...
+    def closed(self) -> bool: ...
+    def close(self) -> None: ...
+    def flush(self, zlib_mode: int = ...) -> None: ...
+    def fileno(self) -> int: ...
+    def rewind(self) -> None: ...
+    def readable(self) -> bool: ...
+    def writable(self) -> bool: ...
+    def seekable(self) -> bool: ...
+    def seek(self, offset: int, whence: int = ...) -> int: ...
+    def readline(self, size: int = ...) -> bytes: ...
 
 class _GzipReader(_compression.DecompressReader):
-    def __init__(self, fp) -> None: ...
-    def read(self, size: int = ...): ...
+    def __init__(self, fp: IO[bytes]) -> None: ...
+    def read(self, size: int = ...) -> bytes: ...
 
-def compress(data, compresslevel: int = ...): ...
-def decompress(data): ...
+def compress(data, compresslevel: int = ...) -> bytes: ...
+def decompress(data: bytes) -> bytes: ...
diff --git a/typeshed/stdlib/3/hashlib.pyi b/typeshed/stdlib/3/hashlib.pyi
index 64ef707..850205f 100644
--- a/typeshed/stdlib/3/hashlib.pyi
+++ b/typeshed/stdlib/3/hashlib.pyi
@@ -1,12 +1,11 @@
 # Stubs for hashlib
 
 import sys
-from abc import abstractmethod, ABCMeta
 from typing import AbstractSet, Optional, Union
 
 _DataType = Union[bytes, bytearray, memoryview]
 
-class _Hash(metaclass=ABCMeta):
+class _Hash(object):
     digest_size = ...  # type: int
     block_size = ...  # type: int
 
@@ -15,14 +14,12 @@ class _Hash(metaclass=ABCMeta):
     # formally specified, so may not exist on some platforms
     name = ...  # type: str
 
-    @abstractmethod
-    def update(self, arg: _DataType) -> None: ...
-    @abstractmethod
+    def __init__(self, data: _DataType = ...) -> None: ...
+
+    def copy(self) -> _Hash: ...
     def digest(self) -> bytes: ...
-    @abstractmethod
     def hexdigest(self) -> str: ...
-    @abstractmethod
-    def copy(self) -> _Hash: ...
+    def update(self, arg: _DataType) -> None: ...
 
 def md5(arg: _DataType = ...) -> _Hash: ...
 def sha1(arg: _DataType = ...) -> _Hash: ...
@@ -42,27 +39,24 @@ if sys.version_info >= (3, 4):
     def pbkdf2_hmac(hash_name: str, password: _DataType, salt: _DataType, iterations: int, dklen: Optional[int] = ...) -> bytes: ...
 
 if sys.version_info >= (3, 6):
-    class _VarLenHash(metaclass=ABCMeta):
+    class _VarLenHash(object):
         digest_size = ...  # type: int
         block_size = ...  # type: int
         name = ...  # type: str
 
-        @abstractmethod
+        def __init__(self, data: _DataType = ...) -> None: ...
+
+        def copy(self) -> _VarLenHash: ...
         def digest(self, length: int) -> bytes: ...
-        @abstractmethod
         def hexdigest(self, length: int) -> str: ...
-        @abstractmethod
         def update(self, arg: _DataType) -> None: ...
-        @abstractmethod
-        def copy(self) -> _VarLenHash: ...
-
-    def sha3_224(arg: _DataType = ...) -> _Hash: ...
-    def sha3_256(arg: _DataType = ...) -> _Hash: ...
-    def sha3_384(arg: _DataType = ...) -> _Hash: ...
-    def sha3_512(arg: _DataType = ...) -> _Hash: ...
 
-    def shake_128(arg: _DataType = ...) -> _VarLenHash: ...
-    def shake_256(arg: _DataType = ...) -> _VarLenHash: ...
+    sha3_224 = _Hash
+    sha3_256 = _Hash
+    sha3_384 = _Hash
+    sha3_512 = _Hash
+    shake_128 = _VarLenHash
+    shake_256 = _VarLenHash
 
     def scrypt(password: _DataType, *, salt: _DataType, n: int, r: int, p: int, maxmem: int = ..., dklen: int = ...) -> bytes: ...
 
@@ -72,7 +66,7 @@ if sys.version_info >= (3, 6):
         PERSON_SIZE = ...  # type: int
         SALT_SIZE = ...  # type: int
 
-        def __init__(self, data: _DataType, digest_size: int = ..., key: _DataType = ..., salt: _DataType = ..., person: _DataType = ..., fanout: int = ..., depth: int = ..., leaf_size: int = ..., node_offset: int = ..., node_depth: int = ..., inner_size: int = ..., last_node: bool = ...) -> None: ...
+        def __init__(self, data: _DataType = ..., digest_size: int = ..., key: _DataType = ..., salt: _DataType = ..., person: _DataType = ..., fanout: int = ..., depth: int = ..., leaf_size: int = ..., node_offset: int = ..., node_depth: int = ..., inner_size: int = ..., last_node: bool = ...) -> None: ...
 
     blake2b = _BlakeHash
     blake2s = _BlakeHash
diff --git a/typeshed/stdlib/3/http/__init__.pyi b/typeshed/stdlib/3/http/__init__.pyi
index a72ccba..345d8f2 100644
--- a/typeshed/stdlib/3/http/__init__.pyi
+++ b/typeshed/stdlib/3/http/__init__.pyi
@@ -5,9 +5,10 @@ from enum import IntEnum
 if sys.version_info >= (3, 5):
     class HTTPStatus(IntEnum):
 
-        def __init__(self, *a) -> None:
-            self.phrase = ...  # type: str
-            self.description = ...  # type: str
+        def __init__(self, *a) -> None: ...
+
+        phrase = ...  # type: str
+        description = ...  # type: str
 
         CONTINUE = ...  # type: HTTPStatus
         SWITCHING_PROTOCOLS = ...  # type: HTTPStatus
diff --git a/typeshed/stdlib/3/importlib/__init__.pyi b/typeshed/stdlib/3/importlib/__init__.pyi
index 08b3a81..c3533f5 100644
--- a/typeshed/stdlib/3/importlib/__init__.pyi
+++ b/typeshed/stdlib/3/importlib/__init__.pyi
@@ -4,15 +4,15 @@ import sys
 import types
 from typing import Any, Mapping, Optional, Sequence
 
-def __import__(name: str, globals: Optional[Mapping[str, Any]] = None,
-               locals: Optional[Mapping[str, Any]] = None,
+def __import__(name: str, globals: Optional[Mapping[str, Any]] = ...,
+               locals: Optional[Mapping[str, Any]] = ...,
                fromlist: Sequence[str] = ...,
                level: int = ...) -> types.ModuleType: ...
 
-def import_module(name: str, package: Optional[str] = None) -> types.ModuleType: ...
+def import_module(name: str, package: Optional[str] = ...) -> types.ModuleType: ...
 
 if sys.version_info >= (3, 3):
-    def find_loader(name: str, path: Optional[str] = None) -> Optional[Loader]: ...
+    def find_loader(name: str, path: Optional[str] = ...) -> Optional[Loader]: ...
 
     def invalidate_caches() -> None: ...
 
diff --git a/typeshed/stdlib/3/importlib/abc.pyi b/typeshed/stdlib/3/importlib/abc.pyi
index f7cd81d..7c49ede 100644
--- a/typeshed/stdlib/3/importlib/abc.pyi
+++ b/typeshed/stdlib/3/importlib/abc.pyi
@@ -19,7 +19,7 @@ class Finder(metaclass=ABCMeta):
     # easier to simply ignore that this method exists.
     # @abstractmethod
     # def find_module(self, fullname: str,
-    #                 path: Optional[Sequence[_Path]] = None) -> Optional[Loader]: ...
+    #                 path: Optional[Sequence[_Path]] = ...) -> Optional[Loader]: ...
 
 class ResourceLoader(Loader):
     @abstractmethod
@@ -64,7 +64,7 @@ if sys.version_info >= (3, 3):
             # Not defined on the actual class, but expected to exist.
             def find_spec(
                 self, fullname: str, path: Optional[Sequence[_Path]],
-                target: Optional[types.ModuleType] = None
+                target: Optional[types.ModuleType] = ...
             ) -> Optional[ModuleSpec]:
                 ...
 
@@ -78,7 +78,7 @@ if sys.version_info >= (3, 3):
             # Not defined on the actual class, but expected to exist.
             def find_spec(
                 self, fullname: str,
-                target: Optional[types.ModuleType] = None
+                target: Optional[types.ModuleType] = ...
             ) -> Optional[ModuleSpec]: ...
 
     class FileLoader(ResourceLoader, ExecutionLoader):
diff --git a/typeshed/stdlib/3/importlib/machinery.pyi b/typeshed/stdlib/3/importlib/machinery.pyi
index f9e1907..d4ed8c3 100644
--- a/typeshed/stdlib/3/importlib/machinery.pyi
+++ b/typeshed/stdlib/3/importlib/machinery.pyi
@@ -22,7 +22,7 @@ if sys.version_info >= (3, 3):
             @classmethod
             def find_spec(cls, fullname: str,
                           path: Optional[Sequence[importlib.abc._Path]],
-                          target: Optional[types.ModuleType] = None) -> Optional[ModuleSpec]:
+                          target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]:
                 ...
         # InspectLoader
         @classmethod
@@ -79,7 +79,7 @@ if sys.version_info >= (3, 3):
             @classmethod
             def find_spec(cls, fullname: str,
                           path: Optional[Sequence[importlib.abc._Path]],
-                          target: Optional[types.ModuleType] = None) -> Optional[ModuleSpec]:
+                          target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]:
                 ...
         # InspectLoader
         @classmethod
@@ -136,7 +136,7 @@ if sys.version_info >= (3, 3):
             @classmethod
             def find_spec(cls, fullname: str,
                           path: Optional[Sequence[importlib.abc._Path]],
-                          target: Optional[types.ModuleType] = None) -> Optional[ModuleSpec]:
+                          target: Optional[types.ModuleType] = ...) -> Optional[ModuleSpec]:
                 ...
 else:
     class WindowsRegisteryFinder:
diff --git a/typeshed/stdlib/3/importlib/util.pyi b/typeshed/stdlib/3/importlib/util.pyi
index 0a56b9c..3daff27 100644
--- a/typeshed/stdlib/3/importlib/util.pyi
+++ b/typeshed/stdlib/3/importlib/util.pyi
@@ -20,22 +20,22 @@ if sys.version_info >= (3, 3):
 if sys.version_info >= (3, 4):
     MAGIC_NUMBER = ...  # type: bytes
 
-    def cache_from_source(path: str, debug_override: Optional[bool] = None, *,
-                          optimization: Optional[Any] = None) -> str: ...
+    def cache_from_source(path: str, debug_override: Optional[bool] = ..., *,
+                          optimization: Optional[Any] = ...) -> str: ...
     def source_from_cache(path: str) -> str: ...
     def decode_source(source_bytes: bytes) -> str: ...
     def find_spec(
-        name: str, package: Optional[str] = None
+        name: str, package: Optional[str] = ...
     ) -> importlib.machinery.ModuleSpec: ...
     def spec_from_loader(
         name: str, loader: Optional[importlib.abc.Loader], *,
-        origin: Optional[str] = None, loader_state: Optional[Any] = None,
-        is_package: Optional[bool] = None
+        origin: Optional[str] = ..., loader_state: Optional[Any] = ...,
+        is_package: Optional[bool] = ...
     ) -> importlib.machinery.ModuleSpec: ...
     def spec_from_file_location(
         name: str, location: str, *,
-        loader: Optional[importlib.abc.Loader] = None,
-        submodule_search_locations: Optional[List[str]] = None
+        loader: Optional[importlib.abc.Loader] = ...,
+        submodule_search_locations: Optional[List[str]] = ...
     ) -> importlib.machinery.ModuleSpec: ...
 
 if sys.version_info >= (3, 5):
diff --git a/typeshed/stdlib/3/inspect.pyi b/typeshed/stdlib/3/inspect.pyi
index 6b90ad9..6482e69 100644
--- a/typeshed/stdlib/3/inspect.pyi
+++ b/typeshed/stdlib/3/inspect.pyi
@@ -66,7 +66,7 @@ def cleandoc(doc: str) -> str: ...
 #
 def signature(callable: Callable[..., Any],
               *,
-              follow_wrapped: bool = True) -> 'Signature': ...
+              follow_wrapped: bool = ...) -> 'Signature': ...
 
 class Signature:
     def __init__(self,
@@ -93,7 +93,7 @@ class Signature:
     def from_callable(cls,
                       obj: Callable[..., Any],
                       *,
-                      follow_wrapped: bool = True) -> 'Signature': ...
+                      follow_wrapped: bool = ...) -> 'Signature': ...
 
 # The name is the same as the enum's name in CPython
 class _ParameterKind: ...
@@ -229,13 +229,13 @@ FrameInfo = NamedTuple('FrameInfo', [('frame', FrameType),
                                      ])
 
 # TODO make the frame type more specific
-def getframeinfo(frame: Any, context: int = 1) -> FrameInfo: ...
-def getouterframes(frame: Any, context: int = 1) -> List[FrameInfo]: ...
-def getinnerframes(traceback: TracebackType, context: int = 1) -> List[FrameInfo]:
+def getframeinfo(frame: Any, context: int = ...) -> FrameInfo: ...
+def getouterframes(frame: Any, context: int = ...) -> List[FrameInfo]: ...
+def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameInfo]:
     ...
 def currentframe() -> Optional[FrameType]: ...
-def stack(context: int = 1) -> List[FrameInfo]: ...
-def trace(context: int = 1) -> List[FrameInfo]: ...
+def stack(context: int = ...) -> List[FrameInfo]: ...
+def trace(context: int = ...) -> List[FrameInfo]: ...
 
 #
 # Fetching attributes statically
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
index 20c63ef..ea2752d 100644
--- a/typeshed/stdlib/3/io.pyi
+++ b/typeshed/stdlib/3/io.pyi
@@ -49,7 +49,8 @@ class IOBase:
     else:
         def readline(self, limit: int = ...) -> bytes: ...
     if sys.version_info >= (3, 2):
-        closed = ...  # type: bool
+        @property
+        def closed(self) -> bool: ...
     else:
         def closed(self) -> bool: ...
 
@@ -101,8 +102,8 @@ class BytesIO(BinaryIO):
     def __iter__(self) -> Iterator[bytes]: ...
     def __next__(self) -> bytes: ...
     def __enter__(self) -> 'BytesIO': ...
-    def __exit__(self, t: Optional[type] = None, value: Optional[BaseException] = None,
-                 traceback: Optional[Any] = None) -> bool: ...
+    def __exit__(self, t: Optional[type] = ..., value: Optional[BaseException] = ...,
+                 traceback: Optional[Any] = ...) -> bool: ...
     def close(self) -> None: ...
     def fileno(self) -> int: ...
     def flush(self) -> None: ...
@@ -204,8 +205,8 @@ class TextIOWrapper(TextIO):
         write_through: bool = ...
     ) -> None: ...
     # copied from IOBase
-    def __exit__(self, t: Optional[type] = None, value: Optional[BaseException] = None,
-                 traceback: Optional[Any] = None) -> bool: ...
+    def __exit__(self, t: Optional[type] = ..., value: Optional[BaseException] = ...,
+                 traceback: Optional[Any] = ...) -> bool: ...
     def close(self) -> None: ...
     def fileno(self) -> int: ...
     def flush(self) -> None: ...
diff --git a/typeshed/stdlib/3/json/decoder.pyi b/typeshed/stdlib/3/json/decoder.pyi
index 164fcad..d5c7c04 100644
--- a/typeshed/stdlib/3/json/decoder.pyi
+++ b/typeshed/stdlib/3/json/decoder.pyi
@@ -18,11 +18,11 @@ class JSONDecoder:
     strict = ...  # type: bool
     object_pairs_hook = ...  # type: Callable[[List[Tuple[str, Any]]], Any]
 
-    def __init__(self, object_hook: Optional[Callable[[Dict[str, Any]], Any]] = None,
-            parse_float: Optional[Callable[[str], Any]] = None,
-            parse_int: Optional[Callable[[str], Any]] = None,
-            parse_constant: Optional[Callable[[str], Any]] = None,
-            strict: bool = True,
-            object_pairs_hook: Optional[Callable[[List[Tuple[str, Any]]], Any]] = None) -> None: ...
+    def __init__(self, object_hook: Optional[Callable[[Dict[str, Any]], Any]] = ...,
+            parse_float: Optional[Callable[[str], Any]] = ...,
+            parse_int: Optional[Callable[[str], Any]] = ...,
+            parse_constant: Optional[Callable[[str], Any]] = ...,
+            strict: bool = ...,
+            object_pairs_hook: Optional[Callable[[List[Tuple[str, Any]]], Any]] = ...) -> None: ...
     def decode(self, s: str) -> Any: ...
     def raw_decode(self, s: str, idx: int = ...) -> Tuple[Any, int]: ...
diff --git a/typeshed/stdlib/3/json/encoder.pyi b/typeshed/stdlib/3/json/encoder.pyi
index ced7168..cee377b 100644
--- a/typeshed/stdlib/3/json/encoder.pyi
+++ b/typeshed/stdlib/3/json/encoder.pyi
@@ -13,8 +13,8 @@ class JSONEncoder:
 
     def __init__(self, skipkeys: bool = ..., ensure_ascii: bool = ...,
             check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ...,
-            indent: Optional[int] = None, separators: Optional[Tuple[str, str]] = None, default: Optional[Callable] = None) -> None: ...
+            indent: Optional[int] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable] = ...) -> None: ...
 
     def default(self, o: Any) -> Any: ...
     def encode(self, o: Any) -> str: ...
-    def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ...
+    def iterencode(self, o: Any, _one_shot: bool = ...) -> Iterator[str]: ...
diff --git a/typeshed/stdlib/3/multiprocessing/__init__.pyi b/typeshed/stdlib/3/multiprocessing/__init__.pyi
index 59b82da..a8a3260 100644
--- a/typeshed/stdlib/3/multiprocessing/__init__.pyi
+++ b/typeshed/stdlib/3/multiprocessing/__init__.pyi
@@ -2,34 +2,39 @@
 
 from typing import (
     Any, Callable, ContextManager, Iterable, Mapping, Optional, Dict, List,
-    Union, TypeVar,
+    Union, TypeVar, Sequence, Tuple
 )
 
 from logging import Logger
-from multiprocessing import pool
-from multiprocessing.context import BaseContext
+from multiprocessing import connection, pool, synchronize
+from multiprocessing.context import (
+    BaseContext,
+    ProcessError, BufferTooShort, TimeoutError, AuthenticationError)
 from multiprocessing.managers import SyncManager
-from multiprocessing.pool import AsyncResult
 from multiprocessing.process import current_process as current_process
-import sys
 import queue
+import sys
 
 _T = TypeVar('_T')
 
-class Lock(ContextManager[Lock]):
-    def acquire(self, block: bool = ..., timeout: int = ...) -> None: ...
-    def release(self) -> None: ...
+# N.B. The functions below are generated at runtime by partially applying
+# multiprocessing.context.BaseContext's methods, so the two signatures should
+# be identical (modulo self).
+
+# Sychronization primitives
+_LockLike = Union[synchronize.Lock, synchronize.RLock]
+def Barrier(parties: int,
+            action: Optional[Callable] = ...,
+            timeout: Optional[float] = ...) -> synchronize.Barrier: ...
+def BoundedSemaphore(value: int = ...) -> synchronize.BoundedSemaphore: ...
+def Condition(lock: Optional[_LockLike] = ...) -> synchronize.Condition: ...
+def Event(lock: Optional[_LockLike] = ...) -> synchronize.Event: ...
+def Lock() -> synchronize.Lock: ...
+def RLock() -> synchronize.RLock: ...
+def Semaphore(value: int = ...) -> synchronize.Semaphore: ...
 
-class Event(object):
-    def __init__(self, *, ctx: BaseContext) -> None: ...
-    def is_set(self) -> bool: ...
-    def set(self) -> None: ...
-    def clear(self) -> None: ...
-    def wait(self, timeout: Optional[int] = ...) -> bool: ...
+def Pipe(duplex: bool = ...) -> Tuple[connection.Connection, connection.Connection]: ...
 
-# N.B. This is generated at runtime by partially applying
-# multiprocessing.context.BaseContext.Pool, so the two signatures should be
-# identical (modulo self).
 def Pool(processes: Optional[int] = ...,
          initializer: Optional[Callable[..., Any]] = ...,
          initargs: Iterable[Any] = ...,
diff --git a/typeshed/stdlib/3/multiprocessing/connection.pyi b/typeshed/stdlib/3/multiprocessing/connection.pyi
new file mode 100644
index 0000000..2bad309
--- /dev/null
+++ b/typeshed/stdlib/3/multiprocessing/connection.pyi
@@ -0,0 +1,39 @@
+from typing import Any, Iterable, List, Optional, Tuple, Type, Union
+import socket
+import sys
+import types
+
+# https://docs.python.org/3/library/multiprocessing.html#address-formats
+_Address = Union[str, Tuple[str, int]]
+
+def deliver_challenge(connection: Connection, authkey: bytes) -> None: ...
+def answer_challenge(connection: Connection, authkey: bytes) -> None: ...
+if sys.version_info >= (3, 3):
+    def wait(object_list: Iterable[Union[Connection, socket.socket, int]], timeout: Optional[float] = ...) -> List[Union[Connection, socket.socket, int]]: ...
+def Client(address: _Address, family: Optional[str] = ..., authkey: Optional[bytes] = ...) -> Connection: ...
+def Pipe(duplex: bool = ...) -> Tuple[Connection, Connection]: ...
+
+class Listener:
+    def __init__(self, address: Optional[_Address] = ..., family: Optional[str] = ..., backlog: int = ..., authkey: Optional[bytes] = ...) -> None: ...
+    def accept(self) -> Connection: ...
+    def close(self) -> None: ...
+    @property
+    def address(self) -> _Address: ...
+    @property
+    def last_accepted(self) -> Optional[_Address]: ...
+    if sys.version_info >= (3, 3):
+        def __enter__(self) -> Listener: ...
+        def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], exc_tb: Optional[types.TracebackType]) -> None: ...
+
+class Connection:
+    def close(self) -> None: ...
+    def fileno(self) -> int: ...
+    def poll(self, timeout: float = ...) -> bool: ...
+    def recv(self) -> Any: ...
+    def recv_bytes(self, maxlength: Optional[int] = ...) -> bytes: ...
+    def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ...
+    def send(self, obj: Any) -> None: ...
+    def send_bytes(self,
+                   buf: bytes,
+                   offset: int = ...,
+                   size: Optional[int] = ...) -> None: ...
diff --git a/typeshed/stdlib/3/multiprocessing/context.pyi b/typeshed/stdlib/3/multiprocessing/context.pyi
index ab095c9..859a6c3 100644
--- a/typeshed/stdlib/3/multiprocessing/context.pyi
+++ b/typeshed/stdlib/3/multiprocessing/context.pyi
@@ -2,11 +2,15 @@
 
 from logging import Logger
 import multiprocessing
+from multiprocessing import synchronize
 import sys
 from typing import (
-    Any, Callable, Iterable, Optional, List, Mapping, Sequence, Tuple, Type, Union,
+    Any, Callable, Iterable, Optional, List, Mapping, Sequence, Tuple, Type,
+    Union,
 )
 
+_LockLike = Union[synchronize.Lock, synchronize.RLock]
+
 class ProcessError(Exception): ...
 
 class BufferTooShort(ProcessError): ...
@@ -21,6 +25,9 @@ class BaseContext(object):
     TimeoutError = ...  # type: Type[Exception]
     AuthenticationError = ...  # type: Type[Exception]
 
+    # N.B. The methods below are applied at runtime to generate
+    # multiprocessing.*, so the signatures should be identical (modulo self).
+
     @staticmethod
     def current_process() -> multiprocessing.Process: ...
     @staticmethod
@@ -30,30 +37,26 @@ class BaseContext(object):
     def Manager(self) -> Any: ...
     # TODO: change return to Pipe once a stub exists in multiprocessing.connection
     def Pipe(self, duplex: bool) -> Any: ...
-    # TODO: change return to Lock once a stub exists in multiprocessing.synchronize
-    def Lock(self) -> Any: ...
-    # TODO: change return to RLock once a stub exists in multiprocessing.synchronize
-    def RLock(self) -> Any: ...
-    # TODO: change lock param to Optional[Union[Lock, RLock]] when stubs exists in multiprocessing.synchronize
-    # TODO: change return to Condition once a stub exists in multiprocessing.synchronize
-    def Condition(self, lock: Optional[Any] = ...) -> Any: ...
-    # TODO: change return to Semaphore once a stub exists in multiprocessing.synchronize
-    def Semaphore(self, value: int = ...) -> Any: ...
-    # TODO: change return to BoundedSemaphore once a stub exists in multiprocessing.synchronize
-    def BoundedSemaphore(self, value: int = ...) -> Any: ...
-    # TODO: change return to Event once a stub exists in multiprocessing.synchronize
-    def Event(self) -> Any: ...
-    # TODO: change return to Barrier once a stub exists in multiprocessing.synchronize
-    def Barrier(self, parties: int, action: Optional[Callable[..., Any]] = ..., timeout: Optional[int] = ...) -> Any: ...
+
+    def Barrier(self,
+                parties: int,
+                action: Optional[Callable] = ...,
+                timeout: Optional[float] = ...) -> synchronize.Barrier: ...
+    def BoundedSemaphore(self,
+                         value: int = ...) -> synchronize.BoundedSemaphore: ...
+    def Condition(self,
+                  lock: Optional[_LockLike] = ...) -> synchronize.Condition: ...
+    def Event(self, lock: Optional[_LockLike] = ...) -> synchronize.Event: ...
+    def Lock(self) -> synchronize.Lock: ...
+    def RLock(self) -> synchronize.RLock: ...
+    def Semaphore(self, value: int = ...) -> synchronize.Semaphore: ...
+
     # TODO: change return to Queue once a stub exists in multiprocessing.queues
     def Queue(self, maxsize: int = ...) -> Any: ...
     # TODO: change return to Queue once a stub exists in multiprocessing.queues
     def JoinableQueue(self, maxsize: int = ...) -> Any: ...
     # TODO: change return to SimpleQueue once a stub exists in multiprocessing.queues
     def SimpleQueue(self) -> Any: ...
-    # N.B. This method is partially applied at runtime to generate
-    # multiprocessing.Pool, so the two signatures should be identical (modulo
-    # self).
     def Pool(
         self,
         processes: Optional[int] = ...,
diff --git a/typeshed/stdlib/3/multiprocessing/pool.pyi b/typeshed/stdlib/3/multiprocessing/pool.pyi
index bdfd476..779961a 100644
--- a/typeshed/stdlib/3/multiprocessing/pool.pyi
+++ b/typeshed/stdlib/3/multiprocessing/pool.pyi
@@ -20,7 +20,7 @@ class Pool(ContextManager[Pool]):
                  initializer: Optional[Callable[..., None]] = ...,
                  initargs: Iterable[Any] = ...,
                  maxtasksperchild: Optional[int] = ...,
-                 context: Optional[Any] = None) -> None: ...
+                 context: Optional[Any] = ...) -> None: ...
     def apply(self,
               func: Callable[..., Any],
               args: Iterable[Any] = ...,
@@ -29,35 +29,35 @@ class Pool(ContextManager[Pool]):
                 func: Callable[..., Any],
                 args: Iterable[Any] = ...,
                 kwds: Dict[str, Any] = ...,
-                callback: Optional[Callable[..., None]] = None,
-                error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
+                callback: Optional[Callable[..., None]] = ...,
+                error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult: ...
     def map(self,
             func: Callable[..., Any],
             iterable: Iterable[Any] = ...,
-            chunksize: Optional[int] = None) -> List[Any]: ...
+            chunksize: Optional[int] = ...) -> List[Any]: ...
     def map_async(self, func: Callable[..., Any],
                   iterable: Iterable[Any] = ...,
-                  chunksize: Optional[int] = None,
-                  callback: Optional[Callable[..., None]] = None,
-                  error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
+                  chunksize: Optional[int] = ...,
+                  callback: Optional[Callable[..., None]] = ...,
+                  error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult: ...
     def imap(self,
              func: Callable[..., Any],
              iterable: Iterable[Any] = ...,
-             chunksize: Optional[int] = None) -> Iterable[Any]: ...
+             chunksize: Optional[int] = ...) -> Iterable[Any]: ...
     def imap_unordered(self,
                        func: Callable[..., Any],
                        iterable: Iterable[Any] = ...,
-                       chunksize: Optional[int] = None) -> Iterable[Any]: ...
+                       chunksize: Optional[int] = ...) -> Iterable[Any]: ...
     def starmap(self,
                 func: Callable[..., Any],
                 iterable: Iterable[Iterable[Any]] = ...,
-                chunksize: Optional[int] = None) -> List[Any]: ...
+                chunksize: Optional[int] = ...) -> List[Any]: ...
     def starmap_async(self,
                       func: Callable[..., Any],
                       iterable: Iterable[Iterable[Any]] = ...,
-                      chunksize: Optional[int] = None,
-                      callback: Optional[Callable[..., None]] = None,
-                      error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
+                      chunksize: Optional[int] = ...,
+                      callback: Optional[Callable[..., None]] = ...,
+                      error_callback: Optional[Callable[[BaseException], None]] = ...) -> AsyncResult: ...
     def close(self) -> None: ...
     def terminate(self) -> None: ...
     def join(self) -> None: ...
@@ -66,6 +66,6 @@ class Pool(ContextManager[Pool]):
 
 class ThreadPool(Pool, ContextManager[ThreadPool]):
 
-    def __init__(self, processes: Optional[int] = None,
-                 initializer: Optional[Callable[..., Any]] = None,
+    def __init__(self, processes: Optional[int] = ...,
+                 initializer: Optional[Callable[..., Any]] = ...,
                  initargs: Iterable[Any] = ...) -> None: ...
diff --git a/typeshed/stdlib/3/multiprocessing/synchronize.pyi b/typeshed/stdlib/3/multiprocessing/synchronize.pyi
new file mode 100644
index 0000000..3b8f0fd
--- /dev/null
+++ b/typeshed/stdlib/3/multiprocessing/synchronize.pyi
@@ -0,0 +1,64 @@
+from typing import Callable, ContextManager, Optional, Union
+
+from multiprocessing.context import BaseContext
+import threading
+import sys
+
+_LockLike = Union[Lock, RLock]
+
+class Barrier(threading.Barrier):
+    def __init__(self,
+                 parties: int,
+                 action: Optional[Callable] = ...,
+                 timeout: Optional[float] = ...,
+                 *
+                 ctx: BaseContext) -> None: ...
+
+class BoundedSemaphore(Semaphore):
+    def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ...
+
+class Condition(ContextManager[bool]):
+    def __init__(self,
+                 lock: Optional[_LockLike] = ...,
+                 *,
+                 ctx: BaseContext) -> None: ...
+    if sys.version_info >= (3, 7):
+        def notify(self, n: int = ...) -> None: ...
+    else:
+        def notify(self) -> None: ...
+    def notify_all(self) -> None: ...
+    def wait(self, timeout: Optional[float] = ...) -> bool: ...
+    if sys.version_info >= (3, 3):
+        def wait_for(self,
+                     predicate: Callable[[], bool],
+                     timeout: Optional[float] = ...) -> bool: ...
+    def acquire(self,
+                block: bool = ...,
+                timeout: Optional[float] = ...) -> bool: ...
+    def release(self) -> None: ...
+
+class Event(ContextManager[bool]):
+    def __init__(self,
+                 lock: Optional[_LockLike] = ...,
+                 *,
+                 ctx: BaseContext) -> None: ...
+    def is_set(self) -> bool: ...
+    def set(self) -> None: ...
+    def clear(self) -> None: ...
+    def wait(self, timeout: Optional[float] = ...) -> bool: ...
+
+class Lock(SemLock):
+    def __init__(self, *, ctx: BaseContext) -> None: ...
+
+class RLock(SemLock):
+    def __init__(self, *, ctx: BaseContext) -> None: ...
+
+class Semaphore(SemLock):
+    def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ...
+
+# Not part of public API
+class SemLock(ContextManager[bool]):
+    def acquire(self,
+                block: bool = ...,
+                timeout: Optional[float] = ...) -> bool: ...
+    def release(self) -> None: ...
diff --git a/typeshed/stdlib/3/shelve.pyi b/typeshed/stdlib/3/shelve.pyi
index ab6b2d9..4a33969 100644
--- a/typeshed/stdlib/3/shelve.pyi
+++ b/typeshed/stdlib/3/shelve.pyi
@@ -3,11 +3,11 @@ import collections
 
 
 class Shelf(collections.MutableMapping):
-    def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+    def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def __len__(self) -> int: ...
     def __contains__(self, key: Any) -> bool: ...  # key should be str, but it would conflict with superclass's type signature
-    def get(self, key: str, default: Any = None) -> Any: ...
+    def get(self, key: str, default: Any = ...) -> Any: ...
     def __getitem__(self, key: str) -> Any: ...
     def __setitem__(self, key: str, value: Any) -> None: ...
     def __delitem__(self, key: str) -> None: ...
@@ -18,7 +18,7 @@ class Shelf(collections.MutableMapping):
     def sync(self) -> None: ...
 
 class BsdDbShelf(Shelf):
-    def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+    def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = ..., writeback: bool = ..., keyencoding: str = ...) -> None: ...
     def set_location(self, key: Any) -> Tuple[str, Any]: ...
     def next(self) -> Tuple[str, Any]: ...
     def previous(self) -> Tuple[str, Any]: ...
@@ -26,6 +26,6 @@ class BsdDbShelf(Shelf):
     def last(self) -> Tuple[str, Any]: ...
 
 class DbfilenameShelf(Shelf):
-    def __init__(self, filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> None: ...
+    def __init__(self, filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> None: ...
 
-def open(filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> DbfilenameShelf: ...
+def open(filename: str, flag: str = ..., protocol: Optional[int] = ..., writeback: bool = ...) -> DbfilenameShelf: ...
diff --git a/typeshed/stdlib/3/socketserver.pyi b/typeshed/stdlib/3/socketserver.pyi
index 8d03b8f..91f781d 100644
--- a/typeshed/stdlib/3/socketserver.pyi
+++ b/typeshed/stdlib/3/socketserver.pyi
@@ -21,7 +21,8 @@ class BaseServer:
     def serve_forever(self, poll_interval: float = ...) -> None: ...
     def shutdown(self) -> None: ...
     def server_close(self) -> None: ...
-    def finish_request(self) -> None: ...
+    def finish_request(self, request: bytes,
+                       client_address: Tuple[str, int]) -> None: ...
     def get_request(self) -> None: ...
     def handle_error(self, request: bytes,
                      client_address: Tuple[str, int]) -> None: ...
diff --git a/typeshed/stdlib/3/sre_constants.pyi b/typeshed/stdlib/3/sre_constants.pyi
index c1dba3a..10e393c 100644
--- a/typeshed/stdlib/3/sre_constants.pyi
+++ b/typeshed/stdlib/3/sre_constants.pyi
@@ -38,3 +38,77 @@ SRE_FLAG_ASCII = ...  # type: int
 SRE_INFO_PREFIX = ...  # type: int
 SRE_INFO_LITERAL = ...  # type: int
 SRE_INFO_CHARSET = ...  # type: int
+
+
+# Stubgen above; manually defined constants below (dynamic at runtime)
+
+# from OPCODES
+FAILURE = ...  # type: _NamedIntConstant
+SUCCESS = ...  # type: _NamedIntConstant
+ANY = ...  # type: _NamedIntConstant
+ANY_ALL = ...  # type: _NamedIntConstant
+ASSERT = ...  # type: _NamedIntConstant
+ASSERT_NOT = ...  # type: _NamedIntConstant
+AT = ...  # type: _NamedIntConstant
+BRANCH = ...  # type: _NamedIntConstant
+CALL = ...  # type: _NamedIntConstant
+CATEGORY = ...  # type: _NamedIntConstant
+CHARSET = ...  # type: _NamedIntConstant
+BIGCHARSET = ...  # type: _NamedIntConstant
+GROUPREF = ...  # type: _NamedIntConstant
+GROUPREF_EXISTS = ...  # type: _NamedIntConstant
+GROUPREF_IGNORE = ...  # type: _NamedIntConstant
+IN = ...  # type: _NamedIntConstant
+IN_IGNORE = ...  # type: _NamedIntConstant
+INFO = ...  # type: _NamedIntConstant
+JUMP = ...  # type: _NamedIntConstant
+LITERAL = ...  # type: _NamedIntConstant
+LITERAL_IGNORE = ...  # type: _NamedIntConstant
+MARK = ...  # type: _NamedIntConstant
+MAX_UNTIL = ...  # type: _NamedIntConstant
+MIN_UNTIL = ...  # type: _NamedIntConstant
+NOT_LITERAL = ...  # type: _NamedIntConstant
+NOT_LITERAL_IGNORE = ...  # type: _NamedIntConstant
+NEGATE = ...  # type: _NamedIntConstant
+RANGE = ...  # type: _NamedIntConstant
+REPEAT = ...  # type: _NamedIntConstant
+REPEAT_ONE = ...  # type: _NamedIntConstant
+SUBPATTERN = ...  # type: _NamedIntConstant
+MIN_REPEAT_ONE = ...  # type: _NamedIntConstant
+RANGE_IGNORE = ...  # type: _NamedIntConstant
+MIN_REPEAT = ...  # type: _NamedIntConstant
+MAX_REPEAT = ...  # type: _NamedIntConstant
+
+# from ATCODES
+AT_BEGINNING = ...  # type: _NamedIntConstant
+AT_BEGINNING_LINE = ...  # type: _NamedIntConstant
+AT_BEGINNING_STRING = ...  # type: _NamedIntConstant
+AT_BOUNDARY = ...  # type: _NamedIntConstant
+AT_NON_BOUNDARY = ...  # type: _NamedIntConstant
+AT_END = ...  # type: _NamedIntConstant
+AT_END_LINE = ...  # type: _NamedIntConstant
+AT_END_STRING = ...  # type: _NamedIntConstant
+AT_LOC_BOUNDARY = ...  # type: _NamedIntConstant
+AT_LOC_NON_BOUNDARY = ...  # type: _NamedIntConstant
+AT_UNI_BOUNDARY = ...  # type: _NamedIntConstant
+AT_UNI_NON_BOUNDARY = ...  # type: _NamedIntConstant
+
+# from CHCODES
+CATEGORY_DIGIT = ...  # type: _NamedIntConstant
+CATEGORY_NOT_DIGIT = ...  # type: _NamedIntConstant
+CATEGORY_SPACE = ...  # type: _NamedIntConstant
+CATEGORY_NOT_SPACE = ...  # type: _NamedIntConstant
+CATEGORY_WORD = ...  # type: _NamedIntConstant
+CATEGORY_NOT_WORD = ...  # type: _NamedIntConstant
+CATEGORY_LINEBREAK = ...  # type: _NamedIntConstant
+CATEGORY_NOT_LINEBREAK = ...  # type: _NamedIntConstant
+CATEGORY_LOC_WORD = ...  # type: _NamedIntConstant
+CATEGORY_LOC_NOT_WORD = ...  # type: _NamedIntConstant
+CATEGORY_UNI_DIGIT = ...  # type: _NamedIntConstant
+CATEGORY_UNI_NOT_DIGIT = ...  # type: _NamedIntConstant
+CATEGORY_UNI_SPACE = ...  # type: _NamedIntConstant
+CATEGORY_UNI_NOT_SPACE = ...  # type: _NamedIntConstant
+CATEGORY_UNI_WORD = ...  # type: _NamedIntConstant
+CATEGORY_UNI_NOT_WORD = ...  # type: _NamedIntConstant
+CATEGORY_UNI_LINEBREAK = ...  # type: _NamedIntConstant
+CATEGORY_UNI_NOT_LINEBREAK = ...  # type: _NamedIntConstant
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index c0a8613..8bb75ab 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -7,7 +7,14 @@ from types import TracebackType
 
 _FILE = Union[None, int, IO[Any]]
 _TXT = Union[bytes, Text]
-_CMD = Union[_TXT, Sequence[_TXT]]
+if sys.version_info >= (3, 6):
+    from builtins import _PathLike
+    _PATH = Union[bytes, Text, _PathLike]
+else:
+    _PATH = Union[bytes, Text]
+# Python 3.6 does't support _CMD being a single PathLike.
+# See: https://bugs.python.org/issue31961
+_CMD = Union[_TXT, Sequence[_PATH]]
 _ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]]
 
 if sys.version_info >= (3, 5):
@@ -31,14 +38,14 @@ if sys.version_info >= (3, 5):
                 input: Optional[_TXT] = ...,
                 check: bool = ...,
                 bufsize: int = ...,
-                executable: _TXT = ...,
+                executable: _PATH = ...,
                 stdin: _FILE = ...,
                 stdout: _FILE = ...,
                 stderr: _FILE = ...,
                 preexec_fn: Callable[[], Any] = ...,
                 close_fds: bool = ...,
                 shell: bool = ...,
-                cwd: Optional[_TXT] = ...,
+                cwd: Optional[_PATH] = ...,
                 env: Optional[_ENV] = ...,
                 universal_newlines: bool = ...,
                 startupinfo: Any = ...,
@@ -56,14 +63,14 @@ if sys.version_info >= (3, 5):
                 input: Optional[_TXT] = ...,
                 check: bool = ...,
                 bufsize: int = ...,
-                executable: _TXT = ...,
+                executable: _PATH = ...,
                 stdin: _FILE = ...,
                 stdout: _FILE = ...,
                 stderr: _FILE = ...,
                 preexec_fn: Callable[[], Any] = ...,
                 close_fds: bool = ...,
                 shell: bool = ...,
-                cwd: Optional[_TXT] = ...,
+                cwd: Optional[_PATH] = ...,
                 env: Optional[_ENV] = ...,
                 universal_newlines: bool = ...,
                 startupinfo: Any = ...,
@@ -77,14 +84,14 @@ if sys.version_info >= (3, 3):
     # 3.3 added timeout
     def call(args: _CMD,
              bufsize: int = ...,
-             executable: _TXT = ...,
+             executable: _PATH = ...,
              stdin: _FILE = ...,
              stdout: _FILE = ...,
              stderr: _FILE = ...,
              preexec_fn: Callable[[], Any] = ...,
              close_fds: bool = ...,
              shell: bool = ...,
-             cwd: Optional[_TXT] = ...,
+             cwd: Optional[_PATH] = ...,
              env: Optional[_ENV] = ...,
              universal_newlines: bool = ...,
              startupinfo: Any = ...,
@@ -96,14 +103,14 @@ if sys.version_info >= (3, 3):
 else:
     def call(args: _CMD,
              bufsize: int = ...,
-             executable: _TXT = ...,
+             executable: _PATH = ...,
              stdin: _FILE = ...,
              stdout: _FILE = ...,
              stderr: _FILE = ...,
              preexec_fn: Callable[[], Any] = ...,
              close_fds: bool = ...,
              shell: bool = ...,
-             cwd: Optional[_TXT] = ...,
+             cwd: Optional[_PATH] = ...,
              env: Optional[_ENV] = ...,
              universal_newlines: bool = ...,
              startupinfo: Any = ...,
@@ -117,14 +124,14 @@ if sys.version_info >= (3, 3):
     # 3.3 added timeout
     def check_call(args: _CMD,
                    bufsize: int = ...,
-                   executable: _TXT = ...,
+                   executable: _PATH = ...,
                    stdin: _FILE = ...,
                    stdout: _FILE = ...,
                    stderr: _FILE = ...,
                    preexec_fn: Callable[[], Any] = ...,
                    close_fds: bool = ...,
                    shell: bool = ...,
-                   cwd: Optional[_TXT] = ...,
+                   cwd: Optional[_PATH] = ...,
                    env: Optional[_ENV] = ...,
                    universal_newlines: bool = ...,
                    startupinfo: Any = ...,
@@ -136,14 +143,14 @@ if sys.version_info >= (3, 3):
 else:
     def check_call(args: _CMD,
                    bufsize: int = ...,
-                   executable: _TXT = ...,
+                   executable: _PATH = ...,
                    stdin: _FILE = ...,
                    stdout: _FILE = ...,
                    stderr: _FILE = ...,
                    preexec_fn: Callable[[], Any] = ...,
                    close_fds: bool = ...,
                    shell: bool = ...,
-                   cwd: Optional[_TXT] = ...,
+                   cwd: Optional[_PATH] = ...,
                    env: Optional[_ENV] = ...,
                    universal_newlines: bool = ...,
                    startupinfo: Any = ...,
@@ -156,13 +163,13 @@ if sys.version_info >= (3, 6):
     # 3.6 added encoding and errors
     def check_output(args: _CMD,
                      bufsize: int = ...,
-                     executable: _TXT = ...,
+                     executable: _PATH = ...,
                      stdin: _FILE = ...,
                      stderr: _FILE = ...,
                      preexec_fn: Callable[[], Any] = ...,
                      close_fds: bool = ...,
                      shell: bool = ...,
-                     cwd: Optional[_TXT] = ...,
+                     cwd: Optional[_PATH] = ...,
                      env: Optional[_ENV] = ...,
                      universal_newlines: bool = ...,
                      startupinfo: Any = ...,
@@ -180,13 +187,13 @@ elif sys.version_info >= (3, 4):
     # 3.4 added input
     def check_output(args: _CMD,
                      bufsize: int = ...,
-                     executable: _TXT = ...,
+                     executable: _PATH = ...,
                      stdin: _FILE = ...,
                      stderr: _FILE = ...,
                      preexec_fn: Callable[[], Any] = ...,
                      close_fds: bool = ...,
                      shell: bool = ...,
-                     cwd: Optional[_TXT] = ...,
+                     cwd: Optional[_PATH] = ...,
                      env: Optional[_ENV] = ...,
                      universal_newlines: bool = ...,
                      startupinfo: Any = ...,
@@ -201,13 +208,13 @@ elif sys.version_info >= (3, 3):
     # 3.3 added timeout
     def check_output(args: _CMD,
                      bufsize: int = ...,
-                     executable: _TXT = ...,
+                     executable: _PATH = ...,
                      stdin: _FILE = ...,
                      stderr: _FILE = ...,
                      preexec_fn: Callable[[], Any] = ...,
                      close_fds: bool = ...,
                      shell: bool = ...,
-                     cwd: Optional[_TXT] = ...,
+                     cwd: Optional[_PATH] = ...,
                      env: Optional[_ENV] = ...,
                      universal_newlines: bool = ...,
                      startupinfo: Any = ...,
@@ -221,13 +228,13 @@ else:
     # Same args as Popen.__init__, except for stdout
     def check_output(args: _CMD,
                      bufsize: int = ...,
-                     executable: _TXT = ...,
+                     executable: _PATH = ...,
                      stdin: _FILE = ...,
                      stderr: _FILE = ...,
                      preexec_fn: Callable[[], Any] = ...,
                      close_fds: bool = ...,
                      shell: bool = ...,
-                     cwd: Optional[_TXT] = ...,
+                     cwd: Optional[_PATH] = ...,
                      env: Optional[_ENV] = ...,
                      universal_newlines: bool = ...,
                      startupinfo: Any = ...,
@@ -277,14 +284,14 @@ class Popen:
         def __init__(self,
                      args: _CMD,
                      bufsize: int = ...,
-                     executable: Optional[_TXT] = ...,
+                     executable: Optional[_PATH] = ...,
                      stdin: Optional[_FILE] = ...,
                      stdout: Optional[_FILE] = ...,
                      stderr: Optional[_FILE] = ...,
                      preexec_fn: Optional[Callable[[], Any]] = ...,
                      close_fds: bool = ...,
                      shell: bool = ...,
-                     cwd: Optional[_TXT] = ...,
+                     cwd: Optional[_PATH] = ...,
                      env: Optional[_ENV] = ...,
                      universal_newlines: bool = ...,
                      startupinfo: Optional[Any] = ...,
@@ -299,14 +306,14 @@ class Popen:
         def __init__(self,
                      args: _CMD,
                      bufsize: int = ...,
-                     executable: Optional[_TXT] = ...,
+                     executable: Optional[_PATH] = ...,
                      stdin: Optional[_FILE] = ...,
                      stdout: Optional[_FILE] = ...,
                      stderr: Optional[_FILE] = ...,
                      preexec_fn: Optional[Callable[[], Any]] = ...,
                      close_fds: bool = ...,
                      shell: bool = ...,
-                     cwd: Optional[_TXT] = ...,
+                     cwd: Optional[_PATH] = ...,
                      env: Optional[_ENV] = ...,
                      universal_newlines: bool = ...,
                      startupinfo: Optional[Any] = ...,
diff --git a/typeshed/stdlib/3/sys.pyi b/typeshed/stdlib/3/sys.pyi
index 76e4cd6..9e574ff 100644
--- a/typeshed/stdlib/3/sys.pyi
+++ b/typeshed/stdlib/3/sys.pyi
@@ -143,7 +143,7 @@ def _getframe(depth: int) -> FrameType: ...
 
 _ProfileFunc = Callable[[FrameType, str, Any], Any]
 def getprofile() -> Optional[_ProfileFunc]: ...
-def setprofile(profilefunc: _ProfileFunc) -> None: ...
+def setprofile(profilefunc: Optional[_ProfileFunc]) -> None: ...
 
 _TraceFunc = Callable[[FrameType, str, Any], Optional[Callable[[FrameType, str, Any], Any]]]
 def gettrace() -> Optional[_TraceFunc]: ...
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
index f9f46c5..c65369a 100644
--- a/typeshed/stdlib/3/types.pyi
+++ b/typeshed/stdlib/3/types.pyi
@@ -29,6 +29,7 @@ class FunctionType:
     __dict__ = ...  # type: Dict[str, Any]
     __globals__ = ...  # type: Dict[str, Any]
     __name__ = ...  # type: str
+    __qualname__ = ...  # type: str
     __annotations__ = ...  # type: Dict[str, Any]
     __kwdefaults__ = ...  # type: Dict[str, Any]
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
@@ -140,10 +141,13 @@ class MethodType:
     __func__ = ...  # type: _StaticFunctionType
     __self__ = ...  # type: object
     __name__ = ...  # type: str
+    __qualname__ = ...  # type: str
     def __init__(self, func: Callable, obj: object) -> None: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 class BuiltinFunctionType:
     __self__ = ...  # type: Union[object, ModuleType]
+    __name__ = ...  # type: str
+    __qualname__ = ...  # type: str
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 BuiltinMethodType = BuiltinFunctionType
 
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index 71f97ce..83471ac 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -19,6 +19,7 @@ class _SpecialForm:
 
 Tuple: _SpecialForm = ...
 Generic: _SpecialForm = ...
+Protocol: _SpecialForm = ...
 Callable: _SpecialForm = ...
 Type: _SpecialForm = ...
 ClassVar: _SpecialForm = ...
@@ -64,51 +65,65 @@ _V_co = TypeVar('_V_co', covariant=True)  # Any type covariant containers.
 _KT_co = TypeVar('_KT_co', covariant=True)  # Key type covariant containers.
 _VT_co = TypeVar('_VT_co', covariant=True)  # Value type covariant containers.
 _T_contra = TypeVar('_T_contra', contravariant=True)  # Ditto contravariant.
+_TC = TypeVar('_TC', bound=Type[object])
 
-class SupportsInt(metaclass=ABCMeta):
+def runtime(cls: _TC) -> _TC: ...
+
+ at runtime
+class SupportsInt(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __int__(self) -> int: ...
 
-class SupportsFloat(metaclass=ABCMeta):
+ at runtime
+class SupportsFloat(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __float__(self) -> float: ...
 
-class SupportsComplex(metaclass=ABCMeta):
+ at runtime
+class SupportsComplex(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __complex__(self) -> complex: ...
 
-class SupportsBytes(metaclass=ABCMeta):
+ at runtime
+class SupportsBytes(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __bytes__(self) -> bytes: ...
 
-class SupportsAbs(Generic[_T]):
+ at runtime
+class SupportsAbs(Protocol[_T_co]):
     @abstractmethod
-    def __abs__(self) -> _T: ...
+    def __abs__(self) -> _T_co: ...
 
-class SupportsRound(Generic[_T]):
+ at runtime
+class SupportsRound(Protocol[_T_co]):
     @abstractmethod
-    def __round__(self, ndigits: int = ...) -> _T: ...
+    def __round__(self, ndigits: int = ...) -> _T_co: ...
 
-class Reversible(Generic[_T_co]):
+ at runtime
+class Reversible(Protocol[_T_co]):
     @abstractmethod
     def __reversed__(self) -> Iterator[_T_co]: ...
 
-class Sized(metaclass=ABCMeta):
+ at runtime
+class Sized(Protocol, metaclass=ABCMeta):
     @abstractmethod
     def __len__(self) -> int: ...
 
-class Hashable(metaclass=ABCMeta):
+ at runtime
+class Hashable(Protocol, metaclass=ABCMeta):
     # TODO: This is special, in that a subclass of a hashable class may not be hashable
     #   (for example, list vs. object). It's not obvious how to represent this. This class
     #   is currently mostly useless for static checking.
     @abstractmethod
     def __hash__(self) -> int: ...
 
-class Iterable(Generic[_T_co]):
+ at runtime
+class Iterable(Protocol[_T_co]):
     @abstractmethod
     def __iter__(self) -> Iterator[_T_co]: ...
 
-class Iterator(Iterable[_T_co], Generic[_T_co]):
+ at runtime
+class Iterator(Iterable[_T_co], Protocol[_T_co]):
     @abstractmethod
     def __next__(self) -> _T_co: ...
     def __iter__(self) -> 'Iterator[_T_co]': ...
@@ -122,7 +137,7 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
 
     @abstractmethod
     def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ...,
-              tb: Optional[TracebackType] = ...) -> None: ...
+              tb: Optional[TracebackType] = ...) -> _T_co: ...
 
     @abstractmethod
     def close(self) -> None: ...
@@ -139,7 +154,8 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
 # Awaitable, AsyncIterator, AsyncIterable, Coroutine, Collection.
 # See https: //github.com/python/typeshed/issues/655 for why this is not easy.
 
-class Awaitable(Generic[_T_co]):
+ at runtime
+class Awaitable(Protocol[_T_co]):
     @abstractmethod
     def __await__(self) -> Generator[Any, None, _T_co]: ...
 
@@ -149,7 +165,7 @@ class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]):
 
     @abstractmethod
     def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ...,
-              tb: Optional[TracebackType] = ...) -> None: ...
+              tb: Optional[TracebackType] = ...) -> _T_co: ...
 
     @abstractmethod
     def close(self) -> None: ...
@@ -161,12 +177,14 @@ class AwaitableGenerator(Generator[_T_co, _T_contra, _V_co], Awaitable[_V_co],
                          Generic[_T_co, _T_contra, _V_co, _S]):
     pass
 
-class AsyncIterable(Generic[_T_co]):
+ at runtime
+class AsyncIterable(Protocol[_T_co]):
     @abstractmethod
     def __aiter__(self) -> 'AsyncIterator[_T_co]': ...
 
+ at runtime
 class AsyncIterator(AsyncIterable[_T_co],
-                    Generic[_T_co]):
+                    Protocol[_T_co]):
     @abstractmethod
     def __anext__(self) -> Awaitable[_T_co]: ...
     def __aiter__(self) -> 'AsyncIterator[_T_co]': ...
@@ -180,8 +198,8 @@ if sys.version_info >= (3, 6):
         def asend(self, value: _T_contra) -> Awaitable[_T_co]: ...
 
         @abstractmethod
-        def athrow(self, typ: Type[BaseException], val: Optional[BaseException] = None,
-                   tb: Any = None) -> Awaitable[_T_co]: ...
+        def athrow(self, typ: Type[BaseException], val: Optional[BaseException] = ...,
+                   tb: Any = ...) -> Awaitable[_T_co]: ...
 
         @abstractmethod
         def aclose(self) -> Awaitable[_T_co]: ...
@@ -194,16 +212,19 @@ if sys.version_info >= (3, 6):
         ag_frame = ...  # type: FrameType
         ag_running = ...  # type: bool
 
-class Container(Generic[_T_co]):
+ at runtime
+class Container(Protocol[_T_co]):
     @abstractmethod
     def __contains__(self, x: object) -> bool: ...
 
 
 if sys.version_info >= (3, 6):
-    class Collection(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]): ...
+    @runtime
+    class Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ...
     _Collection = Collection
 else:
-    class _Collection(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]): ...
+    @runtime
+    class _Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ...
 
 class Sequence(_Collection[_T_co], Reversible[_T_co], Generic[_T_co]):
     @overload
@@ -214,7 +235,7 @@ class Sequence(_Collection[_T_co], Reversible[_T_co], Generic[_T_co]):
     def __getitem__(self, s: slice) -> Sequence[_T_co]: ...
     # Mixin methods
     if sys.version_info >= (3, 5):
-        def index(self, x: Any, start: int = 0, end: int = 0) -> int: ...
+        def index(self, x: Any, start: int = ..., end: int = ...) -> int: ...
     else:
         def index(self, x: Any) -> int: ...
     def count(self, x: Any) -> int: ...
@@ -289,14 +310,16 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_VT_co]: ...
 
-class ContextManager(Generic[_T_co]):
+ at runtime
+class ContextManager(Protocol[_T_co]):
     def __enter__(self) -> _T_co: ...
     def __exit__(self, exc_type: Optional[Type[BaseException]],
                  exc_value: Optional[BaseException],
                  traceback: Optional[TracebackType]) -> Optional[bool]: ...
 
 if sys.version_info >= (3, 5):
-    class AsyncContextManager(Generic[_T_co]):
+    @runtime
+    class AsyncContextManager(Protocol[_T_co]):
         def __aenter__(self) -> Awaitable[_T_co]: ...
         def __aexit__(self, exc_type: Optional[Type[BaseException]],
                       exc_value: Optional[BaseException],
@@ -520,7 +543,10 @@ class NamedTuple(tuple):
     @classmethod
     def _make(cls, iterable: Iterable[Any]) -> NamedTuple: ...
 
-    def _asdict(self) -> dict: ...
+    if sys.version_info >= (3, 1):
+        def _asdict(self) -> collections.OrderedDict[str, Any]: ...
+    else:
+        def _asdict(self) -> Dict[str, Any]: ...
     def _replace(self, **kwargs: Any) -> NamedTuple: ...
 
 def NewType(name: str, tp: Type[_T]) -> Type[_T]: ...
diff --git a/typeshed/stdlib/3/unittest/mock.pyi b/typeshed/stdlib/3/unittest/mock.pyi
index 2002f89..4809325 100644
--- a/typeshed/stdlib/3/unittest/mock.pyi
+++ b/typeshed/stdlib/3/unittest/mock.pyi
@@ -38,9 +38,9 @@ if sys.version_info >= (3, 3):
     # expecting other classes (as is Mock's purpose)
     class NonCallableMock(Any):  # type: ignore
         def __new__(cls, *args: Any, **kw: Any) -> Any: ...
-        def __init__(self, spec: Optional[Any] = None, wraps: Optional[Any] = None, name: Optional[Any] = None, spec_set: Optional[Any] = None, parent: Optional[Any] = None, _spec_state: Optional[Any] = None, _new_name: Any = ..., _new_parent: Optional[Any] = None, _spec_as_instance: Any = False, _eat_self: Optional[Any] = None, unsafe: Any = False, **kwargs: Any) -> None: ...
+        def __init__(self, spec: Optional[Any] = ..., wraps: Optional[Any] = ..., name: Optional[Any] = ..., spec_set: Optional[Any] = ..., parent: Optional[Any] = ..., _spec_state: Optional[Any] = ..., _new_name: Any = ..., _new_parent: Optional[Any] = ..., _spec_as_instance: Any = ..., _eat_self: Optional[Any] = ..., unsafe: Any = ..., **kwargs: Any) -> None: ...
         def attach_mock(self, mock: Any, attribute: Any) -> Any: ...
-        def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
+        def mock_add_spec(self, spec: Any, spec_set: Any = ...) -> Any: ...
         return_value = ...  # type: Any
         __class__ = ...  # type: type
         called = ...  # type: Any
@@ -50,7 +50,7 @@ if sys.version_info >= (3, 3):
         mock_calls = ...  # type: Any
         side_effect = ...  # type: Any
         method_calls = ...  # type: Any
-        def reset_mock(self, visited: Optional[bool] = None) -> None: ...
+        def reset_mock(self, visited: Optional[bool] = ...) -> None: ...
         def configure_mock(self, **kwargs: Any) -> None: ...
         def __getattr__(self, name: Any) -> Any: ...
         def __dir__(self) -> Any: ...
@@ -59,12 +59,12 @@ if sys.version_info >= (3, 3):
         def assert_not_called(_mock_self) -> None: ...
         def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ...
         def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ...
-        def assert_has_calls(self, calls: Any, any_order: bool = False) -> None: ...
+        def assert_has_calls(self, calls: Any, any_order: bool = ...) -> None: ...
         def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ...
 
     class CallableMixin(Base):
         side_effect = ...  # type: Any
-        def __init__(self, spec: Optional[Any] = None, side_effect: Optional[Any] = None, return_value: Any = ..., wraps: Optional[Any] = None, name: Optional[Any] = None, spec_set: Optional[Any] = None, parent: Optional[Any] = None, _spec_state: Optional[Any] = None, _new_name: Any = ..., _new_parent: Optional[Any] = None, **kwargs: Any) -> None: ...
+        def __init__(self, spec: Optional[Any] = ..., side_effect: Optional[Any] = ..., return_value: Any = ..., wraps: Optional[Any] = ..., name: Optional[Any] = ..., spec_set: Optional[Any] = ..., parent: Optional[Any] = ..., _spec_state: Optional[Any] = ..., _new_name: Any = ..., _new_parent: Optional[Any] = ..., **kwargs: Any) -> None: ...
         def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ...
 
     class Mock(CallableMixin, NonCallableMock):
@@ -77,7 +77,7 @@ if sys.version_info >= (3, 3):
         new = ...  # type: Any
         new_callable = ...  # type: Any
         spec = ...  # type: Any
-        create = ...  # type: Any
+        create = ...  # type: bool
         has_local = ...  # type: Any
         spec_set = ...  # type: Any
         autospec = ...  # type: Any
@@ -98,8 +98,9 @@ if sys.version_info >= (3, 3):
         def stop(self) -> Any: ...
 
     class _patcher:
-        def __call__(self, target: Any, new: Optional[Any] = None, spec: Optional[Any] = None, create: Any = False, spec_set: Optional[Any] = None, autospec: Optional[Any] = None, new_callable: Optional[Any] = None, **kwargs: Any) -> Any: ...
-        def object(self, target: Any, attribute: str, new: Optional[Any] = None, spec: Optional[Any] = None, create: Any = False, spec_set: Optional[Any] = None, autospec: Optional[Any] = None, new_callable: Optional[Any] = None, **kwargs: Any) -> _patch: ...
+        def __call__(self, target: Any, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> Any: ...
+        def object(self, target: Any, attribute: str, new: Optional[Any] = ..., spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> _patch: ...
+        def multiple(self, target: Any, spec: Optional[Any] = ..., create: bool = ..., spec_set: Optional[Any] = ..., autospec: Optional[Any] = ..., new_callable: Optional[Any] = ..., **kwargs: Any) -> Any: ...
 
     patch = ...  # type: _patcher
 
@@ -107,7 +108,7 @@ if sys.version_info >= (3, 3):
         in_dict = ...  # type: Any
         values = ...  # type: Any
         clear = ...  # type: Any
-        def __init__(self, in_dict: Any, values: Any = ..., clear: Any = False, **kwargs: Any) -> None: ...
+        def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ...
         def __call__(self, f: Any) -> Any: ...
         def decorate_class(self, klass: Any) -> Any: ...
         def __enter__(self) -> Any: ...
@@ -120,11 +121,11 @@ if sys.version_info >= (3, 3):
 
     class NonCallableMagicMock(MagicMixin, NonCallableMock):
         def __init__(self) -> None: ...
-        def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
+        def mock_add_spec(self, spec: Any, spec_set: Any = ...) -> Any: ...
 
     class MagicMock(MagicMixin, Mock):
         def __init__(self, spec: Any = ..., spec_set: Any = ..., side_effect: Any = ..., return_value: Any = ..., wraps: Any = ..., name: Any = ..., **kwargs: Any) -> None: ...
-        def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
+        def mock_add_spec(self, spec: Any, spec_set: Any = ...) -> Any: ...
 
     class MagicProxy:
         name = ...  # type: Any
@@ -132,7 +133,7 @@ if sys.version_info >= (3, 3):
         def __init__(self, name: Any, parent: Any) -> None: ...
         def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
         def create_mock(self) -> Any: ...
-        def __get__(self, obj: Any, _type: Optional[Any] = None) -> Any: ...
+        def __get__(self, obj: Any, _type: Optional[Any] = ...) -> Any: ...
 
     class _ANY:
         def __eq__(self, other: Any) -> bool: ...
@@ -141,11 +142,11 @@ if sys.version_info >= (3, 3):
     ANY = ...  # type: Any
 
     class _Call(tuple):
-        def __new__(cls, value: Any = ..., name: Optional[Any] = None, parent: Optional[Any] = None, two: bool = False, from_kall: bool = True) -> Any: ...
+        def __new__(cls, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> Any: ...
         name = ...  # type: Any
         parent = ...  # type: Any
         from_kall = ...  # type: Any
-        def __init__(self, value: Any = ..., name: Optional[Any] = None, parent: Optional[Any] = None, two: bool = False, from_kall: bool = True) -> None: ...
+        def __init__(self, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...) -> None: ...
         def __eq__(self, other: Any) -> bool: ...
         __ne__ = ...  # type: Any
         def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
@@ -156,7 +157,7 @@ if sys.version_info >= (3, 3):
 
     call = ...  # type: Any
 
-    def create_autospec(spec: Any, spec_set: Any = False, instance: Any = False, _parent: Optional[Any] = None, _name: Optional[Any] = None, **kwargs: Any) -> Any: ...
+    def create_autospec(spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Optional[Any] = ..., _name: Optional[Any] = ..., **kwargs: Any) -> Any: ...
 
     class _SpecState:
         spec = ...  # type: Any
@@ -165,9 +166,9 @@ if sys.version_info >= (3, 3):
         parent = ...  # type: Any
         instance = ...  # type: Any
         name = ...  # type: Any
-        def __init__(self, spec: Any, spec_set: Any = False, parent: Optional[Any] = None, name: Optional[Any] = None, ids: Optional[Any] = None, instance: Any = False) -> None: ...
+        def __init__(self, spec: Any, spec_set: Any = ..., parent: Optional[Any] = ..., name: Optional[Any] = ..., ids: Optional[Any] = ..., instance: Any = ...) -> None: ...
 
-    def mock_open(mock: Optional[Any] = None, read_data: Any = ...) -> Any: ...
+    def mock_open(mock: Optional[Any] = ..., read_data: Any = ...) -> Any: ...
 
     class PropertyMock(Mock):
         def __get__(self, obj: Any, obj_type: Any) -> Any: ...
diff --git a/typeshed/stdlib/3/urllib/request.pyi b/typeshed/stdlib/3/urllib/request.pyi
index 8548a88..8383642 100644
--- a/typeshed/stdlib/3/urllib/request.pyi
+++ b/typeshed/stdlib/3/urllib/request.pyi
@@ -68,7 +68,7 @@ class Request:
 
 class OpenerDirector:
     def add_handler(self, handler: BaseHandler) -> None: ...
-    def open(self, url: Union[str, Request], data: Optional[bytes] = None,
+    def open(self, url: Union[str, Request], data: Optional[bytes] = ...,
              timeout: float = ...) -> _UrlopenRet: ...
     def error(self, proto: str, *args: Any) -> _UrlopenRet: ...
 
@@ -159,7 +159,7 @@ class HTTPHandler(BaseHandler):
     def http_open(self, req: Request) -> _UrlopenRet: ...
 
 class HTTPSHandler(BaseHandler):
-    def __init__(self, debuglevel: int = 0,
+    def __init__(self, debuglevel: int = ...,
                  context: Optional[ssl.SSLContext] = ...,
                  check_hostname: bool = ...) -> None: ...
     def https_open(self, req: Request) -> _UrlopenRet: ...
diff --git a/typeshed/stdlib/3/wsgiref/types.pyi b/typeshed/stdlib/3/wsgiref/types.pyi
index f6acc2e..1c0d350 100644
--- a/typeshed/stdlib/3/wsgiref/types.pyi
+++ b/typeshed/stdlib/3/wsgiref/types.pyi
@@ -15,19 +15,20 @@
 # you need to use 'WSGIApplication' and not simply WSGIApplication when type
 # hinting your code.  Otherwise Python will raise NameErrors.
 
-from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union
+from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union, Any
 from types import TracebackType
 
 _exc_info = Tuple[Optional[Type[BaseException]],
                   Optional[BaseException],
                   Optional[TracebackType]]
+WSGIEnvironment = Dict[str, Any]
 WSGIApplication = Callable[
     [
-        Dict[str, str],
+        WSGIEnvironment,
         Union[
             Callable[[str, List[Tuple[str, str]]], Callable[[Union[bytes, str]], None]],
             Callable[[str, List[Tuple[str, str]], _exc_info], Callable[[Union[bytes, str]], None]]
         ]
     ],
-    Iterable[Union[bytes, str]],
+    Iterable[Union[bytes, str]]
 ]
diff --git a/typeshed/tests/pytype_test.py b/typeshed/tests/pytype_test.py
index 3d6f363..f90c481 100755
--- a/typeshed/tests/pytype_test.py
+++ b/typeshed/tests/pytype_test.py
@@ -4,7 +4,15 @@
 Depends on mypy and pytype being installed.
 
 If pytype is installed:
-    1. For every pyi, run "pytd <foo.pyi>" in a separate process
+    1. For every pyi, do nothing if it is in pytype_blacklist.txt.
+    2. If the blacklist line has a "# parse only" comment run
+      "pytd <foo.pyi>" in a separate process.
+    3. If the file is not in the blacklist run
+      "pytype --typeshed-location=typeshed_location --module-name=foo \
+      --convert-to-pickle=tmp_file <foo.pyi>.
+Option two will parse the file, mostly syntactical correctness. Option three
+will load the file and all the builtins, typeshed dependencies. This will
+also discover incorrect usage of imported modules.
 """
 
 import os
@@ -25,32 +33,44 @@ def main():
     code, runs = pytype_test(args)
 
     if code:
-        print("--- exit status %d ---" % code)
+        print('--- exit status %d ---' % code)
         sys.exit(code)
     if not runs:
-        print("--- nothing to do; exit 1 ---")
+        print('--- nothing to do; exit 1 ---')
         sys.exit(1)
 
 
 def load_blacklist():
     filename = os.path.join(os.path.dirname(__file__), "pytype_blacklist.txt")
-    regex = r"^\s*([^\s#]+)\s*(?:#.*)?$"
+    skip_re = re.compile(r'^\s*([^\s#]+)\s*(?:#.*)?$')
+    parse_only_re = re.compile(r'^\s*([^\s#]+)\s*#\s*parse only\s*')
+    skip = []
+    parse_only = []
 
     with open(filename) as f:
-        return re.findall(regex, f.read(), flags=re.M)
+        for line in f:
+            parse_only_match = parse_only_re.match(line)
+            skip_match = skip_re.match(line)
+            if parse_only_match:
+                parse_only.append(parse_only_match.group(1))
+            elif skip_match:
+                skip.append(skip_match.group(1))
 
+    return skip, parse_only
 
-class PytdRun(object):
+
+class BinaryRun(object):
     def __init__(self, args, dry_run=False):
         self.args = args
+
         self.dry_run = dry_run
         self.results = None
 
         if dry_run:
-            self.results = (0, "", "")
+            self.results = (0, '', '')
         else:
             self.proc = subprocess.Popen(
-                ["pytd"] + args,
+                self.args,
                 stdout=subprocess.PIPE,
                 stderr=subprocess.PIPE)
 
@@ -63,29 +83,54 @@ class PytdRun(object):
         return self.results
 
 
+def _get_module_name(filename):
+    """Converts a filename stdblib/m.n/module/foo to module.foo."""
+    return '.'.join(filename.split(os.path.sep)[2:]).replace(
+        '.pyi', '').replace('.__init__', '')
+
+
 def pytype_test(args):
     try:
-        PytdRun(["-h"]).communicate()
+        BinaryRun(['pytd', '-h']).communicate()
     except OSError:
-        print("Cannot run pytd. Did you install pytype?")
+        print('Cannot run pytd. Did you install pytype?')
         return 0, 0
 
-    wanted = re.compile(r"stdlib/.*\.pyi$")
-    skipped = re.compile("(%s)$" % "|".join(load_blacklist()))
-    files = []
+    skip, parse_only = load_blacklist()
+    wanted = re.compile(r'stdlib/.*\.pyi$')
+    skipped = re.compile('(%s)$' % '|'.join(skip))
+    parse_only = re.compile('(%s)$' % '|'.join(parse_only))
+
+    pytype_run = []
+    pytd_run = []
 
-    for root, _, filenames in os.walk("stdlib"):
+    for root, _, filenames in os.walk('stdlib'):
         for f in sorted(filenames):
             f = os.path.join(root, f)
-            if wanted.search(f) and not skipped.search(f):
-                files.append(f)
+            if wanted.search(f):
+                if parse_only.search(f):
+                    pytd_run.append(f)
+                elif not skipped.search(f):
+                    pytype_run.append(f)
 
     running_tests = collections.deque()
     max_code, runs, errors = 0, 0, 0
-    print("Running pytype tests...")
+    files = pytype_run + pytd_run
     while 1:
         while files and len(running_tests) < args.num_parallel:
-            test_run = PytdRun([files.pop()], dry_run=args.dry_run)
+            f = files.pop()
+            if f in pytype_run:
+                test_run = BinaryRun(
+                    ['pytype',
+                     '--typeshed-location=%s' % os.getcwd(),
+                     '--module-name=%s' % _get_module_name(f),
+                     '--convert-to-pickle=%s' % os.devnull,
+                     f],
+                    dry_run=args.dry_run)
+            elif f in pytd_run:
+                test_run = BinaryRun(['pytd', f], dry_run=args.dry_run)
+            else:
+                raise ValueError('Unknown action for file: %s' % f)
             running_tests.append(test_run)
 
         if not running_tests:
@@ -97,13 +142,11 @@ def pytype_test(args):
         runs += 1
 
         if code:
-            print("pytd error processing \"%s\":" % test_run.args[0])
             print(stderr)
             errors += 1
 
-    print("Ran pytype with %d pyis, got %d errors." % (runs, errors))
+    print('Ran pytype with %d pyis, got %d errors.' % (runs, errors))
     return max_code, runs
 
-
 if __name__ == '__main__':
     main()
diff --git a/typeshed/third_party/2/croniter.pyi b/typeshed/third_party/2/croniter.pyi
deleted file mode 100644
index c164213..0000000
--- a/typeshed/third_party/2/croniter.pyi
+++ /dev/null
@@ -1,23 +0,0 @@
-from typing import Any
-
-class croniter:
-    MONTHS_IN_YEAR = ...  # type: Any
-    RANGES = ...  # type: Any
-    DAYS = ...  # type: Any
-    ALPHACONV = ...  # type: Any
-    LOWMAP = ...  # type: Any
-    bad_length = ...  # type: Any
-    tzinfo = ...  # type: Any
-    cur = ...  # type: Any
-    exprs = ...  # type: Any
-    expanded = ...  # type: Any
-    def __init__(self, expr_format, start_time=..., ret_type=...) -> None: ...
-    def get_next(self, ret_type=...): ...
-    def get_prev(self, ret_type=...): ...
-    def get_current(self, ret_type=...): ...
-    def __iter__(self): ...
-    __next__ = ...  # type: Any
-    def all_next(self, ret_type=...): ...
-    def all_prev(self, ret_type=...): ...
-    iter = ...  # type: Any
-    def is_leap(self, year): ...
diff --git a/typeshed/third_party/2/dateutil/parser.pyi b/typeshed/third_party/2/dateutil/parser.pyi
index 855538b..f7b90b0 100644
--- a/typeshed/third_party/2/dateutil/parser.pyi
+++ b/typeshed/third_party/2/dateutil/parser.pyi
@@ -1,8 +1,9 @@
-from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict
+from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict, Mapping, Text
 from datetime import datetime, tzinfo
 
-__all__ = ...  # type: List[str]
+_FileOrStr = Union[bytes, Text, IO[str], IO[Any]]
 
+__all__ = ...  # type: List[str]
 
 class parserinfo(object):
     JUMP = ...  # type: List[str]
@@ -15,25 +16,30 @@ class parserinfo(object):
     TZOFFSET = ...  # type: Dict[str, int]
 
     def __init__(self, dayfirst: bool=..., yearfirst: bool=...) -> None: ...
-    def jump(self, name: unicode) -> bool: ...
-    def weekday(self, name: unicode) -> Union[int, None]: ...
-    def month(self, name: unicode) -> Union[int, None]: ...
-    def hms(self, name: unicode) -> Union[int, None]: ...
-    def ampm(self, name: unicode) -> Union[int, None]: ...
-    def pertain(self, name: unicode) -> bool: ...
-    def utczone(self, name: unicode) -> bool: ...
-    def tzoffset(self, name: unicode) -> Union[int, None]: ...
+    def jump(self, name: Text) -> bool: ...
+    def weekday(self, name: Text) -> Optional[int]: ...
+    def month(self, name: Text) -> Optional[int]: ...
+    def hms(self, name: Text) -> Optional[int]: ...
+    def ampm(self, name: Text) -> Optional[int]: ...
+    def pertain(self, name: Text) -> bool: ...
+    def utczone(self, name: Text) -> bool: ...
+    def tzoffset(self, name: Text) -> Optional[int]: ...
     def convertyear(self, year: int) -> int: ...
     def validate(self, res: datetime) -> bool: ...
 
 class parser(object):
-    def __init__(self, info: Optional[parserinfo] = None) -> None: ...
-    def parse(self, timestr: Union[str, unicode, IO[unicode]],
-              default: Optional[datetime] = None,
-              ignoretz: bool = ..., tzinfos: Optional[Dict[Union[str, unicode], tzinfo]] = None,
+    def __init__(self, info: Optional[parserinfo] = ...) -> None: ...
+    def parse(self, timestr: _FileOrStr,
+              default: Optional[datetime] = ...,
+              ignoretz: bool = ..., tzinfos: Optional[Mapping[Text, tzinfo]] = ...,
               **kwargs: Any) -> datetime: ...
 
 DEFAULTPARSER = ...  # type: parser
-def parse(timestr: Union[str, unicode, IO[unicode]],
-          parserinfo: Optional[parserinfo] = None,
-          **kwargs: Any) -> datetime: ...
+def parse(timestr: _FileOrStr, parserinfo: Optional[parserinfo] = ..., **kwargs: Any) -> datetime: ...
+class _tzparser: ...
+
+DEFAULTTZPARSER = ...  # type: _tzparser
+
+class InvalidDatetimeError(ValueError): ...
+class InvalidDateError(InvalidDatetimeError): ...
+class InvalidTimeError(InvalidDatetimeError): ...
diff --git a/typeshed/third_party/2/dateutil/relativedelta.pyi b/typeshed/third_party/2/dateutil/relativedelta.pyi
index ea6592b..71b3aee 100644
--- a/typeshed/third_party/2/dateutil/relativedelta.pyi
+++ b/typeshed/third_party/2/dateutil/relativedelta.pyi
@@ -45,15 +45,13 @@ class relativedelta(object):
                  hour: Optional[int]=..., minute: Optional[int]=...,
                  second: Optional[int]=...,
                  microsecond: Optional[int]=...) -> None: ...
-
     @property
     def weeks(self) -> int: ...
-
     @weeks.setter
     def weeks(self, value: int) -> None: ...
-
     def normalized(self: _SelfT) -> _SelfT: ...
-
+    # TODO: use Union when mypy will handle it properly in overloaded operator
+    # methods (#2129, #1442, #1264 in mypy)
     @overload
     def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ...
     @overload
@@ -61,31 +59,25 @@ class relativedelta(object):
     @overload
     def __add__(self, other: _DateT) -> _DateT: ...
     @overload
+    def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ...
+    @overload
     def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ...
     @overload
     def __radd__(self, other: _DateT) -> _DateT: ...
     @overload
+    def __rsub__(self: _SelfT, other: relativedelta) -> _SelfT: ...
+    @overload
     def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ...
     @overload
     def __rsub__(self, other: _DateT) -> _DateT: ...
     def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ...
-
     def __neg__(self: _SelfT) -> _SelfT: ...
-
     def __bool__(self) -> bool: ...
-
     def __nonzero__(self) -> bool: ...
-
     def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
-
     def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
-
     def __eq__(self, other) -> bool: ...
-
     def __ne__(self, other: object) -> bool: ...
-
     def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
-
     def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
-
     def __repr__(self) -> str: ...
diff --git a/typeshed/third_party/2/dateutil/tz/__init__.pyi b/typeshed/third_party/2/dateutil/tz/__init__.pyi
index f618e63..68cfb9e 100644
--- a/typeshed/third_party/2/dateutil/tz/__init__.pyi
+++ b/typeshed/third_party/2/dateutil/tz/__init__.pyi
@@ -1,5 +1,5 @@
 from .tz import (
-    tzutc as tzutz,
+    tzutc as tzutc,
     tzoffset as tzoffset,
     tzlocal as tzlocal,
     tzfile as tzfile,
diff --git a/typeshed/third_party/2/dateutil/tz/_common.pyi b/typeshed/third_party/2/dateutil/tz/_common.pyi
index 8a684e3..383218d 100644
--- a/typeshed/third_party/2/dateutil/tz/_common.pyi
+++ b/typeshed/third_party/2/dateutil/tz/_common.pyi
@@ -2,7 +2,7 @@ from typing import Any, Optional
 from datetime import datetime, tzinfo, timedelta
 
 def tzname_in_python2(namefunc): ...
-def enfold(dt, fold: int = ...): ...
+def enfold(dt: datetime, fold: int = ...): ...
 
 class _DatetimeWithFold(datetime):
     @property
diff --git a/typeshed/third_party/2/dateutil/tz/tz.pyi b/typeshed/third_party/2/dateutil/tz/tz.pyi
index 5fcd6d8..5fbc932 100644
--- a/typeshed/third_party/2/dateutil/tz/tz.pyi
+++ b/typeshed/third_party/2/dateutil/tz/tz.pyi
@@ -1,9 +1,11 @@
-from typing import Any, IO, List, Optional, Tuple, Union
+from typing import Any, Optional, Union, IO, Text, Tuple, List
 import datetime
 from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo
 from ._common import tzrangebase as tzrangebase, enfold as enfold
 from ..relativedelta import relativedelta
 
+_FileObj = Union[str, Text, IO[str], IO[Text]]
+
 ZERO = ...  # type: datetime.timedelta
 EPOCH = ...  # type: datetime.datetime
 EPOCHORDINAL = ...  # type: int
@@ -47,7 +49,7 @@ class _ttinfo:
     def __ne__(self, other): ...
 
 class tzfile(_tzinfo):
-    def __init__(self, fileobj: Union[unicode, str, IO[str], IO[unicode]], filename: Union[str, unicode, None] = ...) -> None: ...
+    def __init__(self, fileobj: _FileObj, filename: Optional[Text] = ...) -> None: ...
     def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ...
     def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
     def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
@@ -60,22 +62,22 @@ class tzfile(_tzinfo):
 
 class tzrange(tzrangebase):
     hasdst = ...  # type: bool
-    def __init__(self, stdabbr: Union[str, unicode], stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Union[str, unicode, None] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ...
+    def __init__(self, stdabbr: Text, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[Text] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ...
     def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ...
     def __eq__(self, other): ...
 
 class tzstr(tzrange):
     hasdst = ...  # type: bool
-    def __init__(self, s, posix_offset: bool = ...) -> None: ...
+    def __init__(self, s: Union[bytes, _FileObj], posix_offset: bool = ...) -> None: ...
 
 class tzical:
-    def __init__(self, fileobj: Union[unicode, str, IO[str], IO[unicode]]) -> None: ...
+    def __init__(self, fileobj: _FileObj) -> None: ...
     def keys(self): ...
     def get(self, tzid: Optional[Any] = ...): ...
 
 TZFILES = ...  # type: List[str]
 TZPATHS = ...  # type: List[str]
 
-def gettz(name: Union[str, unicode, None] = ...) -> Optional[datetime.tzinfo]: ...
+def gettz(name: Optional[Text] = ...) -> Optional[datetime.tzinfo]: ...
 def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
 def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
diff --git a/typeshed/third_party/2/enum.pyi b/typeshed/third_party/2/enum.pyi
index ae8a91c..2bab1a8 100644
--- a/typeshed/third_party/2/enum.pyi
+++ b/typeshed/third_party/2/enum.pyi
@@ -1,8 +1,20 @@
-from typing import List, Any, TypeVar, Type, Iterable, Iterator
+from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
+from abc import ABCMeta
 
 _T = TypeVar('_T', bound=Enum)
-class EnumMeta(type, Iterable[Enum]):
-    def __iter__(self: Type[_T]) -> Iterator[_T]: ...  # type: ignore
+_S = TypeVar('_S', bound=Type[Enum])
+
+# Note: EnumMeta actually subclasses type directly, not ABCMeta.
+# This is a temporary workaround to allow multiple creation of enums with builtins
+# such as str as mixins, which due to the handling of ABCs of builtin types, cause
+# spurious inconsistent metaclass structure. See #1595.
+class EnumMeta(ABCMeta, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
+    def __iter__(self: Type[_T]) -> Iterator[_T]: ...
+    def __reversed__(self: Type[_T]) -> Iterator[_T]: ...
+    def __contains__(self, member: Any) -> bool: ...
+    def __getitem__(self: Type[_T], name: str) -> _T: ...
+    @property
+    def __members__(self: Type[_T]) -> Mapping[str, _T]: ...
 
 class Enum(metaclass=EnumMeta):
     def __new__(cls: Type[_T], value: Any) -> _T: ...
@@ -18,4 +30,4 @@ class Enum(metaclass=EnumMeta):
 
 class IntEnum(int, Enum): ...
 
-def unique(enumeration: _T) -> _T: ...
+def unique(enumeration: _S) -> _S: ...
diff --git a/typeshed/third_party/2/google/protobuf/message_factory.pyi b/typeshed/third_party/2/google/protobuf/message_factory.pyi
index 24758ff..fc26f2c 100644
--- a/typeshed/third_party/2/google/protobuf/message_factory.pyi
+++ b/typeshed/third_party/2/google/protobuf/message_factory.pyi
@@ -6,7 +6,7 @@ from .descriptor_pool import DescriptorPool
 
 class MessageFactory:
     pool = ...  # type: Any
-    def __init__(self, pool: Optional[DescriptorPool] = None) -> None: ...
+    def __init__(self, pool: Optional[DescriptorPool] = ...) -> None: ...
     def GetPrototype(self, descriptor: Descriptor) -> Type[Message]: ...
     def GetMessages(self, files: Iterable[str]) -> Dict[str, Type[Message]]: ...
 
diff --git a/typeshed/third_party/2/itsdangerous.pyi b/typeshed/third_party/2/itsdangerous.pyi
index 075a2e4..0f65636 100644
--- a/typeshed/third_party/2/itsdangerous.pyi
+++ b/typeshed/third_party/2/itsdangerous.pyi
@@ -26,15 +26,15 @@ class BadData(Exception):
 
 class BadPayload(BadData):
     original_error = ...  # type: Optional[Exception]
-    def __init__(self, message: str, original_error: Optional[Exception]=None) -> None: ...
+    def __init__(self, message: str, original_error: Optional[Exception] = ...) -> None: ...
 
 class BadSignature(BadData):
     payload = ...  # type: Optional[Any]
-    def __init__(self, message: str, payload: Optional[Any]=None) -> None: ...
+    def __init__(self, message: str, payload: Optional[Any] = ...) -> None: ...
 
 class BadTimeSignature(BadSignature):
     date_signed = ...  # type: Optional[int]
-    def __init__(self, message, payload: Optional[Any]=None, date_signed: Optional[int]=None) -> None: ...
+    def __init__(self, message, payload: Optional[Any] = ..., date_signed: Optional[int] = ...) -> None: ...
 
 class BadHeader(BadSignature):
     header = ...  # type: Any
@@ -58,7 +58,7 @@ class NoneAlgorithm(SigningAlgorithm):
 class HMACAlgorithm(SigningAlgorithm):
     default_digest_method = ...  # type: Callable
     digest_method = ...  # type: Callable
-    def __init__(self, digest_method: Optional[Callable]=None) -> None: ...
+    def __init__(self, digest_method: Optional[Callable] = ...) -> None: ...
     def get_signature(self, key: bytes_like, value: bytes_like) -> str: ...
 
 class Signer:
@@ -70,10 +70,10 @@ class Signer:
     key_derivation = ...  # type: str
     digest_method = ...  # type: Callable
     algorithm = ...  # type: SigningAlgorithm
-    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=None, sep: Optional[str]='',
-                 key_derivation: Optional[str]=None,
-                 digest_method: Optional[Callable]=None,
-                 algorithm: Optional[SigningAlgorithm]=None) -> None: ...
+    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like] = ..., sep: Optional[str]='',
+                 key_derivation: Optional[str] = ...,
+                 digest_method: Optional[Callable] = ...,
+                 algorithm: Optional[SigningAlgorithm] = ...) -> None: ...
     def derive_key(self) -> str: ...
     def get_signature(self, value: bytes_like) -> str: ...
     def sign(self, value: bytes_like) -> str: ...
@@ -85,8 +85,8 @@ class TimestampSigner(Signer):
     def get_timestamp(self) -> int: ...
     def timestamp_to_datetime(self, ts: int) -> datetime: ...
     def sign(self, value: bytes_like) -> str: ...
-    def unsign(self, value: str, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
-    def validate(self, signed_value: str, max_age: Optional[int]=None) -> bool: ...
+    def unsign(self, value: str, max_age: Optional[int] = ..., return_timestamp=False) -> Any: ...
+    def validate(self, signed_value: str, max_age: Optional[int] = ...) -> bool: ...
 
 class Serializer:
     default_serializer = ...  # type: Any
@@ -97,21 +97,21 @@ class Serializer:
     is_text_serializer = ...  # type: bool
     signer = ...  # type: Signer
     signer_kwargs = ...  # type: MutableMapping
-    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=b'', serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None) -> None: ...
+    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=b'', serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ...) -> None: ...
     def load_payload(self, payload: Any, serializer=None) -> Any: ...
     def dump_payload(self, *args, **kwargs) -> str: ...
-    def make_signer(self, salt: Optional[bytes_like]=None) -> Signer: ...
-    def dumps(self, obj: Any, salt: Optional[bytes_like]=None) -> str: ...
-    def dump(self, obj: Any, f: IO[str], salt: Optional[bytes_like]=None) -> None: ...
-    def loads(self, s: str, salt: Optional[bytes_like]=None) -> Any: ...
-    def load(self, f: IO[str], salt: Optional[bytes_like]=None): ...
-    def loads_unsafe(self, s, salt: Optional[bytes_like]=None) -> Tuple[bool, Any]: ...
+    def make_signer(self, salt: Optional[bytes_like] = ...) -> Signer: ...
+    def dumps(self, obj: Any, salt: Optional[bytes_like] = ...) -> str: ...
+    def dump(self, obj: Any, f: IO[str], salt: Optional[bytes_like] = ...) -> None: ...
+    def loads(self, s: str, salt: Optional[bytes_like] = ...) -> Any: ...
+    def load(self, f: IO[str], salt: Optional[bytes_like] = ...): ...
+    def loads_unsafe(self, s, salt: Optional[bytes_like] = ...) -> Tuple[bool, Any]: ...
     def load_unsafe(self, f: IO[str], *args, **kwargs) -> Tuple[bool, Any]: ...
 
 class TimedSerializer(Serializer):
     default_signer = ...  # type: Callable[..., TimestampSigner]
-    def loads(self, s: str, salt: Optional[bytes_like]=None, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
-    def loads_unsafe(self, s: str, salt: Optional[bytes_like]=None, max_age: Optional[int]=None) -> Tuple[bool, Any]: ...
+    def loads(self, s: str, salt: Optional[bytes_like] = ..., max_age: Optional[int] = ..., return_timestamp=False) -> Any: ...
+    def loads_unsafe(self, s: str, salt: Optional[bytes_like] = ..., max_age: Optional[int] = ...) -> Tuple[bool, Any]: ...
 
 class JSONWebSignatureSerializer(Serializer):
     jws_algorithms = ...  # type: MutableMapping[str, SigningAlgorithm]
@@ -119,22 +119,22 @@ class JSONWebSignatureSerializer(Serializer):
     default_serializer = ...  # type: Any
     algorithm_name = ...  # type: str
     algorithm = ...  # type: Any
-    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=None, serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None, algorithm_name: Optional[str]=None) -> None: ...
+    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like] = ..., serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ..., algorithm_name: Optional[str] = ...) -> None: ...
     def load_payload(self, payload: Any, return_header=False) -> Any: ...
     def dump_payload(self, *args, **kwargs) -> str: ...
     def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ...
-    def make_signer(self, salt: Optional[bytes_like]=None, algorithm_name: Optional[str]=None) -> Signer: ...
+    def make_signer(self, salt: Optional[bytes_like] = ..., algorithm_name: Optional[str] = ...) -> Signer: ...
     def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
-    def dumps(self, obj: Any, salt: Optional[bytes_like]=None, header_fields=Optional[MutableMapping]) -> str: ...
-    def loads(self, s: str, salt: Optional[bytes_like]=None, return_header=False) -> Any: ...
-    def loads_unsafe(self, s, salt: Optional[bytes_like]=None, return_header=False) -> Tuple[bool, Any]: ...
+    def dumps(self, obj: Any, salt: Optional[bytes_like] = ..., header_fields=Optional[MutableMapping]) -> str: ...
+    def loads(self, s: str, salt: Optional[bytes_like] = ..., return_header=False) -> Any: ...
+    def loads_unsafe(self, s, salt: Optional[bytes_like] = ..., return_header=False) -> Tuple[bool, Any]: ...
 
 class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
     DEFAULT_EXPIRES_IN = ...  # type: int
     expires_in = ...  # type: int
-    def __init__(self, secret_key: bytes_like, expires_in: Optional[int]=None, **kwargs) -> None: ...
+    def __init__(self, secret_key: bytes_like, expires_in: Optional[int] = ..., **kwargs) -> None: ...
     def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
-    def loads(self, s: str, salt: Optional[bytes_like]=None, return_header=False) -> Any: ...
+    def loads(self, s: str, salt: Optional[bytes_like] = ..., return_header=False) -> Any: ...
     def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ...
     def now(self) -> int: ...
 
diff --git a/typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi b/typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi
index c8e565c..66c6e39 100644
--- a/typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi
+++ b/typeshed/third_party/2/selenium/webdriver/remote/webdriver.pyi
@@ -18,7 +18,7 @@ class WebDriver:
                  desired_capabilities: Capabilities=None,
                  browser_profile=None,
                  proxy=None,
-                 keep_alive: bool=False
+                 keep_alive: bool = ...
                  ) -> None: ...
     @property
     def mobile(self) -> Mobile: ...
@@ -29,7 +29,7 @@ class WebDriver:
     w3c = ...  # type: Any
     def start_session(self, desired_capabilities, browser_profile=None): ...
     def create_web_element(self, element_id: basestring) -> WebElement: ...
-    def execute(self, driver_command: basestring, params: Optional[Dict[basestring, Any]]=None) -> ExecuteResult: ...
+    def execute(self, driver_command: basestring, params: Optional[Dict[basestring, Any]] = ...) -> ExecuteResult: ...
     def get(self, url: basestring) -> None: ...
     @property
     def title(self) -> basestring: ...
diff --git a/typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi b/typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi
index fd94c56..6cc6d72 100644
--- a/typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi
+++ b/typeshed/third_party/2/selenium/webdriver/remote/webelement.pyi
@@ -5,7 +5,7 @@ SizeDict = Dict[str, int]  # containing "height", "width"
 PointDict = Dict[str, int]  # containing "x", "y"
 
 class WebElement:
-    def __init__(self, parent: WebDriver, id_: Optional[basestring], w3c: bool=False) -> None: ...
+    def __init__(self, parent: WebDriver, id_: Optional[basestring], w3c: bool = ...) -> None: ...
     @property
     def tag_name(self) -> basestring: ...
     @property
diff --git a/typeshed/third_party/2/six/__init__.pyi b/typeshed/third_party/2/six/__init__.pyi
index bd8d97a..990e1d1 100644
--- a/typeshed/third_party/2/six/__init__.pyi
+++ b/typeshed/third_party/2/six/__init__.pyi
@@ -15,6 +15,7 @@ from mypy_extensions import NoReturn
 from __builtin__ import unichr as unichr
 from StringIO import StringIO as StringIO, StringIO as BytesIO
 from functools import wraps as wraps
+from . import moves
 
 
 _T = TypeVar('_T')
diff --git a/typeshed/third_party/2/werkzeug/wrappers.pyi b/typeshed/third_party/2/werkzeug/wrappers.pyi
index 1fb5668..98b2582 100644
--- a/typeshed/third_party/2/werkzeug/wrappers.pyi
+++ b/typeshed/third_party/2/werkzeug/wrappers.pyi
@@ -20,7 +20,7 @@ class BaseRequest:
     disable_data_descriptor = ...  # type: Any
     environ = ...  # type: Mapping[str, object]
     shallow = ...  # type: Any
-    def __init__(self, environ: Mapping[basestring, object], populate_request: bool=True, shallow: bool=False) -> None: ...
+    def __init__(self, environ: Mapping[basestring, object], populate_request: bool = ..., shallow: bool = ...) -> None: ...
     @property
     def url_charset(self) -> str: ...
     @classmethod
@@ -37,7 +37,7 @@ class BaseRequest:
     input_stream = ...  # type: Any
     args = ...  # type: ImmutableMultiDict
     def data(self): ...
-    def get_data(self, cache: bool=True, as_text: bool=False, parse_form_data: bool=False) -> str: ...
+    def get_data(self, cache: bool = ..., as_text: bool = ..., parse_form_data: bool = ...) -> str: ...
     form = ...  # type: ImmutableMultiDict
     values = ...  # type: CombinedMultiDict
     files = ...  # type: MultiDict
@@ -77,14 +77,14 @@ class BaseResponse:
     direct_passthrough = ...  # type: bool
     response = ...  # type: Iterable[str]
     def __init__(self,
-                 response: Optional[Union[Iterable[str], str]] = None,
-                 status: Optional[Union[basestring, int]] = None,
+                 response: Optional[Union[Iterable[str], str]] = ...,
+                 status: Optional[Union[basestring, int]] = ...,
                  headers: Optional[Union[Headers,
                                          Mapping[basestring, basestring],
                                          Sequence[Tuple[basestring, basestring]]]] = None,
-                 mimetype: Optional[basestring] = None,
-                 content_type: Optional[basestring] = None,
-                 direct_passthrough: Optional[bool] = False) -> None: ...
+                 mimetype: Optional[basestring] = ...,
+                 content_type: Optional[basestring] = ...,
+                 direct_passthrough: Optional[bool] = ...) -> None: ...
     def call_on_close(self, func): ...
     @classmethod
     def force_type(cls, response, environ=None): ...
diff --git a/typeshed/third_party/2and3/atomicwrites/__init__.pyi b/typeshed/third_party/2and3/atomicwrites/__init__.pyi
index ba6ae22..07edff6 100644
--- a/typeshed/third_party/2and3/atomicwrites/__init__.pyi
+++ b/typeshed/third_party/2and3/atomicwrites/__init__.pyi
@@ -1,16 +1,13 @@
-import os
-import sys
-import tempfile
-from typing import Any, AnyStr, Callable, ContextManager, IO, Iterator, Optional, Text
+from typing import AnyStr, Callable, ContextManager, IO, Optional, Text, Type
 
 def replace_atomic(src: AnyStr, dst: AnyStr) -> None: ...
 def move_atomic(src: AnyStr, dst: AnyStr) -> None: ...
 class AtomicWriter(object):
-    def __init__(self, path: AnyStr, mode: Text='w', overwrite: bool=False) -> None: ...
+    def __init__(self, path: AnyStr, mode: Text = ..., overwrite: bool = ...) -> None: ...
     def open(self) -> ContextManager[IO]: ...
     def _open(self, get_fileobject: Callable) -> ContextManager[IO]: ...
-    def get_fileobject(self, dir: Optional[AnyStr] = None, **kwargs) -> IO: ...
+    def get_fileobject(self, dir: Optional[AnyStr] = ..., **kwargs) -> IO: ...
     def sync(self, f: IO) -> None: ...
     def commit(self, f: IO) -> None: ...
     def rollback(self, f: IO) -> None: ...
-def atomic_write(path: AnyStr, writer_cls: type=AtomicWriter, **cls_kwargs) -> ContextManager[IO]: ...
+def atomic_write(path: AnyStr, writer_cls: Type[AtomicWriter] = ..., **cls_kwargs: object) -> ContextManager[IO]: ...
diff --git a/typeshed/third_party/2and3/boto/s3/__init__.pyi b/typeshed/third_party/2and3/boto/s3/__init__.pyi
index 2dcb333..d88955e 100644
--- a/typeshed/third_party/2and3/boto/s3/__init__.pyi
+++ b/typeshed/third_party/2and3/boto/s3/__init__.pyi
@@ -8,7 +8,7 @@ from boto.regioninfo import RegionInfo
 from typing import List, Type, Text
 
 class S3RegionInfo(RegionInfo):
-    def connect(self, name: Optional[Text] = None, endpoint: Optional[str] = None, connection_cls: Optional[Type[AWSAuthConnection]] = None, **kw_params) -> S3Connection: ...
+    def connect(self, name: Optional[Text] = ..., endpoint: Optional[str] = ..., connection_cls: Optional[Type[AWSAuthConnection]] = ..., **kw_params) -> S3Connection: ...
 
 def regions() -> List[S3RegionInfo]: ...
 def connect_to_region(region_name: Text, **kw_params): ...
diff --git a/typeshed/third_party/2and3/characteristic/__init__.pyi b/typeshed/third_party/2and3/characteristic/__init__.pyi
index f2903b9..20bd6e5 100644
--- a/typeshed/third_party/2and3/characteristic/__init__.pyi
+++ b/typeshed/third_party/2and3/characteristic/__init__.pyi
@@ -13,22 +13,22 @@ _T = TypeVar('_T')
 
 def attributes(
     attrs: Sequence[Union[AnyStr, Attribute]],
-    apply_with_cmp: bool = True,
-    apply_with_init: bool = True,
-    apply_with_repr: bool = True,
-    apply_immutable: bool = False,
-    store_attributes: Optional[Callable[[type, Attribute], Any]] = None,
+    apply_with_cmp: bool = ...,
+    apply_with_init: bool = ...,
+    apply_with_repr: bool = ...,
+    apply_immutable: bool = ...,
+    store_attributes: Optional[Callable[[type, Attribute], Any]] = ...,
     **kw: Optional[dict]) -> Callable[[Type[_T]], Type[_T]]: ...
 
 class Attribute:
     def __init__(
         self,
         name: AnyStr,
-        exclude_from_cmp: bool = False,
-        exclude_from_init: bool = False,
-        exclude_from_repr: bool = False,
-        exclude_from_immutable: bool = False,
-        default_value: Any = NOTHING,
-        default_factory: Optional[Callable[[None], Any]] = None,
-        instance_of: Optional[Any] = None,
-        init_aliaser: Optional[Callable[[AnyStr], AnyStr]] = strip_leading_underscores) -> None: ...
+        exclude_from_cmp: bool = ...,
+        exclude_from_init: bool = ...,
+        exclude_from_repr: bool = ...,
+        exclude_from_immutable: bool = ...,
+        default_value: Any = ...,
+        default_factory: Optional[Callable[[None], Any]] = ...,
+        instance_of: Optional[Any] = ...,
+        init_aliaser: Optional[Callable[[AnyStr], AnyStr]] = ...) -> None: ...
diff --git a/typeshed/third_party/2and3/click/core.pyi b/typeshed/third_party/2and3/click/core.pyi
index c0e054d..417a986 100644
--- a/typeshed/third_party/2and3/click/core.pyi
+++ b/typeshed/third_party/2and3/click/core.pyi
@@ -30,7 +30,7 @@ def invoke_param_callback(
 
 @contextmanager
 def augment_usage_errors(
-    ctx: 'Context', param: Optional['Parameter'] = None
+    ctx: 'Context', param: Optional['Parameter'] = ...
 ) -> Generator[None, None, None]:
     ...
 
@@ -73,25 +73,25 @@ class Context:
     def __init__(
         self,
         command: 'Command',
-        parent: Optional['Context'] = None,
-        info_name: Optional[str] = None,
-        obj: Optional[Any] = None,
-        auto_envvar_prefix: Optional[str] = None,
-        default_map: Optional[Mapping[str, Any]] = None,
-        terminal_width: Optional[int] = None,
-        max_content_width: Optional[int] = None,
-        resilient_parsing: bool = False,
-        allow_extra_args: Optional[bool] = None,
-        allow_interspersed_args: Optional[bool] = None,
-        ignore_unknown_options: Optional[bool] = None,
-        help_option_names: Optional[List[str]] = None,
-        token_normalize_func: Optional[Callable[[str], str]] = None,
-        color: Optional[bool] = None
+        parent: Optional['Context'] = ...,
+        info_name: Optional[str] = ...,
+        obj: Optional[Any] = ...,
+        auto_envvar_prefix: Optional[str] = ...,
+        default_map: Optional[Mapping[str, Any]] = ...,
+        terminal_width: Optional[int] = ...,
+        max_content_width: Optional[int] = ...,
+        resilient_parsing: bool = ...,
+        allow_extra_args: Optional[bool] = ...,
+        allow_interspersed_args: Optional[bool] = ...,
+        ignore_unknown_options: Optional[bool] = ...,
+        help_option_names: Optional[List[str]] = ...,
+        token_normalize_func: Optional[Callable[[str], str]] = ...,
+        color: Optional[bool] = ...
     ) -> None:
         ...
 
     @contextmanager
-    def scope(self, cleanup: bool = True) -> Generator['Context', None, None]:
+    def scope(self, cleanup: bool = ...) -> Generator['Context', None, None]:
         ...
 
     def make_formatter(self) -> HelpFormatter:
@@ -121,7 +121,7 @@ class Context:
     def abort(self) -> None:
         ...
 
-    def exit(self, code: Union[int, str] = 0) -> None:
+    def exit(self, code: Union[int, str] = ...) -> None:
         ...
 
     def get_usage(self) -> str:
@@ -147,7 +147,7 @@ class BaseCommand:
     name: str
     context_settings: Dict
 
-    def __init__(self, name: str, context_settings: Optional[Dict] = None) -> None:
+    def __init__(self, name: str, context_settings: Optional[Dict] = ...) -> None:
         ...
 
     def get_usage(self, ctx: Context) -> str:
@@ -157,7 +157,7 @@ class BaseCommand:
         ...
 
     def make_context(
-        self, info_name: str, args: List[str], parent: Optional[Context] = None, **extra
+        self, info_name: str, args: List[str], parent: Optional[Context] = ..., **extra
     ) -> Context:
         ...
 
@@ -169,10 +169,10 @@ class BaseCommand:
 
     def main(
         self,
-        args: Optional[List[str]] = None,
-        prog_name: Optional[str] = None,
-        complete_var: Optional[str] = None,
-        standalone_mode: bool = True,
+        args: Optional[List[str]] = ...,
+        prog_name: Optional[str] = ...,
+        complete_var: Optional[str] = ...,
+        standalone_mode: bool = ...,
         **extra
     ) -> Any:
         ...
@@ -193,14 +193,14 @@ class Command(BaseCommand):
     def __init__(
         self,
         name: str,
-        context_settings: Optional[Dict] = None,
-        callback: Optional[Callable] = None,
-        params: Optional[List['Parameter']] = None,
-        help: Optional[str] = None,
-        epilog: Optional[str] = None,
-        short_help: Optional[str] = None,
-        options_metavar: str = '[OPTIONS]',
-        add_help_option: bool = True
+        context_settings: Optional[Dict] = ...,
+        callback: Optional[Callable] = ...,
+        params: Optional[List['Parameter']] = ...,
+        help: Optional[str] = ...,
+        epilog: Optional[str] = ...,
+        short_help: Optional[str] = ...,
+        options_metavar: str = ...,
+        add_help_option: bool = ...
     ) -> None:
         ...
 
@@ -252,18 +252,18 @@ class MultiCommand(Command):
 
     def __init__(
         self,
-        name: Optional[str] = None,
-        invoke_without_command: bool = False,
-        no_args_is_help: Optional[bool] = None,
-        subcommand_metavar: Optional[str] = None,
-        chain: bool = False,
-        result_callback: Optional[Callable] = None,
+        name: Optional[str] = ...,
+        invoke_without_command: bool = ...,
+        no_args_is_help: Optional[bool] = ...,
+        subcommand_metavar: Optional[str] = ...,
+        chain: bool = ...,
+        result_callback: Optional[Callable] = ...,
         **attrs
     ) -> None:
         ...
 
     def resultcallback(
-        self, replace: bool = False
+        self, replace: bool = ...
     ) -> _Decorator:
         ...
 
@@ -286,11 +286,11 @@ class Group(MultiCommand):
     commands: Dict[str, Command]
 
     def __init__(
-        self, name: Optional[str] = None, commands: Optional[Dict[str, Command]] = None, **attrs
+        self, name: Optional[str] = ..., commands: Optional[Dict[str, Command]] = ..., **attrs
     ) -> None:
         ...
 
-    def add_command(self, cmd: Command, name: Optional[str] = None):
+    def add_command(self, cmd: Command, name: Optional[str] = ...):
         ...
 
     def command(self, *args, **kwargs) -> _Decorator:
@@ -304,7 +304,7 @@ class CommandCollection(MultiCommand):
     sources: List[MultiCommand]
 
     def __init__(
-        self, name: Optional[str] = None, sources: Optional[List[MultiCommand]] = None, **attrs
+        self, name: Optional[str] = ..., sources: Optional[List[MultiCommand]] = ..., **attrs
     ) -> None:
         ...
 
@@ -332,16 +332,16 @@ class Parameter:
 
     def __init__(
         self,
-        param_decls: Optional[List[str]] = None,
-        type: Optional[Union[type, 'ParamType']] = None,
-        required: bool = False,
-        default: Optional[Any] = None,
-        callback: Optional[Callable[[Context, 'Parameter', str], Any]] = None,
-        nargs: Optional[int] = None,
-        metavar: Optional[str] = None,
-        expose_value: bool = True,
-        is_eager: bool = False,
-        envvar: Optional[Union[str, List[str]]] = None
+        param_decls: Optional[List[str]] = ...,
+        type: Optional[Union[type, 'ParamType']] = ...,
+        required: bool = ...,
+        default: Optional[Any] = ...,
+        callback: Optional[Callable[[Context, 'Parameter', str], Any]] = ...,
+        nargs: Optional[int] = ...,
+        metavar: Optional[str] = ...,
+        expose_value: bool = ...,
+        is_eager: bool = ...,
+        envvar: Optional[Union[str, List[str]]] = ...
     ) -> None:
         ...
 
@@ -402,18 +402,18 @@ class Option(Parameter):
 
     def __init__(
         self,
-        param_decls: Optional[List[str]] = None,
-        show_default: bool = False,
-        prompt: Union[bool, str] = False,
-        confirmation_prompt: bool = False,
-        hide_input: bool = False,
-        is_flag: Optional[bool] = None,
-        flag_value: Optional[Any] = None,
-        multiple: bool = False,
-        count: bool = False,
-        allow_from_autoenv: bool = True,
-        type: Optional[Union[type, 'ParamType']] = None,
-        help: Optional[str] = None,
+        param_decls: Optional[List[str]] = ...,
+        show_default: bool = ...,
+        prompt: Union[bool, str] = ...,
+        confirmation_prompt: bool = ...,
+        hide_input: bool = ...,
+        is_flag: Optional[bool] = ...,
+        flag_value: Optional[Any] = ...,
+        multiple: bool = ...,
+        count: bool = ...,
+        allow_from_autoenv: bool = ...,
+        type: Optional[Union[type, 'ParamType']] = ...,
+        help: Optional[str] = ...,
         **attrs
     ) -> None:
         ...
@@ -425,8 +425,8 @@ class Option(Parameter):
 class Argument(Parameter):
     def __init__(
         self,
-        param_decls: Optional[List[str]] = None,
-        required: Optional[bool] = None,
+        param_decls: Optional[List[str]] = ...,
+        required: Optional[bool] = ...,
         **attrs
     ) -> None:
         ...
diff --git a/typeshed/third_party/2and3/click/decorators.pyi b/typeshed/third_party/2and3/click/decorators.pyi
index b9d6faf..6f6e523 100644
--- a/typeshed/third_party/2and3/click/decorators.pyi
+++ b/typeshed/third_party/2and3/click/decorators.pyi
@@ -1,5 +1,5 @@
 from distutils.version import Version
-from typing import Any, Callable, Dict, List, Optional, TypeVar, Union
+from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, Text
 
 from click.core import Command, Group, Argument, Option, Parameter, Context
 from click.types import ParamType
@@ -21,7 +21,7 @@ def pass_obj(_T) -> _T:
 
 
 def make_pass_decorator(
-    object_type: type, ensure: bool = False
+    object_type: type, ensure: bool = ...
 ) -> Callable[[_T], _T]:
     ...
 
@@ -30,15 +30,15 @@ def make_pass_decorator(
 # arguments from core.pyi to help with type checking.
 
 def command(
-    name: Optional[str] = None,
-    cls: type = Command,
+    name: Optional[str] = ...,
+    cls: Optional[Type[Command]] = ...,
     # Command
     context_settings: Optional[Dict] = ...,
-    help: Optional[str] = None,
-    epilog: Optional[str] = None,
-    short_help: Optional[str] = None,
-    options_metavar: str = '[OPTIONS]',
-    add_help_option: bool = True,
+    help: Optional[str] = ...,
+    epilog: Optional[str] = ...,
+    short_help: Optional[str] = ...,
+    options_metavar: str = ...,
+    add_help_option: bool = ...,
 ) -> _Decorator:
     ...
 
@@ -46,22 +46,22 @@ def command(
 # This inherits attrs from Group, MultiCommand and Command.
 
 def group(
-    name: Optional[str] = None,
-    cls: type = Group,
+    name: Optional[str] = ...,
+    cls: Type[Command] = ...,
     # Group
-    commands: Optional[Dict[str, Command]] = None,
+    commands: Optional[Dict[str, Command]] = ...,
     # MultiCommand
-    invoke_without_command: bool = False,
-    no_args_is_help: Optional[bool] = None,
-    subcommand_metavar: Optional[str] = None,
-    chain: bool = False,
-    result_callback: Optional[Callable] = None,
+    invoke_without_command: bool = ...,
+    no_args_is_help: Optional[bool] = ...,
+    subcommand_metavar: Optional[str] = ...,
+    chain: bool = ...,
+    result_callback: Optional[Callable] = ...,
     # Command
-    help: Optional[str] = None,
-    epilog: Optional[str] = None,
-    short_help: Optional[str] = None,
-    options_metavar: str = '[OPTIONS]',
-    add_help_option: bool = True,
+    help: Optional[str] = ...,
+    epilog: Optional[str] = ...,
+    short_help: Optional[str] = ...,
+    options_metavar: str = ...,
+    add_help_option: bool = ...,
     # User-defined
     **kwargs: Any,
 ) -> _Decorator:
@@ -70,159 +70,155 @@ def group(
 
 def argument(
     *param_decls: str,
-    cls: type = Argument,
+    cls: Type[Argument] = ...,
     # Argument
-    required: Optional[bool] = None,
+    required: Optional[bool] = ...,
     # Parameter
-    type: Optional[Union[type, ParamType]] = None,
-    default: Optional[Any] = None,
+    type: Optional[Union[type, ParamType]] = ...,
+    default: Optional[Any] = ...,
     callback: Optional[_Callback] = ...,
-    nargs: Optional[int] = None,
-    metavar: Optional[str] = None,
-    expose_value: bool = True,
-    is_eager: bool = False,
-    envvar: Optional[Union[str, List[str]]] = None
+    nargs: Optional[int] = ...,
+    metavar: Optional[str] = ...,
+    expose_value: bool = ...,
+    is_eager: bool = ...,
+    envvar: Optional[Union[str, List[str]]] = ...
 ) -> _Decorator:
     ...
 
 
 def option(
     *param_decls: str,
-    cls: type = Option,
+    cls: Type[Option] = ...,
     # Option
-    show_default: bool = False,
-    prompt: bool = False,
-    confirmation_prompt: bool = False,
-    hide_input: bool = False,
-    is_flag: Optional[bool] = None,
-    flag_value: Optional[Any] = None,
-    multiple: bool = False,
-    count: bool = False,
-    allow_from_autoenv: bool = True,
-    type: Optional[Union[type, ParamType]] = None,
-    help: Optional[str] = None,
+    show_default: bool = ...,
+    prompt: Union[bool, Text] = ...,
+    confirmation_prompt: bool = ...,
+    hide_input: bool = ...,
+    is_flag: Optional[bool] = ...,
+    flag_value: Optional[Any] = ...,
+    multiple: bool = ...,
+    count: bool = ...,
+    allow_from_autoenv: bool = ...,
+    type: Optional[Union[type, ParamType]] = ...,
+    help: Optional[str] = ...,
     # Parameter
-    default: Optional[Any] = None,
-    required: bool = False,
+    default: Optional[Any] = ...,
+    required: bool = ...,
     callback: Optional[_Callback] = ...,
-    nargs: Optional[int] = None,
-    metavar: Optional[str] = None,
-    expose_value: bool = True,
-    is_eager: bool = False,
-    envvar: Optional[Union[str, List[str]]] = None
+    nargs: Optional[int] = ...,
+    metavar: Optional[str] = ...,
+    expose_value: bool = ...,
+    is_eager: bool = ...,
+    envvar: Optional[Union[str, List[str]]] = ...
 ) -> _Decorator:
     ...
 
 
-# Defaults copied from the decorator body.
 def confirmation_option(
     *param_decls: str,
-    cls: type = Option,
+    cls: Type[Option] = ...,
     # Option
-    show_default: bool = False,
-    prompt: str = 'Do you want to continue?',
-    confirmation_prompt: bool = False,
-    hide_input: bool = False,
-    is_flag: bool = True,
-    flag_value: Optional[Any] = None,
-    multiple: bool = False,
-    count: bool = False,
-    allow_from_autoenv: bool = True,
-    type: Optional[Union[type, ParamType]] = None,
-    help: str = 'Confirm the action without prompting.',
+    show_default: bool = ...,
+    prompt: Union[bool, Text] = ...,
+    confirmation_prompt: bool = ...,
+    hide_input: bool = ...,
+    is_flag: bool = ...,
+    flag_value: Optional[Any] = ...,
+    multiple: bool = ...,
+    count: bool = ...,
+    allow_from_autoenv: bool = ...,
+    type: Optional[Union[type, ParamType]] = ...,
+    help: str = ...,
     # Parameter
-    default: Optional[Any] = None,
+    default: Optional[Any] = ...,
     callback: Optional[_Callback] = ...,
-    nargs: Optional[int] = None,
-    metavar: Optional[str] = None,
-    expose_value: bool = False,
-    is_eager: bool = False,
-    envvar: Optional[Union[str, List[str]]] = None
+    nargs: Optional[int] = ...,
+    metavar: Optional[str] = ...,
+    expose_value: bool = ...,
+    is_eager: bool = ...,
+    envvar: Optional[Union[str, List[str]]] = ...
 ) -> _Decorator:
     ...
 
 
-# Defaults copied from the decorator body.
 def password_option(
     *param_decls: str,
-    cls: type = Option,
+    cls: Type[Option] = ...,
     # Option
-    show_default: bool = False,
-    prompt: bool = True,
-    confirmation_prompt: bool = True,
-    hide_input: bool = True,
-    is_flag: Optional[bool] = None,
-    flag_value: Optional[Any] = None,
-    multiple: bool = False,
-    count: bool = False,
-    allow_from_autoenv: bool = True,
-    type: Optional[Union[type, ParamType]] = None,
-    help: Optional[str] = None,
+    show_default: bool = ...,
+    prompt: Union[bool, Text] = ...,
+    confirmation_prompt: bool = ...,
+    hide_input: bool = ...,
+    is_flag: Optional[bool] = ...,
+    flag_value: Optional[Any] = ...,
+    multiple: bool = ...,
+    count: bool = ...,
+    allow_from_autoenv: bool = ...,
+    type: Optional[Union[type, ParamType]] = ...,
+    help: Optional[str] = ...,
     # Parameter
-    default: Optional[Any] = None,
+    default: Optional[Any] = ...,
     callback: Optional[_Callback] = ...,
-    nargs: Optional[int] = None,
-    metavar: Optional[str] = None,
-    expose_value: bool = True,
-    is_eager: bool = False,
-    envvar: Optional[Union[str, List[str]]] = None
+    nargs: Optional[int] = ...,
+    metavar: Optional[str] = ...,
+    expose_value: bool = ...,
+    is_eager: bool = ...,
+    envvar: Optional[Union[str, List[str]]] = ...
 ) -> _Decorator:
     ...
 
 
-# Defaults copied from the decorator body.
 def version_option(
-    version: Optional[Union[str, Version]] = None,
+    version: Optional[Union[str, Version]] = ...,
     *param_decls: str,
-    cls: type = Option,
+    cls: Type[Option] = ...,
     # Option
-    prog_name: Optional[str] = None,
-    show_default: bool = False,
-    prompt: bool = False,
-    confirmation_prompt: bool = False,
-    hide_input: bool = False,
-    is_flag: bool = True,
-    flag_value: Optional[Any] = None,
-    multiple: bool = False,
-    count: bool = False,
-    allow_from_autoenv: bool = True,
-    type: Optional[Union[type, ParamType]] = None,
-    help: str = 'Show the version and exit.',
+    prog_name: Optional[str] = ...,
+    show_default: bool = ...,
+    prompt: Union[bool, Text] = ...,
+    confirmation_prompt: bool = ...,
+    hide_input: bool = ...,
+    is_flag: bool = ...,
+    flag_value: Optional[Any] = ...,
+    multiple: bool = ...,
+    count: bool = ...,
+    allow_from_autoenv: bool = ...,
+    type: Optional[Union[type, ParamType]] = ...,
+    help: str = ...,
     # Parameter
-    default: Optional[Any] = None,
+    default: Optional[Any] = ...,
     callback: Optional[_Callback] = ...,
-    nargs: Optional[int] = None,
-    metavar: Optional[str] = None,
-    expose_value: bool = False,
-    is_eager: bool = True,
-    envvar: Optional[Union[str, List[str]]] = None
+    nargs: Optional[int] = ...,
+    metavar: Optional[str] = ...,
+    expose_value: bool = ...,
+    is_eager: bool = ...,
+    envvar: Optional[Union[str, List[str]]] = ...
 ) -> _Decorator:
     ...
 
 
-# Defaults copied from the decorator body.
 def help_option(
     *param_decls: str,
-    cls: type = Option,
+    cls: Type[Option] = ...,
     # Option
-    show_default: bool = False,
-    prompt: bool = False,
-    confirmation_prompt: bool = False,
-    hide_input: bool = False,
-    is_flag: bool = True,
-    flag_value: Optional[Any] = None,
-    multiple: bool = False,
-    count: bool = False,
-    allow_from_autoenv: bool = True,
-    type: Optional[Union[type, ParamType]] = None,
-    help: str = 'Show this message and exit.',
+    show_default: bool = ...,
+    prompt: Union[bool, Text] = ...,
+    confirmation_prompt: bool = ...,
+    hide_input: bool = ...,
+    is_flag: bool = ...,
+    flag_value: Optional[Any] = ...,
+    multiple: bool = ...,
+    count: bool = ...,
+    allow_from_autoenv: bool = ...,
+    type: Optional[Union[type, ParamType]] = ...,
+    help: str = ...,
     # Parameter
-    default: Optional[Any] = None,
+    default: Optional[Any] = ...,
     callback: Optional[_Callback] = ...,
-    nargs: Optional[int] = None,
-    metavar: Optional[str] = None,
-    expose_value: bool = False,
-    is_eager: bool = True,
-    envvar: Optional[Union[str, List[str]]] = None
+    nargs: Optional[int] = ...,
+    metavar: Optional[str] = ...,
+    expose_value: bool = ...,
+    is_eager: bool = ...,
+    envvar: Optional[Union[str, List[str]]] = ...
 ) -> _Decorator:
     ...
diff --git a/typeshed/third_party/2and3/click/exceptions.pyi b/typeshed/third_party/2and3/click/exceptions.pyi
index 3517624..6f08d7a 100644
--- a/typeshed/third_party/2and3/click/exceptions.pyi
+++ b/typeshed/third_party/2and3/click/exceptions.pyi
@@ -20,10 +20,10 @@ class ClickException(Exception):
 class UsageError(ClickException):
     ctx: Optional[Context]
 
-    def __init__(self, message: str, ctx: Optional[Context] = None) -> None:
+    def __init__(self, message: str, ctx: Optional[Context] = ...) -> None:
         ...
 
-    def show(self, file: Optional[IO] = None) -> None:
+    def show(self, file: Optional[IO] = ...) -> None:
         ...
 
 
@@ -34,9 +34,9 @@ class BadParameter(UsageError):
     def __init__(
         self,
         message: str,
-        ctx: Optional[Context] = None,
-        param: Optional[Parameter] = None,
-        param_hint: Optional[str] = None
+        ctx: Optional[Context] = ...,
+        param: Optional[Parameter] = ...,
+        param_hint: Optional[str] = ...
     ) -> None:
         ...
 
@@ -46,11 +46,11 @@ class MissingParameter(BadParameter):
 
     def __init__(
         self,
-        message: Optional[str] = None,
-        ctx: Optional[Context] = None,
-        param: Optional[Parameter] = None,
-        param_hint: Optional[str] = None,
-        param_type: Optional[str] = None
+        message: Optional[str] = ...,
+        ctx: Optional[Context] = ...,
+        param: Optional[Parameter] = ...,
+        param_hint: Optional[str] = ...,
+        param_type: Optional[str] = ...
     ) -> None:
         ...
 
@@ -62,20 +62,20 @@ class NoSuchOption(UsageError):
     def __init__(
         self,
         option_name: str,
-        message: Optional[str] = None,
-        possibilities: Optional[List[str]] = None,
-        ctx: Optional[Context] = None
+        message: Optional[str] = ...,
+        possibilities: Optional[List[str]] = ...,
+        ctx: Optional[Context] = ...
     ) -> None:
         ...
 
 
 class BadOptionUsage(UsageError):
-    def __init__(self, message: str, ctx: Optional[Context] = None) -> None:
+    def __init__(self, message: str, ctx: Optional[Context] = ...) -> None:
         ...
 
 
 class BadArgumentUsage(UsageError):
-    def __init__(self, message: str, ctx: Optional[Context] = None) -> None:
+    def __init__(self, message: str, ctx: Optional[Context] = ...) -> None:
         ...
 
 
@@ -83,7 +83,7 @@ class FileError(ClickException):
     ui_filename: str
     filename: str
 
-    def __init__(self, filename: str, hint: Optional[str] = None) -> None:
+    def __init__(self, filename: str, hint: Optional[str] = ...) -> None:
         ...
 
 
diff --git a/typeshed/third_party/2and3/click/formatting.pyi b/typeshed/third_party/2and3/click/formatting.pyi
index 52b81ce..f4fe249 100644
--- a/typeshed/third_party/2and3/click/formatting.pyi
+++ b/typeshed/third_party/2and3/click/formatting.pyi
@@ -17,10 +17,10 @@ def iter_rows(
 
 def wrap_text(
     text: str,
-    width: int = 78,
-    initial_indent: str = '',
-    subsequent_indent: str = '',
-    preserve_paragraphs: bool = False
+    width: int = ...,
+    initial_indent: str = ...,
+    subsequent_indent: str = ...,
+    preserve_paragraphs: bool = ...
 ) -> str:
     ...
 
@@ -33,9 +33,9 @@ class HelpFormatter:
 
     def __init__(
         self,
-        indent_increment: int = 2,
-        width: Optional[int] = None,
-        max_width: Optional[int] = None,
+        indent_increment: int = ...,
+        width: Optional[int] = ...,
+        max_width: Optional[int] = ...,
     ) -> None:
         ...
 
@@ -51,8 +51,8 @@ class HelpFormatter:
     def write_usage(
         self,
         prog: str,
-        args: str = '',
-        prefix: str = 'Usage: ',
+        args: str = ...,
+        prefix: str = ...,
     ):
         ...
 
@@ -68,8 +68,8 @@ class HelpFormatter:
     def write_dl(
         self,
         rows: Iterable[Iterable[str]],
-        col_max: int = 30,
-        col_spacing: int = 2,
+        col_max: int = ...,
+        col_spacing: int = ...,
     ) -> None:
         ...
 
diff --git a/typeshed/third_party/2and3/click/globals.pyi b/typeshed/third_party/2and3/click/globals.pyi
index 816d393..11adce3 100644
--- a/typeshed/third_party/2and3/click/globals.pyi
+++ b/typeshed/third_party/2and3/click/globals.pyi
@@ -2,7 +2,7 @@ from click.core import Context
 from typing import Optional
 
 
-def get_current_context(silent: bool = False) -> Context:
+def get_current_context(silent: bool = ...) -> Context:
     ...
 
 
@@ -14,5 +14,5 @@ def pop_context() -> None:
     ...
 
 
-def resolve_color_default(color: Optional[bool] = None) -> Optional[bool]:
+def resolve_color_default(color: Optional[bool] = ...) -> Optional[bool]:
     ...
diff --git a/typeshed/third_party/2and3/click/parser.pyi b/typeshed/third_party/2and3/click/parser.pyi
index 21833d6..5184da5 100644
--- a/typeshed/third_party/2and3/click/parser.pyi
+++ b/typeshed/third_party/2and3/click/parser.pyi
@@ -37,10 +37,10 @@ class Option:
         self,
         opts: Iterable[str],
         dest: str,
-        action: Optional[str] = None,
-        nargs: int = 1,
-        const: Optional[Any] = None,
-        obj: Optional[Any] = None
+        action: Optional[str] = ...,
+        nargs: int = ...,
+        const: Optional[Any] = ...,
+        obj: Optional[Any] = ...
     ) -> None:
         ...
 
@@ -53,7 +53,7 @@ class Argument:
     nargs: int
     obj: Any
 
-    def __init__(self, dest: str, nargs: int = 1, obj: Optional[Any] = None) -> None:
+    def __init__(self, dest: str, nargs: int = ..., obj: Optional[Any] = ...) -> None:
         ...
 
     def process(self, value: Any, state: 'ParsingState') -> None:
@@ -79,21 +79,21 @@ class OptionParser:
     _opt_prefixes: Set[str]
     _args: List[Argument]
 
-    def __init__(self, ctx: Optional[Context] = None) -> None:
+    def __init__(self, ctx: Optional[Context] = ...) -> None:
         ...
 
     def add_option(
         self,
         opts: Iterable[str],
         dest: str,
-        action: Optional[str] = None,
-        nargs: int = 1,
-        const: Optional[Any] = None,
-        obj: Optional[Any] = None
+        action: Optional[str] = ...,
+        nargs: int = ...,
+        const: Optional[Any] = ...,
+        obj: Optional[Any] = ...
     ) -> None:
         ...
 
-    def add_argument(self, dest: str, nargs: int = 1, obj: Optional[Any] = None) -> None:
+    def add_argument(self, dest: str, nargs: int = ..., obj: Optional[Any] = ...) -> None:
         ...
 
     def parse_args(
diff --git a/typeshed/third_party/2and3/click/termui.pyi b/typeshed/third_party/2and3/click/termui.pyi
index bffb3fa..c99a685 100644
--- a/typeshed/third_party/2and3/click/termui.pyi
+++ b/typeshed/third_party/2and3/click/termui.pyi
@@ -19,33 +19,33 @@ def hidden_prompt_func(prompt: str) -> str:
 def _build_prompt(
     text: str,
     suffix: str,
-    show_default: bool = False,
-    default: Optional[str] = None,
+    show_default: bool = ...,
+    default: Optional[str] = ...,
 ) -> str:
     ...
 
 
 def prompt(
     text: str,
-    default: Optional[str] = None,
-    hide_input: bool = False,
-    confirmation_prompt: bool = False,
-    type: Optional[Any] = None,
-    value_proc: Optional[Callable[[Optional[str]], Any]] = None,
-    prompt_suffix: str = ': ',
-    show_default: bool = True,
-    err: bool = False,
+    default: Optional[str] = ...,
+    hide_input: bool = ...,
+    confirmation_prompt: bool = ...,
+    type: Optional[Any] = ...,
+    value_proc: Optional[Callable[[Optional[str]], Any]] = ...,
+    prompt_suffix: str = ...,
+    show_default: bool = ...,
+    err: bool = ...,
 ) -> Any:
     ...
 
 
 def confirm(
     text: str,
-    default: bool = False,
-    abort: bool = False,
-    prompt_suffix: str = ': ',
-    show_default: bool = True,
-    err: bool = False,
+    default: bool = ...,
+    abort: bool = ...,
+    prompt_suffix: str = ...,
+    show_default: bool = ...,
+    err: bool = ...,
 ) -> bool:
     ...
 
@@ -54,7 +54,7 @@ def get_terminal_size() -> Tuple[int, int]:
     ...
 
 
-def echo_via_pager(text: str, color: Optional[bool] = None) -> None:
+def echo_via_pager(text: str, color: Optional[bool] = ...) -> None:
     ...
 
 
@@ -63,20 +63,20 @@ _T = TypeVar('_T')
 
 @contextmanager
 def progressbar(
-    iterable: Optional[Iterable[_T]] = None,
-    length: Optional[int] = None,
-    label: Optional[str] = None,
-    show_eta: bool = True,
-    show_percent: Optional[bool] = None,
-    show_pos: bool = False,
-    item_show_func: Optional[Callable[[_T], str]] = None,
-    fill_char: str = '#',
-    empty_char: str = '-',
-    bar_template: str = '%(label)s  [%(bar)s]  %(info)s',
-    info_sep: str = '  ',
-    width: int = 36,
-    file: Optional[IO] = None,
-    color: Optional[bool] = None,
+    iterable: Optional[Iterable[_T]] = ...,
+    length: Optional[int] = ...,
+    label: Optional[str] = ...,
+    show_eta: bool = ...,
+    show_percent: Optional[bool] = ...,
+    show_pos: bool = ...,
+    item_show_func: Optional[Callable[[_T], str]] = ...,
+    fill_char: str = ...,
+    empty_char: str = ...,
+    bar_template: str = ...,
+    info_sep: str = ...,
+    width: int = ...,
+    file: Optional[IO] = ...,
+    color: Optional[bool] = ...,
 ) -> Generator[_T, None, None]:
     ...
 
@@ -87,14 +87,14 @@ def clear() -> None:
 
 def style(
     text: str,
-    fg: Optional[str] = None,
-    bg: Optional[str] = None,
-    bold: Optional[bool] = None,
-    dim: Optional[bool] = None,
-    underline: Optional[bool] = None,
-    blink: Optional[bool] = None,
-    reverse: Optional[bool] = None,
-    reset: bool = True,
+    fg: Optional[str] = ...,
+    bg: Optional[str] = ...,
+    bold: Optional[bool] = ...,
+    dim: Optional[bool] = ...,
+    underline: Optional[bool] = ...,
+    blink: Optional[bool] = ...,
+    reverse: Optional[bool] = ...,
+    reset: bool = ...,
 ):
     ...
 
@@ -106,42 +106,42 @@ def unstyle(text: str) -> str:
 # Styling options copied from style() for nicer type checking.
 def secho(
     text: str,
-    file: Optional[IO] = None,
-    nl: bool =True,
-    err: bool = False,
-    color: Optional[bool] = None,
-    fg: Optional[str] = None,
-    bg: Optional[str] = None,
-    bold: Optional[bool] = None,
-    dim: Optional[bool] = None,
-    underline: Optional[bool] = None,
-    blink: Optional[bool] = None,
-    reverse: Optional[bool] = None,
-    reset: bool = True,
+    file: Optional[IO] = ...,
+    nl: bool = ...,
+    err: bool = ...,
+    color: Optional[bool] = ...,
+    fg: Optional[str] = ...,
+    bg: Optional[str] = ...,
+    bold: Optional[bool] = ...,
+    dim: Optional[bool] = ...,
+    underline: Optional[bool] = ...,
+    blink: Optional[bool] = ...,
+    reverse: Optional[bool] = ...,
+    reset: bool = ...,
 ):
     ...
 
 
 def edit(
-    text: Optional[str] = None,
-    editor: Optional[str] = None,
-    env: Optional[str] = None,
-    require_save: bool = True,
-    extension: str = '.txt',
-    filename: Optional[str] = None,
+    text: Optional[str] = ...,
+    editor: Optional[str] = ...,
+    env: Optional[str] = ...,
+    require_save: bool = ...,
+    extension: str = ...,
+    filename: Optional[str] = ...,
 ) -> str:
     ...
 
 
-def launch(url: str, wait: bool = False, locate: bool = False) -> int:
+def launch(url: str, wait: bool = ..., locate: bool = ...) -> int:
     ...
 
 
-def getchar(echo: bool = False) -> str:
+def getchar(echo: bool = ...) -> str:
     ...
 
 
 def pause(
-    info: str ='Press any key to continue ...', err: bool = False
+    info: str = ..., err: bool = ...
 ) -> None:
     ...
diff --git a/typeshed/third_party/2and3/click/types.pyi b/typeshed/third_party/2and3/click/types.pyi
index 7b6431e..525e2f3 100644
--- a/typeshed/third_party/2and3/click/types.pyi
+++ b/typeshed/third_party/2and3/click/types.pyi
@@ -12,8 +12,8 @@ class ParamType:
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> Any:
         ...
 
@@ -34,7 +34,7 @@ class ParamType:
     def split_envvar_value(self, rv: str) -> List[str]:
         ...
 
-    def fail(self, message: str, param: Optional[Parameter] = None, ctx: Optional[Context] = None) -> None:
+    def fail(self, message: str, param: Optional[Parameter] = ..., ctx: Optional[Context] = ...) -> None:
         ...
 
 
@@ -42,8 +42,8 @@ class BoolParamType(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> bool:
         ...
 
@@ -70,8 +70,8 @@ class FloatParamType(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> float:
         ...
 
@@ -91,19 +91,19 @@ class FloatRange(FloatParamType):
 class File(ParamType):
     def __init__(
         self,
-        mode: str = 'r',
-        encoding: Optional[str] = None,
-        errors: Optional[str] = None,
-        lazy: Optional[bool] = None,
-        atomic: Optional[bool] = None,
+        mode: str = ...,
+        encoding: Optional[str] = ...,
+        errors: Optional[str] = ...,
+        lazy: Optional[bool] = ...,
+        atomic: Optional[bool] = ...,
     ) -> None:
         ...
 
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> IO:
         ...
 
@@ -132,8 +132,8 @@ class FuncParamType(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> _F:
         ...
 
@@ -150,8 +150,8 @@ class IntParamType(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> int:
         ...
 
@@ -166,7 +166,7 @@ class IntParamType(ParamType):
 
 class IntRange(IntParamType):
     def __init__(
-        self, min: Optional[int] = None, max: Optional[int] = None, clamp: bool = False
+        self, min: Optional[int] = ..., max: Optional[int] = ..., clamp: bool = ...
     ) -> None:
         ...
 
@@ -177,14 +177,14 @@ _PathType = TypeVar('_PathType', str, bytes)
 class Path(ParamType):
     def __init__(
         self,
-        exists: bool = False,
-        file_okay: bool = True,
-        dir_okay: bool = True,
-        writable: bool = False,
-        readable: bool = True,
-        resolve_path: bool = False,
-        allow_dash: bool = False,
-        path_type: Optional[_PathType] = None,
+        exists: bool = ...,
+        file_okay: bool = ...,
+        dir_okay: bool = ...,
+        writable: bool = ...,
+        readable: bool = ...,
+        resolve_path: bool = ...,
+        allow_dash: bool = ...,
+        path_type: Optional[_PathType] = ...,
     ) -> None:
         ...
 
@@ -194,8 +194,8 @@ class Path(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> _PathType:
         ...
 
@@ -211,8 +211,8 @@ class StringParamType(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> str:
         ...
 
@@ -234,8 +234,8 @@ class Tuple(CompositeParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> Tuple:
         ...
 
@@ -256,8 +256,8 @@ class UUIDParameterType(ParamType):
     def __call__(
         self,
         value: Optional[str],
-        param: Optional[Parameter] = None,
-        ctx: Optional[Context] = None,
+        param: Optional[Parameter] = ...,
+        ctx: Optional[Context] = ...,
     ) -> uuid.UUID:
         ...
 
@@ -270,7 +270,7 @@ class UUIDParameterType(ParamType):
         ...
 
 
-def convert_type(ty: Any, default: Optional[Any] = None) -> ParamType:
+def convert_type(ty: Any, default: Optional[Any] = ...) -> ParamType:
     ...
 
 # parameter type shortcuts
diff --git a/typeshed/third_party/2and3/click/utils.pyi b/typeshed/third_party/2and3/click/utils.pyi
index cce7f20..1a6b962 100644
--- a/typeshed/third_party/2and3/click/utils.pyi
+++ b/typeshed/third_party/2and3/click/utils.pyi
@@ -1,5 +1,4 @@
-from typing import Any, Callable, Iterator, IO, List, Optional, TypeVar, Union
-
+from typing import Any, Callable, Iterator, IO, List, Optional, TypeVar, Union, Text
 
 _T = TypeVar('_T')
 _Decorator = Callable[[_T], _T]
@@ -17,7 +16,7 @@ def make_str(value: Any) -> str:
     ...
 
 
-def make_default_short_help(help: str, max_length: int = 45):
+def make_default_short_help(help: str, max_length: int = ...):
     ...
 
 
@@ -31,10 +30,10 @@ class LazyFile:
     def __init__(
         self,
         filename: str,
-        mode: str = 'r',
-        encoding: Optional[str] = None,
-        errors: str = 'strict',
-        atomic: bool = False
+        mode: str = ...,
+        encoding: Optional[str] = ...,
+        errors: str = ...,
+        atomic: bool = ...
     ) -> None:
         ...
 
@@ -74,11 +73,11 @@ class KeepOpenFile:
 
 
 def echo(
-    message: Optional[str] = None,
-    file: Optional[IO] = None,
-    nl: bool = True,
-    err: bool = False,
-    color: Optional[bool] = None,
+    message: Optional[Union[bytes, Text]] = ...,
+    file: Optional[IO] = ...,
+    nl: bool = ...,
+    err: bool = ...,
+    color: Optional[bool] = ...,
 ) -> None:
     ...
 
@@ -88,18 +87,18 @@ def get_binary_stream(name: str) -> IO[bytes]:
 
 
 def get_text_stream(
-    name: str, encoding: Optional[str] = None, errors: str = 'strict'
+    name: str, encoding: Optional[str] = ..., errors: str = ...
 ) -> IO[str]:
     ...
 
 
 def open_file(
     filename: str,
-    mode: str = 'r',
-    encoding: Optional[str] = None,
-    errors: str = 'strict',
-    lazy: bool = False,
-    atomic: bool = False
+    mode: str = ...,
+    encoding: Optional[str] = ...,
+    errors: str = ...,
+    lazy: bool = ...,
+    atomic: bool = ...
 ) -> Union[IO, LazyFile, KeepOpenFile]:
     ...
 
@@ -108,11 +107,11 @@ def get_os_args() -> List[str]:
     ...
 
 
-def format_filename(filename: str, shorten: bool = False) -> str:
+def format_filename(filename: str, shorten: bool = ...) -> str:
     ...
 
 
 def get_app_dir(
-    app_name: str, roaming: bool = True, force_posix: bool = False
+    app_name: str, roaming: bool = ..., force_posix: bool = ...
 ) -> str:
     ...
diff --git a/typeshed/third_party/2and3/croniter.pyi b/typeshed/third_party/2and3/croniter.pyi
new file mode 100644
index 0000000..0d01b7e
--- /dev/null
+++ b/typeshed/third_party/2and3/croniter.pyi
@@ -0,0 +1,41 @@
+import datetime
+from typing import Any, Dict, Iterator, List, Optional, Text, Tuple, Type, TypeVar, Union
+
+_RetType = Union[Type[float], Type[datetime.datetime]]
+_SelfT = TypeVar('_SelfT', bound=croniter)
+
+class CroniterError(ValueError): ...
+class CroniterBadCronError(CroniterError): ...
+class CroniterBadDateError(CroniterError): ...
+class CroniterNotAlphaError(CroniterError): ...
+
+class croniter(Iterator[Any]):
+    MONTHS_IN_YEAR: int
+    RANGES: Tuple[Tuple[int, int], ...]
+    DAYS: Tuple[int, ...]
+    ALPHACONV: Tuple[Dict[str, Any], ...]
+    LOWMAP: Tuple[Dict[int, Any], ...]
+    bad_length: str
+    tzinfo: Optional[datetime.tzinfo]
+    cur: float
+    expanded: List[List[str]]
+    start_time: float
+    dst_start_time: float
+    nth_weekday_of_month: Dict[str, Any]
+    def __init__(self, expr_format: Text, start_time: Optional[Union[float, datetime.datetime]] = ..., ret_type: Optional[_RetType] = ...) -> None: ...
+    # Most return value depend on ret_type, which can be passed in both as a method argument and as
+    # a constructor argument.
+    def get_next(self, ret_type: Optional[_RetType] = ...) -> Any: ...
+    def get_prev(self, ret_type: Optional[_RetType] = ...) -> Any: ...
+    def get_current(self, ret_type: Optional[_RetType] = ...) -> Any: ...
+    def __iter__(self: _SelfT) -> _SelfT: ...
+    def __next__(self, ret_type: Optional[_RetType] = ...) -> Any: ...
+    def next(self, ret_type: Optional[_RetType] = ...) -> Any: ...
+    def all_next(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ...
+    def all_prev(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ...
+    def iter(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ...
+    def is_leap(self, year: int) -> bool: ...
+    @classmethod
+    def expand(cls, expr_format: Text) -> Tuple[List[List[str]], Dict[str, Any]]: ...
+    @classmethod
+    def is_valid(cls, expression: Text) -> bool: ...
diff --git a/typeshed/third_party/2and3/markupsafe/__init__.pyi b/typeshed/third_party/2and3/markupsafe/__init__.pyi
index 7961654..21878ed 100644
--- a/typeshed/third_party/2and3/markupsafe/__init__.pyi
+++ b/typeshed/third_party/2and3/markupsafe/__init__.pyi
@@ -1,6 +1,6 @@
 import sys
 
-from typing import Any, Callable, Dict, Iterable, List, Optional, Text, Tuple, Union
+from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Text, Tuple, Union
 from collections import Mapping
 from markupsafe._compat import text_type
 import string
@@ -16,8 +16,8 @@ class Markup(text_type):
     def __rmul__(self, num: int) -> Markup: ...
     def __mod__(self, *args: Any) -> Markup: ...
     def join(self, seq: Iterable[text_type]): ...
-    def split(self, sep: Optional[text_type] = None, maxsplit: int = -1) -> List[text_type]: ...
-    def rsplit(self, sep: Optional[text_type] = None, maxsplit: int = -1) -> List[text_type]: ...
+    def split(self, sep: Optional[text_type] = ..., maxsplit: int = ...) -> List[text_type]: ...
+    def rsplit(self, sep: Optional[text_type] = ..., maxsplit: int = ...) -> List[text_type]: ...
     def splitlines(self, keepends: bool = ...) -> List[text_type]: ...
     def unescape(self) -> Text: ...
     def striptags(self) -> Text: ...
@@ -34,16 +34,16 @@ class Markup(text_type):
     def lower(self) -> Markup: ...
     def upper(self) -> Markup: ...
     def swapcase(self) -> Markup: ...
-    def replace(self, old: text_type, new: text_type, count: int = -1) -> Markup: ...
+    def replace(self, old: text_type, new: text_type, count: int = ...) -> Markup: ...
     def ljust(self, width: int, fillchar: text_type = ...) -> Markup: ...
     def rjust(self, width: int, fillchar: text_type = ...) -> Markup: ...
-    def lstrip(self, chars: Optional[text_type] = None) -> Markup: ...
-    def rstrip(self, chars: Optional[text_type] = None) -> Markup: ...
-    def strip(self, chars: Optional[text_type] = None) -> Markup: ...
+    def lstrip(self, chars: Optional[text_type] = ...) -> Markup: ...
+    def rstrip(self, chars: Optional[text_type] = ...) -> Markup: ...
+    def strip(self, chars: Optional[text_type] = ...) -> Markup: ...
     def center(self, width: int, fillchar: text_type = ...) -> Markup: ...
     def zfill(self, width: int) -> Markup: ...
-    def translate(self, table: Union[Dict[int, Any], text_type]) -> Markup: ...
-    def expandtabs(self, tabsize: int = 8) -> Markup: ...
+    def translate(self, table: Union[Mapping[int, Union[int, text_type, None]], Sequence[Union[int, text_type, None]]]) -> Markup: ...
+    def expandtabs(self, tabsize: int = ...) -> Markup: ...
 
 class EscapeFormatter(string.Formatter):
     escape = ...  # type: Callable[[text_type], Markup]
diff --git a/typeshed/third_party/2and3/pymysql/connections.pyi b/typeshed/third_party/2and3/pymysql/connections.pyi
index dd8b5e0..098c177 100644
--- a/typeshed/third_party/2and3/pymysql/connections.pyi
+++ b/typeshed/third_party/2and3/pymysql/connections.pyi
@@ -96,7 +96,7 @@ class Connection:
     def rollback(self): ...
     def escape(self, obj): ...
     def literal(self, obj): ...
-    def cursor(self, cursor: Optional[Type[Cursor]]=None): ...
+    def cursor(self, cursor: Optional[Type[Cursor]] = ...): ...
     def __enter__(self): ...
     def __exit__(self, exc, value, traceback): ...
     def query(self, sql): ...
diff --git a/typeshed/third_party/2and3/pymysql/cursors.pyi b/typeshed/third_party/2and3/pymysql/cursors.pyi
index 357f205..be8cb90 100644
--- a/typeshed/third_party/2and3/pymysql/cursors.pyi
+++ b/typeshed/third_party/2and3/pymysql/cursors.pyi
@@ -22,7 +22,7 @@ class Cursor:
     def executemany(self, query: str, args) -> int: ...
     def callproc(self, procname, args=...): ...
     def fetchone(self) -> Optional[Gen]: ...
-    def fetchmany(self, size: Optional[int] = None) -> Optional[Gen]: ...
+    def fetchmany(self, size: Optional[int] = ...) -> Optional[Gen]: ...
     def fetchall(self) -> Optional[Tuple[Gen, ...]]: ...
     def scroll(self, value, mode=''): ...
     def __iter__(self): ...
diff --git a/typeshed/third_party/2and3/requests/__init__.pyi b/typeshed/third_party/2and3/requests/__init__.pyi
index a89d4a0..97b5405 100644
--- a/typeshed/third_party/2and3/requests/__init__.pyi
+++ b/typeshed/third_party/2and3/requests/__init__.pyi
@@ -6,6 +6,7 @@ from . import api
 from . import sessions
 from . import status_codes
 from . import exceptions
+from . import packages
 import logging
 
 __title__ = ...  # type: Any
diff --git a/typeshed/third_party/2and3/requests/adapters.pyi b/typeshed/third_party/2and3/requests/adapters.pyi
index 7e6167c..fe7e116 100644
--- a/typeshed/third_party/2and3/requests/adapters.pyi
+++ b/typeshed/third_party/2and3/requests/adapters.pyi
@@ -44,10 +44,10 @@ DEFAULT_RETRIES = ...  # type: Any
 
 class BaseAdapter:
     def __init__(self) -> None: ...
-    def send(self, request: PreparedRequest, stream: bool=False,
-             timeout: Union[None, float, Tuple[float, float]]=None,
-             verify: bool=False,
-             cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]]=None
+    def send(self, request: PreparedRequest, stream: bool = ...,
+             timeout: Union[None, float, Tuple[float, float]] = ...,
+             verify: bool = ...,
+             cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]] = ...
              ) -> Response: ...
     def close(self) -> None: ...
 class HTTPAdapter(BaseAdapter):
diff --git a/typeshed/third_party/2and3/requests/api.pyi b/typeshed/third_party/2and3/requests/api.pyi
index e453475..98fdc5a 100644
--- a/typeshed/third_party/2and3/requests/api.pyi
+++ b/typeshed/third_party/2and3/requests/api.pyi
@@ -16,7 +16,7 @@ def get(url: Union[Text, bytes],
                   Mapping[Text, _ParamsMappingValueType],
                   Mapping[bytes, _ParamsMappingValueType],
                   Mapping[int, _ParamsMappingValueType],
-                  Mapping[float, _ParamsMappingValueType]]] = None,
+                  Mapping[float, _ParamsMappingValueType]]] = ...,
         **kwargs) -> Response: ...
 def options(url: Union[str, Text], **kwargs) -> Response: ...
 def head(url: Union[str, Text], **kwargs) -> Response: ...
diff --git a/typeshed/third_party/2and3/requests/models.pyi b/typeshed/third_party/2and3/requests/models.pyi
index 18b1689..b3f9040 100644
--- a/typeshed/third_party/2and3/requests/models.pyi
+++ b/typeshed/third_party/2and3/requests/models.pyi
@@ -2,6 +2,7 @@
 
 from typing import Any, List, MutableMapping, Iterator, Dict, Text
 import datetime
+import types
 
 from . import hooks
 from . import structures
@@ -114,6 +115,8 @@ class Response:
     def __bool__(self) -> bool: ...
     def __nonzero__(self) -> bool: ...
     def __iter__(self) -> Iterator[bytes]: ...
+    def __enter__(self) -> Response: ...
+    def __exit__(self, *args: Any) -> None: ...
     @property
     def ok(self) -> bool: ...
     @property
diff --git a/typeshed/third_party/2and3/requests/sessions.pyi b/typeshed/third_party/2and3/requests/sessions.pyi
index 252f7ee..9f5c0d8 100644
--- a/typeshed/third_party/2and3/requests/sessions.pyi
+++ b/typeshed/third_party/2and3/requests/sessions.pyi
@@ -56,20 +56,21 @@ class SessionRedirectMixin:
     def rebuild_proxies(self, prepared_request, proxies): ...
 
 _Data = Union[None, bytes, MutableMapping[Text, Text], IO]
+_Hooks = MutableMapping[Text, Callable[[Response], Any]]
 
 class Session(SessionRedirectMixin):
     __attrs__ = ...  # type: Any
-    headers = ...  # type: Optional[MutableMapping[Text, Text]]
+    headers = ...  # type: MutableMapping[Text, Text]
     auth = ...  # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
-    proxies = ...  # type: Optional[MutableMapping[Text, Text]]
-    hooks = ...  # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
-    params = ...  # type: Union[None, bytes, MutableMapping[Text, Text]]
+    proxies = ...  # type: MutableMapping[Text, Text]
+    hooks = ...  # type: _Hooks
+    params = ...  # type: Union[bytes, MutableMapping[Text, Text]]
     stream = ...  # type: bool
     verify = ...  # type: bool
     cert = ...  # type: Union[None, Text, Tuple[Text, Text]]
     max_redirects = ...  # type: int
     trust_env = ...  # type: bool
-    cookies = ...  # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
+    cookies = ...  # type: Union[RequestsCookieJar, MutableMapping[Text, Text]]
     adapters = ...  # type: MutableMapping
     redirect_cache = ...  # type: RecentlyUsedContainer
     def __init__(self) -> None: ...
@@ -86,7 +87,7 @@ class Session(SessionRedirectMixin):
                 timeout: Union[None, float, Tuple[float, float]] = ...,
                 allow_redirects: Optional[bool] = ...,
                 proxies: Optional[MutableMapping[Text, Text]] = ...,
-                hooks: Optional[MutableMapping[Text, Callable[[Request], Any]]] = ...,
+                hooks: Optional[_Hooks] = ...,
                 stream: Optional[bool] = ...,
                 verify: Optional[bool] = ...,
                 cert: Union[Text, Tuple[Text, Text], None] = ...,
diff --git a/typeshed/third_party/2/thrift/Thrift.pyi b/typeshed/third_party/2and3/thrift/Thrift.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/Thrift.pyi
rename to typeshed/third_party/2and3/thrift/Thrift.pyi
diff --git a/typeshed/third_party/2/thrift/__init__.pyi b/typeshed/third_party/2and3/thrift/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/__init__.pyi
rename to typeshed/third_party/2and3/thrift/__init__.pyi
diff --git a/typeshed/third_party/2/thrift/protocol/TBinaryProtocol.pyi b/typeshed/third_party/2and3/thrift/protocol/TBinaryProtocol.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/protocol/TBinaryProtocol.pyi
rename to typeshed/third_party/2and3/thrift/protocol/TBinaryProtocol.pyi
diff --git a/typeshed/third_party/2/thrift/protocol/TProtocol.pyi b/typeshed/third_party/2and3/thrift/protocol/TProtocol.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/protocol/TProtocol.pyi
rename to typeshed/third_party/2and3/thrift/protocol/TProtocol.pyi
diff --git a/typeshed/third_party/2/thrift/protocol/__init__.pyi b/typeshed/third_party/2and3/thrift/protocol/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/protocol/__init__.pyi
rename to typeshed/third_party/2and3/thrift/protocol/__init__.pyi
diff --git a/typeshed/third_party/2/thrift/transport/TSocket.pyi b/typeshed/third_party/2and3/thrift/transport/TSocket.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/transport/TSocket.pyi
rename to typeshed/third_party/2and3/thrift/transport/TSocket.pyi
diff --git a/typeshed/third_party/2/thrift/transport/TTransport.pyi b/typeshed/third_party/2and3/thrift/transport/TTransport.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/transport/TTransport.pyi
rename to typeshed/third_party/2and3/thrift/transport/TTransport.pyi
diff --git a/typeshed/third_party/2/thrift/transport/__init__.pyi b/typeshed/third_party/2and3/thrift/transport/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/thrift/transport/__init__.pyi
rename to typeshed/third_party/2and3/thrift/transport/__init__.pyi
diff --git a/typeshed/third_party/2and3/yaml/__init__.pyi b/typeshed/third_party/2and3/yaml/__init__.pyi
index 8f2edec..9c6c197 100644
--- a/typeshed/third_party/2and3/yaml/__init__.pyi
+++ b/typeshed/third_party/2and3/yaml/__init__.pyi
@@ -1,10 +1,11 @@
-from typing import Any
+from typing import Any, Iterator, Union, IO
 from yaml.error import *  # noqa: F403
 from yaml.tokens import *  # noqa: F403
 from yaml.events import *  # noqa: F403
 from yaml.nodes import *  # noqa: F403
 from yaml.loader import *  # noqa: F403
 from yaml.dumper import *  # noqa: F403
+from . import resolver  # Help mypy a bit; this is implied by loader and dumper
 # TODO: stubs for cyaml?
 # from cyaml import *
 
@@ -14,10 +15,10 @@ def scan(stream, Loader=...): ...
 def parse(stream, Loader=...): ...
 def compose(stream, Loader=...): ...
 def compose_all(stream, Loader=...): ...
-def load(stream, Loader=...): ...
-def load_all(stream, Loader=...): ...
-def safe_load(stream): ...
-def safe_load_all(stream): ...
+def load(stream: Union[str, IO[str]], Loader=...) -> Any: ...
+def load_all(stream: Union[str, IO[str]], Loader=...) -> Iterator[Any]: ...
+def safe_load(stream: Union[str, IO[str]]) -> Any: ...
+def safe_load_all(stream: Union[str, IO[str]]) -> Iterator[Any]: ...
 def emit(events, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=...): ...
 def serialize_all(nodes, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=..., encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...): ...
 def serialize(node, stream=..., Dumper=..., **kwds): ...
diff --git a/typeshed/third_party/3/dateutil/parser.pyi b/typeshed/third_party/3/dateutil/parser.pyi
index fcd9775..f7b90b0 100644
--- a/typeshed/third_party/3/dateutil/parser.pyi
+++ b/typeshed/third_party/3/dateutil/parser.pyi
@@ -1,52 +1,45 @@
-from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict
-from datetime import datetime
+from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict, Mapping, Text
+from datetime import datetime, tzinfo
 
-__all__ = ...  # type: List[str]
+_FileOrStr = Union[bytes, Text, IO[str], IO[Any]]
 
+__all__ = ...  # type: List[str]
 
-class parserinfo:
+class parserinfo(object):
     JUMP = ...  # type: List[str]
     WEEKDAYS = ...  # type: List[Tuple[str, str]]
     MONTHS = ...  # type: List[Tuple[str, str]]
     HMS = ...  # type: List[Tuple[str, str, str]]
-    AMPM = ...  # type: List[Tuple[str, str, str]]
+    AMPM = ...  # type: List[Tuple[str, str]]
     UTCZONE = ...  # type: List[str]
     PERTAIN = ...  # type: List[str]
     TZOFFSET = ...  # type: Dict[str, int]
 
     def __init__(self, dayfirst: bool=..., yearfirst: bool=...) -> None: ...
-    def jump(self, name: str) -> bool: ...
-    def weekday(self, name: str) -> str: ...
-    def month(self, name: str) -> str: ...
-    def hms(self, name: str) -> str: ...
-    def ampm(self, name: str) -> str: ...
-    def pertain(self, name: str) -> bool: ...
-    def utczone(self, name: str) -> bool: ...
-    def tzoffset(self, name: str) -> int: ...
+    def jump(self, name: Text) -> bool: ...
+    def weekday(self, name: Text) -> Optional[int]: ...
+    def month(self, name: Text) -> Optional[int]: ...
+    def hms(self, name: Text) -> Optional[int]: ...
+    def ampm(self, name: Text) -> Optional[int]: ...
+    def pertain(self, name: Text) -> bool: ...
+    def utczone(self, name: Text) -> bool: ...
+    def tzoffset(self, name: Text) -> Optional[int]: ...
     def convertyear(self, year: int) -> int: ...
-    def validate(self, year: datetime) -> bool: ...
-
+    def validate(self, res: datetime) -> bool: ...
 
-class parser:
-    def __init__(self, info: parserinfo=...) -> None: ...
-
-    def parse(
-        self,
-        timestr: Union[str, bytes, IO[Any]],
-        default: Optional[datetime],
-        ignoretz: bool=...,
-        tzinfos: Any =...,
-    ) -> datetime: ...
+class parser(object):
+    def __init__(self, info: Optional[parserinfo] = ...) -> None: ...
+    def parse(self, timestr: _FileOrStr,
+              default: Optional[datetime] = ...,
+              ignoretz: bool = ..., tzinfos: Optional[Mapping[Text, tzinfo]] = ...,
+              **kwargs: Any) -> datetime: ...
 
 DEFAULTPARSER = ...  # type: parser
-
-
-def parse(timestr: Union[str, bytes, IO[Any]], parserinfo: parserinfo=..., **kwargs) -> datetime:
-    ...
-
-
-class _tzparser:
-    ...
-
+def parse(timestr: _FileOrStr, parserinfo: Optional[parserinfo] = ..., **kwargs: Any) -> datetime: ...
+class _tzparser: ...
 
 DEFAULTTZPARSER = ...  # type: _tzparser
+
+class InvalidDatetimeError(ValueError): ...
+class InvalidDateError(InvalidDatetimeError): ...
+class InvalidTimeError(InvalidDatetimeError): ...
diff --git a/typeshed/third_party/3/dateutil/relativedelta.pyi b/typeshed/third_party/3/dateutil/relativedelta.pyi
index f221a20..71b3aee 100644
--- a/typeshed/third_party/3/dateutil/relativedelta.pyi
+++ b/typeshed/third_party/3/dateutil/relativedelta.pyi
@@ -1,10 +1,13 @@
-from typing import Optional, overload, Union, List
+from typing import overload, Any, List, Optional, SupportsFloat, TypeVar, Union
 from datetime import date, datetime, timedelta
 
 __all__ = ...  # type: List[str]
 
+_SelfT = TypeVar('_SelfT', bound=relativedelta)
+_DateT = TypeVar('_DateT', date, datetime)
 
-class weekday:
+
+class weekday(object):
     def __init__(self, weekday: int, n: Optional[int]=...) -> None: ...
 
     def __call__(self, n: int) -> 'weekday': ...
@@ -25,7 +28,7 @@ SA = ...  # type: weekday
 SU = ...  # type: weekday
 
 
-class relativedelta:
+class relativedelta(object):
     def __init__(self,
                  dt1: Optional[date]=...,
                  dt2: Optional[date]=...,
@@ -42,71 +45,39 @@ class relativedelta:
                  hour: Optional[int]=..., minute: Optional[int]=...,
                  second: Optional[int]=...,
                  microsecond: Optional[int]=...) -> None: ...
-
     @property
     def weeks(self) -> int: ...
-
     @weeks.setter
     def weeks(self, value: int) -> None: ...
-
-    def normalized(self) -> 'relativedelta': ...
-
+    def normalized(self: _SelfT) -> _SelfT: ...
     # TODO: use Union when mypy will handle it properly in overloaded operator
     # methods (#2129, #1442, #1264 in mypy)
     @overload
-    def __add__(self, other: 'relativedelta') -> 'relativedelta': ...
-
+    def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ...
     @overload
-    def __add__(self, other: timedelta) -> 'relativedelta': ...
-
+    def __add__(self: _SelfT, other: timedelta) -> _SelfT: ...
     @overload
-    def __add__(self, other: date) -> date: ...
-
+    def __add__(self, other: _DateT) -> _DateT: ...
     @overload
-    def __add__(self, other: datetime) -> datetime: ...
-
-    @overload
-    def __radd__(self, other: 'relativedelta') -> 'relativedelta': ...
-
-    @overload
-    def __radd__(self, other: timedelta) -> 'relativedelta': ...
-
-    @overload
-    def __radd__(self, other: date) -> date: ...
-
+    def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ...
     @overload
-    def __radd__(self, other: datetime) -> datetime: ...
-
+    def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ...
     @overload
-    def __rsub__(self, other: 'relativedelta') -> 'relativedelta': ...
-
+    def __radd__(self, other: _DateT) -> _DateT: ...
     @overload
-    def __rsub__(self, other: timedelta) -> 'relativedelta': ...
-
+    def __rsub__(self: _SelfT, other: relativedelta) -> _SelfT: ...
     @overload
-    def __rsub__(self, other: date) -> date: ...
-
+    def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ...
     @overload
-    def __rsub__(self, other: datetime) -> datetime: ...
-
-    def __sub__(self, other: 'relativedelta') -> 'relativedelta': ...
-
-    def __neg__(self) -> 'relativedelta': ...
-
+    def __rsub__(self, other: _DateT) -> _DateT: ...
+    def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ...
+    def __neg__(self: _SelfT) -> _SelfT: ...
     def __bool__(self) -> bool: ...
-
     def __nonzero__(self) -> bool: ...
-
-    def __mul__(self, other: float) -> 'relativedelta': ...
-
-    def __rmul__(self, other: float) -> 'relativedelta': ...
-
+    def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
+    def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
     def __eq__(self, other) -> bool: ...
-
     def __ne__(self, other: object) -> bool: ...
-
-    def __div__(self, other: float) -> 'relativedelta': ...
-
-    def __truediv__(self, other: float) -> 'relativedelta': ...
-
+    def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
+    def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
     def __repr__(self) -> str: ...
diff --git a/typeshed/third_party/3/dateutil/tz/__init__.pyi b/typeshed/third_party/3/dateutil/tz/__init__.pyi
index 4c8a6d2..68cfb9e 100644
--- a/typeshed/third_party/3/dateutil/tz/__init__.pyi
+++ b/typeshed/third_party/3/dateutil/tz/__init__.pyi
@@ -1 +1,12 @@
-from .tz import tzutc, tzoffset, tzlocal, tzfile, tzrange, tzstr, tzical, gettz, datetime_exists, datetime_ambiguous
+from .tz import (
+    tzutc as tzutc,
+    tzoffset as tzoffset,
+    tzlocal as tzlocal,
+    tzfile as tzfile,
+    tzrange as tzrange,
+    tzstr as tzstr,
+    tzical as tzical,
+    gettz as gettz,
+    datetime_exists as datetime_exists,
+    datetime_ambiguous as datetime_ambiguous,
+)
diff --git a/typeshed/third_party/3/dateutil/tz/tz.pyi b/typeshed/third_party/3/dateutil/tz/tz.pyi
index e693db3..5fbc932 100644
--- a/typeshed/third_party/3/dateutil/tz/tz.pyi
+++ b/typeshed/third_party/3/dateutil/tz/tz.pyi
@@ -1,9 +1,11 @@
-from typing import Any, Optional, Union, IO, Tuple, List
+from typing import Any, Optional, Union, IO, Text, Tuple, List
 import datetime
 from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo
 from ._common import tzrangebase as tzrangebase, enfold as enfold
 from ..relativedelta import relativedelta
 
+_FileObj = Union[str, Text, IO[str], IO[Text]]
+
 ZERO = ...  # type: datetime.timedelta
 EPOCH = ...  # type: datetime.datetime
 EPOCHORDINAL = ...  # type: int
@@ -47,7 +49,7 @@ class _ttinfo:
     def __ne__(self, other): ...
 
 class tzfile(_tzinfo):
-    def __init__(self, fileobj: Union[str, IO[str]], filename: Optional[str] = ...) -> None: ...
+    def __init__(self, fileobj: _FileObj, filename: Optional[Text] = ...) -> None: ...
     def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ...
     def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
     def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
@@ -60,22 +62,22 @@ class tzfile(_tzinfo):
 
 class tzrange(tzrangebase):
     hasdst = ...  # type: bool
-    def __init__(self, stdabbr: str, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[str] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ...
+    def __init__(self, stdabbr: Text, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[Text] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ...
     def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ...
     def __eq__(self, other): ...
 
 class tzstr(tzrange):
     hasdst = ...  # type: bool
-    def __init__(self, s: Union[bytes, str, IO[str]], posix_offset: bool = ...) -> None: ...
+    def __init__(self, s: Union[bytes, _FileObj], posix_offset: bool = ...) -> None: ...
 
 class tzical:
-    def __init__(self, fileobj: Union[str, IO[str]]) -> None: ...
+    def __init__(self, fileobj: _FileObj) -> None: ...
     def keys(self): ...
     def get(self, tzid: Optional[Any] = ...): ...
 
 TZFILES = ...  # type: List[str]
 TZPATHS = ...  # type: List[str]
 
-def gettz(name: Optional[str] = ...) -> Optional[datetime.tzinfo]: ...
+def gettz(name: Optional[Text] = ...) -> Optional[datetime.tzinfo]: ...
 def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
 def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
diff --git a/typeshed/third_party/3/enum.pyi b/typeshed/third_party/3/enum.pyi
index 1501be2..001f6e9 100644
--- a/typeshed/third_party/3/enum.pyi
+++ b/typeshed/third_party/3/enum.pyi
@@ -2,7 +2,7 @@ import sys
 from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
 from abc import ABCMeta
 
-_T = TypeVar('_T', bound=Enum)
+_T = TypeVar('_T')
 _S = TypeVar('_S', bound=Type[Enum])
 
 # Note: EnumMeta actually subclasses type directly, not ABCMeta.
diff --git a/typeshed/third_party/3/itsdangerous.pyi b/typeshed/third_party/3/itsdangerous.pyi
index 4ed0402..097f983 100644
--- a/typeshed/third_party/3/itsdangerous.pyi
+++ b/typeshed/third_party/3/itsdangerous.pyi
@@ -30,15 +30,15 @@ class BadData(Exception):
 
 class BadPayload(BadData):
     original_error = ...  # type: Optional[Exception]
-    def __init__(self, message: str, original_error: Optional[Exception]=None) -> None: ...
+    def __init__(self, message: str, original_error: Optional[Exception] = ...) -> None: ...
 
 class BadSignature(BadData):
     payload = ...  # type: Optional[Any]
-    def __init__(self, message: str, payload: Optional[Any]=None) -> None: ...
+    def __init__(self, message: str, payload: Optional[Any] = ...) -> None: ...
 
 class BadTimeSignature(BadSignature):
     date_signed = ...  # type: Optional[int]
-    def __init__(self, message: str, payload: Optional[Any]=None, date_signed: Optional[int]=None) -> None: ...
+    def __init__(self, message: str, payload: Optional[Any] = ..., date_signed: Optional[int] = ...) -> None: ...
 
 class BadHeader(BadSignature):
     header = ...  # type: Any
@@ -62,7 +62,7 @@ class NoneAlgorithm(SigningAlgorithm):
 class HMACAlgorithm(SigningAlgorithm):
     default_digest_method = ...  # type: Callable
     digest_method = ...  # type: Callable
-    def __init__(self, digest_method: Optional[Callable]=None) -> None: ...
+    def __init__(self, digest_method: Optional[Callable] = ...) -> None: ...
     def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ...
 
 class Signer:
@@ -74,10 +74,10 @@ class Signer:
     key_derivation = ...  # type: str
     digest_method = ...  # type: Callable
     algorithm = ...  # type: SigningAlgorithm
-    def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=None, sep: Optional[_can_become_bytes]='',
-                 key_derivation: Optional[str]=None,
-                 digest_method: Optional[Callable]=None,
-                 algorithm: Optional[SigningAlgorithm]=None) -> None: ...
+    def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., sep: Optional[_can_become_bytes]='',
+                 key_derivation: Optional[str] = ...,
+                 digest_method: Optional[Callable] = ...,
+                 algorithm: Optional[SigningAlgorithm] = ...) -> None: ...
     def derive_key(self) -> bytes: ...
     def get_signature(self, value: _bytes_like) -> bytes: ...
     def sign(self, value: _bytes_like) -> bytes: ...
@@ -89,8 +89,8 @@ class TimestampSigner(Signer):
     def get_timestamp(self) -> int: ...
     def timestamp_to_datetime(self, ts: int) -> datetime: ...
     def sign(self, value: _bytes_like) -> bytes: ...
-    def unsign(self, value: _can_become_bytes, max_age: Optional[int]=None, return_timestamp: bool=False) -> Any: ...
-    def validate(self, signed_value: _can_become_bytes, max_age: Optional[int]=None) -> bool: ...
+    def unsign(self, value: _can_become_bytes, max_age: Optional[int] = ..., return_timestamp: bool = ...) -> Any: ...
+    def validate(self, signed_value: _can_become_bytes, max_age: Optional[int] = ...) -> bool: ...
 
 class Serializer:
     default_serializer = ...  # type: _serializer
@@ -101,21 +101,21 @@ class Serializer:
     is_text_serializer = ...  # type: bool
     signer = ...  # type: Signer
     signer_kwargs = ...  # type: MutableMapping
-    def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=b'', serializer: _serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None) -> None: ...
+    def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=b'', serializer: _serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ...) -> None: ...
     def load_payload(self, payload: Any, serializer: _serializer=None) -> Any: ...
     def dump_payload(self, *args, **kwargs) -> bytes: ...
-    def make_signer(self, salt: Optional[_can_become_bytes]=None) -> Signer: ...
-    def dumps(self, obj: Any, salt: Optional[_can_become_bytes]=None) -> _str_like: ...
-    def dump(self, obj: Any, f: IO, salt: Optional[_can_become_bytes]=None) -> None: ...
-    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None) -> Any: ...
-    def load(self, f: IO, salt: Optional[_can_become_bytes]=None): ...
-    def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None) -> Tuple[bool, Any]: ...
+    def make_signer(self, salt: Optional[_can_become_bytes] = ...) -> Signer: ...
+    def dumps(self, obj: Any, salt: Optional[_can_become_bytes] = ...) -> _str_like: ...
+    def dump(self, obj: Any, f: IO, salt: Optional[_can_become_bytes] = ...) -> None: ...
+    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ...) -> Any: ...
+    def load(self, f: IO, salt: Optional[_can_become_bytes] = ...): ...
+    def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ...) -> Tuple[bool, Any]: ...
     def load_unsafe(self, f: IO, *args, **kwargs) -> Tuple[bool, Any]: ...
 
 class TimedSerializer(Serializer):
     default_signer = ...  # type: Callable[..., TimestampSigner]
-    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, max_age: Optional[int]=None, return_timestamp: bool=False) -> Any: ...
-    def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, max_age: Optional[int]=None) -> Tuple[bool, Any]: ...
+    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., max_age: Optional[int] = ..., return_timestamp: bool = ...) -> Any: ...
+    def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., max_age: Optional[int] = ...) -> Tuple[bool, Any]: ...
 
 class JSONWebSignatureSerializer(Serializer):
     jws_algorithms = ...  # type: MutableMapping[str, SigningAlgorithm]
@@ -123,22 +123,22 @@ class JSONWebSignatureSerializer(Serializer):
     default_serializer = ...  # type: Any
     algorithm_name = ...  # type: str
     algorithm = ...  # type: Any
-    def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=None, serializer: _serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None, algorithm_name: Optional[str]=None) -> None: ...
-    def load_payload(self, payload: Any, serializer: _serializer = None, return_header: bool=False) -> Any: ...
+    def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., serializer: _serializer=None, signer: Optional[Callable[..., Signer]] = ..., signer_kwargs: Optional[MutableMapping] = ..., algorithm_name: Optional[str] = ...) -> None: ...
+    def load_payload(self, payload: Any, serializer: _serializer = None, return_header: bool = ...) -> Any: ...
     def dump_payload(self, *args, **kwargs) -> bytes: ...
     def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ...
-    def make_signer(self, salt: Optional[_can_become_bytes]=None, algorithm_name: Optional[str]=None) -> Signer: ...
+    def make_signer(self, salt: Optional[_can_become_bytes] = ..., algorithm_name: Optional[str] = ...) -> Signer: ...
     def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
-    def dumps(self, obj: Any, salt: Optional[_can_become_bytes]=None, header_fields: Optional[MutableMapping]=...) -> str: ...
-    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, return_header: bool=False) -> Any: ...
-    def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, return_header: bool=False) -> Tuple[bool, Any]: ...
+    def dumps(self, obj: Any, salt: Optional[_can_become_bytes] = ..., header_fields: Optional[MutableMapping]=...) -> str: ...
+    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., return_header: bool = ...) -> Any: ...
+    def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., return_header: bool = ...) -> Tuple[bool, Any]: ...
 
 class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
     DEFAULT_EXPIRES_IN = ...  # type: int
     expires_in = ...  # type: int
-    def __init__(self, secret_key: _can_become_bytes, expires_in: Optional[int]=None, **kwargs) -> None: ...
+    def __init__(self, secret_key: _can_become_bytes, expires_in: Optional[int] = ..., **kwargs) -> None: ...
     def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
-    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, return_header: bool=False) -> Any: ...
+    def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes] = ..., return_header: bool = ...) -> Any: ...
     def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ...
     def now(self) -> int: ...
 
diff --git a/typeshed/third_party/3/lxml/__init__.pyi b/typeshed/third_party/3/lxml/__init__.pyi
deleted file mode 100644
index e69de29..0000000
diff --git a/typeshed/third_party/3/lxml/etree.pyi b/typeshed/third_party/3/lxml/etree.pyi
deleted file mode 100644
index 958c82a..0000000
--- a/typeshed/third_party/3/lxml/etree.pyi
+++ /dev/null
@@ -1,134 +0,0 @@
-# Hand-written stub for lxml.etree as used by mypy.report.
-# This is *far* from complete, and the stubgen-generated ones crash mypy.
-# Any use of `Any` below means I couldn't figure out the type.
-
-import typing
-from typing import Any, Dict, List, MutableMapping, Tuple, Union, Optional
-from typing import Iterable, Iterator, SupportsBytes
-
-
-# We do *not* want `typing.AnyStr` because it is a `TypeVar`, which is an
-# unnecessary constraint. It seems reasonable to constrain each
-# List/Dict argument to use one type consistently, though, and it is
-# necessary in order to keep these brief.
-_AnyStr = Union[str, bytes]
-_ListAnyStr = Union[List[str], List[bytes]]
-_DictAnyStr = Union[Dict[str, str], Dict[bytes, bytes]]
-_Dict_Tuple2AnyStr_Any = Union[Dict[Tuple[str, str], Any], Tuple[bytes, bytes], Any]
-
-
-class ElementChildIterator(Iterator['_Element']):
-    def __iter__(self) -> 'ElementChildIterator': ...
-    def __next__(self) -> '_Element': ...
-
-class _Element(Iterable['_Element']):
-    def addprevious(self, element: '_Element') -> None: ...
-    def addnext(self, element: '_Element') -> None: ...
-    def clear(self) -> None: ...
-    def get(self, key: _AnyStr, default: Optional[_AnyStr] = ...) -> _AnyStr: ...
-    def xpath(self, _path: _AnyStr, namespaces: Optional[_DictAnyStr] = ..., extensions: Any = ..., smart_strings: bool = ..., **_variables: Any) -> Any: ...
-    # indeed returns a Union[bool, float, _AnyStr, List[Union[ElementBase, _AnyStr, Tuple[]]]]: ...
-    # http://lxml.de/xpathxslt.html#xpath-return-values
-    attrib = ...  # type: MutableMapping[str, str]
-    text = ...  # type: _AnyStr
-    tag = ...  # type: str
-    def append(self, element: '_Element') -> '_Element': ...
-    def __iter__(self) -> ElementChildIterator: ...
-
-class ElementBase(_Element): ...
-
-class _ElementTree:
-    def write(self,
-              file: Union[_AnyStr, typing.IO],
-              encoding: _AnyStr = ...,
-              method: _AnyStr = ...,
-              pretty_print: bool = ...,
-              xml_declaration: Any = ...,
-              with_tail: Any = ...,
-              standalone: bool = ...,
-              compression: int = ...,
-              exclusive: bool = ...,
-              with_comments: bool = ...,
-              inclusive_ns_prefixes: _ListAnyStr = ...) -> None: ...
-    def xpath(self, _path: _AnyStr, namespaces: Optional[_DictAnyStr] = ..., extensions: Any = ..., smart_strings: bool = ..., **_variables: Any) -> Any: ...
-
-class _XSLTResultTree(SupportsBytes): ...
-
-class _XSLTQuotedStringParam: ...
-
-class XMLParser: ...
-
-class XMLSchema:
-    def __init__(self,
-                 etree: Union[_Element, _ElementTree] = ...,
-                 file: Union[_AnyStr, typing.IO] = ...) -> None: ...
-    def assertValid(self, etree: Union[_Element, _ElementTree]) -> None: ...
-
-class XSLTAccessControl: ...
-
-class XSLT:
-    def __init__(self,
-                 xslt_input: Union[_Element, _ElementTree],
-                 extensions: _Dict_Tuple2AnyStr_Any = ...,
-                 regexp: bool = ...,
-                 access_control: XSLTAccessControl = ...) -> None: ...
-    def __call__(self,
-                 _input: Union[_Element, _ElementTree],
-                 profile_run: bool = ...,
-                 **kwargs: Union[_AnyStr, _XSLTQuotedStringParam]) -> _XSLTResultTree: ...
-    @staticmethod
-    def strparam(s: _AnyStr) -> _XSLTQuotedStringParam: ...
-
-def Element(_tag: _AnyStr,
-            attrib: _DictAnyStr = ...,
-            nsmap: _DictAnyStr = ...,
-            **extra: _AnyStr) -> _Element: ...
-def SubElement(_parent: _Element, _tag: _AnyStr,
-               attrib: _DictAnyStr = ...,
-               nsmap: _DictAnyStr = ...,
-               **extra: _AnyStr) -> _Element: ...
-def ElementTree(element: _Element = ...,
-                file: Union[_AnyStr, typing.IO] = ...,
-                parser: XMLParser = ...) -> _ElementTree: ...
-def ProcessingInstruction(target: _AnyStr, text: _AnyStr = ...) -> _Element: ...
-def parse(source: Union[_AnyStr, typing.IO],
-          parser: XMLParser = ...,
-          base_url: _AnyStr = ...) -> _ElementTree: ...
-def fromstring(text: _AnyStr,
-               parser: XMLParser = ...,
-               *,
-               base_url: _AnyStr = ...) -> _Element: ...
-def tostring(element_or_tree: Union[_Element, _ElementTree],
-             encoding: Union[str, type] = ...,
-             method: str = ...,
-             xml_declaration: bool = ...,
-             pretty_print: bool = ...,
-             with_tail: bool = ...,
-             standalone: bool = ...,
-             doctype: str = ...,
-             exclusive: bool = ...,
-             with_comments: bool = ...,
-             inclusive_ns_prefixes: Any = ...) -> _AnyStr: ...
-
-class _ErrorLog: ...
-
-class Error(Exception): ...
-
-class LxmlError(Error):
-    def __init__(self, message: Any, error_log: _ErrorLog = ...) -> None: ...
-    error_log = ...  # type: _ErrorLog
-
-class DocumentInvalid(LxmlError): ...
-class LxmlSyntaxError(LxmlError, SyntaxError): ...
-class ParseError(LxmlSyntaxError): ...
-class XMLSyntaxError(ParseError): ...
-
-class _Validator: ...
-
-class DTD(_Validator):
-    def __init__(self,
-                 file: Union[_AnyStr, typing.IO] = ...,
-                 *,
-                 external_id: Any = ...) -> None: ...
-
-    def assertValid(self, etree: _Element) -> None: ...
diff --git a/typeshed/third_party/3/lxml/objectify.pyi b/typeshed/third_party/3/lxml/objectify.pyi
deleted file mode 100644
index 74293c4..0000000
--- a/typeshed/third_party/3/lxml/objectify.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-# Hand-written stub, incomplete
-
-from typing import Union
-
-from lxml.etree import ElementBase, XMLParser
-
-class ObjectifiedElement(ElementBase):
-    pass
-
-def fromstring(text: Union[bytes, str],
-               parser: XMLParser = ...,
-               *,
-               base_url: Union[bytes, str] = ...) -> ObjectifiedElement: ...
diff --git a/typeshed/third_party/3/six/__init__.pyi b/typeshed/third_party/3/six/__init__.pyi
index 0631a69..4691daa 100644
--- a/typeshed/third_party/3/six/__init__.pyi
+++ b/typeshed/third_party/3/six/__init__.pyi
@@ -29,6 +29,7 @@ from mypy_extensions import NoReturn
 from io import StringIO as StringIO, BytesIO as BytesIO
 from builtins import next as next
 from functools import wraps as wraps
+from . import moves
 
 _T = TypeVar('_T')
 _K = TypeVar('_K')
@@ -84,16 +85,16 @@ def byte2int(bs: binary_type) -> int: ...
 def indexbytes(buf: binary_type, i: int) -> int: ...
 def iterbytes(buf: binary_type) -> typing.Iterator[int]: ...
 
-def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: Optional[str] = None) -> None: ...
+def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: Optional[str] = ...) -> None: ...
 @overload
-def assertRaisesRegex(self: unittest.TestCase, msg: Optional[str] = None) -> Any: ...
+def assertRaisesRegex(self: unittest.TestCase, msg: Optional[str] = ...) -> Any: ...
 @overload
 def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ...
-def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: Optional[str] = None) -> None: ...
+def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: Optional[str] = ...) -> None: ...
 
 exec_ = exec
 
-def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = None) -> NoReturn: ...
+def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> NoReturn: ...
 def raise_from(value: BaseException, from_value: Optional[BaseException]) -> NoReturn: ...
 
 print_ = print
diff --git a/typeshed/third_party/3/werkzeug/wrappers.pyi b/typeshed/third_party/3/werkzeug/wrappers.pyi
index 031e377..676d1be 100644
--- a/typeshed/third_party/3/werkzeug/wrappers.pyi
+++ b/typeshed/third_party/3/werkzeug/wrappers.pyi
@@ -20,7 +20,7 @@ class BaseRequest:
     disable_data_descriptor = ...  # type: Any
     environ = ...  # type: Mapping[str, object]
     shallow = ...  # type: Any
-    def __init__(self, environ: Mapping[str, object], populate_request: bool=True, shallow: bool=False) -> None: ...
+    def __init__(self, environ: Mapping[str, object], populate_request: bool = ..., shallow: bool = ...) -> None: ...
     @property
     def url_charset(self) -> str: ...
     @classmethod
@@ -37,7 +37,7 @@ class BaseRequest:
     input_stream = ...  # type: Any
     args = ...  # type: ImmutableMultiDict
     def data(self): ...
-    def get_data(self, cache: bool=True, as_text: bool=False, parse_form_data: bool=False) -> bytes: ...
+    def get_data(self, cache: bool = ..., as_text: bool = ..., parse_form_data: bool = ...) -> bytes: ...
     form = ...  # type: ImmutableMultiDict
     values = ...  # type: CombinedMultiDict
     files = ...  # type: MultiDict
@@ -76,14 +76,14 @@ class BaseResponse:
     status = ...  # type: str
     direct_passthrough = ...  # type: bool
     response = ...  # type: Iterable[bytes]
-    def __init__(self, response: Optional[Union[Iterable[bytes], bytes]]=None,
-                 status: Optional[Union[str, int]]=None,
+    def __init__(self, response: Optional[Union[Iterable[bytes], bytes]] = ...,
+                 status: Optional[Union[str, int]] = ...,
                  headers: Optional[Union[Headers,
                                          Mapping[str, str],
                                          Sequence[Tuple[str, str]]]]=None,
-                 mimetype: Optional[str] = None,
-                 content_type: Optional[str] = None,
-                 direct_passthrough: bool=False) -> None: ...
+                 mimetype: Optional[str] = ...,
+                 content_type: Optional[str] = ...,
+                 direct_passthrough: bool = ...) -> None: ...
     def call_on_close(self, func): ...
     @classmethod
     def force_type(cls, response, environ=None): ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list