[med-svn] [mypy] 01/03: Imported Upstream version 0.4.3

Michael Crusoe misterc-guest at moszumanska.debian.org
Tue Aug 2 14:35:46 UTC 2016


This is an automated email from the git hooks/post-receive script.

misterc-guest pushed a commit to branch master
in repository mypy.

commit 8e12ae414acee36fc2eebbbe4f6d34bf0832643c
Author: Michael R. Crusoe <crusoe at ucdavis.edu>
Date:   Tue Aug 2 07:10:49 2016 -0700

    Imported Upstream version 0.4.3
---
 PKG-INFO                                           |    2 +-
 mypy/applytype.py                                  |    4 +-
 mypy/binder.py                                     |  264 +++++
 mypy/build.py                                      |  375 ++++---
 mypy/checker.py                                    | 1032 +++++++++-----------
 mypy/checkexpr.py                                  |  205 ++--
 mypy/checkmember.py                                |  109 +--
 mypy/checkstrformat.py                             |    6 +-
 mypy/constraints.py                                |   13 +-
 mypy/defaults.py                                   |    1 +
 mypy/docstring.py                                  |    6 +-
 mypy/erasetype.py                                  |   33 +-
 mypy/errors.py                                     |   66 +-
 mypy/expandtype.py                                 |   22 +-
 mypy/experiments.py                                |    1 +
 mypy/fastparse.py                                  |   41 +-
 mypy/fixup.py                                      |    9 +-
 mypy/git.py                                        |    6 +-
 mypy/infer.py                                      |    4 +-
 mypy/join.py                                       |   42 +-
 mypy/lex.py                                        |    2 +-
 mypy/main.py                                       |  215 ++--
 mypy/maptype.py                                    |    7 +-
 mypy/meet.py                                       |   74 +-
 mypy/messages.py                                   |   24 +-
 mypy/nodes.py                                      |  434 ++++----
 mypy/options.py                                    |   67 ++
 mypy/parse.py                                      |   70 +-
 mypy/replacetvars.py                               |   45 -
 mypy/report.py                                     |    2 +-
 mypy/sametypes.py                                  |   14 +-
 mypy/semanal.py                                    |  183 ++--
 mypy/solve.py                                      |   15 +-
 mypy/stats.py                                      |    5 +-
 mypy/strconv.py                                    |   23 +-
 mypy/stubgen.py                                    |    5 +-
 mypy/subtypes.py                                   |   17 +-
 mypy/traverser.py                                  |   85 +-
 mypy/treetransform.py                              |    3 -
 mypy/typeanal.py                                   |   44 +-
 mypy/typefixture.py                                |   25 +-
 mypy/types.py                                      |  238 ++++-
 mypy/version.py                                    |    2 +-
 setup.cfg                                          |    2 +-
 typeshed/stdlib/2.7/ConfigParser.pyi               |   10 +-
 .../{3/socketserver.pyi => 2.7/SocketServer.pyi}   |   13 +-
 typeshed/stdlib/2.7/StringIO.pyi                   |    1 +
 typeshed/stdlib/2.7/__builtin__.pyi                |   35 +-
 typeshed/stdlib/2.7/__future__.pyi                 |   14 +-
 typeshed/stdlib/2.7/argparse.pyi                   |  192 ----
 typeshed/stdlib/2.7/builtins.pyi                   |   35 +-
 typeshed/stdlib/2.7/collections.pyi                |   24 +-
 typeshed/stdlib/2.7/csv.pyi                        |   17 +-
 typeshed/stdlib/2.7/datetime.pyi                   |    2 +-
 typeshed/stdlib/2.7/difflib.pyi                    |   13 +-
 typeshed/stdlib/2.7/gettext.pyi                    |    2 +-
 typeshed/stdlib/2.7/json.pyi                       |   46 +-
 typeshed/stdlib/2.7/logging/__init__.pyi           |  239 -----
 typeshed/stdlib/2.7/logging/handlers.pyi           |  202 ----
 typeshed/stdlib/2.7/os/__init__.pyi                |   18 +-
 typeshed/stdlib/2.7/re.pyi                         |   38 +-
 typeshed/stdlib/2.7/socket.pyi                     |    4 +-
 typeshed/stdlib/2.7/stat.pyi                       |   14 +-
 typeshed/stdlib/2.7/subprocess.pyi                 |   59 +-
 typeshed/stdlib/2.7/sys.pyi                        |    6 +-
 typeshed/stdlib/2.7/tarfile.pyi                    |    2 +
 typeshed/stdlib/2.7/token.pyi                      |    2 +-
 typeshed/stdlib/2.7/traceback.pyi                  |    5 +-
 typeshed/stdlib/2.7/typing.pyi                     |   16 +-
 typeshed/stdlib/2.7/unittest.pyi                   |   18 +-
 typeshed/stdlib/2.7/urllib2.pyi                    |    4 +
 typeshed/stdlib/2.7/urlparse.pyi                   |   12 +-
 typeshed/stdlib/2.7/xml/etree/ElementTree.pyi      |    6 +-
 typeshed/stdlib/2and3/argparse.pyi                 |  154 +++
 typeshed/stdlib/2and3/logging/__init__.pyi         |  355 +++++++
 typeshed/stdlib/2and3/logging/config.pyi           |   29 +
 typeshed/stdlib/2and3/logging/handlers.pyi         |  223 +++++
 typeshed/stdlib/2and3/warnings.pyi                 |   56 +-
 typeshed/stdlib/3.2/xml/etree/ElementTree.pyi      |    6 +-
 typeshed/stdlib/3.3/xml/etree/ElementTree.pyi      |    6 +-
 typeshed/stdlib/3.4/asyncio/events.pyi             |    8 +-
 typeshed/stdlib/3.4/asyncio/futures.pyi            |    4 +-
 typeshed/stdlib/3.4/asyncio/tasks.pyi              |    4 +-
 typeshed/stdlib/3.4/enum.pyi                       |    2 +-
 typeshed/stdlib/3.4/xml/etree/ElementTree.pyi      |    6 +-
 typeshed/stdlib/3.5/xml/etree/ElementTree.pyi      |    6 +-
 typeshed/stdlib/3/_importlib_modulespec.pyi        |   44 +
 typeshed/stdlib/3/abc.pyi                          |    5 +
 typeshed/stdlib/3/argparse.pyi                     |  163 ----
 typeshed/stdlib/3/builtins.pyi                     |   19 +-
 typeshed/stdlib/3/collections/__init__.pyi         |   16 +-
 typeshed/stdlib/3/concurrent/futures/_base.pyi     |    2 +-
 typeshed/stdlib/3/concurrent/futures/process.pyi   |    2 +-
 typeshed/stdlib/3/configparser.pyi                 |    4 +-
 typeshed/stdlib/3/datetime.pyi                     |    2 +-
 typeshed/stdlib/3/difflib.pyi                      |   13 +-
 typeshed/stdlib/3/dis.pyi                          |   72 ++
 typeshed/stdlib/3/email/__init__.pyi               |   10 +-
 typeshed/stdlib/3/email/header.pyi                 |    2 +-
 typeshed/stdlib/3/email/message.pyi                |    5 +-
 typeshed/stdlib/3/heapq.pyi                        |    7 +-
 typeshed/stdlib/3/http/client.pyi                  |  266 +++--
 typeshed/stdlib/3/http/cookiejar.pyi               |  213 ++--
 typeshed/stdlib/3/http/cookies.pyi                 |   63 +-
 typeshed/stdlib/3/http/server.pyi                  |   63 ++
 typeshed/stdlib/3/importlib/__init__.pyi           |   18 +
 typeshed/stdlib/3/importlib/abc.pyi                |   84 ++
 typeshed/stdlib/3/importlib/machinery.pyi          |  124 +++
 typeshed/stdlib/3/importlib/util.pyi               |   47 +
 typeshed/stdlib/3/io.pyi                           |    2 +-
 typeshed/stdlib/3/json.pyi                         |    2 +-
 typeshed/stdlib/3/logging/__init__.pyi             |  239 -----
 typeshed/stdlib/3/logging/handlers.pyi             |  200 ----
 typeshed/stdlib/3/multiprocessing/__init__.pyi     |   36 +-
 typeshed/stdlib/3/opcode.pyi                       |   18 +
 typeshed/stdlib/3/os/__init__.pyi                  |   35 +-
 typeshed/stdlib/3/queue.pyi                        |    2 +-
 typeshed/stdlib/3/re.pyi                           |    6 +-
 typeshed/stdlib/3/socketserver.pyi                 |   13 +-
 typeshed/stdlib/3/subprocess.pyi                   |   66 +-
 typeshed/stdlib/3/sys.pyi                          |    5 +-
 typeshed/stdlib/3/token.pyi                        |    2 +-
 typeshed/stdlib/3/traceback.pyi                    |    4 +-
 typeshed/stdlib/3/types.pyi                        |   29 +-
 typeshed/stdlib/3/typing.pyi                       |    4 +-
 typeshed/stdlib/3/unittest.pyi                     |  432 +++++---
 typeshed/stdlib/3/urllib/parse.pyi                 |   24 +-
 typeshed/stdlib/3/xml/etree/ElementTree.pyi        |    6 +-
 typeshed/{runtests.py => tests/mypy_test.py}       |    0
 typeshed/tests/pytype_test.py                      |  109 +++
 typeshed/third_party/2.7/boto/exception.pyi        |  149 +++
 typeshed/third_party/2.7/dateutil/__init__.pyi     |    0
 typeshed/third_party/2.7/dateutil/parser.pyi       |   39 +
 typeshed/third_party/2.7/enum.pyi                  |    2 +-
 typeshed/third_party/2.7/gflags.pyi                |   10 +-
 typeshed/third_party/2.7/pymssql.pyi               |   48 +
 typeshed/third_party/2.7/six/moves/__init__.pyi    |    2 +
 .../third_party/2.7/sqlalchemy/engine/__init__.pyi |    5 +
 .../third_party/2.7/sqlalchemy/engine/base.pyi     |   18 +
 .../third_party/2.7/sqlalchemy/sql/operators.pyi   |    2 +-
 typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi |   11 +-
 typeshed/third_party/3/enum.pyi                    |    2 +-
 typeshed/third_party/3/six/moves/__init__.pyi      |    1 +
 143 files changed, 5019 insertions(+), 3498 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 9596a38..0a75168 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy-lang
-Version: 0.4.2
+Version: 0.4.3
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
diff --git a/mypy/applytype.py b/mypy/applytype.py
index 29f2287..5f066e5 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -3,7 +3,7 @@ from typing import List, Dict
 import mypy.subtypes
 from mypy.sametypes import is_same_type
 from mypy.expandtype import expand_type
-from mypy.types import Type, TypeVarType, CallableType, AnyType, Void
+from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, Void
 from mypy.messages import MessageBuilder
 from mypy.nodes import Context
 
@@ -48,7 +48,7 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
             msg.incompatible_typevar_value(callable, i + 1, type, context)
 
     # Create a map from type variable id to target type.
-    id_to_type = {}  # type: Dict[int, Type]
+    id_to_type = {}  # type: Dict[TypeVarId, Type]
     for i, tv in enumerate(tvars):
         if types[i]:
             id_to_type[tv.id] = types[i]
diff --git a/mypy/binder.py b/mypy/binder.py
new file mode 100644
index 0000000..2a98751
--- /dev/null
+++ b/mypy/binder.py
@@ -0,0 +1,264 @@
+from typing import (Any, Dict, List, Set, Iterator)
+from contextlib import contextmanager
+
+from mypy.types import Type, AnyType, PartialType
+from mypy.nodes import (Node, Var)
+
+from mypy.subtypes import is_subtype
+from mypy.join import join_simple
+from mypy.sametypes import is_same_type
+
+
+class Frame(Dict[Any, Type]):
+    pass
+
+
+class Key(AnyType):
+    pass
+
+
+class ConditionalTypeBinder:
+    """Keep track of conditional types of variables.
+
+    NB: Variables are tracked by literal expression, so it is possible
+    to confuse the binder; for example,
+
+    ```
+    class A:
+        a = None          # type: Union[int, str]
+    x = A()
+    lst = [x]
+    reveal_type(x.a)      # Union[int, str]
+    x.a = 1
+    reveal_type(x.a)      # int
+    reveal_type(lst[0].a) # Union[int, str]
+    lst[0].a = 'a'
+    reveal_type(x.a)      # int
+    reveal_type(lst[0].a) # str
+    ```
+    """
+
+    def __init__(self) -> None:
+        # The set of frames currently used.  These map
+        # expr.literal_hash -- literals like 'foo.bar' --
+        # to types.
+        self.frames = [Frame()]
+
+        # For frames higher in the stack, we record the set of
+        # Frames that can escape there
+        self.options_on_return = []  # type: List[List[Frame]]
+
+        # Maps expr.literal_hash] to get_declaration(expr)
+        # for every expr stored in the binder
+        self.declarations = Frame()
+        # Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]}
+        # Whenever a new key (e.g. x.a.b) is added, we update this
+        self.dependencies = {}  # type: Dict[Key, Set[Key]]
+
+        # breaking_out is set to True on return/break/continue/raise
+        # It is cleared on pop_frame() and placed in last_pop_breaking_out
+        # Lines of code after breaking_out = True are unreachable and not
+        # typechecked.
+        self.breaking_out = False
+
+        # Whether the last pop changed the newly top frame on exit
+        self.last_pop_changed = False
+        # Whether the last pop was necessarily breaking out, and couldn't fall through
+        self.last_pop_breaking_out = False
+
+        self.try_frames = set()  # type: Set[int]
+        self.loop_frames = []  # type: List[int]
+
+    def _add_dependencies(self, key: Key, value: Key = None) -> None:
+        if value is None:
+            value = key
+        else:
+            self.dependencies.setdefault(key, set()).add(value)
+        if isinstance(key, tuple):
+            for elt in key:
+                self._add_dependencies(elt, value)
+
+    def push_frame(self) -> Frame:
+        """Push a new frame into the binder."""
+        f = Frame()
+        self.frames.append(f)
+        self.options_on_return.append([])
+        return f
+
+    def _push(self, key: Key, type: Type, index: int=-1) -> None:
+        self.frames[index][key] = type
+
+    def _get(self, key: Key, index: int=-1) -> Type:
+        if index < 0:
+            index += len(self.frames)
+        for i in range(index, -1, -1):
+            if key in self.frames[i]:
+                return self.frames[i][key]
+        return None
+
+    def push(self, expr: Node, typ: Type) -> None:
+        if not expr.literal:
+            return
+        key = expr.literal_hash
+        if key not in self.declarations:
+            self.declarations[key] = self.get_declaration(expr)
+            self._add_dependencies(key)
+        self._push(key, typ)
+
+    def get(self, expr: Node) -> Type:
+        return self._get(expr.literal_hash)
+
+    def cleanse(self, expr: Node) -> None:
+        """Remove all references to a Node from the binder."""
+        self._cleanse_key(expr.literal_hash)
+
+    def _cleanse_key(self, key: Key) -> None:
+        """Remove all references to a key from the binder."""
+        for frame in self.frames:
+            if key in frame:
+                del frame[key]
+
+    def update_from_options(self, frames: List[Frame]) -> bool:
+        """Update the frame to reflect that each key will be updated
+        as in one of the frames.  Return whether any item changes.
+
+        If a key is declared as AnyType, only update it if all the
+        options are the same.
+        """
+
+        changed = False
+        keys = set(key for f in frames for key in f)
+
+        for key in keys:
+            current_value = self._get(key)
+            resulting_values = [f.get(key, current_value) for f in frames]
+            if any(x is None for x in resulting_values):
+                continue
+
+            if isinstance(self.declarations.get(key), AnyType):
+                type = resulting_values[0]
+                if not all(is_same_type(type, t) for t in resulting_values[1:]):
+                    type = AnyType()
+            else:
+                type = resulting_values[0]
+                for other in resulting_values[1:]:
+                    type = join_simple(self.declarations[key], type, other)
+            if not is_same_type(type, current_value):
+                self._push(key, type)
+                changed = True
+
+        return changed
+
+    def pop_frame(self, fall_through: int = 0) -> Frame:
+        """Pop a frame and return it.
+
+        See frame_context() for documentation of fall_through.
+        """
+        if fall_through and not self.breaking_out:
+            self.allow_jump(-fall_through)
+
+        result = self.frames.pop()
+        options = self.options_on_return.pop()
+
+        self.last_pop_changed = self.update_from_options(options)
+        self.last_pop_breaking_out = self.breaking_out
+
+        return result
+
+    def get_declaration(self, expr: Any) -> Type:
+        if hasattr(expr, 'node') and isinstance(expr.node, Var):
+            type = expr.node.type
+            if isinstance(type, PartialType):
+                return None
+            return type
+        else:
+            return None
+
+    def assign_type(self, expr: Node,
+                    type: Type,
+                    declared_type: Type,
+                    restrict_any: bool = False) -> None:
+        if not expr.literal:
+            return
+        self.invalidate_dependencies(expr)
+
+        if declared_type is None:
+            # Not sure why this happens.  It seems to mainly happen in
+            # member initialization.
+            return
+        if not is_subtype(type, declared_type):
+            # Pretty sure this is only happens when there's a type error.
+
+            # Ideally this function wouldn't be called if the
+            # expression has a type error, though -- do other kinds of
+            # errors cause this function to get called at invalid
+            # times?
+            return
+
+        # If x is Any and y is int, after x = y we do not infer that x is int.
+        # This could be changed.
+        # Eric: I'm changing it in weak typing mode, since Any is so common.
+
+        if (isinstance(self.most_recent_enclosing_type(expr, type), AnyType)
+                and not restrict_any):
+            pass
+        elif isinstance(type, AnyType):
+            self.push(expr, declared_type)
+        else:
+            self.push(expr, type)
+
+        for i in self.try_frames:
+            # XXX This should probably not copy the entire frame, but
+            # just copy this variable into a single stored frame.
+            self.allow_jump(i)
+
+    def invalidate_dependencies(self, expr: Node) -> None:
+        """Invalidate knowledge of types that include expr, but not expr itself.
+
+        For example, when expr is foo.bar, invalidate foo.bar.baz.
+
+        It is overly conservative: it invalidates globally, including
+        in code paths unreachable from here.
+        """
+        for dep in self.dependencies.get(expr.literal_hash, set()):
+            self._cleanse_key(dep)
+
+    def most_recent_enclosing_type(self, expr: Node, type: Type) -> Type:
+        if isinstance(type, AnyType):
+            return self.get_declaration(expr)
+        key = expr.literal_hash
+        enclosers = ([self.get_declaration(expr)] +
+                     [f[key] for f in self.frames
+                      if key in f and is_subtype(type, f[key])])
+        return enclosers[-1]
+
+    def allow_jump(self, index: int) -> None:
+        # self.frames and self.options_on_return have different lengths
+        # so make sure the index is positive
+        if index < 0:
+            index += len(self.options_on_return)
+        frame = Frame()
+        for f in self.frames[index + 1:]:
+            frame.update(f)
+        self.options_on_return[index].append(frame)
+
+    def push_loop_frame(self) -> None:
+        self.loop_frames.append(len(self.frames) - 1)
+
+    def pop_loop_frame(self) -> None:
+        self.loop_frames.pop()
+
+    @contextmanager
+    def frame_context(self, fall_through: int = 0) -> Iterator[Frame]:
+        """Return a context manager that pushes/pops frames on enter/exit.
+
+        If fall_through > 0, then it will allow the frame to escape to
+        its ancestor `fall_through` levels higher.
+
+        A simple 'with binder.frame_context(): pass' will change the
+        last_pop_* flags but nothing else.
+        """
+        was_breaking_out = self.breaking_out
+        yield self.push_frame()
+        self.pop_frame(fall_through)
+        self.breaking_out = was_breaking_out
diff --git a/mypy/build.py b/mypy/build.py
index ec2d29a..c4b9459 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -21,7 +21,7 @@ import time
 from os.path import dirname, basename
 
 from typing import (AbstractSet, Dict, Iterable, Iterator, List,
-                    NamedTuple, Optional, Set, Tuple, Union)
+                    NamedTuple, Optional, Set, Tuple, Union, Mapping)
 
 from mypy.types import Type
 from mypy.nodes import (MypyFile, Node, Import, ImportFrom, ImportAll,
@@ -35,6 +35,7 @@ from mypy import defaults
 from mypy import moduleinfo
 from mypy import util
 from mypy.fixup import fixup_module_pass_one, fixup_module_pass_two
+from mypy.options import Options
 from mypy.parse import parse
 from mypy.stats import dump_type_stats
 
@@ -43,32 +44,6 @@ from mypy.stats import dump_type_stats
 # until Python 3.4, __file__ is relative.
 __file__ = os.path.realpath(__file__)
 
-
-# Build targets (for selecting compiler passes)
-SEMANTIC_ANALYSIS = 0   # Semantic analysis only
-TYPE_CHECK = 1          # Type check
-
-
-# Build flags
-VERBOSE = 'verbose'              # More verbose messages (for troubleshooting)
-MODULE = 'module'                # Build module as a script
-PROGRAM_TEXT = 'program-text'    # Build command-line argument as a script
-TEST_BUILTINS = 'test-builtins'  # Use stub builtins to speed up tests
-DUMP_TYPE_STATS = 'dump-type-stats'
-DUMP_INFER_STATS = 'dump-infer-stats'
-SILENT_IMPORTS = 'silent-imports'  # Silence imports of .py files
-ALMOST_SILENT = 'almost-silent'  # If SILENT_IMPORTS: report silenced imports as errors
-INCREMENTAL = 'incremental'      # Incremental mode: use the cache
-FAST_PARSER = 'fast-parser'      # Use experimental fast parser
-# Disallow calling untyped functions from typed ones
-DISALLOW_UNTYPED_CALLS = 'disallow-untyped-calls'
-# Disallow defining untyped (or incompletely typed) functions
-DISALLOW_UNTYPED_DEFS = 'disallow-untyped-defs'
-# Type check unannotated functions
-CHECK_UNTYPED_DEFS = 'check-untyped-defs'
-# Also check typeshed for missing annotations
-WARN_INCOMPLETE_STUB = 'warn-incomplete-stub'
-
 PYTHON_EXTENSIONS = ['.pyi', '.py']
 
 
@@ -130,14 +105,9 @@ class BuildSourceSet:
 
 
 def build(sources: List[BuildSource],
-          target: int,
+          options: Options,
           alt_lib_path: str = None,
-          bin_dir: str = None,
-          pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
-          custom_typing_module: str = None,
-          report_dirs: Dict[str, str] = None,
-          flags: List[str] = None,
-          python_path: bool = False) -> BuildResult:
+          bin_dir: str = None) -> BuildResult:
     """Analyze a program.
 
     A single call to build performs parsing, semantic analysis and optionally
@@ -147,30 +117,27 @@ def build(sources: List[BuildSource],
     otherwise raise CompileError.
 
     Args:
-      target: select passes to perform (a build target constant, e.g. C)
       sources: list of sources to build
+      options: build options
       alt_lib_path: an additional directory for looking up library modules
         (takes precedence over other directories)
       bin_dir: directory containing the mypy script, used for finding data
         directories; if omitted, use '.' as the data directory
-      pyversion: Python version (major, minor)
-      custom_typing_module: if not None, use this module id as an alias for typing
-      flags: list of build options (e.g. COMPILE_ONLY)
     """
-    report_dirs = report_dirs or {}
-    flags = flags or []
 
     data_dir = default_data_dir(bin_dir)
 
     find_module_clear_caches()
 
     # Determine the default module search path.
-    lib_path = default_lib_path(data_dir, pyversion, python_path)
+    lib_path = default_lib_path(data_dir, options.python_version)
 
-    if TEST_BUILTINS in flags:
+    if options.use_builtins_fixtures:
         # Use stub builtins (to speed up test cases and to make them easier to
-        # debug).
-        lib_path.insert(0, os.path.join(os.path.dirname(__file__), 'test', 'data', 'lib-stub'))
+        # debug).  This is a test-only feature, so assume our files are laid out
+        # as in the source tree.
+        root_dir = os.path.dirname(os.path.dirname(__file__))
+        lib_path.insert(0, os.path.join(root_dir, 'test-data', 'unit', 'lib-stub'))
     else:
         for source in sources:
             if source.path:
@@ -193,19 +160,18 @@ def build(sources: List[BuildSource],
     if alt_lib_path:
         lib_path.insert(0, alt_lib_path)
 
-    reports = Reports(data_dir, report_dirs)
+    reports = Reports(data_dir, options.report_dirs)
 
     source_set = BuildSourceSet(sources)
 
     # Construct a build manager object to hold state during the build.
     #
     # Ignore current directory prefix in error messages.
-    manager = BuildManager(data_dir, lib_path, target,
-                           pyversion=pyversion, flags=flags,
+    manager = BuildManager(data_dir, lib_path,
                            ignore_prefix=os.getcwd(),
-                           custom_typing_module=custom_typing_module,
                            source_set=source_set,
-                           reports=reports)
+                           reports=reports,
+                           options=options)
 
     try:
         dispatch(sources, manager)
@@ -219,7 +185,7 @@ def build(sources: List[BuildSource],
         reports.finish()
 
 
-def default_data_dir(bin_dir: str) -> str:
+def default_data_dir(bin_dir: Optional[str]) -> str:
     """Returns directory containing typeshed directory
 
     Args:
@@ -278,8 +244,7 @@ def mypy_path() -> List[str]:
     return path_env.split(os.pathsep)
 
 
-def default_lib_path(data_dir: str, pyversion: Tuple[int, int],
-        python_path: bool) -> List[str]:
+def default_lib_path(data_dir: str, pyversion: Tuple[int, int]) -> List[str]:
     """Return default standard library search paths."""
     # IDEA: Make this more portable.
     path = []  # type: List[str]
@@ -304,12 +269,6 @@ def default_lib_path(data_dir: str, pyversion: Tuple[int, int],
     if sys.platform != 'win32':
         path.append('/usr/local/lib/mypy')
 
-    # Contents of Python's sys.path go last, to prefer the stubs
-    # TODO: To more closely model what Python actually does, builtins should
-    #       go first, then sys.path, then anything in stdlib and third_party.
-    if python_path:
-        path.extend(sys.path)
-
     return path
 
 
@@ -322,13 +281,23 @@ CacheMeta = NamedTuple('CacheMeta',
                         ('data_mtime', float),  # mtime of data_json
                         ('data_json', str),  # path of <id>.data.json
                         ('suppressed', List[str]),  # dependencies that weren't imported
-                        ('flags', Optional[List[str]]),  # build flags
+                        ('options', Optional[Dict[str, bool]]),  # build options
+                        ('dep_prios', List[int]),
                         ])
-# NOTE: dependencies + suppressed == all unreachable imports;
+# NOTE: dependencies + suppressed == all reachable imports;
 # suppressed contains those reachable imports that were prevented by
 # --silent-imports or simply not found.
 
 
+# Priorities used for imports.  (Here, top-level includes inside a class.)
+# These are used to determine a more predictable order in which the
+# nodes in an import cycle are processed.
+PRI_HIGH = 5  # top-level "from X import blah"
+PRI_MED = 10  # top-level "import X"
+PRI_LOW = 20  # either form inside a function
+PRI_ALL = 99  # include all priorities
+
+
 class BuildManager:
     """This class holds shared state for building a mypy program.
 
@@ -338,7 +307,6 @@ class BuildManager:
 
     Attributes:
       data_dir:        Mypy data directory (contains stubs)
-      target:          Build target; selects which passes to perform
       lib_path:        Library path for looking up modules
       modules:         Mapping of module ID to MypyFile (shared by the passes)
       semantic_analyzer:
@@ -347,53 +315,40 @@ class BuildManager:
                        Semantic analyzer, pass 3
       type_checker:    Type checker
       errors:          Used for reporting all errors
-      pyversion:       Python version (major, minor)
-      flags:           Build options
+      options:         Build options
       missing_modules: Set of modules that could not be imported encountered so far
+      stale_modules:   Set of modules that needed to be rechecked
     """
 
     def __init__(self, data_dir: str,
                  lib_path: List[str],
-                 target: int,
-                 pyversion: Tuple[int, int],
-                 flags: List[str],
                  ignore_prefix: str,
-                 custom_typing_module: str,
                  source_set: BuildSourceSet,
-                 reports: Reports) -> None:
+                 reports: Reports,
+                 options: Options) -> None:
         self.start_time = time.time()
         self.data_dir = data_dir
         self.errors = Errors()
         self.errors.set_ignore_prefix(ignore_prefix)
         self.lib_path = tuple(lib_path)
-        self.target = target
-        self.pyversion = pyversion
-        self.flags = flags
-        self.custom_typing_module = custom_typing_module
         self.source_set = source_set
         self.reports = reports
-        check_untyped_defs = CHECK_UNTYPED_DEFS in self.flags
-        self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors,
-                                                  pyversion=pyversion,
-                                                  check_untyped_defs=check_untyped_defs)
+        self.options = options
+        self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors, options=options)
         self.modules = self.semantic_analyzer.modules
         self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors)
-        self.type_checker = TypeChecker(self.errors,
-                                        self.modules,
-                                        self.pyversion,
-                                        DISALLOW_UNTYPED_CALLS in self.flags,
-                                        DISALLOW_UNTYPED_DEFS in self.flags,
-                                        check_untyped_defs,
-                                        WARN_INCOMPLETE_STUB in self.flags)
+        self.type_checker = TypeChecker(self.errors, self.modules, options=options)
         self.missing_modules = set()  # type: Set[str]
+        self.stale_modules = set()  # type: Set[str]
 
     def all_imported_modules_in_file(self,
-                                     file: MypyFile) -> List[Tuple[str, int]]:
+                                     file: MypyFile) -> List[Tuple[int, str, int]]:
         """Find all reachable import statements in a file.
 
-        Return list of tuples (module id, import line number) for all modules
-        imported in file.
+        Return list of tuples (priority, module id, import line number)
+        for all modules imported in file; lower numbers == higher priority.
         """
+
         def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str:
             """Function to correct for relative imports."""
             file_id = file.fullname()
@@ -408,21 +363,28 @@ class BuildManager:
 
             return new_id
 
-        res = []  # type: List[Tuple[str, int]]
+        res = []  # type: List[Tuple[int, str, int]]
         for imp in file.imports:
             if not imp.is_unreachable:
                 if isinstance(imp, Import):
+                    pri = PRI_MED if imp.is_top_level else PRI_LOW
                     for id, _ in imp.ids:
-                        res.append((id, imp.line))
+                        ancestor_parts = id.split(".")[:-1]
+                        ancestors = []
+                        for part in ancestor_parts:
+                            ancestors.append(part)
+                            res.append((PRI_LOW, ".".join(ancestors), imp.line))
+                        res.append((pri, id, imp.line))
                 elif isinstance(imp, ImportFrom):
                     cur_id = correct_rel_imp(imp)
                     pos = len(res)
                     all_are_submodules = True
                     # Also add any imported names that are submodules.
+                    pri = PRI_MED if imp.is_top_level else PRI_LOW
                     for name, __ in imp.names:
                         sub_id = cur_id + '.' + name
                         if self.is_module(sub_id):
-                            res.append((sub_id, imp.line))
+                            res.append((pri, sub_id, imp.line))
                         else:
                             all_are_submodules = False
                     # If all imported names are submodules, don't add
@@ -431,9 +393,12 @@ class BuildManager:
                     # cur_id is also a dependency, and we should
                     # insert it *before* any submodules.
                     if not all_are_submodules:
-                        res.insert(pos, ((cur_id, imp.line)))
+                        pri = PRI_HIGH if imp.is_top_level else PRI_LOW
+                        res.insert(pos, ((pri, cur_id, imp.line)))
                 elif isinstance(imp, ImportAll):
-                    res.append((correct_rel_imp(imp), imp.line))
+                    pri = PRI_HIGH if imp.is_top_level else PRI_LOW
+                    res.append((pri, correct_rel_imp(imp), imp.line))
+
         return res
 
     def is_module(self, id: str) -> bool:
@@ -446,21 +411,35 @@ class BuildManager:
         Raise CompileError if there is a parse error.
         """
         num_errs = self.errors.num_messages()
-        tree = parse(source, path, self.errors,
-                     pyversion=self.pyversion,
-                     custom_typing_module=self.custom_typing_module,
-                     fast_parser=FAST_PARSER in self.flags)
+        tree = parse(source, path, self.errors, options=self.options)
         tree._fullname = id
+
+        # We don't want to warn about 'type: ignore' comments on
+        # imports, but we're about to modify tree.imports, so grab
+        # these first.
+        import_lines = set(node.line for node in tree.imports)
+
+        # Skip imports that have been ignored (so that we can ignore a C extension module without
+        # stub, for example), except for 'from x import *', because we wouldn't be able to
+        # determine which names should be defined unless we process the module. We can still
+        # ignore errors such as redefinitions when using the latter form.
+        imports = [node for node in tree.imports
+                   if node.line not in tree.ignored_lines or isinstance(node, ImportAll)]
+        tree.imports = imports
+
         if self.errors.num_messages() != num_errs:
             self.log("Bailing due to parse errors")
             self.errors.raise_error()
+
+        self.errors.set_file_ignored_lines(path, tree.ignored_lines)
+        self.errors.mark_file_ignored_lines_used(path, import_lines)
         return tree
 
     def module_not_found(self, path: str, line: int, id: str) -> None:
         self.errors.set_file(path)
         stub_msg = "(Stub files are from https://github.com/python/typeshed)"
-        if ((self.pyversion[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
-                (self.pyversion[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
+        if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
+                (self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
             self.errors.report(
                 line, "No library stub file for standard library module '{}'".format(id))
             self.errors.report(line, stub_msg, severity='note', only_once=True)
@@ -478,12 +457,12 @@ class BuildManager:
             self.reports.file(file, type_map=self.type_checker.type_map)
 
     def log(self, *message: str) -> None:
-        if VERBOSE in self.flags:
+        if self.options.verbosity >= 1:
             print('%.3f:LOG: ' % (time.time() - self.start_time), *message, file=sys.stderr)
             sys.stderr.flush()
 
     def trace(self, *message: str) -> None:
-        if self.flags.count(VERBOSE) >= 2:
+        if self.options.verbosity >= 2:
             print('%.3f:TRACE:' % (time.time() - self.start_time), *message, file=sys.stderr)
             sys.stderr.flush()
 
@@ -533,7 +512,7 @@ find_module_dir_cache = {}  # type: Dict[Tuple[str, Tuple[str, ...]], List[str]]
 find_module_listdir_cache = {}  # type: Dict[str, Optional[List[str]]]
 
 
-def find_module_clear_caches():
+def find_module_clear_caches() -> None:
     find_module_cache.clear()
     find_module_dir_cache.clear()
     find_module_listdir_cache.clear()
@@ -572,12 +551,11 @@ def is_file(path: str) -> bool:
     return os.path.isfile(path)
 
 
-def find_module(id: str, lib_path: Iterable[str]) -> str:
+def find_module(id: str, lib_path_arg: Iterable[str]) -> str:
     """Return the path of the module source file, or None if not found."""
-    if not isinstance(lib_path, tuple):
-        lib_path = tuple(lib_path)
+    lib_path = tuple(lib_path_arg)
 
-    def find():
+    def find() -> Optional[str]:
         # If we're looking for a module like 'foo.bar.baz', it's likely that most of the
         # many elements of lib_path don't even have a subdirectory 'foo/bar'.  Discover
         # that only once and cache it for when we look for modules like 'foo.bar.blah'
@@ -693,22 +671,21 @@ def read_with_python_encoding(path: str, pyversion: Tuple[int, int]) -> str:
         return source_bytearray.decode(encoding)
 
 
-MYPY_CACHE = '.mypy_cache'
-
-
-def get_cache_names(id: str, path: str, pyversion: Tuple[int, int]) -> Tuple[str, str]:
+def get_cache_names(id: str, path: str, cache_dir: str,
+                    pyversion: Tuple[int, int]) -> Tuple[str, str]:
     """Return the file names for the cache files.
 
     Args:
       id: module ID
       path: module path (used to recognize packages)
+      cache_dir: cache directory
       pyversion: Python version (major, minor)
 
     Returns:
       A tuple with the file names to be used for the meta JSON and the
       data JSON, respectively.
     """
-    prefix = os.path.join(MYPY_CACHE, '%d.%d' % pyversion, *id.split('.'))
+    prefix = os.path.join(cache_dir, '%d.%d' % pyversion, *id.split('.'))
     is_package = os.path.basename(path).startswith('__init__.py')
     if is_package:
         prefix = os.path.join(prefix, '__init__')
@@ -728,7 +705,8 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
       valid; otherwise None.
     """
     # TODO: May need to take more build options into account
-    meta_json, data_json = get_cache_names(id, path, manager.pyversion)
+    meta_json, data_json = get_cache_names(
+        id, path, manager.options.cache_dir, manager.options.python_version)
     manager.trace('Looking for {} {}'.format(id, data_json))
     if not os.path.exists(meta_json):
         return None
@@ -748,20 +726,22 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
         meta.get('data_mtime'),
         data_json,
         meta.get('suppressed', []),
-        meta.get('flags'),
+        meta.get('options'),
+        meta.get('dep_prios', []),
     )
     if (m.id != id or m.path != path or
             m.mtime is None or m.size is None or
             m.dependencies is None or m.data_mtime is None):
         return None
 
-    # Metadata generated by older mypy version and no flags were saved
-    if m.flags is None:
+    # Ignore cache if generated by an older mypy version.
+    if m.options is None or len(m.dependencies) != len(m.dep_prios):
         return None
 
-    cached_flags = select_flags_affecting_cache(m.flags)
-    current_flags = select_flags_affecting_cache(manager.flags)
-    if cached_flags != current_flags:
+    # Ignore cache if (relevant) options aren't the same.
+    cached_options = m.options
+    current_options = select_options_affecting_cache(manager.options)
+    if cached_options != current_options:
         return None
 
     # TODO: Share stat() outcome with find_module()
@@ -778,25 +758,26 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
     return m
 
 
-def select_flags_affecting_cache(flags: Iterable[str]) -> AbstractSet[str]:
-    return set(flags).intersection(FLAGS_AFFECTING_CACHE)
+def select_options_affecting_cache(options: Options) -> Mapping[str, bool]:
+    return {opt: getattr(options, opt) for opt in OPTIONS_AFFECTING_CACHE}
 
 
-FLAGS_AFFECTING_CACHE = set([
-    SILENT_IMPORTS,
-    ALMOST_SILENT,
-    DISALLOW_UNTYPED_CALLS,
-    DISALLOW_UNTYPED_DEFS,
-    CHECK_UNTYPED_DEFS,
-])
+OPTIONS_AFFECTING_CACHE = [
+    "silent_imports",
+    "almost_silent",
+    "disallow_untyped_calls",
+    "disallow_untyped_defs",
+    "check_untyped_defs",
+]
 
 
-def random_string():
+def random_string() -> str:
     return binascii.hexlify(os.urandom(8)).decode('ascii')
 
 
 def write_cache(id: str, path: str, tree: MypyFile,
                 dependencies: List[str], suppressed: List[str],
+                dep_prios: List[int],
                 manager: BuildManager) -> None:
     """Write cache files for a module.
 
@@ -806,6 +787,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
       tree: the fully checked module data
       dependencies: module IDs on which this module depends
       suppressed: module IDs which were suppressed as dependencies
+      dep_prios: priorities (parallel array to dependencies)
       manager: the build manager (for pyversion, log/trace)
     """
     path = os.path.abspath(path)
@@ -813,7 +795,8 @@ def write_cache(id: str, path: str, tree: MypyFile,
     st = os.stat(path)  # TODO: Errors
     mtime = st.st_mtime
     size = st.st_size
-    meta_json, data_json = get_cache_names(id, path, manager.pyversion)
+    meta_json, data_json = get_cache_names(
+        id, path, manager.options.cache_dir, manager.options.python_version)
     manager.log('Writing {} {} {}'.format(id, meta_json, data_json))
     data = tree.serialize()
     parent = os.path.dirname(data_json)
@@ -834,19 +817,14 @@ def write_cache(id: str, path: str, tree: MypyFile,
             'data_mtime': data_mtime,
             'dependencies': dependencies,
             'suppressed': suppressed,
-            'flags': manager.flags,
+            'options': select_options_affecting_cache(manager.options),
+            'dep_prios': dep_prios,
             }
     with open(meta_json_tmp, 'w') as f:
         json.dump(meta, f, sort_keys=True)
         f.write('\n')
-    # TODO: This is a temporary change until Python 3.2 support is dropped, see #1504
-    # os.rename will raise an exception rather than replace files on Windows
-    try:
-        replace = os.replace
-    except AttributeError:
-        replace = os.rename
-    replace(data_json_tmp, data_json)
-    replace(meta_json_tmp, meta_json)
+    os.replace(data_json_tmp, data_json)
+    os.replace(meta_json_tmp, meta_json)
 
 
 """Dependency manager.
@@ -1013,6 +991,7 @@ class State:
     tree = None  # type: Optional[MypyFile]
     dependencies = None  # type: List[str]
     suppressed = None  # type: List[str]  # Suppressed/missing dependencies
+    priorities = None  # type: Dict[str, int]
 
     # Map each dependency to the line number where it is first imported
     dep_line_map = None  # type: Dict[str, int]
@@ -1052,7 +1031,7 @@ class State:
         self.id = id or '__main__'
         if not path and source is None:
             file_id = id
-            if id == 'builtins' and manager.pyversion[0] == 2:
+            if id == 'builtins' and manager.options.python_version[0] == 2:
                 # The __builtin__ module is called internally by mypy
                 # 'builtins' in Python 2 mode (similar to Python 3),
                 # but the stub file is __builtin__.pyi.  The reason is
@@ -1065,7 +1044,7 @@ class State:
             path = find_module(file_id, manager.lib_path)
             if path:
                 # In silent mode, don't import .py files, except from stubs.
-                if (SILENT_IMPORTS in manager.flags and
+                if (manager.options.silent_imports and
                         path.endswith('.py') and (caller_state or ancestor_for)):
                     # (Never silence builtins, even if it's a .py file;
                     # this can happen in tests!)
@@ -1073,7 +1052,7 @@ class State:
                         not ((caller_state and
                               caller_state.tree and
                               caller_state.tree.is_stub))):
-                        if ALMOST_SILENT in manager.flags:
+                        if manager.options.almost_silent:
                             if ancestor_for:
                                 self.skipping_ancestor(id, path, ancestor_for)
                             else:
@@ -1086,11 +1065,10 @@ class State:
                 # misspelled module name, missing stub, module not in
                 # search path or the module has not been installed.
                 if caller_state:
-                    suppress_message = ((SILENT_IMPORTS in manager.flags and
-                                        ALMOST_SILENT not in manager.flags) or
+                    suppress_message = ((manager.options.silent_imports and
+                                        not manager.options.almost_silent) or
                                         (caller_state.tree is not None and
-                                         (caller_line in caller_state.tree.ignored_lines or
-                                          'import' in caller_state.tree.weak_opts)))
+                                         'import' in caller_state.tree.weak_opts))
                     if not suppress_message:
                         save_import_context = manager.errors.import_context()
                         manager.errors.set_import_context(caller_state.import_context)
@@ -1106,7 +1084,7 @@ class State:
         self.path = path
         self.xpath = path or '<string>'
         self.source = source
-        if path and source is None and INCREMENTAL in manager.flags:
+        if path and source is None and manager.options.incremental:
             self.meta = find_cache_meta(self.id, self.path, manager)
             # TODO: Get mtime if not cached.
         self.add_ancestors()
@@ -1115,6 +1093,9 @@ class State:
             # compare them to the originals later.
             self.dependencies = list(self.meta.dependencies)
             self.suppressed = list(self.meta.suppressed)
+            assert len(self.meta.dependencies) == len(self.meta.dep_prios)
+            self.priorities = {id: pri
+                               for id, pri in zip(self.meta.dependencies, self.meta.dep_prios)}
             self.dep_line_map = {}
         else:
             # Parse the file (and then some) to get the dependencies.
@@ -1171,6 +1152,7 @@ class State:
     def mark_stale(self) -> None:
         """Throw away the cache data for this file, marking it as stale."""
         self.meta = None
+        self.manager.stale_modules.add(self.id)
 
     def check_blockers(self) -> None:
         """Raise CompileError if a blocking error is detected."""
@@ -1222,7 +1204,7 @@ class State:
             self.source = None  # We won't need it again.
             if self.path and source is None:
                 try:
-                    source = read_with_python_encoding(self.path, manager.pyversion)
+                    source = read_with_python_encoding(self.path, manager.options.python_version)
                 except IOError as ioerr:
                     raise CompileError([
                         "mypy: can't read file '{}': {}".format(self.path, ioerr.strerror)])
@@ -1250,8 +1232,10 @@ class State:
         # Also keep track of each dependency's source line.
         dependencies = []
         suppressed = []
+        priorities = {}  # type: Dict[str, int]  # id -> priority
         dep_line_map = {}  # type: Dict[str, int]  # id -> line
-        for id, line in manager.all_imported_modules_in_file(self.tree):
+        for pri, id, line in manager.all_imported_modules_in_file(self.tree):
+            priorities[id] = min(pri, priorities.get(id, PRI_ALL))
             if id == self.id:
                 continue
             # Omit missing modules, as otherwise we could not type-check
@@ -1282,6 +1266,7 @@ class State:
         # for differences (e.g. --silent-imports).
         self.dependencies = dependencies
         self.suppressed = suppressed
+        self.priorities = priorities
         self.dep_line_map = dep_line_map
         self.check_blockers()
 
@@ -1305,24 +1290,26 @@ class State:
     def semantic_analysis_pass_three(self) -> None:
         with self.wrap_context():
             self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath)
-            if DUMP_TYPE_STATS in self.manager.flags:
+            if self.manager.options.dump_type_stats:
                 dump_type_stats(self.tree, self.xpath)
 
     def type_check(self) -> None:
         manager = self.manager
-        if manager.target < TYPE_CHECK:
+        if manager.options.semantic_analysis_only:
             return
         with self.wrap_context():
             manager.type_checker.visit_file(self.tree, self.xpath)
-            if DUMP_INFER_STATS in manager.flags:
+            if manager.options.dump_inference_stats:
                 dump_type_stats(self.tree, self.xpath, inferred=True,
                                 typemap=manager.type_checker.type_map)
             manager.report_file(self.tree)
 
     def write_cache(self) -> None:
-        if self.path and INCREMENTAL in self.manager.flags and not self.manager.errors.is_errors():
+        if self.path and self.manager.options.incremental and not self.manager.errors.is_errors():
+            dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
             write_cache(self.id, self.path, self.tree,
                         list(self.dependencies), list(self.suppressed),
+                        dep_prios,
                         self.manager)
 
 
@@ -1334,6 +1321,8 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
     graph = load_graph(sources, manager)
     manager.log("Loaded graph with %d nodes" % len(graph))
     process_graph(graph, manager)
+    if manager.options.warn_unused_ignores:
+        manager.errors.generate_unused_ignore_notes()
 
 
 def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
@@ -1351,7 +1340,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
             continue
         if st.id in graph:
             manager.errors.set_file(st.xpath)
-            manager.errors.report(1, "Duplicate module named '%s'" % st.id)
+            manager.errors.report(-1, "Duplicate module named '%s'" % st.id)
             manager.errors.raise_error()
         graph[st.id] = st
         new.append(st)
@@ -1389,10 +1378,9 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
     # dependencies) to roots (those from which everything else can be
     # reached).
     for ascc in sccs:
-        # Sort the SCC's nodes in *reverse* order or encounter.
-        # This is a heuristic for handling import cycles.
+        # Order the SCC's nodes using a heuristic.
         # Note that ascc is a set, and scc is a list.
-        scc = sorted(ascc, key=lambda id: -graph[id].order)
+        scc = order_ascc(graph, ascc)
         # If builtins is in the list, move it last.  (This is a bit of
         # a hack, but it's necessary because the builtins module is
         # part of a small cycle involving at least {builtins, abc,
@@ -1401,6 +1389,12 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
         if 'builtins' in ascc:
             scc.remove('builtins')
             scc.append('builtins')
+        if manager.options.verbosity >= 2:
+            for id in scc:
+                manager.trace("Priorities for %s:" % id,
+                              " ".join("%s:%d" % (x, graph[id].priorities[x])
+                                       for x in graph[id].dependencies
+                                       if x in ascc and x in graph[id].priorities))
         # Because the SCCs are presented in topological sort order, we
         # don't need to look at dependencies recursively for staleness
         # -- the immediate dependencies are sufficient.
@@ -1427,7 +1421,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
             # cache file is newer than any scc node's cache file.
             oldest_in_scc = min(graph[id].meta.data_mtime for id in scc)
             newest_in_deps = 0 if not deps else max(graph[dep].meta.data_mtime for dep in deps)
-            if manager.flags.count(VERBOSE) >= 2:  # Dump all mtimes for extreme debugging.
+            if manager.options.verbosity >= 3:  # Dump all mtimes for extreme debugging.
                 all_ids = sorted(ascc | deps, key=lambda id: graph[id].meta.data_mtime)
                 for id in all_ids:
                     if id in scc:
@@ -1467,6 +1461,53 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
             process_stale_scc(graph, scc)
 
 
+def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) -> List[str]:
+    """Come up with the ideal processing order within an SCC.
+
+    Using the priorities assigned by all_imported_modules_in_file(),
+    try to reduce the cycle to a DAG, by omitting arcs representing
+    dependencies of lower priority.
+
+    In the simplest case, if we have A <--> B where A has a top-level
+    "import B" (medium priority) but B only has the reverse "import A"
+    inside a function (low priority), we turn the cycle into a DAG by
+    dropping the B --> A arc, which leaves only A --> B.
+
+    If all arcs have the same priority, we fall back to sorting by
+    reverse global order (the order in which modules were first
+    encountered).
+
+    The algorithm is recursive, as follows: when as arcs of different
+    priorities are present, drop all arcs of the lowest priority,
+    identify SCCs in the resulting graph, and apply the algorithm to
+    each SCC thus found.  The recursion is bounded because at each
+    recursion the spread in priorities is (at least) one less.
+
+    In practice there are only a few priority levels (currently
+    N=3) and in the worst case we just carry out the same algorithm
+    for finding SCCs N times.  Thus the complexity is no worse than
+    the complexity of the original SCC-finding algorithm -- see
+    strongly_connected_components() below for a reference.
+    """
+    if len(ascc) == 1:
+        return [s for s in ascc]
+    pri_spread = set()
+    for id in ascc:
+        state = graph[id]
+        for dep in state.dependencies:
+            if dep in ascc:
+                pri = state.priorities.get(dep, PRI_HIGH)
+                if pri < pri_max:
+                    pri_spread.add(pri)
+    if len(pri_spread) == 1:
+        # Filtered dependencies are uniform -- order by global order.
+        return sorted(ascc, key=lambda id: -graph[id].order)
+    pri_max = max(pri_spread)
+    sccs = sorted_components(graph, ascc, pri_max)
+    # The recursion is bounded by the len(pri_spread) check above.
+    return [s for ss in sccs for s in order_ascc(graph, ss, pri_max)]
+
+
 def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
     """Process the modules in one SCC from their cached data."""
     for id in scc:
@@ -1498,7 +1539,9 @@ def process_stale_scc(graph: Graph, scc: List[str]) -> None:
         graph[id].write_cache()
 
 
-def sorted_components(graph: Graph) -> List[AbstractSet[str]]:
+def sorted_components(graph: Graph,
+                      vertices: Optional[AbstractSet[str]] = None,
+                      pri_max: int = PRI_ALL) -> List[AbstractSet[str]]:
     """Return the graph's SCCs, topologically sorted by dependencies.
 
     The sort order is from leaves (nodes without dependencies) to
@@ -1508,9 +1551,9 @@ def sorted_components(graph: Graph) -> List[AbstractSet[str]]:
     dependencies that aren't present in graph.keys() are ignored.
     """
     # Compute SCCs.
-    vertices = set(graph)
-    edges = {id: [dep for dep in st.dependencies if dep in graph]
-             for id, st in graph.items()}
+    if vertices is None:
+        vertices = set(graph)
+    edges = {id: deps_filtered(graph, vertices, id, pri_max) for id in vertices}
     sccs = list(strongly_connected_components(vertices, edges))
     # Topsort.
     sccsmap = {id: frozenset(scc) for scc in sccs for id in scc}
@@ -1518,7 +1561,7 @@ def sorted_components(graph: Graph) -> List[AbstractSet[str]]:
     for scc in sccs:
         deps = set()  # type: Set[AbstractSet[str]]
         for id in scc:
-            deps.update(sccsmap[x] for x in graph[id].dependencies if x in graph)
+            deps.update(sccsmap[x] for x in deps_filtered(graph, vertices, id, pri_max))
         data[frozenset(scc)] = deps
     res = []
     for ready in topsort(data):
@@ -1535,7 +1578,17 @@ def sorted_components(graph: Graph) -> List[AbstractSet[str]]:
     return res
 
 
-def strongly_connected_components(vertices: Set[str],
+def deps_filtered(graph: Graph, vertices: AbstractSet[str], id: str, pri_max: int) -> List[str]:
+    """Filter dependencies for id with pri < pri_max."""
+    if id not in vertices:
+        return []
+    state = graph[id]
+    return [dep
+            for dep in state.dependencies
+            if dep in vertices and state.priorities.get(dep, PRI_HIGH) < pri_max]
+
+
+def strongly_connected_components(vertices: AbstractSet[str],
                                   edges: Dict[str, List[str]]) -> Iterator[Set[str]]:
     """Compute Strongly Connected Components of a directed graph.
 
diff --git a/mypy/checker.py b/mypy/checker.py
index 335344a..5bacc9d 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -32,7 +32,7 @@ from mypy import nodes
 from mypy.types import (
     Type, AnyType, CallableType, Void, FunctionLike, Overloaded, TupleType,
     Instance, NoneTyp, ErrorType, strip_type,
-    UnionType, TypeVarType, PartialType, DeletedType
+    UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType
 )
 from mypy.sametypes import is_same_type
 from mypy.messages import MessageBuilder
@@ -49,271 +49,16 @@ from mypy.semanal import self_type, set_callable_name, refers_to_fullname
 from mypy.erasetype import erase_typevars
 from mypy.expandtype import expand_type_by_instance, expand_type
 from mypy.visitor import NodeVisitor
-from mypy.join import join_simple, join_types
+from mypy.join import join_types
 from mypy.treetransform import TransformVisitor
 from mypy.meet import meet_simple, nearest_builtin_ancestor, is_overlapping_types
+from mypy.binder import ConditionalTypeBinder
+from mypy.options import Options
 
+from mypy import experiments
 
-T = TypeVar('T')
-
-
-def min_with_None_large(x: T, y: T) -> T:
-    """Return min(x, y) but with  a < None for all variables a that are not None"""
-    if x is None:
-        return y
-    return min(x, x if y is None else y)
-
-
-class Frame(Dict[Any, Type]):
-    pass
-
-
-class Key(AnyType):
-    pass
-
-
-class ConditionalTypeBinder:
-    """Keep track of conditional types of variables."""
-
-    def __init__(self) -> None:
-        self.frames = []  # type: List[Frame]
-        # The first frame is special: it's the declared types of variables.
-        self.frames.append(Frame())
-        # Set of other keys to invalidate if a key is changed.
-        self.dependencies = {}  # type: Dict[Key, Set[Key]]
-        # Set of keys with dependencies added already.
-        self._added_dependencies = set()  # type: Set[Key]
-
-        self.frames_on_escape = {}  # type: Dict[int, List[Frame]]
-
-        self.try_frames = set()  # type: Set[int]
-        self.loop_frames = []  # type: List[int]
-
-    def _add_dependencies(self, key: Key, value: Key = None) -> None:
-        if value is None:
-            value = key
-            if value in self._added_dependencies:
-                return
-            self._added_dependencies.add(value)
-        if isinstance(key, tuple):
-            key = cast(Any, key)   # XXX sad
-            if key != value:
-                self.dependencies[key] = set()
-                self.dependencies.setdefault(key, set()).add(value)
-            for elt in cast(Any, key):
-                self._add_dependencies(elt, value)
-
-    def push_frame(self) -> Frame:
-        d = Frame()
-        self.frames.append(d)
-        return d
-
-    def _push(self, key: Key, type: Type, index: int=-1) -> None:
-        self._add_dependencies(key)
-        self.frames[index][key] = type
-
-    def _get(self, key: Key, index: int=-1) -> Type:
-        if index < 0:
-            index += len(self.frames)
-        for i in range(index, -1, -1):
-            if key in self.frames[i]:
-                return self.frames[i][key]
-        return None
-
-    def push(self, expr: Node, typ: Type) -> None:
-        if not expr.literal:
-            return
-        key = expr.literal_hash
-        self.frames[0][key] = self.get_declaration(expr)
-        self._push(key, typ)
-
-    def get(self, expr: Node) -> Type:
-        return self._get(expr.literal_hash)
-
-    def cleanse(self, expr: Node) -> None:
-        """Remove all references to a Node from the binder."""
-        key = expr.literal_hash
-        for frame in self.frames:
-            if key in frame:
-                del frame[key]
-
-    def update_from_options(self, frames: List[Frame]) -> bool:
-        """Update the frame to reflect that each key will be updated
-        as in one of the frames.  Return whether any item changes.
-
-        If a key is declared as AnyType, only update it if all the
-        options are the same.
-        """
-
-        changed = False
-        keys = set(key for f in frames for key in f)
-
-        for key in keys:
-            current_value = self._get(key)
-            resulting_values = [f.get(key, current_value) for f in frames]
-            if any(x is None for x in resulting_values):
-                continue
-
-            if isinstance(self.frames[0].get(key), AnyType):
-                type = resulting_values[0]
-                if not all(is_same_type(type, t) for t in resulting_values[1:]):
-                    type = AnyType()
-            else:
-                type = resulting_values[0]
-                for other in resulting_values[1:]:
-                    type = join_simple(self.frames[0][key], type, other)
-            if not is_same_type(type, current_value):
-                self._push(key, type)
-                changed = True
-
-        return changed
-
-    def update_expand(self, frame: Frame, index: int = -1) -> bool:
-        """Update frame to include another one, if that other one is larger than the current value.
-
-        Return whether anything changed."""
-        result = False
-
-        for key in frame:
-            old_type = self._get(key, index)
-            if old_type is None:
-                continue
-            replacement = join_simple(self.frames[0][key], old_type, frame[key])
-
-            if not is_same_type(replacement, old_type):
-                self._push(key, replacement, index)
-                result = True
-        return result
-
-    def pop_frame(self, canskip=True, fallthrough=False) -> Tuple[bool, Frame]:
-        """Pop a frame.
 
-        If canskip, then allow types to skip all the inner frame
-        blocks.  That is, changes that happened in the inner frames
-        are not necessarily reflected in the outer frame (for example,
-        an if block that may be skipped).
-
-        If fallthrough, then allow types to escape from the inner
-        frame to the resulting frame.  That is, the state of types at
-        the end of the last frame are allowed to fall through into the
-        enclosing frame.
-
-        Return whether the newly innermost frame was modified since it
-        was last on top, and what it would be if the block had run to
-        completion.
-        """
-        result = self.frames.pop()
-
-        options = self.frames_on_escape.pop(len(self.frames) - 1, [])  # type: List[Frame]
-        if canskip:
-            options.append(self.frames[-1])
-        if fallthrough:
-            options.append(result)
-
-        changed = self.update_from_options(options)
-
-        return (changed, result)
-
-    def get_declaration(self, expr: Any) -> Type:
-        if hasattr(expr, 'node') and isinstance(expr.node, Var):
-            type = expr.node.type
-            if isinstance(type, PartialType):
-                return None
-            return type
-        else:
-            return self.frames[0].get(expr.literal_hash)
-
-    def assign_type(self, expr: Node, type: Type,
-                    restrict_any: bool = False) -> None:
-        if not expr.literal:
-            return
-        self.invalidate_dependencies(expr)
-
-        declared_type = self.get_declaration(expr)
-
-        if declared_type is None:
-            # Not sure why this happens.  It seems to mainly happen in
-            # member initialization.
-            return
-        if not is_subtype(type, declared_type):
-            # Pretty sure this is only happens when there's a type error.
-
-            # Ideally this function wouldn't be called if the
-            # expression has a type error, though -- do other kinds of
-            # errors cause this function to get called at invalid
-            # times?
-            return
-
-        # If x is Any and y is int, after x = y we do not infer that x is int.
-        # This could be changed.
-        # Eric: I'm changing it in weak typing mode, since Any is so common.
-
-        if (isinstance(self.most_recent_enclosing_type(expr, type), AnyType)
-                and not restrict_any):
-            pass
-        elif isinstance(type, AnyType):
-            self.push(expr, declared_type)
-        else:
-            self.push(expr, type)
-
-        for i in self.try_frames:
-            # XXX This should probably not copy the entire frame, but
-            # just copy this variable into a single stored frame.
-            self.allow_jump(i)
-
-    def invalidate_dependencies(self, expr: Node) -> None:
-        """Invalidate knowledge of types that include expr, but not expr itself.
-
-        For example, when expr is foo.bar, invalidate foo.bar.baz and
-        foo.bar[0].
-
-        It is overly conservative: it invalidates globally, including
-        in code paths unreachable from here.
-        """
-        for dep in self.dependencies.get(expr.literal_hash, set()):
-            for f in self.frames:
-                if dep in f:
-                    del f[dep]
-
-    def most_recent_enclosing_type(self, expr: Node, type: Type) -> Type:
-        if isinstance(type, AnyType):
-            return self.get_declaration(expr)
-        key = expr.literal_hash
-        enclosers = ([self.get_declaration(expr)] +
-                     [f[key] for f in self.frames
-                      if key in f and is_subtype(type, f[key])])
-        return enclosers[-1]
-
-    def allow_jump(self, index: int) -> None:
-        new_frame = Frame()
-        for f in self.frames[index + 1:]:
-            for k in f:
-                new_frame[k] = f[k]
-
-        self.frames_on_escape.setdefault(index, []).append(new_frame)
-
-    def push_loop_frame(self):
-        self.loop_frames.append(len(self.frames) - 1)
-
-    def pop_loop_frame(self):
-        self.loop_frames.pop()
-
-    def __enter__(self) -> None:
-        self.push_frame()
-
-    def __exit__(self, *args: Any) -> None:
-        self.pop_frame()
-
-
-def meet_frames(*frames: Frame) -> Frame:
-    answer = Frame()
-    for f in frames:
-        for key in f:
-            if key in answer:
-                answer[key] = meet_simple(answer[key], f[key])
-            else:
-                answer[key] = f[key]
-    return answer
+T = TypeVar('T')
 
 
 # A node which is postponed to be type checked during the next pass.
@@ -331,8 +76,6 @@ class TypeChecker(NodeVisitor[Type]):
     Type check mypy source files that have been semantically analyzed.
     """
 
-    # Target Python version
-    pyversion = defaults.PYTHON3_VERSION
     # Are we type checking a stub?
     is_stub = False
     # Error message reporter
@@ -355,8 +98,6 @@ class TypeChecker(NodeVisitor[Type]):
     dynamic_funcs = None  # type: List[bool]
     # Stack of functions being type checked
     function_stack = None  # type: List[FuncItem]
-    # Set to True on return/break/raise, False on blocks that can block any of them
-    breaking_out = False
     # Do weak type checking in this file
     weak_opts = set()        # type: Set[str]
     # Stack of collections of variables with partial types
@@ -371,30 +112,20 @@ class TypeChecker(NodeVisitor[Type]):
     # Have we deferred the current function? If yes, don't infer additional
     # types during this pass within the function.
     current_node_deferred = False
-    # This makes it an error to call an untyped function from a typed one
-    disallow_untyped_calls = False
-    # This makes it an error to define an untyped or partially-typed function
-    disallow_untyped_defs = False
-    # Should we check untyped function defs?
-    check_untyped_defs = False
-    warn_incomplete_stub = False
     is_typeshed_stub = False
+    options = None  # type: Options
 
-    def __init__(self, errors: Errors, modules: Dict[str, MypyFile],
-                 pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
-                 disallow_untyped_calls=False, disallow_untyped_defs=False,
-                 check_untyped_defs=False, warn_incomplete_stub=False) -> None:
+    def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Options) -> None:
         """Construct a type checker.
 
         Use errors to report type check errors.
         """
         self.errors = errors
         self.modules = modules
-        self.pyversion = pyversion
+        self.options = options
         self.msg = MessageBuilder(errors, modules)
         self.type_map = {}
         self.binder = ConditionalTypeBinder()
-        self.binder.push_frame()
         self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg)
         self.return_types = []
         self.type_context = []
@@ -405,17 +136,12 @@ class TypeChecker(NodeVisitor[Type]):
         self.deferred_nodes = []
         self.pass_num = 0
         self.current_node_deferred = False
-        self.disallow_untyped_calls = disallow_untyped_calls
-        self.disallow_untyped_defs = disallow_untyped_defs
-        self.check_untyped_defs = check_untyped_defs
-        self.warn_incomplete_stub = warn_incomplete_stub
 
     def visit_file(self, file_node: MypyFile, path: str) -> None:
         """Type check a mypy file with the given path."""
         self.pass_num = 0
         self.is_stub = file_node.is_stub
         self.errors.set_file(path)
-        self.errors.set_ignored_lines(file_node.ignored_lines)
         self.globals = file_node.names
         self.weak_opts = file_node.weak_opts
         self.enter_partial_types()
@@ -430,7 +156,6 @@ class TypeChecker(NodeVisitor[Type]):
         if self.deferred_nodes:
             self.check_second_pass()
 
-        self.errors.set_ignored_lines(set())
         self.current_node_deferred = False
 
         all_ = self.globals.get('__all__')
@@ -442,7 +167,7 @@ class TypeChecker(NodeVisitor[Type]):
                 self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
                           all_.node)
 
-    def check_second_pass(self):
+    def check_second_pass(self) -> None:
         """Run second pass of type checking which goes through deferred nodes."""
         self.pass_num = 1
         for node, type_name in self.deferred_nodes:
@@ -483,16 +208,24 @@ class TypeChecker(NodeVisitor[Type]):
         else:
             return typ
 
-    def accept_in_frame(self, node: Node, type_context: Type = None,
-                        repeat_till_fixed: bool = False) -> Type:
-        """Type check a node in the given type context in a new frame of inferred types."""
-        while True:
-            self.binder.push_frame()
-            answer = self.accept(node, type_context)
-            changed, _ = self.binder.pop_frame(True, True)
-            self.breaking_out = False
-            if not repeat_till_fixed or not changed:
-                return answer
+    def accept_loop(self, body: Node, else_body: Node = None) -> Type:
+        """Repeatedly type check a loop body until the frame doesn't change.
+
+        Then check the else_body.
+        """
+        # The outer frame accumulates the results of all iterations
+        with self.binder.frame_context(1) as outer_frame:
+            self.binder.push_loop_frame()
+            while True:
+                with self.binder.frame_context(1):
+                    # We may skip each iteration
+                    self.binder.options_on_return[-1].append(outer_frame)
+                    self.accept(body)
+                if not self.binder.last_pop_changed:
+                    break
+            self.binder.pop_loop_frame()
+            if else_body:
+                self.accept(else_body)
 
     #
     # Definitions
@@ -601,6 +334,11 @@ class TypeChecker(NodeVisitor[Type]):
             else:
                 # Function definition overrides a variable initialized via assignment.
                 orig_type = defn.original_def.type
+                if orig_type is None:
+                    # XXX This can be None, as happens in
+                    # test_testcheck_TypeCheckSuite.testRedefinedFunctionInTryWithElse
+                    self.msg.note("Internal mypy error checking function redefinition.", defn)
+                    return None
                 if isinstance(orig_type, PartialType):
                     if orig_type.type is None:
                         # Ah this is a partial type. Give it the type of the function.
@@ -663,97 +401,95 @@ class TypeChecker(NodeVisitor[Type]):
         for item, typ in self.expand_typevars(defn, typ):
             old_binder = self.binder
             self.binder = ConditionalTypeBinder()
-            self.binder.push_frame()
-            defn.expanded.append(item)
-
-            # We may be checking a function definition or an anonymous
-            # function. In the first case, set up another reference with the
-            # precise type.
-            if isinstance(item, FuncDef):
-                fdef = item
-            else:
-                fdef = None
-
-            if fdef:
-                # Check if __init__ has an invalid, non-None return type.
-                if (fdef.info and fdef.name() == '__init__' and
-                        not isinstance(typ.ret_type, Void) and
-                        not self.dynamic_funcs[-1]):
-                    self.fail(messages.INIT_MUST_HAVE_NONE_RETURN_TYPE,
-                              item.type)
-
-                show_untyped = not self.is_typeshed_stub or self.warn_incomplete_stub
-                if self.disallow_untyped_defs and show_untyped:
-                    # Check for functions with unspecified/not fully specified types.
-                    def is_implicit_any(t: Type) -> bool:
-                        return isinstance(t, AnyType) and t.implicit
-
-                    if fdef.type is None:
-                        self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef)
-                    elif isinstance(fdef.type, CallableType):
-                        if is_implicit_any(fdef.type.ret_type):
-                            self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
-                        if any(is_implicit_any(t) for t in fdef.type.arg_types):
-                            self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef)
-
-            if name in nodes.reverse_op_method_set:
-                self.check_reverse_op_method(item, typ, name)
-            elif name == '__getattr__':
-                self.check_getattr_method(typ, defn)
-
-            # Refuse contravariant return type variable
-            if isinstance(typ.ret_type, TypeVarType):
-                if typ.ret_type.variance == CONTRAVARIANT:
-                    self.fail(messages.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT,
-                         typ.ret_type)
-
-            # Check that Generator functions have the appropriate return type.
-            if defn.is_generator:
-                if not self.is_generator_return_type(typ.ret_type):
-                    self.fail(messages.INVALID_RETURN_TYPE_FOR_GENERATOR, typ)
-
-                # Python 2 generators aren't allowed to return values.
-                if (self.pyversion[0] == 2 and
-                        isinstance(typ.ret_type, Instance) and
-                        typ.ret_type.type.fullname() == 'typing.Generator'):
-                    if not (isinstance(typ.ret_type.args[2], Void)
-                            or isinstance(typ.ret_type.args[2], AnyType)):
-                        self.fail(messages.INVALID_GENERATOR_RETURN_ITEM_TYPE, typ)
-
-            # Push return type.
-            self.return_types.append(typ.ret_type)
-
-            # Store argument types.
-            for i in range(len(typ.arg_types)):
-                arg_type = typ.arg_types[i]
-
-                # Refuse covariant parameter type variables
-                if isinstance(arg_type, TypeVarType):
-                    if arg_type.variance == COVARIANT:
-                        self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT,
-                                  arg_type)
-
-                if typ.arg_kinds[i] == nodes.ARG_STAR:
-                    # builtins.tuple[T] is typing.Tuple[T, ...]
-                    arg_type = self.named_generic_type('builtins.tuple',
-                                                       [arg_type])
-                elif typ.arg_kinds[i] == nodes.ARG_STAR2:
-                    arg_type = self.named_generic_type('builtins.dict',
-                                                       [self.str_type(),
-                                                        arg_type])
-                item.arguments[i].variable.type = arg_type
-
-            # Type check initialization expressions.
-            for arg in item.arguments:
-                init = arg.initialization_statement
-                if init:
-                    self.accept(init)
-
-            # Clear out the default assignments from the binder
-            self.binder.pop_frame()
-            self.binder.push_frame()
+            with self.binder.frame_context():
+                defn.expanded.append(item)
+
+                # We may be checking a function definition or an anonymous
+                # function. In the first case, set up another reference with the
+                # precise type.
+                if isinstance(item, FuncDef):
+                    fdef = item
+                else:
+                    fdef = None
+
+                if fdef:
+                    # Check if __init__ has an invalid, non-None return type.
+                    if (fdef.info and fdef.name() == '__init__' and
+                            not isinstance(typ.ret_type, Void) and
+                            not self.dynamic_funcs[-1]):
+                        self.fail(messages.INIT_MUST_HAVE_NONE_RETURN_TYPE,
+                                  item.type)
+
+                    show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub
+                    if self.options.disallow_untyped_defs and show_untyped:
+                        # Check for functions with unspecified/not fully specified types.
+                        def is_implicit_any(t: Type) -> bool:
+                            return isinstance(t, AnyType) and t.implicit
+
+                        if fdef.type is None:
+                            self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef)
+                        elif isinstance(fdef.type, CallableType):
+                            if is_implicit_any(fdef.type.ret_type):
+                                self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
+                            if any(is_implicit_any(t) for t in fdef.type.arg_types):
+                                self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef)
+
+                if name in nodes.reverse_op_method_set:
+                    self.check_reverse_op_method(item, typ, name)
+                elif name == '__getattr__':
+                    self.check_getattr_method(typ, defn)
+
+                # Refuse contravariant return type variable
+                if isinstance(typ.ret_type, TypeVarType):
+                    if typ.ret_type.variance == CONTRAVARIANT:
+                        self.fail(messages.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT,
+                             typ.ret_type)
+
+                # Check that Generator functions have the appropriate return type.
+                if defn.is_generator:
+                    if not self.is_generator_return_type(typ.ret_type):
+                        self.fail(messages.INVALID_RETURN_TYPE_FOR_GENERATOR, typ)
+
+                    # Python 2 generators aren't allowed to return values.
+                    if (self.options.python_version[0] == 2 and
+                            isinstance(typ.ret_type, Instance) and
+                            typ.ret_type.type.fullname() == 'typing.Generator'):
+                        if not (isinstance(typ.ret_type.args[2], Void)
+                                or isinstance(typ.ret_type.args[2], AnyType)):
+                            self.fail(messages.INVALID_GENERATOR_RETURN_ITEM_TYPE, typ)
+
+                # Push return type.
+                self.return_types.append(typ.ret_type)
+
+                # Store argument types.
+                for i in range(len(typ.arg_types)):
+                    arg_type = typ.arg_types[i]
+
+                    # Refuse covariant parameter type variables
+                    if isinstance(arg_type, TypeVarType):
+                        if arg_type.variance == COVARIANT:
+                            self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT,
+                                      arg_type)
+
+                    if typ.arg_kinds[i] == nodes.ARG_STAR:
+                        # builtins.tuple[T] is typing.Tuple[T, ...]
+                        arg_type = self.named_generic_type('builtins.tuple',
+                                                           [arg_type])
+                    elif typ.arg_kinds[i] == nodes.ARG_STAR2:
+                        arg_type = self.named_generic_type('builtins.dict',
+                                                           [self.str_type(),
+                                                            arg_type])
+                    item.arguments[i].variable.type = arg_type
+
+                # Type check initialization expressions.
+                for arg in item.arguments:
+                    init = arg.initialization_statement
+                    if init:
+                        self.accept(init)
+
             # Type check body in a new scope.
-            self.accept_in_frame(item.body)
+            with self.binder.frame_context():
+                self.accept(item.body)
 
             self.return_types.pop()
 
@@ -918,7 +654,7 @@ class TypeChecker(NodeVisitor[Type]):
     def expand_typevars(self, defn: FuncItem,
                         typ: CallableType) -> List[Tuple[FuncItem, CallableType]]:
         # TODO use generator
-        subst = []  # type: List[List[Tuple[int, Type]]]
+        subst = []  # type: List[List[Tuple[TypeVarId, Type]]]
         tvars = typ.variables or []
         tvars = tvars[:]
         if defn.info:
@@ -1062,8 +798,8 @@ class TypeChecker(NodeVisitor[Type]):
         self.enter_partial_types()
         old_binder = self.binder
         self.binder = ConditionalTypeBinder()
-        self.binder.push_frame()
-        self.accept(defn.defs)
+        with self.binder.frame_context():
+            self.accept(defn.defs)
         self.binder = old_binder
         self.check_multiple_inheritance(typ)
         self.leave_partial_types()
@@ -1159,7 +895,7 @@ class TypeChecker(NodeVisitor[Type]):
             return None
         for s in b.body:
             self.accept(s)
-            if self.breaking_out:
+            if self.binder.breaking_out:
                 break
 
     def visit_assignment_stmt(self, s: AssignmentStmt) -> Type:
@@ -1181,9 +917,7 @@ class TypeChecker(NodeVisitor[Type]):
     def check_assignment(self, lvalue: Node, rvalue: Node, infer_lvalue_type: bool = True) -> None:
         """Type check a single assignment: lvalue = rvalue."""
         if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
-            ltuple = cast(Union[TupleExpr, ListExpr], lvalue)
-
-            self.check_assignment_to_multiple_lvalues(ltuple.items, rvalue, lvalue,
+            self.check_assignment_to_multiple_lvalues(lvalue.items, rvalue, lvalue,
                                                       infer_lvalue_type)
         else:
             lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue)
@@ -1200,7 +934,11 @@ class TypeChecker(NodeVisitor[Type]):
                         partial_types = self.find_partial_types(var)
                         if partial_types is not None:
                             if not self.current_node_deferred:
-                                var.type = rvalue_type
+                                if experiments.STRICT_OPTIONAL:
+                                    var.type = UnionType.make_simplified_union(
+                                        [rvalue_type, NoneTyp()])
+                                else:
+                                    var.type = rvalue_type
                             else:
                                 var.type = None
                             del partial_types[var]
@@ -1208,10 +946,19 @@ class TypeChecker(NodeVisitor[Type]):
                     # an error will be reported elsewhere.
                     self.infer_partial_type(lvalue_type.var, lvalue, rvalue_type)
                     return
-                rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue)
+                if (is_literal_none(rvalue) and
+                        isinstance(lvalue, NameExpr) and
+                        isinstance(lvalue.node, Var) and
+                        lvalue.node.is_initialized_in_class):
+                    # Allow None's to be assigned to class variables with non-Optional types.
+                    rvalue_type = lvalue_type
+                else:
+                    rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue)
 
                 if rvalue_type and infer_lvalue_type:
-                    self.binder.assign_type(lvalue, rvalue_type,
+                    self.binder.assign_type(lvalue,
+                                            rvalue_type,
+                                            lvalue_type,
                                             self.typing_mode_weak())
             elif index_lvalue:
                 self.check_indexed_assignment(index_lvalue, rvalue, rvalue)
@@ -1229,7 +976,7 @@ class TypeChecker(NodeVisitor[Type]):
             # control in cases like: a, b = [int, str] where rhs would get
             # type List[object]
 
-            rvalues = cast(Union[TupleExpr, ListExpr], rvalue).items
+            rvalues = rvalue.items
 
             if self.check_rvalue_count_in_assignment(lvalues, len(rvalues), context):
                 star_index = next((i for i, lv in enumerate(lvalues) if
@@ -1415,8 +1162,7 @@ class TypeChecker(NodeVisitor[Type]):
             lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True)
             self.store_type(lvalue, lvalue_type)
         elif isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
-            lv = cast(Union[TupleExpr, ListExpr], lvalue)
-            types = [self.check_lvalue(sub_expr)[0] for sub_expr in lv.items]
+            types = [self.check_lvalue(sub_expr)[0] for sub_expr in lvalue.items]
             lvalue_type = TupleType(types, self.named_type('builtins.tuple'))
         else:
             lvalue_type = self.accept(lvalue)
@@ -1444,7 +1190,7 @@ class TypeChecker(NodeVisitor[Type]):
         """Infer the type of initialized variables from initializer type."""
         if self.typing_mode_weak():
             self.set_inferred_type(name, lvalue, AnyType())
-            self.binder.assign_type(lvalue, init_type, True)
+            self.binder.assign_type(lvalue, init_type, self.binder.get_declaration(lvalue), True)
         elif isinstance(init_type, Void):
             self.check_not_void(init_type, context)
             self.set_inference_error_fallback_type(name, lvalue, init_type, context)
@@ -1467,16 +1213,16 @@ class TypeChecker(NodeVisitor[Type]):
             self.set_inferred_type(name, lvalue, init_type)
 
     def infer_partial_type(self, name: Var, lvalue: Node, init_type: Type) -> bool:
-        if isinstance(init_type, NoneTyp):
-            partial_type = PartialType(None, name)
+        if isinstance(init_type, (NoneTyp, UninhabitedType)):
+            partial_type = PartialType(None, name, [init_type])
         elif isinstance(init_type, Instance):
             fullname = init_type.type.fullname()
-            if ((fullname == 'builtins.list' or fullname == 'builtins.set' or
-                 fullname == 'builtins.dict')
-                    and isinstance(init_type.args[0], NoneTyp)
-                    and (fullname != 'builtins.dict' or isinstance(init_type.args[1], NoneTyp))
-                    and isinstance(lvalue, NameExpr)):
-                partial_type = PartialType(init_type.type, name)
+            if (isinstance(lvalue, NameExpr) and
+                    (fullname == 'builtins.list' or
+                     fullname == 'builtins.set' or
+                     fullname == 'builtins.dict') and
+                    all(isinstance(t, (NoneTyp, UninhabitedType)) for t in init_type.args)):
+                partial_type = PartialType(init_type.type, name, init_type.args)
             else:
                 return False
         else:
@@ -1507,7 +1253,7 @@ class TypeChecker(NodeVisitor[Type]):
 
         We implement this here by giving x a valid type (Any).
         """
-        if context.get_line() in self.errors.ignored_lines:
+        if context.get_line() in self.errors.ignored_lines[self.errors.file]:
             self.set_inferred_type(var, lvalue, AnyType())
 
     def narrow_type_from_binder(self, expr: Node, known_type: Type) -> Type:
@@ -1559,8 +1305,8 @@ class TypeChecker(NodeVisitor[Type]):
             self, lvalue: IndexExpr, rvalue: Node) -> None:
         # TODO: Should we share some of this with try_infer_partial_type?
         if isinstance(lvalue.base, RefExpr) and isinstance(lvalue.base.node, Var):
-            var = cast(Var, lvalue.base.node)
-            if var is not None and isinstance(var.type, PartialType):
+            var = lvalue.base.node
+            if isinstance(var.type, PartialType):
                 type_type = var.type.type
                 if type_type is None:
                     return  # The partial type is None.
@@ -1572,10 +1318,15 @@ class TypeChecker(NodeVisitor[Type]):
                     # TODO: Don't infer things twice.
                     key_type = self.accept(lvalue.index)
                     value_type = self.accept(rvalue)
-                    if is_valid_inferred_type(key_type) and is_valid_inferred_type(value_type):
+                    full_key_type = UnionType.make_simplified_union(
+                        [key_type, var.type.inner_types[0]])
+                    full_value_type = UnionType.make_simplified_union(
+                        [value_type, var.type.inner_types[1]])
+                    if (is_valid_inferred_type(full_key_type) and
+                            is_valid_inferred_type(full_value_type)):
                         if not self.current_node_deferred:
                             var.type = self.named_generic_type('builtins.dict',
-                                                               [key_type, value_type])
+                                                               [full_key_type, full_value_type])
                         del partial_types[var]
 
     def visit_expression_stmt(self, s: ExpressionStmt) -> Type:
@@ -1583,7 +1334,7 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_return_stmt(self, s: ReturnStmt) -> Type:
         """Type check a return statement."""
-        self.breaking_out = True
+        self.binder.breaking_out = True
         if self.is_within_function():
             if self.function_stack[-1].is_generator:
                 return_type = self.get_generator_return_type(self.return_types[-1])
@@ -1651,77 +1402,52 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_if_stmt(self, s: IfStmt) -> Type:
         """Type check an if statement."""
-        broken = True
-        ending_frames = []  # type: List[Frame]
-        clauses_frame = self.binder.push_frame()
-        for e, b in zip(s.expr, s.body):
-            t = self.accept(e)
-            self.check_not_void(t, e)
-            if_map, else_map = find_isinstance_check(
-                e, self.type_map,
-                self.typing_mode_weak()
-            )
-            if if_map is None:
-                # The condition is always false
-                # XXX should issue a warning?
-                pass
-            else:
-                # Only type check body if the if condition can be true.
-                self.binder.push_frame()
-                if if_map:
-                    for var, type in if_map.items():
-                        self.binder.push(var, type)
-
-                self.accept(b)
-                _, frame = self.binder.pop_frame()
-                if not self.breaking_out:
-                    broken = False
-                    ending_frames.append(meet_frames(clauses_frame, frame))
-
-                self.breaking_out = False
-
-                if else_map:
-                    for var, type in else_map.items():
-                        self.binder.push(var, type)
-            if else_map is None:
-                # The condition is always true => remaining elif/else blocks
-                # can never be reached.
-
-                # Might also want to issue a warning
-                # print("Warning: isinstance always true")
-                if broken:
-                    self.binder.pop_frame()
-                    self.breaking_out = True
-                    return None
-                break
-        else:
-            if s.else_body:
-                self.accept(s.else_body)
-
-                if self.breaking_out and broken:
-                    self.binder.pop_frame()
-                    return None
-
-                if not self.breaking_out:
-                    ending_frames.append(clauses_frame)
-
-                self.breaking_out = False
-            else:
-                ending_frames.append(clauses_frame)
-
-        self.binder.pop_frame()
-        self.binder.update_from_options(ending_frames)
+        breaking_out = True
+        # This frame records the knowledge from previous if/elif clauses not being taken.
+        with self.binder.frame_context():
+            for e, b in zip(s.expr, s.body):
+                t = self.accept(e)
+                self.check_not_void(t, e)
+                if_map, else_map = find_isinstance_check(
+                    e, self.type_map,
+                    self.typing_mode_weak()
+                )
+                if if_map is None:
+                    # The condition is always false
+                    # XXX should issue a warning?
+                    pass
+                else:
+                    # Only type check body if the if condition can be true.
+                    with self.binder.frame_context(2):
+                        if if_map:
+                            for var, type in if_map.items():
+                                self.binder.push(var, type)
+
+                        self.accept(b)
+                    breaking_out = breaking_out and self.binder.last_pop_breaking_out
+
+                    if else_map:
+                        for var, type in else_map.items():
+                            self.binder.push(var, type)
+                if else_map is None:
+                    # The condition is always true => remaining elif/else blocks
+                    # can never be reached.
+
+                    # Might also want to issue a warning
+                    # print("Warning: isinstance always true")
+                    break
+            else:  # Didn't break => can't prove one of the conditions is always true
+                with self.binder.frame_context(2):
+                    if s.else_body:
+                        self.accept(s.else_body)
+                breaking_out = breaking_out and self.binder.last_pop_breaking_out
+        if breaking_out:
+            self.binder.breaking_out = True
+        return None
 
     def visit_while_stmt(self, s: WhileStmt) -> Type:
         """Type check a while statement."""
-        self.binder.push_frame()
-        self.binder.push_loop_frame()
-        self.accept_in_frame(IfStmt([s.expr], [s.body], None),
-                             repeat_till_fixed=True)
-        self.binder.pop_loop_frame()
-        if s.else_body:
-            self.accept(s.else_body)
-        self.binder.pop_frame(False, True)
+        self.accept_loop(IfStmt([s.expr], [s.body], None), s.else_body)
 
     def visit_operator_assignment_stmt(self,
                                        s: OperatorAssignmentStmt) -> Type:
@@ -1752,7 +1478,7 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_raise_stmt(self, s: RaiseStmt) -> Type:
         """Type check a raise statement."""
-        self.breaking_out = True
+        self.binder.breaking_out = True
         if s.expr:
             self.type_check_raise(s.expr, s)
         if s.from_expr:
@@ -1769,7 +1495,7 @@ class TypeChecker(NodeVisitor[Type]):
                     # Good!
                     return
                 # Else fall back to the checks below (which will fail).
-        if isinstance(typ, TupleType) and self.pyversion[0] == 2:
+        if isinstance(typ, TupleType) and self.options.python_version[0] == 2:
             # allow `raise type, value, traceback`
             # https://docs.python.org/2/reference/simple_stmts.html#the-raise-statement
             # TODO: Also check tuple item types.
@@ -1784,57 +1510,70 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_try_stmt(self, s: TryStmt) -> Type:
         """Type check a try statement."""
-        completed_frames = []  # type: List[Frame]
-        self.binder.push_frame()
-        self.binder.try_frames.add(len(self.binder.frames) - 2)
-        self.accept(s.body)
-        self.binder.try_frames.remove(len(self.binder.frames) - 2)
-        self.breaking_out = False
-        changed, frame_on_completion = self.binder.pop_frame()
-        completed_frames.append(frame_on_completion)
-
-        for i in range(len(s.handlers)):
-            self.binder.push_frame()
-            if s.types[i]:
-                t = self.visit_except_handler_test(s.types[i])
-                if s.vars[i]:
-                    # To support local variables, we make this a definition line,
-                    # causing assignment to set the variable's type.
-                    s.vars[i].is_def = True
-                    self.check_assignment(s.vars[i], self.temp_node(t, s.vars[i]))
-            self.accept(s.handlers[i])
-            if s.vars[i]:
-                # Exception variables are deleted in python 3 but not python 2.
-                # But, since it's bad form in python 2 and the type checking
-                # wouldn't work very well, we delete it anyway.
-
-                # Unfortunately, this doesn't let us detect usage before the
-                # try/except block.
-                if self.pyversion[0] >= 3:
-                    source = s.vars[i].name
-                else:
-                    source = ('(exception variable "{}", which we do not accept '
-                              'outside except: blocks even in python 2)'.format(s.vars[i].name))
-                var = cast(Var, s.vars[i].node)
-                var.type = DeletedType(source=source)
-                self.binder.cleanse(s.vars[i])
-
-            self.breaking_out = False
-            changed, frame_on_completion = self.binder.pop_frame()
-            completed_frames.append(frame_on_completion)
-
-        # Do the else block similar to the way we do except blocks.
-        if s.else_body:
-            self.binder.push_frame()
-            self.accept(s.else_body)
-            self.breaking_out = False
-            changed, frame_on_completion = self.binder.pop_frame()
-            completed_frames.append(frame_on_completion)
-
-        self.binder.update_from_options(completed_frames)
-
-        if s.finally_body:
+        # Our enclosing frame will get the result if the try/except falls through.
+        # This one gets all possible intermediate states
+        with self.binder.frame_context():
+            if s.finally_body:
+                self.binder.try_frames.add(len(self.binder.frames) - 1)
+                breaking_out = self.visit_try_without_finally(s)
+                self.binder.try_frames.remove(len(self.binder.frames) - 1)
+                # First we check finally_body is type safe for all intermediate frames
+                self.accept(s.finally_body)
+                breaking_out = breaking_out or self.binder.breaking_out
+            else:
+                breaking_out = self.visit_try_without_finally(s)
+
+        if not breaking_out and s.finally_body:
+            # Then we try again for the more restricted set of options that can fall through
             self.accept(s.finally_body)
+        self.binder.breaking_out = breaking_out
+        return None
+
+    def visit_try_without_finally(self, s: TryStmt) -> bool:
+        """Type check a try statement, ignoring the finally block.
+
+        Return whether we are guaranteed to be breaking out.
+        Otherwise, it will place the results possible frames of
+        that don't break out into self.binder.frames[-2].
+        """
+        breaking_out = True
+        # This frame records the possible states that exceptions can leave variables in
+        # during the try: block
+        with self.binder.frame_context():
+            with self.binder.frame_context(3):
+                self.binder.try_frames.add(len(self.binder.frames) - 2)
+                self.accept(s.body)
+                self.binder.try_frames.remove(len(self.binder.frames) - 2)
+                if s.else_body:
+                    self.accept(s.else_body)
+            breaking_out = breaking_out and self.binder.last_pop_breaking_out
+            for i in range(len(s.handlers)):
+                with self.binder.frame_context(3):
+                    if s.types[i]:
+                        t = self.visit_except_handler_test(s.types[i])
+                        if s.vars[i]:
+                            # To support local variables, we make this a definition line,
+                            # causing assignment to set the variable's type.
+                            s.vars[i].is_def = True
+                            self.check_assignment(s.vars[i], self.temp_node(t, s.vars[i]))
+                    self.accept(s.handlers[i])
+                    if s.vars[i]:
+                        # Exception variables are deleted in python 3 but not python 2.
+                        # But, since it's bad form in python 2 and the type checking
+                        # wouldn't work very well, we delete it anyway.
+
+                        # Unfortunately, this doesn't let us detect usage before the
+                        # try/except block.
+                        if self.options.python_version[0] >= 3:
+                            source = s.vars[i].name
+                        else:
+                            source = ('(exception variable "{}", which we do not accept outside'
+                                      'except: blocks even in python 2)'.format(s.vars[i].name))
+                        var = cast(Var, s.vars[i].node)
+                        var.type = DeletedType(source=source)
+                        self.binder.cleanse(s.vars[i])
+                breaking_out = breaking_out and self.binder.last_pop_breaking_out
+        return breaking_out
 
     def visit_except_handler_test(self, n: Node) -> Type:
         """Type check an exception handler test clause."""
@@ -1867,13 +1606,7 @@ class TypeChecker(NodeVisitor[Type]):
         """Type check a for statement."""
         item_type = self.analyze_iterable_item_type(s.expr)
         self.analyze_index_variables(s.index, item_type, s)
-        self.binder.push_frame()
-        self.binder.push_loop_frame()
-        self.accept_in_frame(s.body, repeat_till_fixed=True)
-        self.binder.pop_loop_frame()
-        if s.else_body:
-            self.accept(s.else_body)
-        self.binder.pop_frame(False, True)
+        self.accept_loop(s.body, s.else_body)
 
     def analyze_iterable_item_type(self, expr: Node) -> Type:
         """Analyse iterable expression and return iterator item type."""
@@ -1881,7 +1614,10 @@ class TypeChecker(NodeVisitor[Type]):
 
         self.check_not_void(iterable, expr)
         if isinstance(iterable, TupleType):
-            joined = NoneTyp()  # type: Type
+            if experiments.STRICT_OPTIONAL:
+                joined = UninhabitedType()  # type: Type
+            else:
+                joined = NoneTyp()
             for item in iterable.items:
                 joined = join_types(joined, item)
             if isinstance(joined, ErrorType):
@@ -1899,7 +1635,7 @@ class TypeChecker(NodeVisitor[Type]):
             method = echk.analyze_external_member_access('__iter__', iterable,
                                                          expr)
             iterator = echk.check_call(method, [], [], expr)[0]
-            if self.pyversion[0] >= 3:
+            if self.options.python_version[0] >= 3:
                 nextmethod = '__next__'
             else:
                 nextmethod = 'next'
@@ -1924,7 +1660,6 @@ class TypeChecker(NodeVisitor[Type]):
             def flatten(t: Node) -> List[Node]:
                 """Flatten a nested sequence of tuples/lists into one list of nodes."""
                 if isinstance(t, TupleExpr) or isinstance(t, ListExpr):
-                    t = cast(Union[TupleExpr, ListExpr], t)
                     return [b for a in t.items for b in flatten(a)]
                 else:
                     return [t]
@@ -1932,7 +1667,9 @@ class TypeChecker(NodeVisitor[Type]):
             s.expr.accept(self)
             for elt in flatten(s.expr):
                 if isinstance(elt, NameExpr):
-                    self.binder.assign_type(elt, DeletedType(source=elt.name),
+                    self.binder.assign_type(elt,
+                                            DeletedType(source=elt.name),
+                                            self.binder.get_declaration(elt),
                                             self.typing_mode_weak())
             return None
 
@@ -2053,12 +1790,12 @@ class TypeChecker(NodeVisitor[Type]):
         return self.expr_checker.visit_member_expr(e)
 
     def visit_break_stmt(self, s: BreakStmt) -> Type:
-        self.breaking_out = True
+        self.binder.breaking_out = True
         self.binder.allow_jump(self.binder.loop_frames[-1] - 1)
         return None
 
     def visit_continue_stmt(self, s: ContinueStmt) -> Type:
-        self.breaking_out = True
+        self.binder.breaking_out = True
         self.binder.allow_jump(self.binder.loop_frames[-1])
         return None
 
@@ -2248,7 +1985,7 @@ class TypeChecker(NodeVisitor[Type]):
         self.type_map[node] = typ
 
     def typing_mode_none(self) -> bool:
-        if self.is_dynamic_function() and not self.check_untyped_defs:
+        if self.is_dynamic_function() and not self.options.check_untyped_defs:
             return not self.weak_opts
         elif self.function_stack:
             return False
@@ -2256,7 +1993,7 @@ class TypeChecker(NodeVisitor[Type]):
             return False
 
     def typing_mode_weak(self) -> bool:
-        if self.is_dynamic_function() and not self.check_untyped_defs:
+        if self.is_dynamic_function() and not self.options.check_untyped_defs:
             return bool(self.weak_opts)
         elif self.function_stack:
             return False
@@ -2264,7 +2001,7 @@ class TypeChecker(NodeVisitor[Type]):
             return 'global' in self.weak_opts
 
     def typing_mode_full(self) -> bool:
-        if self.is_dynamic_function() and not self.check_untyped_defs:
+        if self.is_dynamic_function() and not self.options.check_untyped_defs:
             return False
         elif self.function_stack:
             return True
@@ -2311,8 +2048,12 @@ class TypeChecker(NodeVisitor[Type]):
         partial_types = self.partial_types.pop()
         if not self.current_node_deferred:
             for var, context in partial_types.items():
-                self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
-                var.type = AnyType()
+                if experiments.STRICT_OPTIONAL and cast(PartialType, var.type).type is None:
+                    # None partial type: assume variable is intended to have type None
+                    var.type = NoneTyp()
+                else:
+                    self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+                    var.type = AnyType()
 
     def find_partial_types(self, var: Var) -> Optional[Dict[Var, Context]]:
         for partial_types in reversed(self.partial_types):
@@ -2356,11 +2097,111 @@ class TypeChecker(NodeVisitor[Type]):
         return method_type_with_fallback(func, self.named_type('builtins.function'))
 
 
+# Data structure returned by find_isinstance_check representing
+# information learned from the truth or falsehood of a condition.  The
+# dict maps nodes representing expressions like 'a[0].x' to their
+# refined types under the assumption that the condition has a
+# particular truth value. A value of None means that the condition can
+# never have that truth value.
+
+# NB: The keys of this dict are nodes in the original source program,
+# which are compared by reference equality--effectively, being *the
+# same* expression of the program, not just two identical expressions
+# (such as two references to the same variable). TODO: it would
+# probably be better to have the dict keyed by the nodes' literal_hash
+# field instead.
+
+# NB: This should be `TypeMap = Optional[Dict[Node, Type]]`!
+# But see https://github.com/python/mypy/issues/1637
+TypeMap = Dict[Node, Type]
+
+
+def conditional_type_map(expr: Node,
+                         current_type: Optional[Type],
+                         proposed_type: Optional[Type],
+                         *,
+                         weak: bool = False
+                         ) -> Tuple[TypeMap, TypeMap]:
+    """Takes in an expression, the current type of the expression, and a
+    proposed type of that expression.
+
+    Returns a 2-tuple: The first element is a map from the expression to
+    the proposed type, if the expression can be the proposed type.  The
+    second element is a map from the expression to the type it would hold
+    if it was not the proposed type, if any."""
+    if proposed_type:
+        if current_type:
+            if is_proper_subtype(current_type, proposed_type):
+                return {expr: proposed_type}, None
+            elif not is_overlapping_types(current_type, proposed_type):
+                return None, {expr: current_type}
+            else:
+                remaining_type = restrict_subtype_away(current_type, proposed_type)
+                return {expr: proposed_type}, {expr: remaining_type}
+        else:
+            return {expr: proposed_type}, {}
+    else:
+        # An isinstance check, but we don't understand the type
+        if weak:
+            return {expr: AnyType()}, {expr: current_type}
+        else:
+            return {}, {}
+
+
+def is_literal_none(n: Node) -> bool:
+    return isinstance(n, NameExpr) and n.fullname == 'builtins.None'
+
+
+def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
+    """Calculate what information we can learn from the truth of (e1 and e2)
+    in terms of the information that we can learn from the truth of e1 and
+    the truth of e2.
+    """
+
+    if m1 is None or m2 is None:
+        # One of the conditions can never be true.
+        return None
+    # Both conditions can be true; combine the information. Anything
+    # we learn from either conditions's truth is valid. If the same
+    # expression's type is refined by both conditions, we somewhat
+    # arbitrarily give precedence to m2. (In the future, we could use
+    # an intersection type.)
+    result = m2.copy()
+    m2_keys = set(n2.literal_hash for n2 in m2)
+    for n1 in m1:
+        if n1.literal_hash not in m2_keys:
+            result[n1] = m1[n1]
+    return result
+
+
+def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
+    """Calculate what information we can learn from the truth of (e1 or e2)
+    in terms of the information that we can learn from the truth of e1 and
+    the truth of e2.
+    """
+
+    if m1 is None:
+        return m2
+    if m2 is None:
+        return m1
+    # Both conditions can be true. Combine information about
+    # expressions whose type is refined by both conditions. (We do not
+    # learn anything about expressions whose type is refined by only
+    # one condition.)
+    result = {}
+    for n1 in m1:
+        for n2 in m2:
+            if n1.literal_hash == n2.literal_hash:
+                result[n1] = UnionType.make_simplified_union([m1[n1], m2[n2]])
+    return result
+
+
 def find_isinstance_check(node: Node,
                           type_map: Dict[Node, Type],
-                          weak: bool=False) \
-        -> Tuple[Optional[Dict[Node, Type]], Optional[Dict[Node, Type]]]:
-    """Find any isinstance checks (within a chain of ands).
+                          weak: bool=False
+                          ) -> Tuple[TypeMap, TypeMap]:
+    """Find any isinstance checks (within a chain of ands).  Includes
+    implicit and explicit checks for None.
 
     Return value is a map of variables to their types if the condition
     is true and a map of variables to their types if the condition is false.
@@ -2376,22 +2217,33 @@ def find_isinstance_check(node: Node,
             if expr.literal == LITERAL_TYPE:
                 vartype = type_map[expr]
                 type = get_isinstance_type(node.args[1], type_map)
-                if type:
-                    elsetype = vartype
-                    if vartype:
-                        if is_proper_subtype(vartype, type):
-                            return {expr: type}, None
-                        elif not is_overlapping_types(vartype, type):
-                            return None, {expr: elsetype}
-                        else:
-                            elsetype = restrict_subtype_away(vartype, type)
-                    return {expr: type}, {expr: elsetype}
-                else:
-                    # An isinstance check, but we don't understand the type
-                    if weak:
-                        return {expr: AnyType()}, {expr: vartype}
+                return conditional_type_map(expr, vartype, type, weak=weak)
+    elif (isinstance(node, ComparisonExpr) and any(is_literal_none(n) for n in node.operands) and
+          experiments.STRICT_OPTIONAL):
+        # Check for `x is None` and `x is not None`.
+        is_not = node.operators == ['is not']
+        if is_not or node.operators == ['is']:
+            if_vars = {}  # type: Dict[Node, Type]
+            else_vars = {}  # type: Dict[Node, Type]
+            for expr in node.operands:
+                if expr.literal == LITERAL_TYPE and not is_literal_none(expr) and expr in type_map:
+                    # This should only be true at most once: there should be
+                    # two elements in node.operands, and at least one of them
+                    # should represent a None.
+                    vartype = type_map[expr]
+                    if_vars, else_vars = conditional_type_map(expr, vartype, NoneTyp(), weak=weak)
+                    break
+
+            if is_not:
+                if_vars, else_vars = else_vars, if_vars
+            return if_vars, else_vars
+    elif isinstance(node, RefExpr) and experiments.STRICT_OPTIONAL:
+        # The type could be falsy, so we can't deduce anything new about the else branch
+        vartype = type_map[node]
+        _, if_vars = conditional_type_map(node, vartype, NoneTyp(), weak=weak)
+        return if_vars, {}
     elif isinstance(node, OpExpr) and node.op == 'and':
-        left_if_vars, right_else_vars = find_isinstance_check(
+        left_if_vars, left_else_vars = find_isinstance_check(
             node.left,
             type_map,
             weak,
@@ -2402,16 +2254,28 @@ def find_isinstance_check(node: Node,
             type_map,
             weak,
         )
-        if left_if_vars:
-            if right_if_vars is not None:
-                left_if_vars.update(right_if_vars)
-            else:
-                left_if_vars = None
-        else:
-            left_if_vars = right_if_vars
 
-        # Make no claim about the types in else
-        return left_if_vars, {}
+        # (e1 and e2) is true if both e1 and e2 are true,
+        # and false if at least one of e1 and e2 is false.
+        return (and_conditional_maps(left_if_vars, right_if_vars),
+                or_conditional_maps(left_else_vars, right_else_vars))
+    elif isinstance(node, OpExpr) and node.op == 'or':
+        left_if_vars, left_else_vars = find_isinstance_check(
+            node.left,
+            type_map,
+            weak,
+        )
+
+        right_if_vars, right_else_vars = find_isinstance_check(
+            node.right,
+            type_map,
+            weak,
+        )
+
+        # (e1 or e2) is true if at least one of e1 or e2 is true,
+        # and false if both e1 and e2 are false.
+        return (or_conditional_maps(left_if_vars, right_if_vars),
+                and_conditional_maps(left_else_vars, right_else_vars))
     elif isinstance(node, UnaryExpr) and node.op == 'not':
         left, right = find_isinstance_check(node.expr, type_map, weak)
         return right, left
@@ -2447,17 +2311,17 @@ def get_isinstance_type(node: Node, type_map: Dict[Node, Type]) -> Type:
         return UnionType(types)
 
 
-def expand_node(defn: Node, map: Dict[int, Type]) -> Node:
+def expand_node(defn: Node, map: Dict[TypeVarId, Type]) -> Node:
     visitor = TypeTransformVisitor(map)
     return defn.accept(visitor)
 
 
-def expand_func(defn: FuncItem, map: Dict[int, Type]) -> FuncItem:
+def expand_func(defn: FuncItem, map: Dict[TypeVarId, Type]) -> FuncItem:
     return cast(FuncItem, expand_node(defn, map))
 
 
 class TypeTransformVisitor(TransformVisitor):
-    def __init__(self, map: Dict[int, Type]) -> None:
+    def __init__(self, map: Dict[TypeVarId, Type]) -> None:
         super().__init__()
         self.map = map
 
@@ -2571,6 +2435,12 @@ def is_valid_inferred_type(typ: Type) -> bool:
     Examples of invalid types include the None type or a type with a None component.
     """
     if is_same_type(typ, NoneTyp()):
+        # With strict Optional checking, we *may* eventually infer NoneTyp, but
+        # we only do that if we can't infer a specific Optional type.  This
+        # resolution happens in leave_partial_types when we pop a partial types
+        # scope.
+        return False
+    if is_same_type(typ, UninhabitedType()):
         return False
     elif isinstance(typ, Instance):
         for arg in typ.args:
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 64263ce..599d67a 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -4,8 +4,8 @@ from typing import cast, Dict, List, Tuple, Callable, Union, Optional
 
 from mypy.types import (
     Type, AnyType, CallableType, Overloaded, NoneTyp, Void, TypeVarDef,
-    TupleType, Instance, TypeVarType, ErasedType, UnionType,
-    PartialType, DeletedType, UnboundType, TypeType
+    TupleType, Instance, TypeVarId, TypeVarType, ErasedType, UnionType,
+    PartialType, DeletedType, UnboundType, UninhabitedType, TypeType
 )
 from mypy.nodes import (
     NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
@@ -22,7 +22,7 @@ from mypy import nodes
 import mypy.checker
 from mypy import types
 from mypy.sametypes import is_same_type
-from mypy.replacetvars import replace_func_type_vars
+from mypy.erasetype import replace_meta_vars
 from mypy.messages import MessageBuilder
 from mypy import messages
 from mypy.infer import infer_type_arguments, infer_function_type_arguments
@@ -34,7 +34,10 @@ from mypy.checkmember import analyze_member_access, type_object_type
 from mypy.semanal import self_type
 from mypy.constraints import get_actual_type
 from mypy.checkstrformat import StringFormatterChecker
+from mypy.expandtype import expand_type
+import mypy.checkexpr
 
+from mypy import experiments
 
 # Type of callback user for checking individual function arguments. See
 # check_args() below for details.
@@ -133,7 +136,7 @@ class ExpressionChecker:
             return self.accept(e.analyzed, self.chk.type_context[-1])
         self.try_infer_partial_type(e)
         callee_type = self.accept(e.callee)
-        if (self.chk.disallow_untyped_calls and
+        if (self.chk.options.disallow_untyped_calls and
                 self.chk.typing_mode_full() and
                 isinstance(callee_type, CallableType)
                 and callee_type.implicit):
@@ -154,19 +157,21 @@ class ExpressionChecker:
             var = cast(Var, e.callee.expr.node)
             partial_types = self.chk.find_partial_types(var)
             if partial_types is not None and not self.chk.current_node_deferred:
-                partial_type_type = cast(PartialType, var.type).type
-                if partial_type_type is None:
+                partial_type = cast(PartialType, var.type)
+                if partial_type is None or partial_type.type is None:
                     # A partial None type -> can't infer anything.
                     return
-                typename = partial_type_type.fullname()
+                typename = partial_type.type.fullname()
                 methodname = e.callee.name
                 # Sometimes we can infer a full type for a partial List, Dict or Set type.
                 # TODO: Don't infer argument expression twice.
                 if (typename in self.item_args and methodname in self.item_args[typename]
                         and e.arg_kinds == [ARG_POS]):
                     item_type = self.accept(e.args[0])
-                    if mypy.checker.is_valid_inferred_type(item_type):
-                        var.type = self.chk.named_generic_type(typename, [item_type])
+                    full_item_type = UnionType.make_simplified_union(
+                        [item_type, partial_type.inner_types[0]])
+                    if mypy.checker.is_valid_inferred_type(full_item_type):
+                        var.type = self.chk.named_generic_type(typename, [full_item_type])
                         del partial_types[var]
                 elif (typename in self.container_args
                       and methodname in self.container_args[typename]
@@ -175,10 +180,15 @@ class ExpressionChecker:
                     if isinstance(arg_type, Instance):
                         arg_typename = arg_type.type.fullname()
                         if arg_typename in self.container_args[typename][methodname]:
+                            full_item_types = [
+                                UnionType.make_simplified_union([item_type, prev_type])
+                                for item_type, prev_type
+                                in zip(arg_type.args, partial_type.inner_types)
+                            ]
                             if all(mypy.checker.is_valid_inferred_type(item_type)
-                                   for item_type in arg_type.args):
+                                   for item_type in full_item_types):
                                 var.type = self.chk.named_generic_type(typename,
-                                                                       list(arg_type.args))
+                                                                       list(full_item_types))
                                 del partial_types[var]
 
     def check_call_expr_with_callee_type(self, callee_type: Type,
@@ -226,6 +236,7 @@ class ExpressionChecker:
                 lambda i: self.accept(args[i]))
 
             if callee.is_generic():
+                callee = freshen_generic_callable(callee)
                 callee = self.infer_function_type_arguments_using_context(
                     callee, context)
                 callee = self.infer_function_type_arguments(
@@ -240,6 +251,11 @@ class ExpressionChecker:
             self.check_argument_types(arg_types, arg_kinds, callee,
                                       formal_to_actual, context,
                                       messages=arg_messages)
+
+            if (callee.is_type_obj() and (len(arg_types) == 1)
+                    and is_equivalent(callee.ret_type, self.named_type('builtins.type'))):
+                callee = callee.copy_modified(ret_type=TypeType(arg_types[0]))
+
             if callable_node:
                 # Store the inferred callable type.
                 self.chk.store_type(callable_node, callee)
@@ -270,8 +286,8 @@ class ExpressionChecker:
                     callee)
         elif isinstance(callee, Instance):
             call_function = analyze_member_access('__call__', callee, context,
-                                         False, False, self.named_type, self.not_ready_callback,
-                                         self.msg)
+                                         False, False, False, self.named_type,
+                                         self.not_ready_callback, self.msg)
             return self.check_call(call_function, args, arg_kinds, context, arg_names,
                                    callable_node, arg_messages)
         elif isinstance(callee, TypeVarType):
@@ -320,7 +336,7 @@ class ExpressionChecker:
         self.msg.unsupported_type_type(item, context)
         return AnyType()
 
-    def infer_arg_types_in_context(self, callee: CallableType,
+    def infer_arg_types_in_context(self, callee: Optional[CallableType],
                                    args: List[Node]) -> List[Type]:
         """Infer argument expression types using a callable type as context.
 
@@ -386,16 +402,16 @@ class ExpressionChecker:
         ctx = self.chk.type_context[-1]
         if not ctx:
             return callable
-        # The return type may have references to function type variables that
+        # The return type may have references to type metavariables that
         # we are inferring right now. We must consider them as indeterminate
         # and they are not potential results; thus we replace them with the
         # special ErasedType type. On the other hand, class type variables are
         # valid results.
-        erased_ctx = replace_func_type_vars(ctx, ErasedType())
+        erased_ctx = replace_meta_vars(ctx, ErasedType())
         ret_type = callable.ret_type
         if isinstance(ret_type, TypeVarType):
             if ret_type.values or (not isinstance(ctx, Instance) or
-                                   not cast(Instance, ctx).args):
+                                   not ctx.args):
                 # The return type is a type variable. If it has values, we can't easily restrict
                 # type inference to conform to the valid values. If it's unrestricted, we could
                 # infer a too general type for the type variable if we use context, and this could
@@ -411,7 +427,7 @@ class ExpressionChecker:
         # Only substitute non-None and non-erased types.
         new_args = []  # type: List[Type]
         for arg in args:
-            if isinstance(arg, NoneTyp) or has_erased_component(arg):
+            if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg):
                 new_args.append(None)
             else:
                 new_args.append(arg)
@@ -470,7 +486,7 @@ class ExpressionChecker:
                 #       if they shuffle type variables around, as we assume that there is a 1-1
                 #       correspondence with dict type variables. This is a marginal issue and
                 #       a little tricky to fix so it's left unfixed for now.
-                if isinstance(inferred_args[0], NoneTyp):
+                if isinstance(inferred_args[0], (NoneTyp, UninhabitedType)):
                     inferred_args[0] = self.named_type('builtins.str')
                 elif not is_subtype(self.named_type('builtins.str'), inferred_args[0]):
                     self.msg.fail(messages.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE,
@@ -504,7 +520,7 @@ class ExpressionChecker:
         # information to infer the argument. Replace them with None values so
         # that they are not applied yet below.
         for i, arg in enumerate(inferred_args):
-            if isinstance(arg, NoneTyp) or has_erased_component(arg):
+            if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg):
                 inferred_args[i] = None
 
         callee_type = cast(CallableType, self.apply_generic_arguments(
@@ -845,7 +861,7 @@ class ExpressionChecker:
         else:
             # This is a reference to a non-module attribute.
             return analyze_member_access(e.name, self.accept(e.expr), e,
-                                         is_lvalue, False,
+                                         is_lvalue, False, False,
                                          self.named_type, self.not_ready_callback, self.msg)
 
     def analyze_external_member_access(self, member: str, base_type: Type,
@@ -854,7 +870,7 @@ class ExpressionChecker:
         refer to private definitions. Return the result type.
         """
         # TODO remove; no private definitions in mypy
-        return analyze_member_access(member, base_type, context, False, False,
+        return analyze_member_access(member, base_type, context, False, False, False,
                                      self.named_type, self.not_ready_callback, self.msg)
 
     def visit_int_expr(self, e: IntExpr) -> Type:
@@ -883,7 +899,7 @@ class ExpressionChecker:
 
     def visit_ellipsis(self, e: EllipsisExpr) -> Type:
         """Type check '...'."""
-        if self.chk.pyversion[0] >= 3:
+        if self.chk.options.python_version[0] >= 3:
             return self.named_type('builtins.ellipsis')
         else:
             # '...' is not valid in normal Python 2 code, but it can
@@ -980,7 +996,7 @@ class ExpressionChecker:
         return result
 
     def get_operator_method(self, op: str) -> str:
-        if op == '/' and self.chk.pyversion[0] == 2:
+        if op == '/' and self.chk.options.python_version[0] == 2:
             # TODO also check for "from __future__ import division"
             return '__div__'
         else:
@@ -992,7 +1008,7 @@ class ExpressionChecker:
 
         Return tuple (result type, inferred operator method type).
         """
-        method_type = analyze_member_access(method, base_type, context, False, False,
+        method_type = analyze_member_access(method, base_type, context, False, False, True,
                                             self.named_type, self.not_ready_callback, local_errors)
         return self.check_call(method_type, [arg], [nodes.ARG_POS],
                                context, arg_messages=local_errors)
@@ -1052,7 +1068,7 @@ class ExpressionChecker:
                                            self.msg)
 
     def get_reverse_op_method(self, method: str) -> str:
-        if method == '__div__' and self.chk.pyversion[0] == 2:
+        if method == '__div__' and self.chk.options.python_version[0] == 2:
             return '__rdiv__'
         else:
             return nodes.reverse_op_methods[method]
@@ -1070,24 +1086,38 @@ class ExpressionChecker:
         left_type = self.accept(e.left, ctx)
 
         if e.op == 'and':
-            # else_map unused
-            if_map, else_map = \
+            right_map, left_map = \
+                mypy.checker.find_isinstance_check(e.left, self.chk.type_map,
+                                                   self.chk.typing_mode_weak())
+        elif e.op == 'or':
+            left_map, right_map = \
                 mypy.checker.find_isinstance_check(e.left, self.chk.type_map,
                                                    self.chk.typing_mode_weak())
         else:
-            if_map = None
+            left_map = None
+            right_map = None
 
-        self.chk.binder.push_frame()
-        if if_map:
-            for var, type in if_map.items():
-                self.chk.binder.push(var, type)
+        if left_map and e.left in left_map:
+            # The type of expressions in left_map is the type they'll have if
+            # the left operand is the result of the operator.
+            left_type = left_map[e.left]
 
-        right_type = self.accept(e.right, left_type)
+        with self.chk.binder.frame_context():
+            if right_map:
+                for var, type in right_map.items():
+                    self.chk.binder.push(var, type)
 
-        self.chk.binder.pop_frame()
+            right_type = self.accept(e.right, left_type)
 
         self.check_not_void(left_type, context)
         self.check_not_void(right_type, context)
+
+        # If either of the type maps is None that means that result cannot happen.
+        # If both of the type maps are None we just have no information.
+        if left_map is not None and right_map is None:
+            return left_type
+        elif left_map is None and right_map is not None:
+            return right_type
         return UnionType.make_simplified_union([left_type, right_type])
 
     def check_list_multiply(self, e: OpExpr) -> Type:
@@ -1175,7 +1205,7 @@ class ExpressionChecker:
             e.method_type = method_type
             return result
 
-    def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr):
+    def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type:
         begin = None  # type: int
         end = None  # type: int
         stride = None  # type:int
@@ -1221,6 +1251,8 @@ class ExpressionChecker:
         """Type check a cast expression."""
         source_type = self.accept(expr.expr, context=AnyType())
         target_type = expr.type
+        if self.chk.options.warn_redundant_casts and is_same_type(source_type, target_type):
+            self.msg.redundant_cast(target_type, expr)
         if not self.is_valid_cast(source_type, target_type):
             self.msg.invalid_cast(target_type, source_type, expr)
         return target_type
@@ -1256,7 +1288,8 @@ class ExpressionChecker:
     def check_list_or_set_expr(self, items: List[Node], fullname: str,
                                tag: str, context: Context) -> Type:
         # Translate into type checking a generic function call.
-        tv = TypeVarType('T', -1, [], self.chk.object_type())
+        tvdef = TypeVarDef('T', -1, [], self.chk.object_type())
+        tv = TypeVarType(tvdef)
         constructor = CallableType(
             [tv],
             [nodes.ARG_STAR],
@@ -1264,7 +1297,7 @@ class ExpressionChecker:
             self.chk.named_generic_type(fullname, [tv]),
             self.named_type('builtins.function'),
             name=tag,
-            variables=[TypeVarDef('T', -1, None, self.chk.object_type())])
+            variables=[tvdef])
         return self.check_call(constructor,
                                items,
                                [nodes.ARG_POS] * len(items), context)[0]
@@ -1293,20 +1326,21 @@ class ExpressionChecker:
 
     def visit_dict_expr(self, e: DictExpr) -> Type:
         # Translate into type checking a generic function call.
-        tv1 = TypeVarType('KT', -1, [], self.chk.object_type())
-        tv2 = TypeVarType('VT', -2, [], self.chk.object_type())
+        ktdef = TypeVarDef('KT', -1, [], self.chk.object_type())
+        vtdef = TypeVarDef('VT', -2, [], self.chk.object_type())
+        kt = TypeVarType(ktdef)
+        vt = TypeVarType(vtdef)
         # The callable type represents a function like this:
         #
         #   def <unnamed>(*v: Tuple[kt, vt]) -> Dict[kt, vt]: ...
         constructor = CallableType(
-            [TupleType([tv1, tv2], self.named_type('builtins.tuple'))],
+            [TupleType([kt, vt], self.named_type('builtins.tuple'))],
             [nodes.ARG_STAR],
             [None],
-            self.chk.named_generic_type('builtins.dict', [tv1, tv2]),
+            self.chk.named_generic_type('builtins.dict', [kt, vt]),
             self.named_type('builtins.function'),
             name='<list>',
-            variables=[TypeVarDef('KT', -1, None, self.chk.object_type()),
-                       TypeVarDef('VT', -2, None, self.chk.object_type())])
+            variables=[ktdef, vtdef])
         # Synthesize function arguments.
         args = []  # type: List[Node]
         for key, value in e.items:
@@ -1335,6 +1369,11 @@ class ExpressionChecker:
             if e.expr() not in self.chk.type_map:
                 self.accept(e.expr())
             ret_type = self.chk.type_map[e.expr()]
+            if isinstance(ret_type, NoneTyp):
+                # For "lambda ...: None", just use type from the context.
+                # Important when the context is Callable[..., None] which
+                # really means Void. See #1425.
+                return inferred_type
             return replace_callable_return_type(inferred_type, ret_type)
 
     def infer_lambda_type_using_context(self, e: FuncExpr) -> CallableType:
@@ -1352,7 +1391,7 @@ class ExpressionChecker:
         # they must be considered as indeterminate. We use ErasedType since it
         # does not affect type inference results (it is for purposes like this
         # only).
-        ctx = replace_func_type_vars(ctx, ErasedType())
+        ctx = replace_meta_vars(ctx, ErasedType())
 
         callable_ctx = cast(CallableType, ctx)
 
@@ -1395,7 +1434,7 @@ class ExpressionChecker:
                     if not self.chk.typing_mode_full():
                         return AnyType()
                     return analyze_member_access(e.name, self_type(e.info), e,
-                                                 is_lvalue, True,
+                                                 is_lvalue, True, False,
                                                  self.named_type, self.not_ready_callback,
                                                  self.msg, base)
         else:
@@ -1430,7 +1469,8 @@ class ExpressionChecker:
 
         # Infer the type of the list comprehension by using a synthetic generic
         # callable type.
-        tv = TypeVarType('T', -1, [], self.chk.object_type())
+        tvdef = TypeVarDef('T', -1, [], self.chk.object_type())
+        tv = TypeVarType(tvdef)
         constructor = CallableType(
             [tv],
             [nodes.ARG_POS],
@@ -1438,27 +1478,28 @@ class ExpressionChecker:
             self.chk.named_generic_type(type_name, [tv]),
             self.chk.named_type('builtins.function'),
             name=id_for_messages,
-            variables=[TypeVarDef('T', -1, None, self.chk.object_type())])
+            variables=[tvdef])
         return self.check_call(constructor,
                                [gen.left_expr], [nodes.ARG_POS], gen)[0]
 
-    def visit_dictionary_comprehension(self, e: DictionaryComprehension):
+    def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type:
         """Type check a dictionary comprehension."""
         self.check_for_comp(e)
 
         # Infer the type of the list comprehension by using a synthetic generic
         # callable type.
-        key_tv = TypeVarType('KT', -1, [], self.chk.object_type())
-        value_tv = TypeVarType('VT', -2, [], self.chk.object_type())
+        ktdef = TypeVarDef('KT', -1, [], self.chk.object_type())
+        vtdef = TypeVarDef('VT', -2, [], self.chk.object_type())
+        kt = TypeVarType(ktdef)
+        vt = TypeVarType(vtdef)
         constructor = CallableType(
-            [key_tv, value_tv],
+            [kt, vt],
             [nodes.ARG_POS, nodes.ARG_POS],
             [None, None],
-            self.chk.named_generic_type('builtins.dict', [key_tv, value_tv]),
+            self.chk.named_generic_type('builtins.dict', [kt, vt]),
             self.chk.named_type('builtins.function'),
             name='<dictionary-comprehension>',
-            variables=[TypeVarDef('KT', -1, None, self.chk.object_type()),
-                       TypeVarDef('VT', -2, None, self.chk.object_type())])
+            variables=[ktdef, vtdef])
         return self.check_call(constructor,
                                [e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0]
 
@@ -1466,14 +1507,13 @@ class ExpressionChecker:
         """Check the for_comp part of comprehensions. That is the part from 'for':
         ... for x in y if z
         """
-        self.chk.binder.push_frame()
-        for index, sequence, conditions in zip(e.indices, e.sequences,
-                                               e.condlists):
-            sequence_type = self.chk.analyze_iterable_item_type(sequence)
-            self.chk.analyze_index_variables(index, sequence_type, e)
-            for condition in conditions:
-                self.accept(condition)
-        self.chk.binder.pop_frame()
+        with self.chk.binder.frame_context():
+            for index, sequence, conditions in zip(e.indices, e.sequences,
+                                                   e.condlists):
+                sequence_type = self.chk.analyze_iterable_item_type(sequence)
+                self.chk.analyze_index_variables(index, sequence_type, e)
+                for condition in conditions:
+                    self.accept(condition)
 
     def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
         cond_type = self.accept(e.cond)
@@ -1512,7 +1552,7 @@ class ExpressionChecker:
 
     def analyze_cond_branch(self, map: Optional[Dict[Node, Type]],
                             node: Node, context: Optional[Type]) -> Type:
-        with self.chk.binder:
+        with self.chk.binder.frame_context():
             if map:
                 for var, type in map.items():
                     self.chk.binder.push(var, type)
@@ -1730,17 +1770,36 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
         actual = actual.erase_to_union_or_bound()
     if isinstance(formal, TypeVarType):
         formal = formal.erase_to_union_or_bound()
-    if (isinstance(actual, NoneTyp) or isinstance(actual, AnyType) or
+    if (isinstance(actual, UninhabitedType) or isinstance(actual, AnyType) or
             isinstance(formal, AnyType) or isinstance(formal, CallableType) or
             (isinstance(actual, Instance) and actual.type.fallback_to_any)):
         # These could match anything at runtime.
         return 2
+    if isinstance(actual, NoneTyp):
+        if not experiments.STRICT_OPTIONAL:
+            # NoneTyp matches anything if we're not doing strict Optional checking
+            return 2
+        else:
+            # NoneType is a subtype of object
+            if isinstance(formal, Instance) and formal.type.fullname() == "builtins.object":
+                return 2
     if isinstance(actual, UnionType):
         return max(overload_arg_similarity(item, formal)
                    for item in actual.items)
     if isinstance(formal, UnionType):
         return max(overload_arg_similarity(actual, item)
                    for item in formal.items)
+    if isinstance(formal, TypeType):
+        if isinstance(actual, TypeType):
+            # Since Type[T] is covariant, check if actual = Type[A] is
+            # a subtype of formal = Type[F].
+            return overload_arg_similarity(actual.item, formal.item)
+        elif isinstance(actual, CallableType) and actual.is_type_obj():
+            # Check if the actual is a constructor of some sort.
+            # Note that this is this unsound, since we don't check the __init__ signature.
+            return overload_arg_similarity(actual.ret_type, formal.item)
+        else:
+            return 0
     if isinstance(formal, Instance):
         if isinstance(actual, CallableType):
             actual = actual.fallback
@@ -1757,6 +1816,11 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
                 return 1
             else:
                 return 0
+        elif isinstance(actual, TypeType):
+            if formal.type.fullname() in {"builtins.object", "builtins.type"}:
+                return 2
+            else:
+                return 0
         else:
             return 0
     if isinstance(actual, UnboundType) or isinstance(formal, UnboundType):
@@ -1764,3 +1828,14 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
         return 2
     # Fall back to a conservative equality check for the remaining kinds of type.
     return 2 if is_same_type(erasetype.erase_type(actual), erasetype.erase_type(formal)) else 0
+
+
+def freshen_generic_callable(callee: CallableType) -> CallableType:
+    tvdefs = []
+    tvmap = {}  # type: Dict[TypeVarId, Type]
+    for v in callee.variables:
+        tvdef = TypeVarDef.new_unification_variable(v)
+        tvdefs.append(tvdef)
+        tvmap[v.id] = TypeVarType(tvdef)
+
+    return cast(CallableType, expand_type(callee, tvmap)).copy_modified(variables=tvdefs)
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index e3a692c..36768e0 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -1,14 +1,15 @@
 """Type checking of attribute access"""
 
-from typing import cast, Callable, List, Optional
+from typing import cast, Callable, List, Dict, Optional
 
 from mypy.types import (
-    Type, Instance, AnyType, TupleType, CallableType, FunctionLike, TypeVarDef,
+    Type, Instance, AnyType, TupleType, CallableType, FunctionLike, TypeVarId, TypeVarDef,
     Overloaded, TypeVarType, TypeTranslator, UnionType, PartialType,
     DeletedType, NoneTyp, TypeType
 )
 from mypy.nodes import TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context
-from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2, function_type, Decorator, OverloadedFuncDef
+from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2, OpExpr, ComparisonExpr
+from mypy.nodes import function_type, Decorator, OverloadedFuncDef
 from mypy.messages import MessageBuilder
 from mypy.maptype import map_instance_to_supertype
 from mypy.expandtype import expand_type_by_instance
@@ -18,11 +19,16 @@ from mypy import messages
 from mypy import subtypes
 
 
-def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool,
+def analyze_member_access(name: str,
+                          typ: Type,
+                          node: Context,
+                          is_lvalue: bool,
                           is_super: bool,
+                          is_operator: bool,
                           builtin_type: Callable[[str], Instance],
                           not_ready_callback: Callable[[str, Context], None],
-                          msg: MessageBuilder, override_info: TypeInfo = None,
+                          msg: MessageBuilder,
+                          override_info: TypeInfo = None,
                           report_type: Type = None) -> Type:
     """Analyse attribute access.
 
@@ -51,7 +57,6 @@ def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool,
         if method:
             if method.is_property:
                 assert isinstance(method, OverloadedFuncDef)
-                method = cast(OverloadedFuncDef, method)
                 return analyze_var(name, method.items[0].var, typ, info, node, is_lvalue, msg,
                                    not_ready_callback)
             if is_lvalue:
@@ -73,18 +78,23 @@ def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool,
     elif isinstance(typ, AnyType):
         # The base object has dynamic type.
         return AnyType()
+    elif isinstance(typ, NoneTyp):
+        # The only attribute NoneType has are those it inherits from object
+        return analyze_member_access(name, builtin_type('builtins.object'), node, is_lvalue,
+                                     is_super, is_operator, builtin_type, not_ready_callback, msg,
+                                     report_type=report_type)
     elif isinstance(typ, UnionType):
         # The base object has dynamic type.
         msg.disable_type_names += 1
-        results = [analyze_member_access(name, subtype, node, is_lvalue,
-                                         is_super, builtin_type, not_ready_callback, msg)
+        results = [analyze_member_access(name, subtype, node, is_lvalue, is_super,
+                                         is_operator, builtin_type, not_ready_callback, msg)
                    for subtype in typ.items]
         msg.disable_type_names -= 1
         return UnionType.make_simplified_union(results)
     elif isinstance(typ, TupleType):
         # Actually look up from the fallback instance type.
-        return analyze_member_access(name, typ.fallback, node, is_lvalue,
-                                     is_super, builtin_type, not_ready_callback, msg)
+        return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
+                                     is_operator, builtin_type, not_ready_callback, msg)
     elif isinstance(typ, FunctionLike) and typ.is_type_obj():
         # Class attribute.
         # TODO super?
@@ -92,24 +102,38 @@ def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool,
         if isinstance(ret_type, TupleType):
             ret_type = ret_type.fallback
         if isinstance(ret_type, Instance):
-            result = analyze_class_attribute_access(ret_type, name, node, is_lvalue,
-                                                    builtin_type, not_ready_callback, msg)
-            if result:
-                return result
+            if not is_operator:
+                # When Python sees an operator (eg `3 == 4`), it automatically translates that
+                # into something like `int.__eq__(3, 4)` instead of `(3).__eq__(4)` as an
+                # optimation.
+                #
+                # While it normally it doesn't matter which of the two versions are used, it
+                # does cause inconsistencies when working with classes. For example, translating
+                # `int == int` to `int.__eq__(int)` would not work since `int.__eq__` is meant to
+                # compare two int _instances_. What we really want is `type(int).__eq__`, which
+                # is meant to compare two types or classes.
+                #
+                # This check makes sure that when we encounter an operator, we skip looking up
+                # the corresponding method in the current instance to avoid this edge case.
+                # See https://github.com/python/mypy/pull/1787 for more info.
+                result = analyze_class_attribute_access(ret_type, name, node, is_lvalue,
+                                                        builtin_type, not_ready_callback, msg)
+                if result:
+                    return result
             # Look up from the 'type' type.
             return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
-                                         builtin_type, not_ready_callback, msg,
+                                         is_operator, builtin_type, not_ready_callback, msg,
                                          report_type=report_type)
         else:
             assert False, 'Unexpected type {}'.format(repr(ret_type))
     elif isinstance(typ, FunctionLike):
         # Look up from the 'function' type.
         return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
-                                     builtin_type, not_ready_callback, msg,
+                                     is_operator, builtin_type, not_ready_callback, msg,
                                      report_type=report_type)
     elif isinstance(typ, TypeVarType):
         return analyze_member_access(name, typ.upper_bound, node, is_lvalue, is_super,
-                                     builtin_type, not_ready_callback, msg,
+                                     is_operator, builtin_type, not_ready_callback, msg,
                                      report_type=report_type)
     elif isinstance(typ, DeletedType):
         msg.deleted_as_rvalue(typ, node)
@@ -122,14 +146,15 @@ def analyze_member_access(name: str, typ: Type, node: Context, is_lvalue: bool,
         elif isinstance(typ.item, TypeVarType):
             if isinstance(typ.item.upper_bound, Instance):
                 item = typ.item.upper_bound
-        if item:
+        if item and not is_operator:
+            # See comment above for why operators are skipped
             result = analyze_class_attribute_access(item, name, node, is_lvalue,
                                                     builtin_type, not_ready_callback, msg)
             if result:
                 return result
         fallback = builtin_type('builtins.type')
         return analyze_member_access(name, fallback, node, is_lvalue, is_super,
-                                     builtin_type, not_ready_callback, msg,
+                                     is_operator, builtin_type, not_ready_callback, msg,
                                      report_type=report_type)
     return msg.has_no_attr(report_type, name, node)
 
@@ -204,7 +229,7 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
                 # Class-level function objects and classmethods become bound
                 # methods: the former to the instance, the latter to the
                 # class.
-                functype = cast(FunctionLike, t)
+                functype = t
                 check_method_type(functype, itype, var.is_classmethod, node, msg)
                 signature = method_type(functype)
                 if var.is_property:
@@ -414,51 +439,15 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance,
                    special_sig: Optional[str]) -> CallableType:
     """Create a type object type based on the signature of __init__."""
     variables = []  # type: List[TypeVarDef]
-    for i, tvar in enumerate(info.defn.type_vars):
-        variables.append(TypeVarDef(tvar.name, i + 1, tvar.values, tvar.upper_bound,
-                                    tvar.variance))
-
-    initvars = init_type.variables
-    variables.extend(initvars)
+    variables.extend(info.defn.type_vars)
+    variables.extend(init_type.variables)
 
     callable_type = init_type.copy_modified(
         ret_type=self_type(info), fallback=type_type, name=None, variables=variables,
         special_sig=special_sig)
     c = callable_type.with_name('"{}"'.format(info.name()))
-    cc = convert_class_tvars_to_func_tvars(c, len(initvars))
-    cc.is_classmethod_class = True
-    return cc
-
-
-def convert_class_tvars_to_func_tvars(callable: CallableType,
-                                      num_func_tvars: int) -> CallableType:
-    return cast(CallableType, callable.accept(TvarTranslator(num_func_tvars)))
-
-
-class TvarTranslator(TypeTranslator):
-    def __init__(self, num_func_tvars: int) -> None:
-        super().__init__()
-        self.num_func_tvars = num_func_tvars
-
-    def visit_type_var(self, t: TypeVarType) -> Type:
-        if t.id < 0:
-            return t
-        else:
-            return TypeVarType(t.name, -t.id - self.num_func_tvars, t.values, t.upper_bound,
-                               t.variance)
-
-    def translate_variables(self,
-                            variables: List[TypeVarDef]) -> List[TypeVarDef]:
-        if not variables:
-            return variables
-        items = []  # type: List[TypeVarDef]
-        for v in variables:
-            if v.id > 0:
-                items.append(TypeVarDef(v.name, -v.id - self.num_func_tvars,
-                                        v.values, v.upper_bound, v.variance))
-            else:
-                items.append(v)
-        return items
+    c.is_classmethod_class = True
+    return c
 
 
 def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index 368bfae..0ff81fb 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -25,10 +25,10 @@ class ConversionSpecifier:
         self.precision = precision
         self.type = type
 
-    def has_key(self):
+    def has_key(self) -> bool:
         return self.key is not None
 
-    def has_star(self):
+    def has_star(self) -> bool:
         return self.width == '*' or self.precision == '*'
 
 
@@ -137,7 +137,7 @@ class StringFormatterChecker:
                                        replacements: Node) -> None:
         dict_with_only_str_literal_keys = (isinstance(replacements, DictExpr) and
                                           all(isinstance(k, (StrExpr, BytesExpr))
-                                              for k, v in cast(DictExpr, replacements).items))
+                                              for k, v in replacements.items))
         if dict_with_only_str_literal_keys:
             mapping = {}  # type: Dict[str, Type]
             for k, v in cast(DictExpr, replacements).items:
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 8e5fdee..e9f0402 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -5,7 +5,7 @@ from typing import List, Optional, cast
 from mypy.types import (
     CallableType, Type, TypeVisitor, UnboundType, AnyType, Void, NoneTyp, TypeVarType,
     Instance, TupleType, UnionType, Overloaded, ErasedType, PartialType, DeletedType,
-    TypeType, is_named_instance
+    UninhabitedType, TypeType, TypeVarId, is_named_instance
 )
 from mypy.maptype import map_instance_to_supertype
 from mypy import nodes
@@ -23,11 +23,11 @@ class Constraint:
     It can be either T <: type or T :> type (T is a type variable).
     """
 
-    type_var = 0   # Type variable id
-    op = 0         # SUBTYPE_OF or SUPERTYPE_OF
-    target = None  # type: Type
+    type_var = None  # Type variable id
+    op = 0           # SUBTYPE_OF or SUPERTYPE_OF
+    target = None    # type: Type
 
-    def __init__(self, type_var: int, op: int, target: Type) -> None:
+    def __init__(self, type_var: TypeVarId, op: int, target: Type) -> None:
         self.type_var = type_var
         self.op = op
         self.target = target
@@ -222,6 +222,9 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
     def visit_none_type(self, template: NoneTyp) -> List[Constraint]:
         return []
 
+    def visit_uninhabited_type(self, template: UninhabitedType) -> List[Constraint]:
+        return []
+
     def visit_erased_type(self, template: ErasedType) -> List[Constraint]:
         return []
 
diff --git a/mypy/defaults.py b/mypy/defaults.py
index 5a0875f..9ce210b 100644
--- a/mypy/defaults.py
+++ b/mypy/defaults.py
@@ -1,2 +1,3 @@
 PYTHON2_VERSION = (2, 7)
 PYTHON3_VERSION = (3, 5)
+MYPY_CACHE = '.mypy_cache'
diff --git a/mypy/docstring.py b/mypy/docstring.py
index 4ee14bd..d78a81d 100644
--- a/mypy/docstring.py
+++ b/mypy/docstring.py
@@ -83,7 +83,7 @@ known_patterns = [
 
 
 class DocstringTypes(object):
-    def __init__(self):
+    def __init__(self) -> None:
         self.args = OrderedDict()  # type: Dict[str, Optional[str]]
         self.rettype = None  # type: Optional[str]
 
@@ -91,7 +91,7 @@ class DocstringTypes(object):
         return ('(' + ','.join([v or 'Any' for v in self.args.values()]) +
                 ') -> ' + (self.rettype or 'Any'))
 
-    def __str__(self):
+    def __str__(self) -> str:
         return repr({'args': self.args, 'return': self.rettype})
 
 
@@ -99,7 +99,7 @@ def wsprefix(s: str) -> str:
     return s[:len(s) - len(s.lstrip())]
 
 
-def scrubtype(typestr: Optional[str], only_known=False) -> Optional[str]:
+def scrubtype(typestr: Optional[str], only_known: bool =False) -> Optional[str]:
     if typestr is None:
         return typestr
 
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index e805f85..b2c1f76 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -1,9 +1,9 @@
-from typing import Optional, Container
+from typing import Optional, Container, Callable
 
 from mypy.types import (
-    Type, TypeVisitor, UnboundType, ErrorType, AnyType, Void, NoneTyp,
+    Type, TypeVisitor, UnboundType, ErrorType, AnyType, Void, NoneTyp, TypeVarId,
     Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded, ErasedType,
-    PartialType, DeletedType, TypeTranslator, TypeList, TypeType
+    PartialType, DeletedType, TypeTranslator, TypeList, UninhabitedType, TypeType
 )
 
 
@@ -43,6 +43,9 @@ class EraseTypeVisitor(TypeVisitor[Type]):
     def visit_none_type(self, t: NoneTyp) -> Type:
         return t
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
+        return t
+
     def visit_erased_type(self, t: ErasedType) -> Type:
         # Should not get here.
         raise RuntimeError()
@@ -102,20 +105,30 @@ class GenericTypeEraser(TypeTranslator):
         return Instance(t.type, [], t.line)
 
 
-def erase_typevars(t: Type, ids_to_erase: Optional[Container[int]] = None) -> Type:
+def erase_typevars(t: Type, ids_to_erase: Optional[Container[TypeVarId]] = None) -> Type:
     """Replace all type variables in a type with any,
     or just the ones in the provided collection.
     """
-    return t.accept(TypeVarEraser(ids_to_erase))
+    def erase_id(id: TypeVarId) -> bool:
+        if ids_to_erase is None:
+            return True
+        return id in ids_to_erase
+    return t.accept(TypeVarEraser(erase_id, AnyType()))
+
+
+def replace_meta_vars(t: Type, target_type: Type) -> Type:
+    """Replace unification variables in a type with the target type."""
+    return t.accept(TypeVarEraser(lambda id: id.is_meta_var(), target_type))
 
 
 class TypeVarEraser(TypeTranslator):
     """Implementation of type erasure"""
 
-    def __init__(self, ids_to_erase: Optional[Container[int]]) -> None:
-        self.ids_to_erase = ids_to_erase
+    def __init__(self, erase_id: Callable[[TypeVarId], bool], replacement: Type) -> None:
+        self.erase_id = erase_id
+        self.replacement = replacement
 
     def visit_type_var(self, t: TypeVarType) -> Type:
-        if self.ids_to_erase is not None and t.id not in self.ids_to_erase:
-            return t
-        return AnyType()
+        if self.erase_id(t.id):
+            return self.replacement
+        return t
diff --git a/mypy/errors.py b/mypy/errors.py
index b0a2c62..25f7ed0 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -2,8 +2,9 @@ import os
 import os.path
 import sys
 import traceback
+from collections import OrderedDict, defaultdict
 
-from typing import Tuple, List, TypeVar, Set
+from typing import Tuple, List, TypeVar, Set, Dict, Optional
 
 
 T = TypeVar('T')
@@ -79,8 +80,11 @@ class Errors:
     # Stack of short names of current functions or members (or None).
     function_or_member = None  # type: List[str]
 
-    # Ignore errors on these lines.
-    ignored_lines = None  # type: Set[int]
+    # Ignore errors on these lines of each file.
+    ignored_lines = None  # type: Dict[str, Set[int]]
+
+    # Lines on which an error was actually ignored.
+    used_ignored_lines = None  # type: Dict[str, Set[int]]
 
     # Collection of reported only_once messages.
     only_once_messages = None  # type: Set[str]
@@ -90,7 +94,8 @@ class Errors:
         self.import_ctx = []
         self.type_name = [None]
         self.function_or_member = [None]
-        self.ignored_lines = set()
+        self.ignored_lines = OrderedDict()
+        self.used_ignored_lines = defaultdict(set)
         self.only_once_messages = set()
 
     def copy(self) -> 'Errors':
@@ -109,13 +114,26 @@ class Errors:
             prefix += os.sep
         self.ignore_prefix = prefix
 
-    def set_file(self, file: str) -> None:
-        """Set the path of the current file."""
+    def simplify_path(self, file: str) -> str:
         file = os.path.normpath(file)
-        self.file = remove_path_prefix(file, self.ignore_prefix)
+        return remove_path_prefix(file, self.ignore_prefix)
+
+    def set_file(self, file: str, ignored_lines: Set[int] = None) -> None:
+        """Set the path of the current file."""
+        # The path will be simplified later, in render_messages. That way
+        #  * 'file' is always a key that uniquely identifies a source file
+        #    that mypy read (simplified paths might not be unique); and
+        #  * we only have to simplify in one place, while still supporting
+        #    reporting errors for files other than the one currently being
+        #    processed.
+        self.file = file
+
+    def set_file_ignored_lines(self, file: str, ignored_lines: Set[int] = None) -> None:
+        self.ignored_lines[file] = ignored_lines
 
-    def set_ignored_lines(self, ignored_lines: Set[int]) -> None:
-        self.ignored_lines = ignored_lines
+    def mark_file_ignored_lines_used(self, file: str, used_ignored_lines: Set[int] = None
+                                     ) -> None:
+        self.used_ignored_lines[file] |= used_ignored_lines
 
     def push_function(self, name: str) -> None:
         """Set the current function or member short name (it can be None)."""
@@ -170,8 +188,11 @@ class Errors:
         self.add_error_info(info)
 
     def add_error_info(self, info: ErrorInfo) -> None:
-        if info.line in self.ignored_lines:
+        if (info.file in self.ignored_lines and
+                info.line in self.ignored_lines[info.file] and
+                not info.blocker):
             # Annotation requests us to ignore all errors on this line.
+            self.used_ignored_lines[info.file].add(info.line)
             return
         if info.only_once:
             if info.message in self.only_once_messages:
@@ -179,6 +200,15 @@ class Errors:
             self.only_once_messages.add(info.message)
         self.error_info.append(info)
 
+    def generate_unused_ignore_notes(self) -> None:
+        for file, ignored_lines in self.ignored_lines.items():
+            for line in ignored_lines - self.used_ignored_lines[file]:
+                # Don't use report since add_error_info will ignore the error!
+                info = ErrorInfo(self.import_context(), file, None, None,
+                                 line, 'note', "unused 'type: ignore' comment",
+                                 False, False)
+                self.error_info.append(info)
+
     def num_messages(self) -> int:
         """Return the number of generated messages."""
         return len(self.error_info)
@@ -254,32 +284,34 @@ class Errors:
                     result.append((None, -1, 'note', fmt.format(path, line)))
                     i -= 1
 
+            file = self.simplify_path(e.file)
+
             # Report context within a source file.
             if (e.function_or_member != prev_function_or_member or
                     e.type != prev_type):
                 if e.function_or_member is None:
                     if e.type is None:
-                        result.append((e.file, -1, 'note', 'At top level:'))
+                        result.append((file, -1, 'note', 'At top level:'))
                     else:
-                        result.append((e.file, -1, 'note', 'In class "{}":'.format(
+                        result.append((file, -1, 'note', 'In class "{}":'.format(
                             e.type)))
                 else:
                     if e.type is None:
-                        result.append((e.file, -1, 'note',
+                        result.append((file, -1, 'note',
                                        'In function "{}":'.format(
                                            e.function_or_member)))
                     else:
-                        result.append((e.file, -1, 'note',
+                        result.append((file, -1, 'note',
                                        'In member "{}" of class "{}":'.format(
                                            e.function_or_member, e.type)))
             elif e.type != prev_type:
                 if e.type is None:
-                    result.append((e.file, -1, 'note', 'At top level:'))
+                    result.append((file, -1, 'note', 'At top level:'))
                 else:
-                    result.append((e.file, -1, 'note',
+                    result.append((file, -1, 'note',
                                    'In class "{}":'.format(e.type)))
 
-            result.append((e.file, e.line, e.severity, e.message))
+            result.append((file, e.line, e.severity, e.message))
 
             prev_import_context = e.import_ctx
             prev_function_or_member = e.function_or_member
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index e25190f..87b1641 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -3,11 +3,11 @@ from typing import Dict, Tuple, List, cast
 from mypy.types import (
     Type, Instance, CallableType, TypeVisitor, UnboundType, ErrorType, AnyType,
     Void, NoneTyp, TypeVarType, Overloaded, TupleType, UnionType, ErasedType, TypeList,
-    PartialType, DeletedType, TypeType
+    PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId
 )
 
 
-def expand_type(typ: Type, env: Dict[int, Type]) -> Type:
+def expand_type(typ: Type, env: Dict[TypeVarId, Type]) -> Type:
     """Substitute any type variable references in a type given by a type
     environment.
     """
@@ -16,23 +16,24 @@ def expand_type(typ: Type, env: Dict[int, Type]) -> Type:
 
 
 def expand_type_by_instance(typ: Type, instance: Instance) -> Type:
-    """Substitute type variables in type using values from an Instance."""
+    """Substitute type variables in type using values from an Instance.
+    Type variables are considered to be bound by the class declaration."""
 
     if instance.args == []:
         return typ
     else:
-        variables = {}  # type: Dict[int, Type]
-        for i in range(len(instance.args)):
-            variables[i + 1] = instance.args[i]
+        variables = {}  # type: Dict[TypeVarId, Type]
+        for binder, arg in zip(instance.type.defn.type_vars, instance.args):
+            variables[binder.id] = arg
         return expand_type(typ, variables)
 
 
 class ExpandTypeVisitor(TypeVisitor[Type]):
     """Visitor that substitutes type variables with values."""
 
-    variables = None  # type: Dict[int, Type]  # TypeVar id -> TypeVar value
+    variables = None  # type: Dict[TypeVarId, Type]  # TypeVar id -> TypeVar value
 
-    def __init__(self, variables: Dict[int, Type]) -> None:
+    def __init__(self, variables: Dict[TypeVarId, Type]) -> None:
         self.variables = variables
 
     def visit_unbound_type(self, t: UnboundType) -> Type:
@@ -53,6 +54,9 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
     def visit_none_type(self, t: NoneTyp) -> Type:
         return t
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
+        return t
+
     def visit_deleted_type(self, t: DeletedType) -> Type:
         return t
 
@@ -67,7 +71,7 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
     def visit_type_var(self, t: TypeVarType) -> Type:
         repl = self.variables.get(t.id, t)
         if isinstance(repl, Instance):
-            inst = cast(Instance, repl)
+            inst = repl
             # Return copy of instance with type erasure flag on.
             return Instance(inst.type, inst.args, inst.line, True)
         else:
diff --git a/mypy/experiments.py b/mypy/experiments.py
new file mode 100644
index 0000000..a4684cc
--- /dev/null
+++ b/mypy/experiments.py
@@ -0,0 +1 @@
+STRICT_OPTIONAL = False
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 6bcb12b..c76e8b9 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -18,6 +18,7 @@ from mypy.nodes import (
 )
 from mypy.types import Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType
 from mypy import defaults
+from mypy import experiments
 from mypy.errors import Errors
 
 try:
@@ -35,8 +36,9 @@ except ImportError:
               ' Python 3.3 and greater.')
     sys.exit(1)
 
-T = TypeVar('T')
-U = TypeVar('U')
+T = TypeVar('T', bound=Union[ast35.expr, ast35.stmt])
+U = TypeVar('U', bound=Node)
+V = TypeVar('V')
 
 TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment'
 TYPE_COMMENT_AST_ERROR = 'invalid type comment'
@@ -91,16 +93,16 @@ def parse_type_comment(type_comment: str, line: int) -> Type:
         return TypeConverter(line=line).visit(typ.body)
 
 
-def with_line(f: Callable[[Any, T], U]) -> Callable[[Any, T], U]:
+def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter', T], U]:
     @wraps(f)
-    def wrapper(self, ast):
+    def wrapper(self: 'ASTConverter', ast: T) -> U:
         node = f(self, ast)
         node.set_line(ast.lineno)
         return node
     return wrapper
 
 
-def find(f: Callable[[T], bool], seq: Sequence[T]) -> T:
+def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
     for item in seq:
         if f(item):
             return item
@@ -176,7 +178,7 @@ class ASTConverter(ast35.NodeTransformer):
     def as_block(self, stmts: List[ast35.stmt], lineno: int) -> Block:
         b = None
         if stmts:
-            b = Block(self.visit_list(stmts))
+            b = Block(self.fix_function_overloads(self.visit_list(stmts)))
             b.set_line(lineno)
         return b
 
@@ -254,7 +256,8 @@ class ASTConverter(ast35.NodeTransformer):
             # for ellipsis arg
             if (len(func_type_ast.argtypes) == 1 and
                     isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
-                arg_types = [AnyType() for a in args]
+                arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
+                             for a in args]
             else:
                 arg_types = [a if a is not None else AnyType() for
                             a in TypeConverter(line=n.lineno).visit_list(func_type_ast.argtypes)]
@@ -267,6 +270,9 @@ class ASTConverter(ast35.NodeTransformer):
             arg_types = [a.type_annotation for a in args]
             return_type = TypeConverter(line=n.lineno).visit(n.returns)
 
+        if isinstance(return_type, UnboundType):
+            return_type.is_ret_type = True
+
         func_type = None
         if any(arg_types) or return_type:
             func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
@@ -281,6 +287,7 @@ class ASTConverter(ast35.NodeTransformer):
                        func_type)
         if func_type is not None:
             func_type.definition = func_def
+            func_type.line = n.lineno
 
         if n.decorator_list:
             var = Var(func_def.name())
@@ -294,10 +301,20 @@ class ASTConverter(ast35.NodeTransformer):
         else:
             return func_def
 
+    def set_type_optional(self, type: Type, initializer: Node) -> None:
+        if not experiments.STRICT_OPTIONAL:
+            return
+        # Indicate that type should be wrapped in an Optional if arg is initialized to None.
+        optional = isinstance(initializer, NameExpr) and initializer.name == 'None'
+        if isinstance(type, UnboundType):
+            type.optional = optional
+
     def transform_args(self, args: ast35.arguments, line: int) -> List[Argument]:
-        def make_argument(arg, default, kind):
+        def make_argument(arg: ast35.arg, default: Optional[ast35.expr], kind: int) -> Argument:
             arg_type = TypeConverter(line=line).visit(arg.annotation)
-            return Argument(Var(arg.arg), arg_type, self.visit(default), kind)
+            converted_default = self.visit(default)
+            self.set_type_optional(arg_type, converted_default)
+            return Argument(Var(arg.arg), arg_type, converted_default, kind)
 
         new_args = []
         num_no_defaults = len(args.args) - len(args.defaults)
@@ -353,7 +370,7 @@ class ASTConverter(ast35.NodeTransformer):
             metaclass = self.stringify_name(metaclass_arg.value)
 
         cdef = ClassDef(n.name,
-                        Block(self.fix_function_overloads(self.visit_list(n.body))),
+                        self.as_block(n.body, n.lineno),
                         None,
                         self.visit_list(n.bases),
                         metaclass=metaclass)
@@ -516,7 +533,7 @@ class ASTConverter(ast35.NodeTransformer):
             raise RuntimeError('unknown BoolOp ' + str(type(n)))
 
         # potentially inefficient!
-        def group(vals):
+        def group(vals: List[Node]) -> Node:
             if len(vals) == 2:
                 return OpExpr(op, vals[0], vals[1])
             else:
@@ -634,7 +651,7 @@ class ASTConverter(ast35.NodeTransformer):
     # keyword = (identifier? arg, expr value)
     @with_line
     def visit_Call(self, n: ast35.Call) -> Node:
-        def is_star2arg(k):
+        def is_star2arg(k: ast35.keyword) -> bool:
             return k.arg is None
 
         arg_types = self.visit_list(
diff --git a/mypy/fixup.py b/mypy/fixup.py
index 73fb227..929da24 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -8,7 +8,7 @@ from mypy.nodes import (MypyFile, SymbolNode, SymbolTable, SymbolTableNode,
                         LDEF, MDEF, GDEF, MODULE_REF)
 from mypy.types import (CallableType, EllipsisType, Instance, Overloaded, TupleType,
                         TypeList, TypeVarType, UnboundType, UnionType, TypeVisitor,
-                        TypeType)
+                        UninhabitedType, TypeType)
 from mypy.visitor import NodeVisitor
 
 
@@ -182,6 +182,9 @@ class TypeFixer(TypeVisitor[None]):
     def visit_none_type(self, o: Any) -> None:
         pass  # Nothing to descend into.
 
+    def visit_uninhabited_type(self, o: Any) -> None:
+        pass  # Nothing to descend into.
+
     def visit_partial_type(self, o: Any) -> None:
         raise RuntimeError("Shouldn't get here", o)
 
@@ -247,7 +250,7 @@ def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str) -> SymbolTa
             return stnode
         node = stnode.node
         assert isinstance(node, TypeInfo)
-        names = cast(TypeInfo, node).names
+        names = node.names
 
 
 def store_qualified(modules: Dict[str, MypyFile], name: str, info: SymbolNode) -> None:
@@ -275,4 +278,4 @@ def store_qualified(modules: Dict[str, MypyFile], name: str, info: SymbolNode) -
             stnode.node = info
             return
         assert isinstance(node, TypeInfo)
-        names = cast(TypeInfo, node).names
+        names = node.names
diff --git a/mypy/git.py b/mypy/git.py
index 508c8e9..355ef9b 100644
--- a/mypy/git.py
+++ b/mypy/git.py
@@ -70,7 +70,7 @@ def warn_no_git_executable() -> None:
           "git executable not in path.", file=sys.stderr)
 
 
-def warn_dirty(dir) -> None:
+def warn_dirty(dir: str) -> None:
     print("Warning: git module '{}' has uncommitted changes.".format(dir),
           file=sys.stderr)
     print("Go to the directory", file=sys.stderr)
@@ -78,7 +78,7 @@ def warn_dirty(dir) -> None:
     print("and commit or reset your changes", file=sys.stderr)
 
 
-def warn_extra_files(dir) -> None:
+def warn_extra_files(dir: str) -> None:
     print("Warning: git module '{}' has untracked files.".format(dir),
           file=sys.stderr)
     print("Go to the directory", file=sys.stderr)
@@ -86,7 +86,7 @@ def warn_extra_files(dir) -> None:
     print("and add & commit your new files.", file=sys.stderr)
 
 
-def chdir_prefix(dir) -> str:
+def chdir_prefix(dir: str) -> str:
     """Return the command to change to the target directory, plus '&&'."""
     if os.path.relpath(dir) != ".":
         return "cd " + pipes.quote(dir) + " && "
diff --git a/mypy/infer.py b/mypy/infer.py
index 3ba66ef..0047fe4 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -3,7 +3,7 @@
 from typing import List, Optional
 
 from mypy.constraints import infer_constraints, infer_constraints_for_callable
-from mypy.types import Type, CallableType
+from mypy.types import Type, TypeVarId, CallableType
 from mypy.solve import solve_constraints
 from mypy.constraints import SUBTYPE_OF
 
@@ -35,7 +35,7 @@ def infer_function_type_arguments(callee_type: CallableType,
     return solve_constraints(type_vars, constraints, strict)
 
 
-def infer_type_arguments(type_var_ids: List[int],
+def infer_type_arguments(type_var_ids: List[TypeVarId],
                          template: Type, actual: Type) -> List[Type]:
     # Like infer_function_type_arguments, but only match a single type
     # against a generic type.
diff --git a/mypy/join.py b/mypy/join.py
index 0c42b64..d63388d 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -6,11 +6,13 @@ from mypy.types import (
     Type, AnyType, NoneTyp, Void, TypeVisitor, Instance, UnboundType,
     ErrorType, TypeVarType, CallableType, TupleType, ErasedType, TypeList,
     UnionType, FunctionLike, Overloaded, PartialType, DeletedType,
-    TypeType
+    UninhabitedType, TypeType
 )
 from mypy.maptype import map_instance_to_supertype
 from mypy.subtypes import is_subtype, is_equivalent, is_subtype_ignoring_tvars
 
+from mypy import experiments
+
 
 def join_simple(declaration: Type, s: Type, t: Type) -> Type:
     """Return a simple least upper bound given the declared type."""
@@ -18,9 +20,6 @@ def join_simple(declaration: Type, s: Type, t: Type) -> Type:
     if isinstance(s, AnyType):
         return s
 
-    if isinstance(s, NoneTyp) and not isinstance(t, Void):
-        return t
-
     if isinstance(s, ErasedType):
         return t
 
@@ -33,6 +32,12 @@ def join_simple(declaration: Type, s: Type, t: Type) -> Type:
     if isinstance(declaration, UnionType):
         return UnionType.make_simplified_union([s, t])
 
+    if isinstance(s, NoneTyp) and not isinstance(t, NoneTyp):
+        s, t = t, s
+
+    if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType):
+        s, t = t, s
+
     value = t.accept(TypeJoinVisitor(s))
 
     if value is None:
@@ -58,12 +63,12 @@ def join_types(s: Type, t: Type) -> Type:
     if isinstance(s, AnyType):
         return s
 
-    if isinstance(s, NoneTyp) and not isinstance(t, Void):
-        return t
-
     if isinstance(s, ErasedType):
         return t
 
+    if isinstance(s, NoneTyp) and not isinstance(t, NoneTyp):
+        s, t = t, s
+
     # Use a visitor to handle non-trivial cases.
     return t.accept(TypeJoinVisitor(s))
 
@@ -106,6 +111,18 @@ class TypeJoinVisitor(TypeVisitor[Type]):
             return ErrorType()
 
     def visit_none_type(self, t: NoneTyp) -> Type:
+        if experiments.STRICT_OPTIONAL:
+            if isinstance(self.s, (NoneTyp, UninhabitedType)):
+                return t
+            else:
+                return self.default(self.s)
+        else:
+            if not isinstance(self.s, Void):
+                return self.s
+            else:
+                return self.default(self.s)
+
+    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
         if not isinstance(self.s, Void):
             return self.s
         else:
@@ -238,9 +255,6 @@ class TypeJoinVisitor(TypeVisitor[Type]):
 def join_instances(t: Instance, s: Instance) -> Type:
     """Calculate the join of two instance types.
 
-    If allow_interfaces is True, also consider interface-type results for
-    non-interface types.
-
     Return ErrorType if the result is ambiguous.
     """
     if t.type == s.type:
@@ -333,8 +347,12 @@ def join_type_list(types: List[Type]) -> Type:
     if not types:
         # This is a little arbitrary but reasonable. Any empty tuple should be compatible
         # with all variable length tuples, and this makes it possible. A better approach
-        # would be to use a special bottom type.
-        return NoneTyp()
+        # would be to use a special bottom type, which we do when strict Optional
+        # checking is enabled.
+        if experiments.STRICT_OPTIONAL:
+            return UninhabitedType()
+        else:
+            return NoneTyp()
     joined = types[0]
     for t in types[1:]:
         joined = join_types(joined, t)
diff --git a/mypy/lex.py b/mypy/lex.py
index 27c18b5..41f4262 100644
--- a/mypy/lex.py
+++ b/mypy/lex.py
@@ -142,7 +142,7 @@ class LexError(Token):
         self.type = type
         self.message = message
 
-    def __str__(self):
+    def __str__(self) -> str:
         if self.message:
             return 'LexError(%s)' % self.message
         else:
diff --git a/mypy/main.py b/mypy/main.py
index d405dd9..80ae44d 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -5,34 +5,21 @@ import os
 import re
 import sys
 
-from typing import Optional, Dict, List, Set, Tuple
+from typing import Any, Dict, List, Optional, Set, Tuple
 
 from mypy import build
 from mypy import defaults
 from mypy import git
+from mypy import experiments
 from mypy.build import BuildSource, BuildResult, PYTHON_EXTENSIONS
 from mypy.errors import CompileError, set_drop_into_pdb
+from mypy.options import Options, BuildType
 
 from mypy.version import __version__
 
 PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS)
 
 
-class Options:
-    """Options collected from flags."""
-
-    def __init__(self) -> None:
-        # Set default options.
-        self.target = build.TYPE_CHECK
-        self.build_flags = []  # type: List[str]
-        self.pyversion = defaults.PYTHON3_VERSION
-        self.custom_typing_module = None  # type: str
-        self.report_dirs = {}  # type: Dict[str, str]
-        self.python_path = False
-        self.dirty_stubs = False
-        self.pdb = False
-
-
 def main(script_path: str) -> None:
     """Main entry point to the type checker.
 
@@ -43,18 +30,15 @@ def main(script_path: str) -> None:
         bin_dir = find_bin_directory(script_path)
     else:
         bin_dir = None
-    sources, options = process_options()
+    sources, options = process_options(sys.argv[1:])
     if options.pdb:
         set_drop_into_pdb(True)
     if not options.dirty_stubs:
         git.verify_git_integrity_or_abort(build.default_data_dir(bin_dir))
     f = sys.stdout
     try:
-        if options.target == build.TYPE_CHECK:
-            res = type_check_only(sources, bin_dir, options)
-            a = res.errors
-        else:
-            raise RuntimeError('unsupported target %d' % options.target)
+        res = type_check_only(sources, bin_dir, options)
+        a = res.errors
     except CompileError as e:
         a = e.messages
         if not e.use_stdout:
@@ -93,20 +77,37 @@ def type_check_only(sources: List[BuildSource],
         bin_dir: str, options: Options) -> BuildResult:
     # Type-check the program and dependencies and translate to Python.
     return build.build(sources=sources,
-                       target=build.TYPE_CHECK,
                        bin_dir=bin_dir,
-                       pyversion=options.pyversion,
-                       custom_typing_module=options.custom_typing_module,
-                       report_dirs=options.report_dirs,
-                       flags=options.build_flags,
-                       python_path=options.python_path)
+                       options=options)
 
 
 FOOTER = """environment variables:
 MYPYPATH     additional module search path"""
 
 
-def process_options() -> Tuple[List[BuildSource], Options]:
+class SplitNamespace(argparse.Namespace):
+    def __init__(self, standard_namespace: object, alt_namespace: object, alt_prefix: str) -> None:
+        self.__dict__['_standard_namespace'] = standard_namespace
+        self.__dict__['_alt_namespace'] = alt_namespace
+        self.__dict__['_alt_prefix'] = alt_prefix
+
+    def _get(self) -> Tuple[Any, Any]:
+        return (self._standard_namespace, self._alt_namespace)
+
+    def __setattr__(self, name: str, value: Any) -> None:
+        if name.startswith(self._alt_prefix):
+            setattr(self._alt_namespace, name[len(self._alt_prefix):], value)
+        else:
+            setattr(self._standard_namespace, name, value)
+
+    def __getattr__(self, name: str) -> Any:
+        if name.startswith(self._alt_prefix):
+            return getattr(self._alt_namespace, name[len(self._alt_prefix):])
+        else:
+            return getattr(self._standard_namespace, name)
+
+
+def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
     """Process command line arguments.
 
     Return (mypy program path (or None),
@@ -120,7 +121,7 @@ def process_options() -> Tuple[List[BuildSource], Options]:
     parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER,
                                      formatter_class=help_factory)
 
-    def parse_version(v):
+    def parse_version(v: str) -> Tuple[int, int]:
         m = re.match(r'\A(\d)\.(\d+)\Z', v)
         if m:
             return int(m.group(1)), int(m.group(2))
@@ -128,8 +129,13 @@ def process_options() -> Tuple[List[BuildSource], Options]:
             raise argparse.ArgumentTypeError(
                 "Invalid python version '{}' (expected format: 'x.y')".format(v))
 
-    parser.add_argument('-v', '--verbose', action='count', help="more verbose messages")
-    parser.add_argument('-V', '--version', action='version',  # type: ignore # see typeshed#124
+    # Unless otherwise specified, arguments will be parsed directly onto an
+    # Options object.  Options that require further processing should have
+    # their `dest` prefixed with `special-opts:`, which will cause them to be
+    # parsed into the separate special_opts namespace object.
+    parser.add_argument('-v', '--verbose', action='count', dest='verbosity',
+                        help="more verbose messages")
+    parser.add_argument('-V', '--version', action='version',
                         version='%(prog)s ' + __version__)
     parser.add_argument('--python-version', type=parse_version, metavar='x.y',
                         help='use Python x.y')
@@ -137,7 +143,7 @@ def process_options() -> Tuple[List[BuildSource], Options]:
                         const=defaults.PYTHON2_VERSION, help="use Python 2 mode")
     parser.add_argument('-s', '--silent-imports', action='store_true',
                         help="don't follow imports to .py files")
-    parser.add_argument('--silent', action='store_true',
+    parser.add_argument('--silent', action='store_true', dest='special-opts:silent',
                         help="deprecated name for --silent-imports")
     parser.add_argument('--almost-silent', action='store_true',
                         help="like --silent-imports but reports the imports as errors")
@@ -152,137 +158,124 @@ def process_options() -> Tuple[List[BuildSource], Options]:
     parser.add_argument('--warn-incomplete-stub', action='store_true',
                         help="warn if missing type annotation in typeshed, only relevant with"
                         " --check-untyped-defs enabled")
+    parser.add_argument('--warn-redundant-casts', action='store_true',
+                        help="warn about casting an expression to its inferred type")
+    parser.add_argument('--warn-unused-ignores', action='store_true',
+                        help="warn about unneeded '# type: ignore' comments")
     parser.add_argument('--fast-parser', action='store_true',
                         help="enable experimental fast parser")
     parser.add_argument('-i', '--incremental', action='store_true',
                         help="enable experimental module cache")
+    parser.add_argument('--cache-dir', action='store', metavar='DIR',
+                        help="store module cache info in the given folder in incremental mode "
+                        "(defaults to '{}')".format(defaults.MYPY_CACHE))
+    parser.add_argument('--strict-optional', action='store_true',
+                        dest='special-opts:strict_optional',
+                        help="enable experimental strict Optional checks")
     parser.add_argument('-f', '--dirty-stubs', action='store_true',
                         help="don't warn if typeshed is out of sync")
     parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error")
     parser.add_argument('--use-python-path', action='store_true',
+                        dest='special-opts:use_python_path',
                         help="an anti-pattern")
-    parser.add_argument('--stats', action='store_true', help="dump stats")
-    parser.add_argument('--inferstats', action='store_true', help="dump type inference stats")
-    parser.add_argument('--custom-typing', metavar='MODULE', help="use a custom typing module")
+    parser.add_argument('--stats', action='store_true', dest='dump_type_stats', help="dump stats")
+    parser.add_argument('--inferstats', action='store_true', dest='dump_inference_stats',
+                        help="dump type inference stats")
+    parser.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module',
+                        help="use a custom typing module")
 
     report_group = parser.add_argument_group(
         title='report generation',
         description='Generate a report in the specified format.')
-    report_group.add_argument('--html-report', metavar='DIR')
-    report_group.add_argument('--old-html-report', metavar='DIR')
-    report_group.add_argument('--xslt-html-report', metavar='DIR')
-    report_group.add_argument('--xml-report', metavar='DIR')
-    report_group.add_argument('--txt-report', metavar='DIR')
-    report_group.add_argument('--xslt-txt-report', metavar='DIR')
-    report_group.add_argument('--linecount-report', metavar='DIR')
+    report_group.add_argument('--html-report', metavar='DIR',
+                              dest='special-opts:html_report')
+    report_group.add_argument('--old-html-report', metavar='DIR',
+                              dest='special-opts:old_html_report')
+    report_group.add_argument('--xslt-html-report', metavar='DIR',
+                              dest='special-opts:xslt_html_report')
+    report_group.add_argument('--xml-report', metavar='DIR',
+                              dest='special-opts:xml_report')
+    report_group.add_argument('--txt-report', metavar='DIR',
+                              dest='special-opts:txt_report')
+    report_group.add_argument('--xslt-txt-report', metavar='DIR',
+                              dest='special-opts:xslt_txt_report')
+    report_group.add_argument('--linecount-report', metavar='DIR',
+                              dest='special-opts:linecount_report')
 
     code_group = parser.add_argument_group(title='How to specify the code to type check')
-    code_group.add_argument('-m', '--module', action='append', dest='modules',
+    code_group.add_argument('-m', '--module', action='append', metavar='MODULE',
+                            dest='special-opts:modules',
                             help="type-check module; can repeat for more modules")
     # TODO: `mypy -c A -c B` and `mypy -p A -p B` currently silently
     # ignore A (last option wins).  Perhaps -c, -m and -p could just
     # be command-line flags that modify how we interpret self.files?
-    code_group.add_argument('-c', '--command', help="type-check program passed in as string")
-    code_group.add_argument('-p', '--package', help="type-check all files in a directory")
-    code_group.add_argument('files', nargs='*', help="type-check given files or directories")
+    code_group.add_argument('-c', '--command', metavar='PROGRAM_TEXT', dest='special-opts:command',
+                            help="type-check program passed in as string")
+    code_group.add_argument('-p', '--package', metavar='PACKAGE', dest='special-opts:package',
+                            help="type-check all files in a directory")
+    code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files',
+                            help="type-check given files or directories")
 
-    args = parser.parse_args()
+    options = Options()
+    special_opts = argparse.Namespace()
+    parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:'))
 
     # --use-python-path is no longer supported; explain why.
-    if args.use_python_path:
+    if special_opts.use_python_path:
         parser.error("Sorry, --use-python-path is no longer supported.\n"
                      "If you are trying this because your code depends on a library module,\n"
                      "you should really investigate how to obtain stubs for that module.\n"
                      "See https://github.com/python/mypy/issues/1411 for more discussion."
                      )
+
     # --silent is deprecated; warn about this.
-    if args.silent:
+    if special_opts.silent:
         print("Warning: --silent is deprecated; use --silent-imports",
               file=sys.stderr)
+        options.silent_imports = True
 
     # Check for invalid argument combinations.
-    code_methods = sum(bool(c) for c in [args.modules, args.command, args.package, args.files])
+    code_methods = sum(bool(c) for c in [special_opts.modules,
+                                         special_opts.command,
+                                         special_opts.package,
+                                         special_opts.files])
     if code_methods == 0:
         parser.error("Missing target module, package, files, or command.")
     elif code_methods > 1:
         parser.error("May only specify one of: module, package, files, or command.")
 
-    if args.use_python_path and args.python_version and args.python_version[0] == 2:
-        parser.error('Python version 2 (or --py2) specified, '
-                     'but --use-python-path will search in sys.path of Python 3')
-
-    # Set options.
-    options = Options()
-    options.dirty_stubs = args.dirty_stubs
-    options.python_path = args.use_python_path
-    options.pdb = args.pdb
-    options.custom_typing_module = args.custom_typing
-
     # Set build flags.
-    if args.python_version is not None:
-        options.pyversion = args.python_version
-
-    if args.verbose:
-        options.build_flags.extend(args.verbose * [build.VERBOSE])
-
-    if args.stats:
-        options.build_flags.append(build.DUMP_TYPE_STATS)
-
-    if args.inferstats:
-        options.build_flags.append(build.DUMP_INFER_STATS)
-
-    if args.silent_imports or args.silent:
-        options.build_flags.append(build.SILENT_IMPORTS)
-    if args.almost_silent:
-        options.build_flags.append(build.SILENT_IMPORTS)
-        options.build_flags.append(build.ALMOST_SILENT)
-
-    if args.disallow_untyped_calls:
-        options.build_flags.append(build.DISALLOW_UNTYPED_CALLS)
-
-    if args.disallow_untyped_defs:
-        options.build_flags.append(build.DISALLOW_UNTYPED_DEFS)
-
-    if args.check_untyped_defs:
-        options.build_flags.append(build.CHECK_UNTYPED_DEFS)
-
-    if args.warn_incomplete_stub:
-        options.build_flags.append(build.WARN_INCOMPLETE_STUB)
-
-    # experimental
-    if args.fast_parser:
-        options.build_flags.append(build.FAST_PARSER)
-    if args.incremental:
-        options.build_flags.append(build.INCREMENTAL)
+    if special_opts.strict_optional:
+        experiments.STRICT_OPTIONAL = True
 
     # Set reports.
-    for flag, val in vars(args).items():
+    for flag, val in vars(special_opts).items():
         if flag.endswith('_report') and val is not None:
             report_type = flag[:-7].replace('_', '-')
             report_dir = val
             options.report_dirs[report_type] = report_dir
 
     # Set target.
-    if args.modules:
-        options.build_flags.append(build.MODULE)
-        targets = [BuildSource(None, m, None) for m in args.modules]
+    if special_opts.modules:
+        options.build_type = BuildType.MODULE
+        targets = [BuildSource(None, m, None) for m in special_opts.modules]
         return targets, options
-    elif args.package:
-        if os.sep in args.package or os.altsep and os.altsep in args.package:
+    elif special_opts.package:
+        if os.sep in special_opts.package or os.altsep and os.altsep in special_opts.package:
             fail("Package name '{}' cannot have a slash in it."
-                 .format(args.package))
-        options.build_flags.append(build.MODULE)
+                 .format(special_opts.package))
+        options.build_type = BuildType.MODULE
         lib_path = [os.getcwd()] + build.mypy_path()
-        targets = build.find_modules_recursive(args.package, lib_path)
+        targets = build.find_modules_recursive(special_opts.package, lib_path)
         if not targets:
-            fail("Can't find package '{}'".format(args.package))
+            fail("Can't find package '{}'".format(special_opts.package))
         return targets, options
-    elif args.command:
-        options.build_flags.append(build.PROGRAM_TEXT)
-        return [BuildSource(None, None, args.command)], options
+    elif special_opts.command:
+        options.build_type = BuildType.PROGRAM_TEXT
+        return [BuildSource(None, None, special_opts.command)], options
     else:
         targets = []
-        for f in args.files:
+        for f in special_opts.files:
             if f.endswith(PY_EXTENSIONS):
                 targets.append(BuildSource(f, crawl_up(f)[1], None))
             elif os.path.isdir(f):
diff --git a/mypy/maptype.py b/mypy/maptype.py
index 5eb6a0d..dc8e7b2 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -2,7 +2,7 @@ from typing import Dict, List, cast
 
 from mypy.expandtype import expand_type
 from mypy.nodes import TypeInfo
-from mypy.types import Type, Instance, AnyType
+from mypy.types import Type, TypeVarId, Instance, AnyType
 
 
 def map_instance_to_supertype(instance: Instance,
@@ -82,7 +82,7 @@ def map_instance_to_direct_supertypes(instance: Instance,
         return [Instance(supertype, [AnyType()] * len(supertype.type_vars))]
 
 
-def instance_to_type_environment(instance: Instance) -> Dict[int, Type]:
+def instance_to_type_environment(instance: Instance) -> Dict[TypeVarId, Type]:
     """Given an Instance, produce the resulting type environment for type
     variables bound by the Instance's class definition.
 
@@ -92,5 +92,4 @@ def instance_to_type_environment(instance: Instance) -> Dict[int, Type]:
     arguments.  The type variables are mapped by their `id`.
 
     """
-    # Type variables bound by a class have `id` of 1, 2, etc.
-    return {i + 1: instance.args[i] for i in range(len(instance.args))}
+    return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)}
diff --git a/mypy/meet.py b/mypy/meet.py
index eff2eed..63e3aae 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -3,12 +3,14 @@ from typing import cast, List
 from mypy.join import is_similar_callables, combine_similar_callables
 from mypy.types import (
     Type, AnyType, TypeVisitor, UnboundType, Void, ErrorType, NoneTyp, TypeVarType,
-    Instance, CallableType, TupleType, ErasedType, TypeList,
-    UnionType, PartialType, DeletedType, TypeType
+    Instance, CallableType, TupleType, ErasedType, TypeList, UnionType, PartialType,
+    DeletedType, UninhabitedType, TypeType
 )
 from mypy.subtypes import is_subtype
 from mypy.nodes import TypeInfo
 
+from mypy import experiments
+
 # TODO Describe this module.
 
 
@@ -29,7 +31,10 @@ def meet_simple(s: Type, t: Type, default_right: bool = True) -> Type:
     if isinstance(s, UnionType):
         return UnionType.make_simplified_union([meet_types(x, t) for x in s.items])
     elif not is_overlapping_types(s, t, use_promotions=True):
-        return NoneTyp()
+        if experiments.STRICT_OPTIONAL:
+            return UninhabitedType()
+        else:
+            return NoneTyp()
     else:
         if default_right:
             return t
@@ -72,7 +77,7 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
         s = s.erase_to_union_or_bound()
     if isinstance(t, Instance):
         if isinstance(s, Instance):
-            # Only consider two classes non-disjoint if one is included in the mro
+            # Consider two classes non-disjoint if one is included in the mro
             # of another.
             if use_promotions:
                 # Consider cases like int vs float to be overlapping where
@@ -89,8 +94,23 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
     if isinstance(s, UnionType):
         return any(is_overlapping_types(t, item)
                    for item in s.items)
-    # We conservatively assume that non-instance, non-union types can overlap any other
-    # types.
+    if isinstance(t, TypeType) and isinstance(s, TypeType):
+        # If both types are TypeType, compare their inner types.
+        return is_overlapping_types(t.item, s.item, use_promotions)
+    elif isinstance(t, TypeType) or isinstance(s, TypeType):
+        # If exactly only one of t or s is a TypeType, check if one of them
+        # is an `object` or a `type` and otherwise assume no overlap.
+        other = s if isinstance(t, TypeType) else t
+        if isinstance(other, Instance):
+            return other.type.fullname() in {'builtins.object', 'builtins.type'}
+        else:
+            return False
+    if experiments.STRICT_OPTIONAL:
+        if isinstance(t, NoneTyp) != isinstance(s, NoneTyp):
+            # NoneTyp does not overlap with other non-Union types under strict Optional checking
+            return False
+    # We conservatively assume that non-instance, non-union, and non-TypeType types can overlap
+    # any other types.
     return True
 
 
@@ -110,6 +130,11 @@ class TypeMeetVisitor(TypeVisitor[Type]):
         if isinstance(self.s, Void) or isinstance(self.s, ErrorType):
             return ErrorType()
         elif isinstance(self.s, NoneTyp):
+            if experiments.STRICT_OPTIONAL:
+                return AnyType()
+            else:
+                return self.s
+        elif isinstance(self.s, UninhabitedType):
             return self.s
         else:
             return AnyType()
@@ -141,6 +166,19 @@ class TypeMeetVisitor(TypeVisitor[Type]):
             return ErrorType()
 
     def visit_none_type(self, t: NoneTyp) -> Type:
+        if experiments.STRICT_OPTIONAL:
+            if isinstance(self.s, NoneTyp) or (isinstance(self.s, Instance) and
+                                               self.s.type.fullname() == 'builtins.object'):
+                return t
+            else:
+                return UninhabitedType()
+        else:
+            if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
+                return t
+            else:
+                return ErrorType()
+
+    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
         if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
             return t
         else:
@@ -149,6 +187,11 @@ class TypeMeetVisitor(TypeVisitor[Type]):
     def visit_deleted_type(self, t: DeletedType) -> Type:
         if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
             if isinstance(self.s, NoneTyp):
+                if experiments.STRICT_OPTIONAL:
+                    return t
+                else:
+                    return self.s
+            elif isinstance(self.s, UninhabitedType):
                 return self.s
             else:
                 return t
@@ -176,7 +219,10 @@ class TypeMeetVisitor(TypeVisitor[Type]):
                         args.append(self.meet(t.args[i], si.args[i]))
                     return Instance(t.type, args)
                 else:
-                    return NoneTyp()
+                    if experiments.STRICT_OPTIONAL:
+                        return UninhabitedType()
+                    else:
+                        return NoneTyp()
             else:
                 if is_subtype(t, self.s):
                     return t
@@ -184,7 +230,10 @@ class TypeMeetVisitor(TypeVisitor[Type]):
                     # See also above comment.
                     return self.s
                 else:
-                    return NoneTyp()
+                    if experiments.STRICT_OPTIONAL:
+                        return UninhabitedType()
+                    else:
+                        return NoneTyp()
         elif isinstance(self.s, TypeType):
             return meet_types(t, self.s)
         else:
@@ -221,13 +270,16 @@ class TypeMeetVisitor(TypeVisitor[Type]):
         else:
             return self.default(self.s)
 
-    def meet(self, s, t):
+    def meet(self, s: Type, t: Type) -> Type:
         return meet_types(s, t)
 
-    def default(self, typ):
+    def default(self, typ: Type) -> Type:
         if isinstance(typ, UnboundType):
             return AnyType()
         elif isinstance(typ, Void) or isinstance(typ, ErrorType):
             return ErrorType()
         else:
-            return NoneTyp()
+            if experiments.STRICT_OPTIONAL:
+                return UninhabitedType()
+            else:
+                return NoneTyp()
diff --git a/mypy/messages.py b/mypy/messages.py
index 2946910..2090107 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -249,14 +249,21 @@ class MessageBuilder:
             else:
                 return 'tuple(length {})'.format(len(items))
         elif isinstance(typ, UnionType):
-            items = []
-            for t in typ.items:
-                items.append(strip_quotes(self.format(t)))
-            s = '"Union[{}]"'.format(', '.join(items))
-            if len(s) < 40:
-                return s
+            # Only print Unions as Optionals if the Optional wouldn't have to contain another Union
+            print_as_optional = (len(typ.items) -
+                                 sum(isinstance(t, NoneTyp) for t in typ.items) == 1)
+            if print_as_optional:
+                rest = [t for t in typ.items if not isinstance(t, NoneTyp)]
+                return '"Optional[{}]"'.format(strip_quotes(self.format(rest[0])))
             else:
-                return 'union type ({} items)'.format(len(items))
+                items = []
+                for t in typ.items:
+                    items.append(strip_quotes(self.format(t)))
+                s = '"Union[{}]"'.format(', '.join(items))
+                if len(s) < 40:
+                    return s
+                else:
+                    return 'union type ({} items)'.format(len(items))
         elif isinstance(typ, Void):
             return 'None'
         elif isinstance(typ, NoneTyp):
@@ -834,6 +841,9 @@ class MessageBuilder:
     def unsupported_type_type(self, item: Type, context: Context) -> None:
         self.fail('Unsupported type Type[{}]'.format(self.format(item)), context)
 
+    def redundant_cast(self, typ: Type, context: Context) -> None:
+        self.note('Redundant cast to {}'.format(self.format(typ)), context)
+
 
 def capitalize(s: str) -> str:
     """Capitalize the first character of a string."""
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 33613f3..fe4da0d 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -56,6 +56,9 @@ LITERAL_YES = 2
 LITERAL_TYPE = 1
 LITERAL_NO = 0
 
+# Hard coded name of Enum baseclass.
+ENUM_BASECLASS = "enum.Enum"
+
 node_kinds = {
     LDEF: 'Ldef',
     GDEF: 'Gdef',
@@ -113,20 +116,12 @@ class Node(Context):
     def accept(self, visitor: NodeVisitor[T]) -> T:
         raise RuntimeError('Not implemented')
 
-    # NOTE: Can't use @abstractmethod, since many subclasses of Node
-    # don't implement serialize().
-    def serialize(self) -> Any:
-        raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__))
 
-    @classmethod
-    def deserialize(cls, data: JsonDict) -> 'Node':
-        classname = data['.class']
-        glo = globals()
-        if classname in glo:
-            cl = glo[classname]
-            if issubclass(cl, cls) and 'deserialize' in cl.__dict__:
-                return cl.deserialize(data)
-        raise NotImplementedError('unexpected .class {}'.format(classname))
+# These are placeholders for a future refactoring; see #1783.
+# For now they serve as (unchecked) documentation of what various
+# fields of Node subtypes are expected to contain.
+Statement = Node
+Expression = Node
 
 
 class SymbolNode(Node):
@@ -140,12 +135,23 @@ class SymbolNode(Node):
     @abstractmethod
     def fullname(self) -> str: pass
 
+    # NOTE: Can't use @abstractmethod, since many subclasses of Node
+    # don't implement serialize().
+    def serialize(self) -> Any:
+        raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__))
+
     @classmethod
     def deserialize(cls, data: JsonDict) -> 'SymbolNode':
-        return cast(SymbolNode, super().deserialize(data))
+        classname = data['.class']
+        glo = globals()
+        if classname in glo:
+            cl = glo[classname]
+            if issubclass(cl, cls) and 'deserialize' in cl.__dict__:
+                return cl.deserialize(data)
+        raise NotImplementedError('unexpected .class {}'.format(classname))
 
 
-class MypyFile(SymbolNode):
+class MypyFile(SymbolNode, Statement):
     """The abstract syntax tree of a single source file."""
 
     # Module name ('__main__' for initial file)
@@ -155,7 +161,7 @@ class MypyFile(SymbolNode):
     # Path to the file (None if not known)
     path = ''
     # Top-level definitions and statements
-    defs = None  # type: List[Node]
+    defs = None  # type: List[Statement]
     # Is there a UTF-8 BOM at the start?
     is_bom = False
     names = None  # type: SymbolTable
@@ -169,7 +175,7 @@ class MypyFile(SymbolNode):
     weak_opts = None  # type: Set[str]
 
     def __init__(self,
-                 defs: List[Node],
+                 defs: List[Statement],
                  imports: List['ImportBase'],
                  is_bom: bool = False,
                  ignored_lines: Set[int] = None,
@@ -218,9 +224,10 @@ class MypyFile(SymbolNode):
         return tree
 
 
-class ImportBase(Node):
+class ImportBase(Statement):
     """Base class for all import statements."""
     is_unreachable = False
+    is_top_level = False  # Set by semanal.FirstPass
     # If an import replaces existing definitions, we construct dummy assignment
     # statements that assign the imported names to the names in the current scope,
     # for type checking purposes. Example:
@@ -294,7 +301,7 @@ class FuncBase(SymbolNode):
         return bool(self.info)
 
 
-class OverloadedFuncDef(FuncBase):
+class OverloadedFuncDef(FuncBase, Statement):
     """A logical node representing all the variants of an overloaded function.
 
     This node has no explicit representation in the source program.
@@ -336,8 +343,14 @@ class OverloadedFuncDef(FuncBase):
 class Argument(Node):
     """A single argument in a FuncItem."""
 
+    variable = None  # type: Var
+    type_annotation = None  # type: Optional[mypy.types.Type]
+    initializater = None  # type: Optional[Expression]
+    kind = None  # type: int
+    initialization_statement = None  # type: Optional[AssignmentStmt]
+
     def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]',
-            initializer: Optional[Node], kind: int,
+            initializer: Optional[Expression], kind: int,
             initialization_statement: Optional['AssignmentStmt'] = None) -> None:
         self.variable = variable
 
@@ -431,11 +444,11 @@ class FuncItem(FuncBase):
             arg.set_line(self.line)
         return self
 
-    def is_dynamic(self):
+    def is_dynamic(self) -> bool:
         return self.type is None
 
 
-class FuncDef(FuncItem):
+class FuncDef(FuncItem, Statement):
     """Function definition.
 
     This is a non-lambda function defined using 'def'.
@@ -503,18 +516,18 @@ class FuncDef(FuncItem):
         return ret
 
 
-class Decorator(SymbolNode):
+class Decorator(SymbolNode, Statement):
     """A decorated function.
 
     A single Decorator object can include any number of function decorators.
     """
 
-    func = None  # type: FuncDef           # Decorated function
-    decorators = None  # type: List[Node]  # Decorators, at least one  # XXX Not true
-    var = None  # type: Var              # Represents the decorated function obj
+    func = None  # type: FuncDef                # Decorated function
+    decorators = None  # type: List[Expression] # Decorators, at least one  # XXX Not true
+    var = None  # type: Var                     # Represents the decorated function obj
     is_overload = False
 
-    def __init__(self, func: FuncDef, decorators: List[Node],
+    def __init__(self, func: FuncDef, decorators: List[Expression],
                  var: 'Var') -> None:
         self.func = func
         self.decorators = decorators
@@ -547,7 +560,7 @@ class Decorator(SymbolNode):
         return dec
 
 
-class Var(SymbolNode):
+class Var(SymbolNode, Statement):
     """A variable.
 
     It can refer to global/local variable or a data attribute.
@@ -615,7 +628,7 @@ class Var(SymbolNode):
         return v
 
 
-class ClassDef(Node):
+class ClassDef(Statement):
     """Class definition"""
 
     name = None  # type: str       # Name of the class without module prefix
@@ -623,12 +636,10 @@ class ClassDef(Node):
     defs = None  # type: Block
     type_vars = None  # type: List[mypy.types.TypeVarDef]
     # Base class expressions (not semantically analyzed -- can be arbitrary expressions)
-    base_type_exprs = None  # type: List[Node]
-    # Semantically analyzed base types, derived from base_type_exprs during semantic analysis
-    base_types = None  # type: List[mypy.types.Instance]
+    base_type_exprs = None  # type: List[Expression]
     info = None  # type: TypeInfo  # Related TypeInfo
     metaclass = ''
-    decorators = None  # type: List[Node]
+    decorators = None  # type: List[Expression]
     # Built-in/extension class? (single implementation inheritance only)
     is_builtinclass = False
 
@@ -636,13 +647,12 @@ class ClassDef(Node):
                  name: str,
                  defs: 'Block',
                  type_vars: List['mypy.types.TypeVarDef'] = None,
-                 base_type_exprs: List[Node] = None,
+                 base_type_exprs: List[Expression] = None,
                  metaclass: str = None) -> None:
         self.name = name
         self.defs = defs
         self.type_vars = type_vars or []
         self.base_type_exprs = base_type_exprs or []
-        self.base_types = []  # Not yet semantically analyzed --> don't know base types
         self.metaclass = metaclass
         self.decorators = []
 
@@ -658,7 +668,6 @@ class ClassDef(Node):
                 'name': self.name,
                 'fullname': self.fullname,
                 'type_vars': [v.serialize() for v in self.type_vars],
-                'base_types': [t.serialize() for t in self.base_types],
                 'metaclass': self.metaclass,
                 'is_builtinclass': self.is_builtinclass,
                 }
@@ -672,12 +681,11 @@ class ClassDef(Node):
                        metaclass=data['metaclass'],
                        )
         res.fullname = data['fullname']
-        res.base_types = [mypy.types.Instance.deserialize(t) for t in data['base_types']]
         res.is_builtinclass = data['is_builtinclass']
         return res
 
 
-class GlobalDecl(Node):
+class GlobalDecl(Statement):
     """Declaration global x, y, ..."""
 
     names = None  # type: List[str]
@@ -689,7 +697,7 @@ class GlobalDecl(Node):
         return visitor.visit_global_decl(self)
 
 
-class NonlocalDecl(Node):
+class NonlocalDecl(Statement):
     """Declaration nonlocal x, y, ..."""
 
     names = None  # type: List[str]
@@ -701,14 +709,14 @@ class NonlocalDecl(Node):
         return visitor.visit_nonlocal_decl(self)
 
 
-class Block(Node):
-    body = None  # type: List[Node]
+class Block(Statement):
+    body = None  # type: List[Statement]
     # True if we can determine that this block is not executed. For example,
     # this applies to blocks that are protected by something like "if PY3:"
     # when using Python 2.
     is_unreachable = False
 
-    def __init__(self, body: List[Node]) -> None:
+    def __init__(self, body: List[Statement]) -> None:
         self.body = body
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -718,18 +726,18 @@ class Block(Node):
 # Statements
 
 
-class ExpressionStmt(Node):
+class ExpressionStmt(Statement):
     """An expression as a statement, such as print(s)."""
-    expr = None  # type: Node
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_expression_stmt(self)
 
 
-class AssignmentStmt(Node):
+class AssignmentStmt(Statement):
     """Assignment statement
     The same node class is used for single assignment, multiple assignment
     (e.g. x, y = z) and chained assignment (e.g. x = y = z), assignments
@@ -738,12 +746,12 @@ class AssignmentStmt(Node):
     An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, IndexExpr.
     """
 
-    lvalues = None  # type: List[Node]
-    rvalue = None  # type: Node
+    lvalues = None  # type: List[Expression]
+    rvalue = None  # type: Expression
     # Declared type in a comment, may be None.
     type = None  # type: mypy.types.Type
 
-    def __init__(self, lvalues: List[Node], rvalue: Node,
+    def __init__(self, lvalues: List[Expression], rvalue: Expression,
                  type: 'mypy.types.Type' = None) -> None:
         self.lvalues = lvalues
         self.rvalue = rvalue
@@ -753,14 +761,14 @@ class AssignmentStmt(Node):
         return visitor.visit_assignment_stmt(self)
 
 
-class OperatorAssignmentStmt(Node):
+class OperatorAssignmentStmt(Statement):
     """Operator assignment statement such as x += 1"""
 
     op = ''
-    lvalue = None  # type: Node
-    rvalue = None  # type: Node
+    lvalue = None  # type: Expression
+    rvalue = None  # type: Expression
 
-    def __init__(self, op: str, lvalue: Node, rvalue: Node) -> None:
+    def __init__(self, op: str, lvalue: Expression, rvalue: Expression) -> None:
         self.op = op
         self.lvalue = lvalue
         self.rvalue = rvalue
@@ -769,12 +777,12 @@ class OperatorAssignmentStmt(Node):
         return visitor.visit_operator_assignment_stmt(self)
 
 
-class WhileStmt(Node):
-    expr = None  # type: Node
+class WhileStmt(Statement):
+    expr = None  # type: Expression
     body = None  # type: Block
     else_body = None  # type: Block
 
-    def __init__(self, expr: Node, body: Block, else_body: Block) -> None:
+    def __init__(self, expr: Expression, body: Block, else_body: Block) -> None:
         self.expr = expr
         self.body = body
         self.else_body = else_body
@@ -783,15 +791,15 @@ class WhileStmt(Node):
         return visitor.visit_while_stmt(self)
 
 
-class ForStmt(Node):
+class ForStmt(Statement):
     # Index variables
-    index = None  # type: Node
+    index = None  # type: Expression
     # Expression to iterate
-    expr = None  # type: Node
+    expr = None  # type: Expression
     body = None  # type: Block
     else_body = None  # type: Block
 
-    def __init__(self, index: Node, expr: Node, body: Block,
+    def __init__(self, index: Expression, expr: Expression, body: Block,
                  else_body: Block) -> None:
         self.index = index
         self.expr = expr
@@ -802,57 +810,57 @@ class ForStmt(Node):
         return visitor.visit_for_stmt(self)
 
 
-class ReturnStmt(Node):
-    expr = None  # type: Node   # Expression or None
+class ReturnStmt(Statement):
+    expr = None  # type: Optional[Expression]
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Optional[Expression]) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_return_stmt(self)
 
 
-class AssertStmt(Node):
-    expr = None  # type: Node
+class AssertStmt(Statement):
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_assert_stmt(self)
 
 
-class DelStmt(Node):
-    expr = None  # type: Node
+class DelStmt(Statement):
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_del_stmt(self)
 
 
-class BreakStmt(Node):
+class BreakStmt(Statement):
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_break_stmt(self)
 
 
-class ContinueStmt(Node):
+class ContinueStmt(Statement):
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_continue_stmt(self)
 
 
-class PassStmt(Node):
+class PassStmt(Statement):
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_pass_stmt(self)
 
 
-class IfStmt(Node):
-    expr = None  # type: List[Node]
+class IfStmt(Statement):
+    expr = None  # type: List[Expression]
     body = None  # type: List[Block]
     else_body = None  # type: Block
 
-    def __init__(self, expr: List[Node], body: List[Block],
+    def __init__(self, expr: List[Expression], body: List[Block],
                  else_body: Block) -> None:
         self.expr = expr
         self.body = body
@@ -862,11 +870,11 @@ class IfStmt(Node):
         return visitor.visit_if_stmt(self)
 
 
-class RaiseStmt(Node):
-    expr = None  # type: Node
-    from_expr = None  # type: Node
+class RaiseStmt(Statement):
+    expr = None  # type: Expression
+    from_expr = None  # type: Expression
 
-    def __init__(self, expr: Node, from_expr: Node = None) -> None:
+    def __init__(self, expr: Expression, from_expr: Expression = None) -> None:
         self.expr = expr
         self.from_expr = from_expr
 
@@ -874,15 +882,15 @@ class RaiseStmt(Node):
         return visitor.visit_raise_stmt(self)
 
 
-class TryStmt(Node):
+class TryStmt(Statement):
     body = None  # type: Block                # Try body
-    types = None  # type: List[Node]          # Except type expressions
+    types = None  # type: List[Expression]    # Except type expressions
     vars = None  # type: List[NameExpr]     # Except variable names
     handlers = None  # type: List[Block]      # Except bodies
     else_body = None  # type: Block
     finally_body = None  # type: Block
 
-    def __init__(self, body: Block, vars: List['NameExpr'], types: List[Node],
+    def __init__(self, body: Block, vars: List['NameExpr'], types: List[Expression],
                  handlers: List[Block], else_body: Block,
                  finally_body: Block) -> None:
         self.body = body
@@ -896,12 +904,12 @@ class TryStmt(Node):
         return visitor.visit_try_stmt(self)
 
 
-class WithStmt(Node):
-    expr = None  # type: List[Node]
-    target = None  # type: List[Node]
+class WithStmt(Statement):
+    expr = None  # type: List[Expression]
+    target = None  # type: List[Expression]
     body = None  # type: Block
 
-    def __init__(self, expr: List[Node], target: List[Node],
+    def __init__(self, expr: List[Expression], target: List[Expression],
                  body: Block) -> None:
         self.expr = expr
         self.target = target
@@ -911,15 +919,15 @@ class WithStmt(Node):
         return visitor.visit_with_stmt(self)
 
 
-class PrintStmt(Node):
+class PrintStmt(Statement):
     """Python 2 print statement"""
 
-    args = None  # type: List[Node]
+    args = None  # type: List[Expression]
     newline = False
     # The file-like target object (given using >>).
-    target = None  # type: Optional[Node]
+    target = None  # type: Optional[Expression]
 
-    def __init__(self, args: List[Node], newline: bool, target: Node = None) -> None:
+    def __init__(self, args: List[Expression], newline: bool, target: Expression = None) -> None:
         self.args = args
         self.newline = newline
         self.target = target
@@ -928,14 +936,16 @@ class PrintStmt(Node):
         return visitor.visit_print_stmt(self)
 
 
-class ExecStmt(Node):
+class ExecStmt(Statement):
     """Python 2 exec statement"""
 
-    expr = None  # type: Node
-    variables1 = None  # type: Optional[Node]
-    variables2 = None  # type: Optional[Node]
+    expr = None  # type: Expression
+    variables1 = None  # type: Optional[Expression]
+    variables2 = None  # type: Optional[Expression]
 
-    def __init__(self, expr: Node, variables1: Optional[Node], variables2: Optional[Node]) -> None:
+    def __init__(self, expr: Expression,
+                 variables1: Optional[Expression],
+                 variables2: Optional[Expression]) -> None:
         self.expr = expr
         self.variables1 = variables1
         self.variables2 = variables2
@@ -947,7 +957,7 @@ class ExecStmt(Node):
 # Expressions
 
 
-class IntExpr(Node):
+class IntExpr(Expression):
     """Integer literal"""
 
     value = 0
@@ -972,7 +982,7 @@ class IntExpr(Node):
 # 'x', u'x' -> StrExpr
 # UnicodeExpr is unused
 
-class StrExpr(Node):
+class StrExpr(Expression):
     """String literal"""
 
     value = ''
@@ -986,7 +996,7 @@ class StrExpr(Node):
         return visitor.visit_str_expr(self)
 
 
-class BytesExpr(Node):
+class BytesExpr(Expression):
     """Bytes literal"""
 
     value = ''  # TODO use bytes
@@ -1000,7 +1010,7 @@ class BytesExpr(Node):
         return visitor.visit_bytes_expr(self)
 
 
-class UnicodeExpr(Node):
+class UnicodeExpr(Expression):
     """Unicode literal (Python 2.x)"""
 
     value = ''  # TODO use bytes
@@ -1014,7 +1024,7 @@ class UnicodeExpr(Node):
         return visitor.visit_unicode_expr(self)
 
 
-class FloatExpr(Node):
+class FloatExpr(Expression):
     """Float literal"""
 
     value = 0.0
@@ -1028,7 +1038,7 @@ class FloatExpr(Node):
         return visitor.visit_float_expr(self)
 
 
-class ComplexExpr(Node):
+class ComplexExpr(Expression):
     """Complex literal"""
 
     value = 0.0j
@@ -1042,19 +1052,19 @@ class ComplexExpr(Node):
         return visitor.visit_complex_expr(self)
 
 
-class EllipsisExpr(Node):
+class EllipsisExpr(Expression):
     """Ellipsis (...)"""
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_ellipsis(self)
 
 
-class StarExpr(Node):
+class StarExpr(Expression):
     """Star expression"""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
         self.literal = self.expr.literal
         self.literal_hash = ('Star', expr.literal_hash,)
@@ -1066,11 +1076,11 @@ class StarExpr(Node):
         return visitor.visit_star_expr(self)
 
 
-class RefExpr(Node):
+class RefExpr(Expression):
     """Abstract base class for name-like constructs"""
 
     kind = None  # type: int      # LDEF/GDEF/MDEF/... (None if not available)
-    node = None  # type: Node        # Var, FuncDef or TypeInfo that describes this
+    node = None  # type: SymbolNode  # Var, FuncDef or TypeInfo that describes this
     fullname = None  # type: str  # Fully qualified name (or name if not global)
 
     # Does this define a new name with inferred type?
@@ -1116,7 +1126,7 @@ class NameExpr(RefExpr):
         assert data['.class'] == 'NameExpr'
         ret = NameExpr(data['name'])
         ret.kind = data['kind']
-        ret.node = None if data['node'] is None else Node.deserialize(data['node'])
+        ret.node = None if data['node'] is None else SymbolNode.deserialize(data['node'])
         ret.fullname = data['fullname']
         ret.is_def = data['is_def']
         ret.literal = data['literal']
@@ -1126,12 +1136,12 @@ class NameExpr(RefExpr):
 class MemberExpr(RefExpr):
     """Member access expression x.y"""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
     name = None  # type: str
     # The variable node related to a definition.
     def_var = None  # type: Var
 
-    def __init__(self, expr: Node, name: str) -> None:
+    def __init__(self, expr: Expression, name: str) -> None:
         self.expr = expr
         self.name = name
         self.literal = self.expr.literal
@@ -1155,24 +1165,24 @@ ARG_NAMED = 3  # type: int
 ARG_STAR2 = 4  # type: int
 
 
-class CallExpr(Node):
+class CallExpr(Expression):
     """Call expression.
 
     This can also represent several special forms that are syntactically calls
     such as cast(...) and None  # type: ....
     """
 
-    callee = None  # type: Node
-    args = None  # type: List[Node]
+    callee = None  # type: Expression
+    args = None  # type: List[Expression]
     arg_kinds = None  # type: List[int]  # ARG_ constants
     # Each name can be None if not a keyword argument.
     arg_names = None  # type: List[str]
     # If not None, the node that represents the meaning of the CallExpr. For
     # cast(...) this is a CastExpr.
-    analyzed = None  # type: Node
+    analyzed = None  # type: Optional[Expression]
 
-    def __init__(self, callee: Node, args: List[Node], arg_kinds: List[int],
-                 arg_names: List[str] = None, analyzed: Node = None) -> None:
+    def __init__(self, callee: Expression, args: List[Expression], arg_kinds: List[int],
+                 arg_names: List[str] = None, analyzed: Expression = None) -> None:
         if not arg_names:
             arg_names = [None] * len(args)
         self.callee = callee
@@ -1185,41 +1195,41 @@ class CallExpr(Node):
         return visitor.visit_call_expr(self)
 
 
-class YieldFromExpr(Node):
-    expr = None  # type: Node
+class YieldFromExpr(Expression):
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_yield_from_expr(self)
 
 
-class YieldExpr(Node):
-    expr = None  # type: Optional[Node]
+class YieldExpr(Expression):
+    expr = None  # type: Optional[Expression]
 
-    def __init__(self, expr: Optional[Node]) -> None:
+    def __init__(self, expr: Optional[Expression]) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_yield_expr(self)
 
 
-class IndexExpr(Node):
+class IndexExpr(Expression):
     """Index expression x[y].
 
     Also wraps type application such as List[int] as a special form.
     """
 
-    base = None  # type: Node
-    index = None  # type: Node
+    base = None  # type: Expression
+    index = None  # type: Expression
     # Inferred __getitem__ method type
     method_type = None  # type: mypy.types.Type
     # If not None, this is actually semantically a type application
     # Class[type, ...] or a type alias initializer.
     analyzed = None  # type: Union[TypeApplication, TypeAliasExpr]
 
-    def __init__(self, base: Node, index: Node) -> None:
+    def __init__(self, base: Expression, index: Expression) -> None:
         self.base = base
         self.index = index
         self.analyzed = None
@@ -1232,15 +1242,15 @@ class IndexExpr(Node):
         return visitor.visit_index_expr(self)
 
 
-class UnaryExpr(Node):
+class UnaryExpr(Expression):
     """Unary operation"""
 
     op = ''
-    expr = None  # type: Node
+    expr = None  # type: Expression
     # Inferred operator method type
     method_type = None  # type: mypy.types.Type
 
-    def __init__(self, op: str, expr: Node) -> None:
+    def __init__(self, op: str, expr: Expression) -> None:
         self.op = op
         self.expr = expr
         self.literal = self.expr.literal
@@ -1304,17 +1314,17 @@ normal_from_reverse_op = dict((m, n) for n, m in reverse_op_methods.items())
 reverse_op_method_set = set(reverse_op_methods.values())
 
 
-class OpExpr(Node):
+class OpExpr(Expression):
     """Binary operation (other than . or [] or comparison operators,
     which have specific nodes)."""
 
     op = ''
-    left = None  # type: Node
-    right = None  # type: Node
+    left = None  # type: Expression
+    right = None  # type: Expression
     # Inferred type for the operator method type (when relevant).
     method_type = None  # type: mypy.types.Type
 
-    def __init__(self, op: str, left: Node, right: Node) -> None:
+    def __init__(self, op: str, left: Expression, right: Expression) -> None:
         self.op = op
         self.left = left
         self.right = right
@@ -1325,15 +1335,15 @@ class OpExpr(Node):
         return visitor.visit_op_expr(self)
 
 
-class ComparisonExpr(Node):
+class ComparisonExpr(Expression):
     """Comparison expression (e.g. a < b > c < d)."""
 
     operators = None  # type: List[str]
-    operands = None  # type: List[Node]
+    operands = None  # type: List[Expression]
     # Inferred type for the operator methods (when relevant; None for 'is').
     method_types = None  # type: List[mypy.types.Type]
 
-    def __init__(self, operators: List[str], operands: List[Node]) -> None:
+    def __init__(self, operators: List[str], operands: List[Expression]) -> None:
         self.operators = operators
         self.operands = operands
         self.method_types = []
@@ -1345,18 +1355,19 @@ class ComparisonExpr(Node):
         return visitor.visit_comparison_expr(self)
 
 
-class SliceExpr(Node):
+class SliceExpr(Expression):
     """Slice expression (e.g. 'x:y', 'x:', '::2' or ':').
 
     This is only valid as index in index expressions.
     """
 
-    begin_index = None  # type: Node  # May be None
-    end_index = None  # type: Node    # May be None
-    stride = None  # type: Node       # May be None
+    begin_index = None  # type: Optional[Expression]
+    end_index = None  # type: Optional[Expression]
+    stride = None  # type: Optional[Expression]
 
-    def __init__(self, begin_index: Node, end_index: Node,
-                 stride: Node) -> None:
+    def __init__(self, begin_index: Optional[Expression],
+                 end_index: Optional[Expression],
+                 stride: Optional[Expression]) -> None:
         self.begin_index = begin_index
         self.end_index = end_index
         self.stride = stride
@@ -1365,13 +1376,13 @@ class SliceExpr(Node):
         return visitor.visit_slice_expr(self)
 
 
-class CastExpr(Node):
+class CastExpr(Expression):
     """Cast expression cast(type, expr)."""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
     type = None  # type: mypy.types.Type
 
-    def __init__(self, expr: Node, typ: 'mypy.types.Type') -> None:
+    def __init__(self, expr: Expression, typ: 'mypy.types.Type') -> None:
         self.expr = expr
         self.type = typ
 
@@ -1379,19 +1390,19 @@ class CastExpr(Node):
         return visitor.visit_cast_expr(self)
 
 
-class RevealTypeExpr(Node):
+class RevealTypeExpr(Expression):
     """Reveal type expression reveal_type(expr)."""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_reveal_type_expr(self)
 
 
-class SuperExpr(Node):
+class SuperExpr(Expression):
     """Expression super().name"""
 
     name = ''
@@ -1404,13 +1415,13 @@ class SuperExpr(Node):
         return visitor.visit_super_expr(self)
 
 
-class FuncExpr(FuncItem):
+class FuncExpr(FuncItem, Expression):
     """Lambda expression"""
 
     def name(self) -> str:
         return '<lambda>'
 
-    def expr(self) -> Node:
+    def expr(self) -> Expression:
         """Return the expression (the body) of the lambda."""
         ret = cast(ReturnStmt, self.body.body[-1])
         return ret.expr
@@ -1419,12 +1430,12 @@ class FuncExpr(FuncItem):
         return visitor.visit_func_expr(self)
 
 
-class ListExpr(Node):
+class ListExpr(Expression):
     """List literal expression [...]."""
 
-    items = None  # type: List[Node]
+    items = None  # type: List[Expression]
 
-    def __init__(self, items: List[Node]) -> None:
+    def __init__(self, items: List[Expression]) -> None:
         self.items = items
         if all(x.literal == LITERAL_YES for x in items):
             self.literal = LITERAL_YES
@@ -1434,12 +1445,12 @@ class ListExpr(Node):
         return visitor.visit_list_expr(self)
 
 
-class DictExpr(Node):
+class DictExpr(Expression):
     """Dictionary literal expression {key: value, ...}."""
 
-    items = None  # type: List[Tuple[Node, Node]]
+    items = None  # type: List[Tuple[Expression, Expression]]
 
-    def __init__(self, items: List[Tuple[Node, Node]]) -> None:
+    def __init__(self, items: List[Tuple[Expression, Expression]]) -> None:
         self.items = items
         if all(x[0].literal == LITERAL_YES and x[1].literal == LITERAL_YES
                for x in items):
@@ -1451,12 +1462,12 @@ class DictExpr(Node):
         return visitor.visit_dict_expr(self)
 
 
-class TupleExpr(Node):
+class TupleExpr(Expression):
     """Tuple literal expression (..., ...)"""
 
-    items = None  # type: List[Node]
+    items = None  # type: List[Expression]
 
-    def __init__(self, items: List[Node]) -> None:
+    def __init__(self, items: List[Expression]) -> None:
         self.items = items
         if all(x.literal == LITERAL_YES for x in items):
             self.literal = LITERAL_YES
@@ -1466,12 +1477,12 @@ class TupleExpr(Node):
         return visitor.visit_tuple_expr(self)
 
 
-class SetExpr(Node):
+class SetExpr(Expression):
     """Set literal expression {value, ...}."""
 
-    items = None  # type: List[Node]
+    items = None  # type: List[Expression]
 
-    def __init__(self, items: List[Node]) -> None:
+    def __init__(self, items: List[Expression]) -> None:
         self.items = items
         if all(x.literal == LITERAL_YES for x in items):
             self.literal = LITERAL_YES
@@ -1481,16 +1492,16 @@ class SetExpr(Node):
         return visitor.visit_set_expr(self)
 
 
-class GeneratorExpr(Node):
+class GeneratorExpr(Expression):
     """Generator expression ... for ... in ... [ for ...  in ... ] [ if ... ]."""
 
-    left_expr = None  # type: Node
-    sequences_expr = None  # type: List[Node]
-    condlists = None  # type: List[List[Node]]
-    indices = None  # type: List[Node]
+    left_expr = None  # type: Expression
+    sequences_expr = None  # type: List[Expression]
+    condlists = None  # type: List[List[Expression]]
+    indices = None  # type: List[Expression]
 
-    def __init__(self, left_expr: Node, indices: List[Node],
-                 sequences: List[Node], condlists: List[List[Node]]) -> None:
+    def __init__(self, left_expr: Expression, indices: List[Expression],
+                 sequences: List[Expression], condlists: List[List[Expression]]) -> None:
         self.left_expr = left_expr
         self.sequences = sequences
         self.condlists = condlists
@@ -1500,7 +1511,7 @@ class GeneratorExpr(Node):
         return visitor.visit_generator_expr(self)
 
 
-class ListComprehension(Node):
+class ListComprehension(Expression):
     """List comprehension (e.g. [x + 1 for x in a])"""
 
     generator = None  # type: GeneratorExpr
@@ -1512,7 +1523,7 @@ class ListComprehension(Node):
         return visitor.visit_list_comprehension(self)
 
 
-class SetComprehension(Node):
+class SetComprehension(Expression):
     """Set comprehension (e.g. {x + 1 for x in a})"""
 
     generator = None  # type: GeneratorExpr
@@ -1524,17 +1535,17 @@ class SetComprehension(Node):
         return visitor.visit_set_comprehension(self)
 
 
-class DictionaryComprehension(Node):
+class DictionaryComprehension(Expression):
     """Dictionary comprehension (e.g. {k: v for k, v in a}"""
 
-    key = None  # type: Node
-    value = None  # type: Node
-    sequences_expr = None  # type: List[Node]
-    condlists = None  # type: List[List[Node]]
-    indices = None  # type: List[Node]
+    key = None  # type: Expression
+    value = None  # type: Expression
+    sequences_expr = None  # type: List[Expression]
+    condlists = None  # type: List[List[Expression]]
+    indices = None  # type: List[Expression]
 
-    def __init__(self, key: Node, value: Node, indices: List[Node],
-                 sequences: List[Node], condlists: List[List[Node]]) -> None:
+    def __init__(self, key: Expression, value: Expression, indices: List[Expression],
+                 sequences: List[Expression], condlists: List[List[Expression]]) -> None:
         self.key = key
         self.value = value
         self.sequences = sequences
@@ -1545,14 +1556,14 @@ class DictionaryComprehension(Node):
         return visitor.visit_dictionary_comprehension(self)
 
 
-class ConditionalExpr(Node):
+class ConditionalExpr(Expression):
     """Conditional expression (e.g. x if y else z)"""
 
-    cond = None  # type: Node
-    if_expr = None  # type: Node
-    else_expr = None  # type: Node
+    cond = None  # type: Expression
+    if_expr = None  # type: Expression
+    else_expr = None  # type: Expression
 
-    def __init__(self, cond: Node, if_expr: Node, else_expr: Node) -> None:
+    def __init__(self, cond: Expression, if_expr: Expression, else_expr: Expression) -> None:
         self.cond = cond
         self.if_expr = if_expr
         self.else_expr = else_expr
@@ -1561,25 +1572,25 @@ class ConditionalExpr(Node):
         return visitor.visit_conditional_expr(self)
 
 
-class BackquoteExpr(Node):
+class BackquoteExpr(Expression):
     """Python 2 expression `...`."""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_backquote_expr(self)
 
 
-class TypeApplication(Node):
+class TypeApplication(Expression):
     """Type application expr[type, ...]"""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
     types = None  # type: List[mypy.types.Type]
 
-    def __init__(self, expr: Node, types: List['mypy.types.Type']) -> None:
+    def __init__(self, expr: Expression, types: List['mypy.types.Type']) -> None:
         self.expr = expr
         self.types = types
 
@@ -1601,7 +1612,7 @@ COVARIANT = 1  # type: int
 CONTRAVARIANT = 2  # type: int
 
 
-class TypeVarExpr(SymbolNode):
+class TypeVarExpr(SymbolNode, Expression):
     """Type variable expression TypeVar(...)."""
 
     _name = ''
@@ -1656,7 +1667,7 @@ class TypeVarExpr(SymbolNode):
                            data['variance'])
 
 
-class TypeAliasExpr(Node):
+class TypeAliasExpr(Expression):
     """Type alias expression (rvalue)."""
 
     type = None  # type: mypy.types.Type
@@ -1668,7 +1679,7 @@ class TypeAliasExpr(Node):
         return visitor.visit_type_alias_expr(self)
 
 
-class NamedTupleExpr(Node):
+class NamedTupleExpr(Expression):
     """Named tuple expression namedtuple(...)."""
 
     # The class representation of this named tuple (its tuple_type attribute contains
@@ -1682,7 +1693,7 @@ class NamedTupleExpr(Node):
         return visitor.visit_namedtuple_expr(self)
 
 
-class PromoteExpr(Node):
+class PromoteExpr(Expression):
     """Ducktype class decorator expression _promote(...)."""
 
     type = None  # type: mypy.types.Type
@@ -1697,7 +1708,7 @@ class PromoteExpr(Node):
 # Constants
 
 
-class TempNode(Node):
+class TempNode(Expression):
     """Temporary dummy node used during type checking.
 
     This node is not present in the original program; it is just an artifact
@@ -1873,6 +1884,18 @@ class TypeInfo(SymbolNode):
         mro = linearize_hierarchy(self)
         assert mro, "Could not produce a MRO at all for %s" % (self,)
         self.mro = mro
+        self.is_enum = self._calculate_is_enum()
+
+    def _calculate_is_enum(self) -> bool:
+        """
+        If this is "enum.Enum" itself, then yes, it's an enum.
+        If the flag .is_enum has been set on anything in the MRO, it's an enum.
+        """
+        if self.fullname() == ENUM_BASECLASS:
+            return True
+        if self.mro:
+            return any(type_info.is_enum for type_info in self.mro)
+        return False
 
     def has_base(self, fullname: str) -> bool:
         """Return True if type has a base type with the specified name.
@@ -1973,8 +1996,8 @@ class SymbolTableNode:
     # AST node of definition (FuncDef/Var/TypeInfo/Decorator/TypeVarExpr,
     # or None for a bound type variable).
     node = None  # type: Optional[SymbolNode]
-    # Type variable id (for bound type variables only)
-    tvar_id = 0
+    # Type variable definition (for bound type variables only)
+    tvar_def = None  # type: Optional[mypy.types.TypeVarDef]
     # Module id (e.g. "foo.bar") or None
     mod_id = ''
     # If this not None, override the type of the 'node' attribute.
@@ -1987,13 +2010,14 @@ class SymbolTableNode:
     cross_ref = None  # type: Optional[str]
 
     def __init__(self, kind: int, node: Optional[SymbolNode], mod_id: str = None,
-                 typ: 'mypy.types.Type' = None, tvar_id: int = 0,
+                 typ: 'mypy.types.Type' = None,
+                 tvar_def: 'mypy.types.TypeVarDef' = None,
                  module_public: bool = True) -> None:
         self.kind = kind
         self.node = node
         self.type_override = typ
         self.mod_id = mod_id
-        self.tvar_id = tvar_id
+        self.tvar_def = tvar_def
         self.module_public = module_public
 
     @property
@@ -2036,8 +2060,8 @@ class SymbolTableNode:
         data = {'.class': 'SymbolTableNode',
                 'kind': node_kinds[self.kind],
                 }  # type: JsonDict
-        if self.tvar_id:
-            data['tvar_id'] = self.tvar_id
+        if self.tvar_def:
+            data['tvar_def'] = self.tvar_def.serialize()
         if not self.module_public:
             data['module_public'] = False
         if self.kind == MODULE_REF:
@@ -2079,8 +2103,8 @@ class SymbolTableNode:
             if 'type_override' in data:
                 typ = mypy.types.Type.deserialize(data['type_override'])
             stnode = SymbolTableNode(kind, node, typ=typ)
-        if 'tvar_id' in data:
-            stnode.tvar_id = data['tvar_id']
+        if 'tvar_def' in data:
+            stnode.tvar_def = mypy.types.TypeVarDef.deserialize(data['tvar_def'])
         if 'module_public' in data:
             stnode.module_public = data['module_public']
         return stnode
@@ -2128,7 +2152,7 @@ class SymbolTable(Dict[str, SymbolTableNode]):
 def function_type(func: FuncBase, fallback: 'mypy.types.Instance') -> 'mypy.types.FunctionLike':
     if func.type:
         assert isinstance(func.type, mypy.types.FunctionLike)
-        return cast(mypy.types.FunctionLike, func.type)
+        return func.type
     else:
         # Implicit type signature with dynamic types.
         # Overloaded functions always have a signature, so func must be an ordinary function.
diff --git a/mypy/options.py b/mypy/options.py
new file mode 100644
index 0000000..cf1340a
--- /dev/null
+++ b/mypy/options.py
@@ -0,0 +1,67 @@
+from mypy import defaults
+import pprint
+from typing import Any
+
+
+class BuildType:
+    STANDARD = 0
+    MODULE = 1
+    PROGRAM_TEXT = 2
+
+
+class Options:
+    """Options collected from flags."""
+
+    def __init__(self) -> None:
+        # -- build options --
+        self.build_type = BuildType.STANDARD
+        self.python_version = defaults.PYTHON3_VERSION
+        self.custom_typing_module = None  # type: str
+        self.report_dirs = {}  # type: Dict[str, str]
+        self.silent_imports = False
+        self.almost_silent = False
+
+        # Disallow calling untyped functions from typed ones
+        self.disallow_untyped_calls = False
+
+        # Disallow defining untyped (or incompletely typed) functions
+        self.disallow_untyped_defs = False
+
+        # Type check unannotated functions
+        self.check_untyped_defs = False
+
+        # Also check typeshed for missing annotations
+        self.warn_incomplete_stub = False
+
+        # Warn about casting an expression to its inferred type
+        self.warn_redundant_casts = False
+
+        # Warn about unused '# type: ignore' comments
+        self.warn_unused_ignores = False
+        # -- development options --
+        self.verbosity = 0  # More verbose messages (for troubleshooting)
+        self.pdb = False
+        self.dump_type_stats = False
+        self.dump_inference_stats = False
+        self.dirty_stubs = False
+
+        # -- test options --
+        # Stop after the semantic analysis phase
+        self.semantic_analysis_only = False
+
+        # Use stub builtins fixtures to speed up tests
+        self.use_builtins_fixtures = False
+
+        # -- experimental options --
+        self.fast_parser = False
+        self.incremental = False
+        self.cache_dir = defaults.MYPY_CACHE
+
+    def __eq__(self, other: object) -> bool:
+        return self.__class__ == other.__class__ and self.__dict__ == other.__dict__
+
+    def __ne__(self, other: object) -> bool:
+        return not self == other
+
+    def __repr__(self) -> str:
+        return 'Options({})'.format(pprint.pformat(self.__dict__))
diff --git a/mypy/parse.py b/mypy/parse.py
index 4cd3389..3007902 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -36,6 +36,9 @@ from mypy.types import Type, CallableType, AnyType, UnboundType
 from mypy.parsetype import (
     parse_type, parse_types, parse_signature, TypeParseError, parse_str_as_signature
 )
+from mypy.options import Options
+
+from mypy import experiments
 
 
 precedence = {
@@ -68,30 +71,30 @@ op_comp = set([
 none = Token('')  # Empty token
 
 
-def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
-          pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
-          custom_typing_module: str = None,
-          fast_parser: bool = False) -> MypyFile:
+def parse(source: Union[str, bytes],
+          fnam: str,
+          errors: Errors,
+          options: Options) -> MypyFile:
     """Parse a source file, without doing any semantic analysis.
 
     Return the parse tree. If errors is not provided, raise ParseError
     on failure. Otherwise, use the errors object to report parse errors.
 
-    The pyversion (major, minor) argument determines the Python syntax variant.
+    The python_version (major, minor) option determines the Python syntax variant.
     """
-    if fast_parser:
+    if options.fast_parser:
         import mypy.fastparse
         return mypy.fastparse.parse(source,
                                     fnam=fnam,
                                     errors=errors,
-                                    pyversion=pyversion,
-                                    custom_typing_module=custom_typing_module)
+                                    pyversion=options.python_version,
+                                    custom_typing_module=options.custom_typing_module)
 
     is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     parser = Parser(fnam,
                     errors,
-                    pyversion,
-                    custom_typing_module,
+                    options.python_version,
+                    options.custom_typing_module,
                     is_stub_file=is_stub_file)
     tree = parser.parse(source)
     tree.path = fnam
@@ -174,13 +177,7 @@ class Parser:
         defs = self.parse_defs()
         weak_opts = self.weak_opts()
         self.expect_type(Eof)
-        # Skip imports that have been ignored (so that we can ignore a C extension module without
-        # stub, for example), except for 'from x import *', because we wouldn't be able to
-        # determine which names should be defined unless we process the module. We can still
-        # ignore errors such as redefinitions when using the latter form.
-        imports = [node for node in self.imports
-                   if node.line not in self.ignored_lines or isinstance(node, ImportAll)]
-        node = MypyFile(defs, imports, is_bom, self.ignored_lines,
+        node = MypyFile(defs, self.imports, is_bom, self.ignored_lines,
                         weak_opts=weak_opts)
         return node
 
@@ -482,6 +479,9 @@ class Parser:
             if is_error:
                 return None
 
+            if typ and isinstance(typ.ret_type, UnboundType):
+                typ.ret_type.is_ret_type = True
+
             node = FuncDef(name, args, body, typ)
             node.set_line(def_tok)
             if typ is not None:
@@ -782,8 +782,18 @@ class Parser:
             else:
                 kind = nodes.ARG_POS
 
+        self.set_type_optional(type, initializer)
+
         return Argument(variable, type, initializer, kind), require_named
 
+    def set_type_optional(self, type: Type, initializer: Node) -> None:
+        if not experiments.STRICT_OPTIONAL:
+            return
+        # Indicate that type should be wrapped in an Optional if arg is initialized to None.
+        optional = isinstance(initializer, NameExpr) and initializer.name == 'None'
+        if isinstance(type, UnboundType):
+            type.optional = optional
+
     def parse_parameter_annotation(self) -> Node:
         if self.current_str() == ':':
             self.skip()
@@ -852,7 +862,7 @@ class Parser:
             if allow_type:
                 cur = self.current()
                 if type is None and isinstance(cur, StrLit):
-                    ds = docstring.parse_docstring(cast(StrLit, cur).parsed())
+                    ds = docstring.parse_docstring(cur.parsed())
                     if ds and False:  # TODO: Enable when this is working.
                         try:
                             type = parse_str_as_signature(ds.as_type_str(), cur.line)
@@ -879,15 +889,13 @@ class Parser:
 
     def try_combine_overloads(self, s: Node, stmt: List[Node]) -> bool:
         if isinstance(s, Decorator) and stmt:
-            fdef = cast(Decorator, s)
+            fdef = s
             n = fdef.func.name()
-            if (isinstance(stmt[-1], Decorator) and
-                    (cast(Decorator, stmt[-1])).func.name() == n):
-                stmt[-1] = OverloadedFuncDef([cast(Decorator, stmt[-1]), fdef])
+            if isinstance(stmt[-1], Decorator) and stmt[-1].func.name() == n:
+                stmt[-1] = OverloadedFuncDef([stmt[-1], fdef])
                 return True
-            elif (isinstance(stmt[-1], OverloadedFuncDef) and
-                    (cast(OverloadedFuncDef, stmt[-1])).name() == n):
-                (cast(OverloadedFuncDef, stmt[-1])).items.append(fdef)
+            elif isinstance(stmt[-1], OverloadedFuncDef) and stmt[-1].name() == n:
+                stmt[-1].items.append(fdef)
                 return True
         return False
 
@@ -1435,7 +1443,7 @@ class Parser:
             items[0] = self.parse_generator_expr(items[0])
         self.expect(']')
         if len(items) == 1 and isinstance(items[0], GeneratorExpr):
-            return ListComprehension(cast(GeneratorExpr, items[0]))
+            return ListComprehension(items[0])
         else:
             expr = ListExpr(items)
             return expr
@@ -1520,7 +1528,7 @@ class Parser:
         expr = SetExpr(items)
         return expr
 
-    def parse_set_comprehension(self, expr: Node):
+    def parse_set_comprehension(self, expr: Node) -> SetComprehension:
         gen = self.parse_generator_expr(expr)
         self.expect('}')
         set_comp = SetComprehension(gen)
@@ -1683,7 +1691,7 @@ class Parser:
         self.expect('.')
         name = self.expect_type(Name)
         if (isinstance(expr, CallExpr) and isinstance(expr.callee, NameExpr)
-                and cast(NameExpr, expr.callee).name == 'super'):
+                and expr.callee.name == 'super'):
             # super() expression
             node = SuperExpr(name.string)  # type: Node
         else:
@@ -1992,7 +2000,7 @@ if __name__ == '__main__':
     # Parse a file and dump the AST (or display errors).
     import sys
 
-    def usage():
+    def usage() -> None:
         print('Usage: parse.py [--py2] [--quiet] FILE [...]', file=sys.stderr)
         sys.exit(2)
 
@@ -2014,7 +2022,9 @@ if __name__ == '__main__':
         s = open(fnam, 'rb').read()
         errors = Errors()
         try:
-            tree = parse(s, fnam, pyversion=pyversion)
+            options = Options()
+            options.python_version = pyversion
+            tree = parse(s, fnam, None, options=options)
             if not quiet:
                 print(tree)
         except CompileError as e:
diff --git a/mypy/replacetvars.py b/mypy/replacetvars.py
deleted file mode 100644
index 1ea5f83..0000000
--- a/mypy/replacetvars.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""Type operations"""
-
-from mypy.types import Type, AnyType, TypeTranslator, TypeVarType
-
-
-def replace_type_vars(typ: Type, func_tvars: bool = True) -> Type:
-    """Replace type variable references in a type with the Any type. If
-    func_tvars is false, only replace instance type variables.
-    """
-    return typ.accept(ReplaceTypeVarsVisitor(func_tvars))
-
-
-class ReplaceTypeVarsVisitor(TypeTranslator):
-    # Only override type variable handling; otherwise perform an identity
-    # transformation.
-
-    func_tvars = False
-
-    def __init__(self, func_tvars: bool) -> None:
-        self.func_tvars = func_tvars
-
-    def visit_type_var(self, t: TypeVarType) -> Type:
-        if t.id > 0 or self.func_tvars:
-            if t.line is not None:
-                return AnyType(t.line)
-            else:
-                return AnyType()
-        else:
-            return t
-
-
-def replace_func_type_vars(typ: Type, target_type: Type) -> Type:
-    """Replace function type variables in a type with the target type."""
-    return typ.accept(ReplaceFuncTypeVarsVisitor(target_type))
-
-
-class ReplaceFuncTypeVarsVisitor(TypeTranslator):
-    def __init__(self, target_type: Type) -> None:
-        self.target_type = target_type
-
-    def visit_type_var(self, t: TypeVarType) -> Type:
-        if t.id < 0:
-            return self.target_type
-        else:
-            return t
diff --git a/mypy/report.py b/mypy/report.py
index a4a71bb..f506781 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -64,7 +64,7 @@ class FuncCounterVisitor(TraverserVisitor):
         super().__init__()
         self.counts = [0, 0]
 
-    def visit_func_def(self, defn: FuncDef):
+    def visit_func_def(self, defn: FuncDef) -> None:
         self.counts[defn.type is not None] += 1
 
 
diff --git a/mypy/sametypes.py b/mypy/sametypes.py
index 319af50..9e428ae 100644
--- a/mypy/sametypes.py
+++ b/mypy/sametypes.py
@@ -2,8 +2,8 @@ from typing import Sequence
 
 from mypy.types import (
     Type, UnboundType, ErrorType, AnyType, NoneTyp, Void, TupleType, UnionType, CallableType,
-    TypeVarType, Instance, TypeVisitor, ErasedType, TypeList,
-    Overloaded, PartialType, DeletedType, TypeType
+    TypeVarType, Instance, TypeVisitor, ErasedType, TypeList, Overloaded, PartialType,
+    DeletedType, UninhabitedType, TypeType
 )
 
 
@@ -70,9 +70,15 @@ class SameTypeVisitor(TypeVisitor[bool]):
     def visit_none_type(self, left: NoneTyp) -> bool:
         return isinstance(self.right, NoneTyp)
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
+        return isinstance(self.right, UninhabitedType)
+
     def visit_erased_type(self, left: ErasedType) -> bool:
-        # Should not get here.
-        raise RuntimeError()
+        # We can get here when isinstance is used inside a lambda
+        # whose type is being inferred. In any event, we have no reason
+        # to think that an ErasedType will end up being the same as
+        # any other type, even another ErasedType.
+        return False
 
     def visit_deleted_type(self, left: DeletedType) -> bool:
         return isinstance(self.right, DeletedType)
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 414c0c3..c8ae5e2 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -63,6 +63,7 @@ from mypy.nodes import (
     YieldFromExpr, NamedTupleExpr, NonlocalDecl,
     SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr,
     YieldExpr, ExecStmt, Argument, BackquoteExpr, ImportBase, COVARIANT, CONTRAVARIANT,
+    IntExpr, FloatExpr, UnicodeExpr,
     INVARIANT, UNBOUND_IMPORTED
 )
 from mypy.visitor import NodeVisitor
@@ -81,6 +82,7 @@ from mypy.parsetype import parse_type
 from mypy.sametypes import is_same_type
 from mypy.erasetype import erase_typevars
 from mypy import defaults
+from mypy.options import Options
 
 
 T = TypeVar('T')
@@ -127,12 +129,6 @@ TYPE_PROMOTIONS_PYTHON2.update({
     'builtins.bytearray': 'builtins.str',
 })
 
-# Hard coded list of Enum baseclasses.
-ENUM_BASECLASSES = [
-    'enum.Enum',
-    'enum.IntEnum',
-]
-
 # When analyzing a function, should we analyze the whole function in one go, or
 # should we only perform one phase of the analysis? The latter is used for
 # nested functions. In the first phase we add the function to the symbol table
@@ -175,6 +171,8 @@ class SemanticAnalyzer(NodeVisitor):
     bound_tvars = None  # type: List[SymbolTableNode]
     # Stack of type variables that were bound by outer classess
     tvar_stack = None  # type: List[List[SymbolTableNode]]
+    # Do weak type checking in this file
+    weak_opts = set()        # type: Set[str]
 
     # Stack of functions being analyzed
     function_stack = None  # type: List[FuncItem]
@@ -198,8 +196,7 @@ class SemanticAnalyzer(NodeVisitor):
     def __init__(self,
                  lib_path: List[str],
                  errors: Errors,
-                 pyversion: Tuple[int, int],
-                 check_untyped_defs: bool) -> None:
+                 options: Options) -> None:
         """Construct semantic analyzer.
 
         Use lib_path to search for modules, and report analysis errors
@@ -218,19 +215,18 @@ class SemanticAnalyzer(NodeVisitor):
         self.lib_path = lib_path
         self.errors = errors
         self.modules = {}
-        self.pyversion = pyversion
-        self.check_untyped_defs = check_untyped_defs
+        self.options = options
         self.postpone_nested_functions_stack = [FUNCTION_BOTH_PHASES]
         self.postponed_functions_stack = []
         self.all_exports = set()  # type: Set[str]
 
     def visit_file(self, file_node: MypyFile, fnam: str) -> None:
         self.errors.set_file(fnam)
-        self.errors.set_ignored_lines(file_node.ignored_lines)
         self.cur_mod_node = file_node
         self.cur_mod_id = file_node.fullname()
         self.is_stub_file = fnam.lower().endswith('.pyi')
         self.globals = file_node.names
+        self.weak_opts = file_node.weak_opts
 
         if 'builtins' in self.modules:
             self.globals['__builtins__'] = SymbolTableNode(
@@ -249,8 +245,6 @@ class SemanticAnalyzer(NodeVisitor):
         if self.cur_mod_id == 'builtins':
             remove_imported_names_from_symtable(self.globals, 'builtins')
 
-        self.errors.set_ignored_lines(set())
-
         if '__all__' in self.globals:
             for name, g in self.globals.items():
                 if name not in self.all_exports:
@@ -308,7 +302,7 @@ class SemanticAnalyzer(NodeVisitor):
                         original_def = symbol.node
                         if self.is_conditional_func(original_def, defn):
                             # Conditional function definition -- multiple defs are ok.
-                            defn.original_def = cast(FuncDef, original_def)
+                            defn.original_def = original_def
                         else:
                             # Report error.
                             self.check_no_global(defn.name(), defn, True)
@@ -465,7 +459,7 @@ class SemanticAnalyzer(NodeVisitor):
 
         tvarnodes = self.add_func_type_variables_to_symbol_table(defn)
         next_function_tvar_id = min([self.next_function_tvar_id()] +
-                                    [n.tvar_id - 1 for n in tvarnodes])
+                                    [n.tvar_def.id.raw_id - 1 for n in tvarnodes])
         self.next_function_tvar_id_stack.append(next_function_tvar_id)
 
         if defn.type:
@@ -522,7 +516,7 @@ class SemanticAnalyzer(NodeVisitor):
                 name = item.name
                 if name in names:
                     self.name_already_defined(name, defn)
-                node = self.bind_type_var(name, item.id, defn)
+                node = self.bind_type_var(name, item, defn)
                 nodes.append(node)
                 names.add(name)
         return nodes
@@ -533,19 +527,22 @@ class SemanticAnalyzer(NodeVisitor):
         else:
             return set(self.type.type_vars)
 
-    def bind_type_var(self, fullname: str, id: int,
+    def bind_type_var(self, fullname: str, tvar_def: TypeVarDef,
                      context: Context) -> SymbolTableNode:
         node = self.lookup_qualified(fullname, context)
         node.kind = BOUND_TVAR
-        node.tvar_id = id
+        node.tvar_def = tvar_def
         return node
 
     def check_function_signature(self, fdef: FuncItem) -> None:
         sig = cast(CallableType, fdef.type)
         if len(sig.arg_types) < len(fdef.arguments):
             self.fail('Type signature has too few arguments', fdef)
+            # Add dummy Any arguments to prevent crashes later.
+            extra_anys = [AnyType()] * (len(fdef.arguments) - len(sig.arg_types))
+            sig.arg_types.extend(extra_anys)
         elif len(sig.arg_types) > len(fdef.arguments):
-            self.fail('Type signature has too many arguments', fdef)
+            self.fail('Type signature has too many arguments', fdef, blocker=True)
 
     def visit_class_def(self, defn: ClassDef) -> None:
         self.clean_up_bases_and_infer_type_variables(defn)
@@ -608,7 +605,7 @@ class SemanticAnalyzer(NodeVisitor):
     def analyze_class_decorator(self, defn: ClassDef, decorator: Node) -> None:
         decorator.accept(self)
 
-    def setup_is_builtinclass(self, defn: ClassDef):
+    def setup_is_builtinclass(self, defn: ClassDef) -> None:
         for decorator in defn.decorators:
             if refers_to_fullname(decorator, 'typing.builtinclass'):
                 defn.is_builtinclass = True
@@ -657,7 +654,7 @@ class SemanticAnalyzer(NodeVisitor):
                     # _promote class decorator (undocumented faeture).
                     promote_target = analyzed.type
         if not promote_target:
-            promotions = (TYPE_PROMOTIONS_PYTHON3 if self.pyversion[0] >= 3
+            promotions = (TYPE_PROMOTIONS_PYTHON3 if self.options.python_version[0] >= 3
                           else TYPE_PROMOTIONS_PYTHON2)
             if defn.fullname in promotions:
                 promote_target = self.named_type_or_none(promotions[defn.fullname])
@@ -701,7 +698,7 @@ class SemanticAnalyzer(NodeVisitor):
     def analyze_typevar_declaration(self, t: Type) -> Optional[List[Tuple[str, TypeVarExpr]]]:
         if not isinstance(t, UnboundType):
             return None
-        unbound = cast(UnboundType, t)
+        unbound = t
         sym = self.lookup_qualified(unbound.name, unbound)
         if sym is None or sym.node is None:
             return None
@@ -720,7 +717,7 @@ class SemanticAnalyzer(NodeVisitor):
     def analyze_unbound_tvar(self, t: Type) -> Tuple[str, TypeVarExpr]:
         if not isinstance(t, UnboundType):
             return None
-        unbound = cast(UnboundType, t)
+        unbound = t
         sym = self.lookup_qualified(unbound.name, unbound)
         if sym is not None and sym.kind == UNBOUND_TVAR:
             return unbound.name, cast(TypeVarExpr, sym.node)
@@ -738,40 +735,45 @@ class SemanticAnalyzer(NodeVisitor):
             self.add_symbol(defn.name, SymbolTableNode(kind, defn.info), defn)
 
     def analyze_base_classes(self, defn: ClassDef) -> None:
-        """Analyze and set up base classes."""
+        """Analyze and set up base classes.
+
+        This computes several attributes on the corresponding TypeInfo defn.info
+        related to the base classes: defn.info.bases, defn.info.mro, and
+        miscellaneous others (at least tuple_type, fallback_to_any, and is_enum.)
+        """
+
+        base_types = []  # type: List[Instance]
         for base_expr in defn.base_type_exprs:
-            # The base class is originally an expression; convert it to a type.
             try:
                 base = self.expr_to_analyzed_type(base_expr)
             except TypeTranslationError:
                 self.fail('Invalid base class', base_expr)
-                defn.info.mro = []
-                return
+                defn.info.fallback_to_any = True
+                continue
+
             if isinstance(base, TupleType):
                 if defn.info.tuple_type:
                     self.fail("Class has two incompatible bases derived from tuple", defn)
-                defn.info.tuple_type = base
-                base = base.fallback
-                if (not self.is_stub_file and not defn.info.is_named_tuple and
-                        base.type.fullname() == 'builtins.tuple'):
+                if (not self.is_stub_file
+                        and not defn.info.is_named_tuple
+                        and base.fallback.type.fullname() == 'builtins.tuple'):
                     self.fail("Tuple[...] not supported as a base class outside a stub file", defn)
-            if isinstance(base, Instance):
-                defn.base_types.append(base)
-            elif isinstance(base, TupleType):
-                assert False, "Internal error: Unexpected TupleType base class"
+                defn.info.tuple_type = base
+                base_types.append(base.fallback)
+            elif isinstance(base, Instance):
+                base_types.append(base)
             elif isinstance(base, AnyType):
-                # We don't know anything about the base class. Make any unknown attributes
-                # have type 'Any'.
                 defn.info.fallback_to_any = True
-            elif not isinstance(base, UnboundType):
+            else:
                 self.fail('Invalid base class', base_expr)
-            if isinstance(base, Instance):
-                defn.info.is_enum = self.decide_is_enum(base)
+                defn.info.fallback_to_any = True
+
         # Add 'object' as implicit base if there is no other base class.
-        if (not defn.base_types and defn.fullname != 'builtins.object'):
-            obj = self.object_type()
-            defn.base_types.insert(0, obj)
-        defn.info.bases = defn.base_types
+        if (not base_types and defn.fullname != 'builtins.object'):
+            base_types.append(self.object_type())
+
+        defn.info.bases = base_types
+
         # Calculate the MRO. It might be incomplete at this point if
         # the bases of defn include classes imported from other
         # modules in an import loop. We'll recompute it in ThirdPass.
@@ -830,11 +832,6 @@ class SemanticAnalyzer(NodeVisitor):
                     visited.add(base.type)
         return False
 
-    def decide_is_enum(self, instance: Instance) -> bool:
-        """Decide if a TypeInfo should be marked as .is_enum=True"""
-        fullname = instance.type.fullname()
-        return fullname in ENUM_BASECLASSES
-
     def analyze_metaclass(self, defn: ClassDef) -> None:
         if defn.metaclass:
             sym = self.lookup_qualified(defn.metaclass, defn)
@@ -869,10 +866,9 @@ class SemanticAnalyzer(NodeVisitor):
 
     def bind_class_type_variables_in_symbol_table(
             self, info: TypeInfo) -> List[SymbolTableNode]:
-        vars = info.type_vars
         nodes = []  # type: List[SymbolTableNode]
-        for index, var in enumerate(vars, 1):
-            node = self.bind_type_var(var, index, info)
+        for var, binder in zip(info.type_vars, info.defn.type_vars):
+            node = self.bind_type_var(var, binder, info)
             nodes.append(node)
         return nodes
 
@@ -1053,15 +1049,18 @@ class SemanticAnalyzer(NodeVisitor):
             s.type = self.anal_type(s.type, allow_tuple_literal)
         else:
             # For simple assignments, allow binding type aliases.
+            # Also set the type if the rvalue is a simple literal.
             if (s.type is None and len(s.lvalues) == 1 and
                     isinstance(s.lvalues[0], NameExpr)):
+                if s.lvalues[0].is_def:
+                    s.type = self.analyze_simple_literal_type(s.rvalue)
                 res = analyze_type_alias(s.rvalue,
                                          self.lookup_qualified,
                                          self.lookup_fully_qualified,
                                          self.fail)
-                if res and (not isinstance(res, Instance) or cast(Instance, res).args):
+                if res and (not isinstance(res, Instance) or res.args):
                     # TODO: What if this gets reassigned?
-                    name = cast(NameExpr, s.lvalues[0])
+                    name = s.lvalues[0]
                     node = self.lookup(name.name, name)
                     node.kind = TYPE_ALIAS
                     node.type_override = res
@@ -1080,6 +1079,29 @@ class SemanticAnalyzer(NodeVisitor):
                 isinstance(s.rvalue, (ListExpr, TupleExpr))):
             self.add_exports(*s.rvalue.items)
 
+    def analyze_simple_literal_type(self, rvalue: Node) -> Optional[Type]:
+        """Return builtins.int if rvalue is an int literal, etc."""
+        if self.weak_opts or self.options.semantic_analysis_only or self.function_stack:
+            # Skip this if any weak options are set.
+            # Also skip if we're only doing the semantic analysis pass.
+            # This is mostly to avoid breaking unit tests.
+            # Also skip inside a function; this is to avoid confusing
+            # the code that handles dead code due to isinstance()
+            # inside type variables with value restrictions (like
+            # AnyStr).
+            return None
+        if isinstance(rvalue, IntExpr):
+            return self.named_type_or_none('builtins.int')
+        if isinstance(rvalue, FloatExpr):
+            return self.named_type_or_none('builtins.float')
+        if isinstance(rvalue, StrExpr):
+            return self.named_type_or_none('builtins.str')
+        if isinstance(rvalue, BytesExpr):
+            return self.named_type_or_none('builtins.bytes')
+        if isinstance(rvalue, UnicodeExpr):
+            return self.named_type_or_none('builtins.unicode')
+        return None
+
     def check_and_set_up_type_alias(self, s: AssignmentStmt) -> None:
         """Check if assignment creates a type alias and set it up as needed."""
         # For now, type aliases only work at the top level of a module.
@@ -1219,8 +1241,8 @@ class SemanticAnalyzer(NodeVisitor):
         """Does memberexpr to refer to an attribute of self?"""
         if not isinstance(memberexpr.expr, NameExpr):
             return False
-        node = (cast(NameExpr, memberexpr.expr)).node
-        return isinstance(node, Var) and (cast(Var, node)).is_self
+        node = memberexpr.expr.node
+        return isinstance(node, Var) and node.is_self
 
     def check_lvalue_validity(self, node: Node, ctx: Context) -> None:
         if isinstance(node, (TypeInfo, TypeVarExpr)):
@@ -1314,10 +1336,10 @@ class SemanticAnalyzer(NodeVisitor):
             return None
         if not isinstance(s.rvalue, CallExpr):
             return None
-        call = cast(CallExpr, s.rvalue)
+        call = s.rvalue
         if not isinstance(call.callee, RefExpr):
             return None
-        callee = cast(RefExpr, call.callee)
+        callee = call.callee
         if callee.fullname != 'typing.TypeVar':
             return None
         return call
@@ -1388,7 +1410,7 @@ class SemanticAnalyzer(NodeVisitor):
         """Check if s defines a namedtuple; if yes, store the definition in symbol table."""
         if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr):
             return
-        lvalue = cast(NameExpr, s.lvalues[0])
+        lvalue = s.lvalues[0]
         name = lvalue.name
         named_tuple = self.check_namedtuple(s.rvalue, name)
         if named_tuple is None:
@@ -1412,10 +1434,10 @@ class SemanticAnalyzer(NodeVisitor):
         """
         if not isinstance(node, CallExpr):
             return None
-        call = cast(CallExpr, node)
+        call = node
         if not isinstance(call.callee, RefExpr):
             return None
-        callee = cast(RefExpr, call.callee)
+        callee = call.callee
         fullname = callee.fullname
         if fullname not in ('collections.namedtuple', 'typing.NamedTuple'):
             return None
@@ -1658,14 +1680,14 @@ class SemanticAnalyzer(NodeVisitor):
 
     def visit_break_stmt(self, s: BreakStmt) -> None:
         if self.loop_depth == 0:
-            self.fail("'break' outside loop", s, True)
+            self.fail("'break' outside loop", s, True, blocker=True)
 
     def visit_continue_stmt(self, s: ContinueStmt) -> None:
         if self.loop_depth == 0:
-            self.fail("'continue' outside loop", s, True)
+            self.fail("'continue' outside loop", s, True, blocker=True)
 
     def visit_if_stmt(self, s: IfStmt) -> None:
-        infer_reachability_of_if_statement(s, pyversion=self.pyversion)
+        infer_reachability_of_if_statement(s, pyversion=self.options.python_version)
         for i in range(len(s.expr)):
             s.expr[i].accept(self)
             self.visit_block(s.body[i])
@@ -1756,7 +1778,7 @@ class SemanticAnalyzer(NodeVisitor):
                           "context".format(expr.name), expr)
             else:
                 expr.kind = n.kind
-                expr.node = (cast(Node, n.node))
+                expr.node = n.node
                 expr.fullname = n.fullname
 
     def visit_super_expr(self, expr: SuperExpr) -> None:
@@ -1790,7 +1812,7 @@ class SemanticAnalyzer(NodeVisitor):
 
     def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
         if not self.is_func_scope():  # not sure
-            self.fail("'yield from' outside function", e)
+            self.fail("'yield from' outside function", e, True, blocker=True)
         else:
             self.function_stack[-1].is_generator = True
         if e.expr:
@@ -2043,7 +2065,7 @@ class SemanticAnalyzer(NodeVisitor):
 
     def visit_yield_expr(self, expr: YieldExpr) -> None:
         if not self.is_func_scope():
-            self.fail("'yield' outside function", expr)
+            self.fail("'yield' outside function", expr, True, blocker=True)
         else:
             self.function_stack[-1].is_generator = True
         if expr.expr:
@@ -2257,16 +2279,17 @@ class SemanticAnalyzer(NodeVisitor):
     def name_already_defined(self, name: str, ctx: Context) -> None:
         self.fail("Name '{}' already defined".format(name), ctx)
 
-    def fail(self, msg: str, ctx: Context, serious: bool = False) -> None:
+    def fail(self, msg: str, ctx: Context, serious: bool = False, *,
+             blocker: bool = False) -> None:
         if (not serious and
-                not self.check_untyped_defs and
+                not self.options.check_untyped_defs and
                 self.function_stack and
                 self.function_stack[-1].is_dynamic()):
             return
-        self.errors.report(ctx.get_line(), msg)
+        self.errors.report(ctx.get_line(), msg, blocker=blocker)
 
     def note(self, msg: str, ctx: Context) -> None:
-        if (not self.check_untyped_defs and
+        if (not self.options.check_untyped_defs and
                 self.function_stack and
                 self.function_stack[-1].is_dynamic()):
             return
@@ -2293,7 +2316,7 @@ class FirstPass(NodeVisitor):
 
     def __init__(self, sem: SemanticAnalyzer) -> None:
         self.sem = sem
-        self.pyversion = sem.pyversion
+        self.pyversion = sem.options.python_version
 
     def analyze(self, file: MypyFile, fnam: str, mod_id: str) -> None:
         """Perform the first analysis pass.
@@ -2420,12 +2443,14 @@ class FirstPass(NodeVisitor):
         # We can't bind module names during the first pass, as the target module might be
         # unprocessed. However, we add dummy unbound imported names to the symbol table so
         # that we at least know that the name refers to a module.
+        node.is_top_level = True
         for name, as_name in node.names:
             imported_name = as_name or name
             if imported_name not in self.sem.globals:
                 self.sem.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
 
     def visit_import(self, node: Import) -> None:
+        node.is_top_level = True
         # This is similar to visit_import_from -- see the comment there.
         for id, as_id in node.ids:
             imported_id = as_id or id
@@ -2435,6 +2460,9 @@ class FirstPass(NodeVisitor):
                 # If the previous symbol is a variable, this should take precedence.
                 self.sem.globals[imported_id] = SymbolTableNode(UNBOUND_IMPORTED, None)
 
+    def visit_import_all(self, node: ImportAll) -> None:
+        node.is_top_level = True
+
     def visit_while_stmt(self, s: WhileStmt) -> None:
         s.body.accept(self)
         if s.else_body:
@@ -2470,7 +2498,7 @@ class FirstPass(NodeVisitor):
         self.sem.analyze_lvalue(lvalue, add_global=True, explicit_type=explicit_type)
 
 
-class ThirdPass(TraverserVisitor[None]):
+class ThirdPass(TraverserVisitor):
     """The third and final pass of semantic analysis.
 
     Check type argument counts and values of generic types, and perform some
@@ -2483,9 +2511,7 @@ class ThirdPass(TraverserVisitor[None]):
 
     def visit_file(self, file_node: MypyFile, fnam: str) -> None:
         self.errors.set_file(fnam)
-        self.errors.set_ignored_lines(file_node.ignored_lines)
         self.accept(file_node)
-        self.errors.set_ignored_lines(set())
 
     def accept(self, node: Node) -> None:
         try:
@@ -2595,7 +2621,7 @@ class ThirdPass(TraverserVisitor[None]):
         names = self.modules['builtins']
         sym = names.names[name]
         assert isinstance(sym.node, TypeInfo)
-        return Instance(cast(TypeInfo, sym.node), args or [])
+        return Instance(sym.node, args or [])
 
 
 def self_type(typ: TypeInfo) -> Union[Instance, TupleType]:
@@ -2604,10 +2630,7 @@ def self_type(typ: TypeInfo) -> Union[Instance, TupleType]:
     """
     tv = []  # type: List[Type]
     for i in range(len(typ.type_vars)):
-        tv.append(TypeVarType(typ.type_vars[i], i + 1,
-                          typ.defn.type_vars[i].values,
-                          typ.defn.type_vars[i].upper_bound,
-                          typ.defn.type_vars[i].variance))
+        tv.append(TypeVarType(typ.defn.type_vars[i]))
     inst = Instance(typ, tv)
     if typ.tuple_type is None:
         return inst
diff --git a/mypy/solve.py b/mypy/solve.py
index a2cfa28..1ebeb92 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -2,15 +2,17 @@
 
 from typing import List, Dict
 
-from mypy.types import Type, Void, NoneTyp, AnyType, ErrorType
+from mypy.types import Type, Void, NoneTyp, AnyType, ErrorType, UninhabitedType, TypeVarId
 from mypy.constraints import Constraint, SUPERTYPE_OF
 from mypy.join import join_types
 from mypy.meet import meet_types
 from mypy.subtypes import is_subtype
 
+from mypy import experiments
 
-def solve_constraints(vars: List[int], constraints: List[Constraint],
-                      strict=True) -> List[Type]:
+
+def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
+                      strict: bool =True) -> List[Type]:
     """Solve type constraints.
 
     Return the best type(s) for type variables; each type can be None if the value of the variable
@@ -21,7 +23,7 @@ def solve_constraints(vars: List[int], constraints: List[Constraint],
     pick AnyType.
     """
     # Collect a list of constraints for each type variable.
-    cmap = {}  # type: Dict[int, List[Constraint]]
+    cmap = {}  # type: Dict[TypeVarId, List[Constraint]]
     for con in constraints:
         a = cmap.get(con.type_var, [])  # type: List[Constraint]
         a.append(con)
@@ -58,7 +60,10 @@ def solve_constraints(vars: List[int], constraints: List[Constraint],
             else:
                 # No constraints for type variable -- type 'None' is the most specific type.
                 if strict:
-                    candidate = NoneTyp()
+                    if experiments.STRICT_OPTIONAL:
+                        candidate = UninhabitedType()
+                    else:
+                        candidate = NoneTyp()
                 else:
                     candidate = AnyType()
         elif top is None:
diff --git a/mypy/stats.py b/mypy/stats.py
index 1a493d8..952dcd5 100644
--- a/mypy/stats.py
+++ b/mypy/stats.py
@@ -88,8 +88,7 @@ class StatisticsVisitor(TraverserVisitor):
     def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
         self.line = o.line
         if (isinstance(o.rvalue, nodes.CallExpr) and
-            isinstance(cast(nodes.CallExpr, o.rvalue).analyzed,
-                       nodes.TypeVarExpr)):
+                isinstance(o.rvalue.analyzed, nodes.TypeVarExpr)):
             # Type variable definition -- not a real assignment.
             return
         if o.type:
@@ -256,7 +255,7 @@ class HasAnyQuery2(HasAnyQuery):
 
 
 def is_generic(t: Type) -> bool:
-    return isinstance(t, Instance) and bool(cast(Instance, t).args)
+    return isinstance(t, Instance) and bool(t.args)
 
 
 def is_complex(t: Type) -> bool:
diff --git a/mypy/strconv.py b/mypy/strconv.py
index fe7937c..8d2c084 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -3,7 +3,7 @@
 import re
 import os
 
-import typing
+from typing import Any, List
 
 from mypy.util import dump_tagged, short_type
 import mypy.nodes
@@ -21,7 +21,7 @@ class StrConv(NodeVisitor[str]):
         ExpressionStmt:1(
           IntExpr(1)))
     """
-    def dump(self, nodes, obj):
+    def dump(self, nodes: List[Any], obj: 'mypy.nodes.Node') -> str:
         """Convert a list of items to a multiline pretty-printed string.
 
         The tag is produced from the type name of obj and its line
@@ -30,7 +30,7 @@ class StrConv(NodeVisitor[str]):
         """
         return dump_tagged(nodes, short_type(obj) + ':' + str(obj.line))
 
-    def func_helper(self, o):
+    def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[Any]:
         """Return a list in a format suitable for dump() that represents the
         arguments and the body of a function. The caller can then decorate the
         array with information specific to methods, global functions or
@@ -50,7 +50,7 @@ class StrConv(NodeVisitor[str]):
                 extra.append(('VarArg', [o.arguments[i].variable]))
             elif kind == mypy.nodes.ARG_STAR2:
                 extra.append(('DictVarArg', [o.arguments[i].variable]))
-        a = []
+        a = []  # type: List[Any]
         if args:
             a.append(('Args', args))
         if o.type:
@@ -65,9 +65,9 @@ class StrConv(NodeVisitor[str]):
 
     # Top-level structures
 
-    def visit_mypy_file(self, o):
+    def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> str:
         # Skip implicit definitions.
-        a = [o.defs]
+        a = [o.defs]  # type: List[Any]
         if o.is_bom:
             a.insert(0, 'BOM')
         # Omit path to special file with name "main". This is used to simplify
@@ -82,7 +82,7 @@ class StrConv(NodeVisitor[str]):
                                                     for line in sorted(o.ignored_lines)))
         return self.dump(a, o)
 
-    def visit_import(self, o):
+    def visit_import(self, o: 'mypy.nodes.Import') -> str:
         a = []
         for id, as_id in o.ids:
             if as_id is not None:
@@ -130,10 +130,11 @@ class StrConv(NodeVisitor[str]):
         a = [o.name, o.defs.body]
         # Display base types unless they are implicitly just builtins.object
         # (in this case base_type_exprs is empty).
-        if o.base_types and o.base_type_exprs:
-            a.insert(1, ('BaseType', o.base_types))
-        elif len(o.base_type_exprs) > 0:
-            a.insert(1, ('BaseTypeExpr', o.base_type_exprs))
+        if o.base_type_exprs:
+            if o.info and o.info.bases:
+                a.insert(1, ('BaseType', o.info.bases))
+            else:
+                a.insert(1, ('BaseTypeExpr', o.base_type_exprs))
         if o.type_vars:
             a.insert(1, ('TypeVars', o.type_vars))
         if o.metaclass:
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 972278f..b765105 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -58,6 +58,7 @@ from mypy.nodes import (
 )
 from mypy.stubgenc import parse_all_signatures, find_unique_signatures, generate_stub_for_c_module
 from mypy.stubutil import is_c_module, write_header
+from mypy.options import Options as MypyOptions
 
 
 Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
@@ -165,8 +166,10 @@ def generate_stub(path: str, output_dir: str, _all_: Optional[List[str]] = None,
                   target: str = None, add_header: bool = False, module: str = None,
                   pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
     source = open(path, 'rb').read()
+    options = MypyOptions()
+    options.python_version = pyversion
     try:
-        ast = mypy.parse.parse(source, fnam=path, pyversion=pyversion)
+        ast = mypy.parse.parse(source, fnam=path, errors=None, options=options)
     except mypy.errors.CompileError as e:
         # Syntax error!
         for m in e.messages:
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index ea47c3f..3d9df0c 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -3,7 +3,7 @@ from typing import cast, List, Dict, Callable
 from mypy.types import (
     Type, AnyType, UnboundType, TypeVisitor, ErrorType, Void, NoneTyp,
     Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded, ErasedType, TypeList,
-    PartialType, DeletedType, TypeType, is_named_instance
+    PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance
 )
 import mypy.applytype
 import mypy.constraints
@@ -13,6 +13,8 @@ from mypy import messages, sametypes
 from mypy.nodes import CONTRAVARIANT, COVARIANT
 from mypy.maptype import map_instance_to_supertype
 
+from mypy import experiments
+
 
 TypeParameterChecker = Callable[[Type, Type, int], bool]
 
@@ -44,7 +46,7 @@ def is_subtype(left: Type, right: Type,
         return True
     elif isinstance(right, UnionType) and not isinstance(left, UnionType):
         return any(is_subtype(left, item, type_parameter_checker)
-                   for item in cast(UnionType, right).items)
+                   for item in right.items)
     else:
         return left.accept(SubtypeVisitor(right, type_parameter_checker))
 
@@ -56,7 +58,7 @@ def is_subtype_ignoring_tvars(left: Type, right: Type) -> bool:
 
 
 def is_equivalent(a: Type, b: Type,
-                  type_parameter_checker=check_type_parameter) -> bool:
+                  type_parameter_checker: TypeParameterChecker = check_type_parameter) -> bool:
     return is_subtype(a, b, type_parameter_checker) and is_subtype(b, a, type_parameter_checker)
 
 
@@ -99,6 +101,13 @@ class SubtypeVisitor(TypeVisitor[bool]):
         return isinstance(self.right, Void)
 
     def visit_none_type(self, left: NoneTyp) -> bool:
+        if experiments.STRICT_OPTIONAL:
+            return (isinstance(self.right, NoneTyp) or
+                    is_named_instance(self.right, 'builtins.object'))
+        else:
+            return not isinstance(self.right, Void)
+
+    def visit_uninhabited_type(self, left: UninhabitedType) -> bool:
         return not isinstance(self.right, Void)
 
     def visit_erased_type(self, left: ErasedType) -> bool:
@@ -329,7 +338,7 @@ def unify_generic_callable(type: CallableType, target: CallableType,
     applied = mypy.applytype.apply_generic_arguments(type, inferred_vars, msg, context=target)
     if msg.is_errors() or not isinstance(applied, CallableType):
         return None
-    return cast(CallableType, applied)
+    return applied
 
 
 def restrict_subtype_away(t: Type, s: Type) -> Type:
diff --git a/mypy/traverser.py b/mypy/traverser.py
index f49100a..ddd4ea7 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -1,7 +1,5 @@
 """Generic node traverser visitor"""
 
-from typing import TypeVar, Generic
-
 from mypy.visitor import NodeVisitor
 from mypy.nodes import (
     Block, MypyFile, FuncItem, CallExpr, ClassDef, Decorator, FuncDef,
@@ -15,10 +13,7 @@ from mypy.nodes import (
 )
 
 
-T = TypeVar('T')
-
-
-class TraverserVisitor(NodeVisitor[T], Generic[T]):
+class TraverserVisitor(NodeVisitor[None]):
     """A parse tree visitor that traverses the parse tree during visiting.
 
     It does not peform any actions outside the traversal. Subclasses
@@ -29,15 +24,15 @@ class TraverserVisitor(NodeVisitor[T], Generic[T]):
 
     # Visit methods
 
-    def visit_mypy_file(self, o: MypyFile) -> T:
+    def visit_mypy_file(self, o: MypyFile) -> None:
         for d in o.defs:
             d.accept(self)
 
-    def visit_block(self, block: Block) -> T:
+    def visit_block(self, block: Block) -> None:
         for s in block.body:
             s.accept(self)
 
-    def visit_func(self, o: FuncItem) -> T:
+    def visit_func(self, o: FuncItem) -> None:
         for arg in o.arguments:
             init = arg.initialization_statement
             if init is not None:
@@ -48,60 +43,60 @@ class TraverserVisitor(NodeVisitor[T], Generic[T]):
 
         o.body.accept(self)
 
-    def visit_func_def(self, o: FuncDef) -> T:
+    def visit_func_def(self, o: FuncDef) -> None:
         self.visit_func(o)
 
-    def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> T:
+    def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
         for item in o.items:
             item.accept(self)
 
-    def visit_class_def(self, o: ClassDef) -> T:
+    def visit_class_def(self, o: ClassDef) -> None:
         o.defs.accept(self)
 
-    def visit_decorator(self, o: Decorator) -> T:
+    def visit_decorator(self, o: Decorator) -> None:
         o.func.accept(self)
         o.var.accept(self)
         for decorator in o.decorators:
             decorator.accept(self)
 
-    def visit_expression_stmt(self, o: ExpressionStmt) -> T:
+    def visit_expression_stmt(self, o: ExpressionStmt) -> None:
         o.expr.accept(self)
 
-    def visit_assignment_stmt(self, o: AssignmentStmt) -> T:
+    def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
         o.rvalue.accept(self)
         for l in o.lvalues:
             l.accept(self)
 
-    def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> T:
+    def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None:
         o.rvalue.accept(self)
         o.lvalue.accept(self)
 
-    def visit_while_stmt(self, o: WhileStmt) -> T:
+    def visit_while_stmt(self, o: WhileStmt) -> None:
         o.expr.accept(self)
         o.body.accept(self)
         if o.else_body:
             o.else_body.accept(self)
 
-    def visit_for_stmt(self, o: ForStmt) -> T:
+    def visit_for_stmt(self, o: ForStmt) -> None:
         o.index.accept(self)
         o.expr.accept(self)
         o.body.accept(self)
         if o.else_body:
             o.else_body.accept(self)
 
-    def visit_return_stmt(self, o: ReturnStmt) -> T:
+    def visit_return_stmt(self, o: ReturnStmt) -> None:
         if o.expr is not None:
             o.expr.accept(self)
 
-    def visit_assert_stmt(self, o: AssertStmt) -> T:
+    def visit_assert_stmt(self, o: AssertStmt) -> None:
         if o.expr is not None:
             o.expr.accept(self)
 
-    def visit_del_stmt(self, o: DelStmt) -> T:
+    def visit_del_stmt(self, o: DelStmt) -> None:
         if o.expr is not None:
             o.expr.accept(self)
 
-    def visit_if_stmt(self, o: IfStmt) -> T:
+    def visit_if_stmt(self, o: IfStmt) -> None:
         for e in o.expr:
             e.accept(self)
         for b in o.body:
@@ -109,13 +104,13 @@ class TraverserVisitor(NodeVisitor[T], Generic[T]):
         if o.else_body:
             o.else_body.accept(self)
 
-    def visit_raise_stmt(self, o: RaiseStmt) -> T:
+    def visit_raise_stmt(self, o: RaiseStmt) -> None:
         if o.expr is not None:
             o.expr.accept(self)
         if o.from_expr is not None:
             o.from_expr.accept(self)
 
-    def visit_try_stmt(self, o: TryStmt) -> T:
+    def visit_try_stmt(self, o: TryStmt) -> None:
         o.body.accept(self)
         for i in range(len(o.types)):
             if o.types[i]:
@@ -126,39 +121,39 @@ class TraverserVisitor(NodeVisitor[T], Generic[T]):
         if o.finally_body is not None:
             o.finally_body.accept(self)
 
-    def visit_with_stmt(self, o: WithStmt) -> T:
+    def visit_with_stmt(self, o: WithStmt) -> None:
         for i in range(len(o.expr)):
             o.expr[i].accept(self)
             if o.target[i] is not None:
                 o.target[i].accept(self)
         o.body.accept(self)
 
-    def visit_member_expr(self, o: MemberExpr) -> T:
+    def visit_member_expr(self, o: MemberExpr) -> None:
         o.expr.accept(self)
 
-    def visit_yield_from_expr(self, o: YieldFromExpr) -> T:
+    def visit_yield_from_expr(self, o: YieldFromExpr) -> None:
         o.expr.accept(self)
 
-    def visit_yield_expr(self, o: YieldExpr) -> T:
+    def visit_yield_expr(self, o: YieldExpr) -> None:
         if o.expr:
             o.expr.accept(self)
 
-    def visit_call_expr(self, o: CallExpr) -> T:
+    def visit_call_expr(self, o: CallExpr) -> None:
         for a in o.args:
             a.accept(self)
         o.callee.accept(self)
         if o.analyzed:
             o.analyzed.accept(self)
 
-    def visit_op_expr(self, o: OpExpr) -> T:
+    def visit_op_expr(self, o: OpExpr) -> None:
         o.left.accept(self)
         o.right.accept(self)
 
-    def visit_comparison_expr(self, o: ComparisonExpr) -> T:
+    def visit_comparison_expr(self, o: ComparisonExpr) -> None:
         for operand in o.operands:
             operand.accept(self)
 
-    def visit_slice_expr(self, o: SliceExpr) -> T:
+    def visit_slice_expr(self, o: SliceExpr) -> None:
         if o.begin_index is not None:
             o.begin_index.accept(self)
         if o.end_index is not None:
@@ -166,39 +161,39 @@ class TraverserVisitor(NodeVisitor[T], Generic[T]):
         if o.stride is not None:
             o.stride.accept(self)
 
-    def visit_cast_expr(self, o: CastExpr) -> T:
+    def visit_cast_expr(self, o: CastExpr) -> None:
         o.expr.accept(self)
 
-    def visit_reveal_type_expr(self, o: RevealTypeExpr) -> T:
+    def visit_reveal_type_expr(self, o: RevealTypeExpr) -> None:
         o.expr.accept(self)
 
-    def visit_unary_expr(self, o: UnaryExpr) -> T:
+    def visit_unary_expr(self, o: UnaryExpr) -> None:
         o.expr.accept(self)
 
-    def visit_list_expr(self, o: ListExpr) -> T:
+    def visit_list_expr(self, o: ListExpr) -> None:
         for item in o.items:
             item.accept(self)
 
-    def visit_tuple_expr(self, o: TupleExpr) -> T:
+    def visit_tuple_expr(self, o: TupleExpr) -> None:
         for item in o.items:
             item.accept(self)
 
-    def visit_dict_expr(self, o: DictExpr) -> T:
+    def visit_dict_expr(self, o: DictExpr) -> None:
         for k, v in o.items:
             k.accept(self)
             v.accept(self)
 
-    def visit_set_expr(self, o: SetExpr) -> T:
+    def visit_set_expr(self, o: SetExpr) -> None:
         for item in o.items:
             item.accept(self)
 
-    def visit_index_expr(self, o: IndexExpr) -> T:
+    def visit_index_expr(self, o: IndexExpr) -> None:
         o.base.accept(self)
         o.index.accept(self)
         if o.analyzed:
             o.analyzed.accept(self)
 
-    def visit_generator_expr(self, o: GeneratorExpr) -> T:
+    def visit_generator_expr(self, o: GeneratorExpr) -> None:
         for index, sequence, conditions in zip(o.indices, o.sequences,
                                                o.condlists):
             sequence.accept(self)
@@ -207,16 +202,16 @@ class TraverserVisitor(NodeVisitor[T], Generic[T]):
                 cond.accept(self)
         o.left_expr.accept(self)
 
-    def visit_list_comprehension(self, o: ListComprehension) -> T:
+    def visit_list_comprehension(self, o: ListComprehension) -> None:
         o.generator.accept(self)
 
-    def visit_conditional_expr(self, o: ConditionalExpr) -> T:
+    def visit_conditional_expr(self, o: ConditionalExpr) -> None:
         o.cond.accept(self)
         o.if_expr.accept(self)
         o.else_expr.accept(self)
 
-    def visit_type_application(self, o: TypeApplication) -> T:
+    def visit_type_application(self, o: TypeApplication) -> None:
         o.expr.accept(self)
 
-    def visit_func_expr(self, o: FuncExpr) -> T:
+    def visit_func_expr(self, o: FuncExpr) -> None:
         self.visit_func(o)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index ed74064..829b86d 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -160,9 +160,6 @@ class TransformVisitor(NodeVisitor[Node]):
                        node.metaclass)
         new.fullname = node.fullname
         new.info = node.info
-        new.base_types = []
-        for base in node.base_types:
-            new.base_types.append(cast(Instance, self.type(base)))
         new.decorators = [decorator.accept(self)
                           for decorator in node.decorators]
         new.is_builtinclass = node.is_builtinclass
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 6a7ef29..0493d3a 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -5,7 +5,7 @@ from typing import Callable, cast, List, Tuple
 from mypy.types import (
     Type, UnboundType, TypeVarType, TupleType, UnionType, Instance, AnyType, CallableType,
     Void, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor, StarType, PartialType,
-    EllipsisType, TypeType
+    EllipsisType, UninhabitedType, TypeType
 )
 from mypy.nodes import (
     BOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
@@ -16,6 +16,7 @@ from mypy.sametypes import is_same_type
 from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
 from mypy.subtypes import satisfies_upper_bound
 from mypy import nodes
+from mypy import experiments
 
 
 type_constructors = {'typing.Tuple', 'typing.Union', 'typing.Callable', 'typing.Type'}
@@ -73,6 +74,11 @@ class TypeAnalyser(TypeVisitor[Type]):
         self.fail = fail_func
 
     def visit_unbound_type(self, t: UnboundType) -> Type:
+        if t.optional:
+            t.optional = False
+            # We don't need to worry about double-wrapping Optionals or
+            # wrapping Anys: Union simplification will take care of that.
+            return UnionType.make_simplified_union([self.visit_unbound_type(t), NoneTyp()])
         sym = self.lookup(t.name, t)
         if sym is not None:
             if sym.node is None:
@@ -85,21 +91,24 @@ class TypeAnalyser(TypeVisitor[Type]):
                 if len(t.args) > 0:
                     self.fail('Type variable "{}" used with arguments'.format(
                         t.name), t)
-                tvar_expr = cast(TypeVarExpr, sym.node)
-                return TypeVarType(t.name, sym.tvar_id, tvar_expr.values,
-                                   tvar_expr.upper_bound,
-                                   tvar_expr.variance,
-                                   t.line)
+                assert sym.tvar_def is not None
+                return TypeVarType(sym.tvar_def, t.line)
             elif fullname == 'builtins.None':
-                return Void()
+                if experiments.STRICT_OPTIONAL:
+                    if t.is_ret_type:
+                        return Void()
+                    else:
+                        return NoneTyp()
+                else:
+                    return Void()
             elif fullname == 'typing.Any':
                 return AnyType()
             elif fullname == 'typing.Tuple':
                 if len(t.args) == 2 and isinstance(t.args[1], EllipsisType):
                     # Tuple[T, ...] (uniform, variable-length tuple)
                     node = self.lookup_fqn_func('builtins.tuple')
-                    info = cast(TypeInfo, node.node)
-                    return Instance(info, [t.args[0].accept(self)], t.line)
+                    tuple_info = cast(TypeInfo, node.node)
+                    return Instance(tuple_info, [t.args[0].accept(self)], t.line)
                 return self.tuple_type(self.anal_array(t.args))
             elif fullname == 'typing.Union':
                 items = self.anal_array(t.args)
@@ -110,8 +119,11 @@ class TypeAnalyser(TypeVisitor[Type]):
                     self.fail('Optional[...] must have exactly one type argument', t)
                     return AnyType()
                 items = self.anal_array(t.args)
-                # Currently Optional[t] is just an alias for t.
-                return items[0]
+                if experiments.STRICT_OPTIONAL:
+                    return UnionType.make_simplified_union([items[0], NoneTyp()])
+                else:
+                    # Without strict Optional checking Optional[t] is just an alias for t.
+                    return items[0]
             elif fullname == 'typing.Callable':
                 return self.analyze_callable_type(t)
             elif fullname == 'typing.Type':
@@ -137,7 +149,7 @@ class TypeAnalyser(TypeVisitor[Type]):
                     return AnyType()
                 self.fail('Invalid type "{}"'.format(name), t)
                 return t
-            info = cast(TypeInfo, sym.node)
+            info = sym.node  # type: TypeInfo
             if len(t.args) > 0 and info.fullname() == 'builtins.tuple':
                 return TupleType(self.anal_array(t.args),
                                  Instance(info, [AnyType()], t.line),
@@ -172,6 +184,9 @@ class TypeAnalyser(TypeVisitor[Type]):
     def visit_none_type(self, t: NoneTyp) -> Type:
         return t
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
+        return t
+
     def visit_deleted_type(self, t: DeletedType) -> Type:
         return t
 
@@ -262,7 +277,7 @@ class TypeAnalyser(TypeVisitor[Type]):
     def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]:
         a = []  # type: List[TypeVarDef]
         for vd in var_defs:
-            a.append(TypeVarDef(vd.name, vd.id, self.anal_array(vd.values),
+            a.append(TypeVarDef(vd.name, vd.id.raw_id, self.anal_array(vd.values),
                                 vd.upper_bound.accept(self),
                                 vd.variance,
                                 vd.line))
@@ -384,6 +399,9 @@ class TypeAnalyserPass3(TypeVisitor[None]):
     def visit_none_type(self, t: NoneTyp) -> None:
         pass
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> None:
+        pass
+
     def visit_deleted_type(self, t: DeletedType) -> None:
         pass
 
diff --git a/mypy/typefixture.py b/mypy/typefixture.py
index ec76cf8..59ffeea 100644
--- a/mypy/typefixture.py
+++ b/mypy/typefixture.py
@@ -6,8 +6,8 @@ It contains class TypeInfos and Type objects.
 from typing import List
 
 from mypy.types import (
-    TypeVarType, AnyType, Void, ErrorType, NoneTyp, Instance, CallableType, TypeVarDef,
-    TypeType,
+    Type, TypeVarType, AnyType, Void, ErrorType, NoneTyp,
+    Instance, CallableType, TypeVarDef, TypeType,
 )
 from mypy.nodes import (
     TypeInfo, ClassDef, Block, ARG_POS, ARG_OPT, ARG_STAR, SymbolTable,
@@ -25,14 +25,19 @@ class TypeFixture:
         self.oi = self.make_type_info('builtins.object')               # class object
         self.o = Instance(self.oi, [])                        # object
 
-        # Type variables
-        self.t = TypeVarType('T', 1, [], self.o, variance)     # T`1 (type variable)
-        self.tf = TypeVarType('T', -1, [], self.o, variance)   # T`-1 (type variable)
-        self.tf2 = TypeVarType('T', -2, [], self.o, variance)  # T`-2 (type variable)
-        self.s = TypeVarType('S', 2, [], self.o, variance)     # S`2 (type variable)
-        self.s1 = TypeVarType('S', 1, [], self.o, variance)    # S`1 (type variable)
-        self.sf = TypeVarType('S', -2, [], self.o, variance)   # S`-2 (type variable)
-        self.sf1 = TypeVarType('S', -1, [], self.o, variance)  # S`-1 (type variable)
+        # Type variables (these are effectively global)
+
+        def make_type_var(name: str, id: int, values: List[Type], upper_bound: Type,
+                          variance: int) -> TypeVarType:
+            return TypeVarType(TypeVarDef(name, id, values, upper_bound, variance))
+
+        self.t = make_type_var('T', 1, [], self.o, variance)     # T`1 (type variable)
+        self.tf = make_type_var('T', -1, [], self.o, variance)   # T`-1 (type variable)
+        self.tf2 = make_type_var('T', -2, [], self.o, variance)  # T`-2 (type variable)
+        self.s = make_type_var('S', 2, [], self.o, variance)     # S`2 (type variable)
+        self.s1 = make_type_var('S', 1, [], self.o, variance)    # S`1 (type variable)
+        self.sf = make_type_var('S', -2, [], self.o, variance)   # S`-2 (type variable)
+        self.sf1 = make_type_var('S', -1, [], self.o, variance)  # S`-1 (type variable)
 
         # Simple types
         self.anyt = AnyType()
diff --git a/mypy/types.py b/mypy/types.py
index 92b8659..1f6d957 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1,11 +1,15 @@
 """Classes for representing mypy types."""
 
 from abc import abstractmethod
-from typing import Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Sequence, Optional
+from typing import (
+    Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Sequence, Optional, Union
+)
 
 import mypy.nodes
 from mypy.nodes import INVARIANT, SymbolNode
 
+from mypy import experiments
+
 
 T = TypeVar('T')
 
@@ -43,25 +47,84 @@ class Type(mypy.nodes.Context):
         raise NotImplementedError('unexpected .class {}'.format(classname))
 
 
+class TypeVarId:
+    # A type variable is uniquely identified by its raw id and meta level.
+
+    # For plain variables (type parameters of generic classes and
+    # functions) raw ids are allocated by semantic analysis, using
+    # positive ids 1, 2, ... for generic class parameters and negative
+    # ids -1, ... for generic function type arguments. This convention
+    # is only used to keep type variable ids distinct when allocating
+    # them; the type checker makes no distinction between class and
+    # function type variables.
+
+    # Metavariables are allocated unique ids starting from 1.
+    raw_id = 0  # type: int
+
+    # Level of the variable in type inference. Currently either 0 for
+    # declared types, or 1 for type inference metavariables.
+    meta_level = 0  # type: int
+
+    # Class variable used for allocating fresh ids for metavariables.
+    next_raw_id = 1  # type: int
+
+    def __init__(self, raw_id: int, meta_level: int = 0) -> None:
+        self.raw_id = raw_id
+        self.meta_level = meta_level
+
+    @staticmethod
+    def new(meta_level: int) -> 'TypeVarId':
+        raw_id = TypeVarId.next_raw_id
+        TypeVarId.next_raw_id += 1
+        return TypeVarId(raw_id, meta_level)
+
+    def __repr__(self) -> str:
+        return self.raw_id.__repr__()
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, TypeVarId):
+            return (self.raw_id == other.raw_id and
+                    self.meta_level == other.meta_level)
+        else:
+            return False
+
+    def __ne__(self, other: object) -> bool:
+        return not (self == other)
+
+    def __hash__(self) -> int:
+        return hash((self.raw_id, self.meta_level))
+
+    def is_meta_var(self) -> bool:
+        return self.meta_level > 0
+
+
 class TypeVarDef(mypy.nodes.Context):
     """Definition of a single type variable."""
 
     name = ''
-    id = 0
-    values = None  # type: Optional[List[Type]]
+    id = None  # type: TypeVarId
+    values = None  # type: List[Type]  # Value restriction, empty list if no restriction
     upper_bound = None  # type: Type
     variance = INVARIANT  # type: int
     line = 0
 
-    def __init__(self, name: str, id: int, values: Optional[List[Type]],
+    def __init__(self, name: str, id: Union[TypeVarId, int], values: Optional[List[Type]],
                  upper_bound: Type, variance: int = INVARIANT, line: int = -1) -> None:
         self.name = name
+        if isinstance(id, int):
+            id = TypeVarId(id)
         self.id = id
         self.values = values
         self.upper_bound = upper_bound
         self.variance = variance
         self.line = line
 
+    @staticmethod
+    def new_unification_variable(old: 'TypeVarDef') -> 'TypeVarDef':
+        new_id = TypeVarId.new(meta_level=1)
+        return TypeVarDef(old.name, new_id, old.values,
+                          old.upper_bound, old.variance, old.line)
+
     def get_line(self) -> int:
         return self.line
 
@@ -74,9 +137,10 @@ class TypeVarDef(mypy.nodes.Context):
             return self.name
 
     def serialize(self) -> JsonDict:
+        assert not self.id.is_meta_var()
         return {'.class': 'TypeVarDef',
                 'name': self.name,
-                'id': self.id,
+                'id': self.id.raw_id,
                 'values': None if self.values is None else [v.serialize() for v in self.values],
                 'upper_bound': self.upper_bound.serialize(),
                 'variance': self.variance,
@@ -99,12 +163,23 @@ class UnboundType(Type):
 
     name = ''
     args = None  # type: List[Type]
+    # should this type be wrapped in an Optional?
+    optional = False
+    # is this type a return type?
+    is_ret_type = False
 
-    def __init__(self, name: str, args: List[Type] = None, line: int = -1) -> None:
+    def __init__(self,
+                 name: str,
+                 args: List[Type] = None,
+                 line: int = -1,
+                 optional: bool = False,
+                 is_ret_type: bool = False) -> None:
         if not args:
             args = []
         self.name = name
         self.args = args
+        self.optional = optional
+        self.is_ret_type = is_ret_type
         super().__init__(line)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
@@ -161,7 +236,7 @@ class TypeList(Type):
 class AnyType(Type):
     """The type 'Any'."""
 
-    def __init__(self, implicit=False, line: int = -1) -> None:
+    def __init__(self, implicit: bool = False, line: int = -1) -> None:
         super().__init__(line)
         self.implicit = implicit
 
@@ -205,17 +280,51 @@ class Void(Type):
         return Void()
 
 
+class UninhabitedType(Type):
+    """This type has no members.
+
+    This type is almost the bottom type, except it is not a subtype of Void.
+    With strict Optional checking, it is the only common subtype between all
+    other types, which allows `meet` to be well defined.  Without strict
+    Optional checking, NoneTyp fills this role.
+
+    In general, for any type T that isn't Void:
+        join(UninhabitedType, T) = T
+        meet(UninhabitedType, T) = UninhabitedType
+        is_subtype(UninhabitedType, T) = True
+    """
+
+    def __init__(self, line: int = -1) -> None:
+        super().__init__(line)
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_uninhabited_type(self)
+
+    def serialize(self) -> JsonDict:
+        return {'.class': 'UninhabitedType'}
+
+    @classmethod
+    def deserialize(cls, data: JsonDict) -> 'UninhabitedType':
+        assert data['.class'] == 'UninhabitedType'
+        return UninhabitedType()
+
+
 class NoneTyp(Type):
     """The type of 'None'.
 
-    This is only used internally during type inference.  Programs
-    cannot declare a variable of this type, and the type checker
-    refuses to infer this type for a variable. However, subexpressions
-    often have this type. Note that this is not used as the result
-    type when calling a function with a void type, even though
-    semantically such a function returns a None value; the void type
-    is used instead so that we can report an error if the caller tries
-    to do anything with the return value.
+    Without strict Optional checking:
+        This is only used internally during type inference.  Programs
+        cannot declare a variable of this type, and the type checker
+        refuses to infer this type for a variable. However, subexpressions
+        often have this type. Note that this is not used as the result
+        type when calling a function with a void type, even though
+        semantically such a function returns a None value; the void type
+        is used instead so that we can report an error if the caller tries
+        to do anything with the return value.
+
+    With strict Optional checking:
+        This type can be written by users as 'None', except as the return value
+        of a function, where 'None' means Void.
     """
 
     def __init__(self, line: int = -1) -> None:
@@ -321,19 +430,18 @@ class TypeVarType(Type):
     """
 
     name = ''  # Name of the type variable (for messages and debugging)
-    id = 0     # 1, 2, ... for type-related, -1, ... for function-related
+    id = None  # type: TypeVarId
     values = None  # type: List[Type]  # Value restriction, empty list if no restriction
     upper_bound = None  # type: Type   # Upper bound for values
     # See comments in TypeVarDef for more about variance.
     variance = INVARIANT  # type: int
 
-    def __init__(self, name: str, id: int, values: List[Type], upper_bound: Type,
-                 variance: int = INVARIANT, line: int = -1) -> None:
-        self.name = name
-        self.id = id
-        self.values = values
-        self.upper_bound = upper_bound
-        self.variance = variance
+    def __init__(self, binder: TypeVarDef, line: int = -1) -> None:
+        self.name = binder.name
+        self.id = binder.id
+        self.values = binder.values
+        self.upper_bound = binder.upper_bound
+        self.variance = binder.variance
         super().__init__(line)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
@@ -346,9 +454,10 @@ class TypeVarType(Type):
             return self.upper_bound
 
     def serialize(self) -> JsonDict:
+        assert not self.id.is_meta_var()
         return {'.class': 'TypeVarType',
                 'name': self.name,
-                'id': self.id,
+                'id': self.id.raw_id,
                 'values': [v.serialize() for v in self.values],
                 'upper_bound': self.upper_bound.serialize(),
                 'variance': self.variance,
@@ -357,11 +466,12 @@ class TypeVarType(Type):
     @classmethod
     def deserialize(cls, data: JsonDict) -> 'TypeVarType':
         assert data['.class'] == 'TypeVarType'
-        return TypeVarType(data['name'],
+        tvdef = TypeVarDef(data['name'],
                            data['id'],
                            [Type.deserialize(v) for v in data['values']],
                            Type.deserialize(data['upper_bound']),
                            data['variance'])
+        return TypeVarType(tvdef)
 
 
 class FunctionLike(Type):
@@ -428,9 +538,9 @@ class CallableType(FunctionLike):
                  variables: List[TypeVarDef] = None,
                  line: int = -1,
                  is_ellipsis_args: bool = False,
-                 implicit=False,
-                 is_classmethod_class=False,
-                 special_sig=None,
+                 implicit: bool = False,
+                 is_classmethod_class: bool = False,
+                 special_sig: Optional[str] = None,
                  ) -> None:
         if variables is None:
             variables = []
@@ -514,8 +624,8 @@ class CallableType(FunctionLike):
     def is_generic(self) -> bool:
         return bool(self.variables)
 
-    def type_var_ids(self) -> List[int]:
-        a = []  # type: List[int]
+    def type_var_ids(self) -> List[TypeVarId]:
+        a = []  # type: List[TypeVarId]
         for tv in self.variables:
             a.append(tv.id)
         return a
@@ -683,7 +793,10 @@ class UnionType(Type):
         elif len(items) == 1:
             return items[0]
         else:
-            return Void()
+            if experiments.STRICT_OPTIONAL:
+                return UninhabitedType()
+            else:
+                return Void()
 
     @staticmethod
     def make_simplified_union(items: List[Type], line: int = -1) -> Type:
@@ -754,10 +867,15 @@ class PartialType(Type):
     # None for the 'None' partial type; otherwise a generic class
     type = None  # type: Optional[mypy.nodes.TypeInfo]
     var = None  # type: mypy.nodes.Var
+    inner_types = None  # type: List[Type]
 
-    def __init__(self, type: Optional['mypy.nodes.TypeInfo'], var: 'mypy.nodes.Var') -> None:
+    def __init__(self,
+                 type: Optional['mypy.nodes.TypeInfo'],
+                 var: 'mypy.nodes.Var',
+                 inner_types: List[Type]) -> None:
         self.type = type
         self.var = var
+        self.inner_types = inner_types
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_partial_type(self)
@@ -825,7 +943,8 @@ class TypeType(Type):
     def serialize(self) -> JsonDict:
         return {'.class': 'TypeType', 'item': self.item.serialize()}
 
-    def deserialize(self, data: JsonDict) -> 'TypeType':
+    @classmethod
+    def deserialize(cls, data: JsonDict) -> 'TypeType':
         assert data['.class'] == 'TypeType'
         return TypeType(Type.deserialize(data['item']))
 
@@ -869,6 +988,10 @@ class TypeVisitor(Generic[T]):
     def visit_none_type(self, t: NoneTyp) -> T:
         pass
 
+    @abstractmethod
+    def visit_uninhabited_type(self, t: UninhabitedType) -> T:
+        pass
+
     def visit_erased_type(self, t: ErasedType) -> T:
         raise self._notimplemented_helper('erased_type')
 
@@ -939,6 +1062,9 @@ class TypeTranslator(TypeVisitor[Type]):
     def visit_none_type(self, t: NoneTyp) -> Type:
         return t
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
+        return t
+
     def visit_erased_type(self, t: ErasedType) -> Type:
         return t
 
@@ -1006,38 +1132,41 @@ class TypeStrVisitor(TypeVisitor[str]):
      - Represent the NoneTyp type as None.
     """
 
-    def visit_unbound_type(self, t):
+    def visit_unbound_type(self, t: UnboundType)-> str:
         s = t.name + '?'
         if t.args != []:
             s += '[{}]'.format(self.list_str(t.args))
         return s
 
-    def visit_type_list(self, t):
+    def visit_type_list(self, t: TypeList) -> str:
         return '<TypeList {}>'.format(self.list_str(t.items))
 
-    def visit_error_type(self, t):
+    def visit_error_type(self, t: ErrorType) -> str:
         return '<ERROR>'
 
-    def visit_any(self, t):
+    def visit_any(self, t: AnyType) -> str:
         return 'Any'
 
-    def visit_void(self, t):
+    def visit_void(self, t: Void) -> str:
         return 'void'
 
-    def visit_none_type(self, t):
-        # Include quotes to make this distinct from the None value.
-        return "'None'"
+    def visit_none_type(self, t: NoneTyp) -> str:
+        # Fully qualify to make this distinct from the None value.
+        return "builtins.None"
 
-    def visit_erased_type(self, t):
+    def visit_uninhabited_type(self, t: UninhabitedType) -> str:
+        return "<uninhabited>"
+
+    def visit_erased_type(self, t: ErasedType) -> str:
         return "<Erased>"
 
-    def visit_deleted_type(self, t):
+    def visit_deleted_type(self, t: DeletedType) -> str:
         if t.source is None:
             return "<Deleted>"
         else:
             return "<Deleted '{}'>".format(t.source)
 
-    def visit_instance(self, t):
+    def visit_instance(self, t: Instance) -> str:
         s = t.type.fullname() if t.type is not None else '<?>'
         if t.erased:
             s += '*'
@@ -1045,7 +1174,7 @@ class TypeStrVisitor(TypeVisitor[str]):
             s += '[{}]'.format(self.list_str(t.args))
         return s
 
-    def visit_type_var(self, t):
+    def visit_type_var(self, t: TypeVarType) -> str:
         if t.name is None:
             # Anonymous type variable type (only numeric id).
             return '`{}'.format(t.id)
@@ -1053,7 +1182,7 @@ class TypeStrVisitor(TypeVisitor[str]):
             # Named type variable type.
             return '{}`{}'.format(t.name, t.id)
 
-    def visit_callable_type(self, t):
+    def visit_callable_type(self, t: CallableType) -> str:
         s = ''
         bare_asterisk = False
         for i in range(len(t.arg_types)):
@@ -1082,13 +1211,13 @@ class TypeStrVisitor(TypeVisitor[str]):
 
         return 'def {}'.format(s)
 
-    def visit_overloaded(self, t):
+    def visit_overloaded(self, t: Overloaded) -> str:
         a = []
         for i in t.items():
             a.append(i.accept(self))
         return 'Overload({})'.format(', '.join(a))
 
-    def visit_tuple_type(self, t):
+    def visit_tuple_type(self, t: TupleType) -> str:
         s = self.list_str(t.items)
         if t.fallback and t.fallback.type:
             fallback_name = t.fallback.type.fullname()
@@ -1096,11 +1225,11 @@ class TypeStrVisitor(TypeVisitor[str]):
                 return 'Tuple[{}, fallback={}]'.format(s, t.fallback.accept(self))
         return 'Tuple[{}]'.format(s)
 
-    def visit_star_type(self, t):
+    def visit_star_type(self, t: StarType) -> str:
         s = t.type.accept(self)
         return '*{}'.format(s)
 
-    def visit_union_type(self, t):
+    def visit_union_type(self, t: UnionType) -> str:
         s = self.list_str(t.items)
         return 'Union[{}]'.format(s)
 
@@ -1111,13 +1240,13 @@ class TypeStrVisitor(TypeVisitor[str]):
             return '<partial {}[{}]>'.format(t.type.name(),
                                              ', '.join(['?'] * len(t.type.type_vars)))
 
-    def visit_ellipsis_type(self, t):
+    def visit_ellipsis_type(self, t: EllipsisType) -> str:
         return '...'
 
-    def visit_type_type(self, t):
+    def visit_type_type(self, t: TypeType) -> str:
         return 'Type[{}]'.format(t.item.accept(self))
 
-    def list_str(self, a):
+    def list_str(self, a: List[Type]) -> str:
         """Convert items of an array to strings (pretty-print types)
         and join the results with commas.
         """
@@ -1172,6 +1301,9 @@ class TypeQuery(TypeVisitor[bool]):
     def visit_void(self, t: Void) -> bool:
         return self.default
 
+    def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
+        return self.default
+
     def visit_none_type(self, t: NoneTyp) -> bool:
         return self.default
 
diff --git a/mypy/version.py b/mypy/version.py
index a987347..908c0bb 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1 +1 @@
-__version__ = '0.4.2'
+__version__ = '0.4.3'
diff --git a/setup.cfg b/setup.cfg
index 2aeeb50..ea994ef 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [flake8]
 max-line-length = 99
-exclude = mypy/codec/*,mypy/test/data/lib-stub/*,mypy/test/data/fixtures/*
+exclude = mypy/codec/*
 # Thing to ignore:
 #   E251: spaces around default arg value (against our style)
 #   E128: continuation line under-indented (too noisy)
diff --git a/typeshed/stdlib/2.7/ConfigParser.pyi b/typeshed/stdlib/2.7/ConfigParser.pyi
index f9c7ea4..7b027ce 100644
--- a/typeshed/stdlib/2.7/ConfigParser.pyi
+++ b/typeshed/stdlib/2.7/ConfigParser.pyi
@@ -1,16 +1,16 @@
 from typing import Any, Tuple
 
-__all__ = None # type: list[str]
-DEFAULTSECT = None # type: str
-MAX_INTERPOLATION_DEPTH = None # type: int
+__all__ = ... # type: list[str]
+DEFAULTSECT = ... # type: str
+MAX_INTERPOLATION_DEPTH = ... # type: int
 
 class Error(Exception):
-    message = None # type: Any
+    message = ... # type: Any
     def __init__(self, msg: str = ...) -> None: ...
     def _get_message(self) -> None: ...
     def _set_message(self, value: str) -> None: ...
     def __repr__(self) -> str: ...
-    __str__ = __repr__
+    def __str__(self) -> str: ...
 
 class NoSectionError(Error):
     section = ... # type: str
diff --git a/typeshed/stdlib/3/socketserver.pyi b/typeshed/stdlib/2.7/SocketServer.pyi
similarity index 90%
copy from typeshed/stdlib/3/socketserver.pyi
copy to typeshed/stdlib/2.7/SocketServer.pyi
index 56c5973..4cd6e2d 100644
--- a/typeshed/stdlib/3/socketserver.pyi
+++ b/typeshed/stdlib/2.7/SocketServer.pyi
@@ -1,6 +1,6 @@
-# Stubs for socketserver (Python 3.4)
+# Stubs for socketserver
 
-from typing import Optional, Tuple
+from typing import BinaryIO, Optional, Tuple
 from socket import SocketType
 import sys
 import types
@@ -73,5 +73,10 @@ class BaseRequestHandler:
     def handle(self) -> None: ...
     def finish(self) -> None: ...
 
-class StreamRequestHandler(BaseRequestHandler): ...
-class DatagramRequestHandler(BaseRequestHandler): ...
+class StreamRequestHandler(BaseRequestHandler):
+    rfile = ...  # type: BinaryIO
+    wfile = ...  # type: BinaryIO
+
+class DatagramRequestHandler(BaseRequestHandler):
+    rfile = ...  # type: BinaryIO
+    wfile = ...  # type: BinaryIO
diff --git a/typeshed/stdlib/2.7/StringIO.pyi b/typeshed/stdlib/2.7/StringIO.pyi
index 22f7a02..3c90129 100644
--- a/typeshed/stdlib/2.7/StringIO.pyi
+++ b/typeshed/stdlib/2.7/StringIO.pyi
@@ -5,6 +5,7 @@ from typing import Any, IO, AnyStr, Iterator, Iterable, Generic, List
 class StringIO(IO[AnyStr], Generic[AnyStr]):
     closed = ... # type: bool
     softspace = ... # type: int
+    len = ... # type: int
     def __init__(self, buf: AnyStr = ...) -> None: ...
     def __iter__(self) -> Iterator[AnyStr]: ...
     def next(self) -> AnyStr: ...
diff --git a/typeshed/stdlib/2.7/__builtin__.pyi b/typeshed/stdlib/2.7/__builtin__.pyi
index c23c913..0ce76c9 100644
--- a/typeshed/stdlib/2.7/__builtin__.pyi
+++ b/typeshed/stdlib/2.7/__builtin__.pyi
@@ -365,7 +365,7 @@ class str(basestring, Sequence[str]):
     def __ge__(self, x: unicode) -> bool: ...
     def __mod__(self, x: Any) -> str: ...
 
-class bytearray(Sequence[int]):
+class bytearray(MutableSequence[int]):
     @overload
     def __init__(self) -> None: ...
     @overload
@@ -383,6 +383,7 @@ class bytearray(Sequence[int]):
     def expandtabs(self, tabsize: int = 8) -> bytearray: ...
     def find(self, sub: str, start: int = 0, end: int = ...) -> int: ...
     def index(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def insert(self, index: int, object: int) -> None: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
     def isdigit(self) -> bool: ...
@@ -431,10 +432,7 @@ class bytearray(Sequence[int]):
     @overload
     def __setitem__(self, s: slice, x: Union[Sequence[int], str]) -> None: ...
     def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ...
-    @overload
-    def __delitem__(self, i: int) -> None: ...
-    @overload
-    def __delitem__(self, s: slice) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
     def __delslice__(self, start: int, stop: int) -> None: ...
     def __add__(self, s: str) -> bytearray: ...
     def __mul__(self, n: int) -> bytearray: ...
@@ -492,7 +490,7 @@ class list(MutableSequence[_T], Generic[_T]):
     def insert(self, index: int, object: _T) -> None: ...
     def remove(self, object: _T) -> None: ...
     def reverse(self) -> None: ...
-    def sort(self, *, key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ...
+    def sort(self, cmp: Callable[[_T, _T], Any] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ...
 
     def __len__(self) -> int: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -539,9 +537,9 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def popitem(self) -> Tuple[_KT, _VT]: ...
     def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
     @overload
-    def update(self, m: Mapping[_KT, _VT]) -> None: ...
+    def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
     @overload
-    def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
     def keys(self) -> List[_KT]: ...
     def values(self) -> List[_VT]: ...
     def items(self) -> List[Tuple[_KT, _VT]]: ...
@@ -567,11 +565,11 @@ class set(MutableSet[_T], Generic[_T]):
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> set[_T]: ...
-    def difference(self, s: Iterable[Any]) -> set[_T]: ...
-    def difference_update(self, s: Iterable[Any]) -> None: ...
+    def difference(self, *s: Iterable[Any]) -> set[_T]: ...
+    def difference_update(self, *s: Iterable[Any]) -> None: ...
     def discard(self, element: _T) -> None: ...
-    def intersection(self, s: Iterable[Any]) -> set[_T]: ...
-    def intersection_update(self, s: Iterable[Any]) -> None: ...
+    def intersection(self, *s: Iterable[Any]) -> set[_T]: ...
+    def intersection_update(self, *s: Iterable[Any]) -> None: ...
     def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
     def issubset(self, s: AbstractSet[Any]) -> bool: ...
     def issuperset(self, s: AbstractSet[Any]) -> bool: ...
@@ -579,8 +577,8 @@ class set(MutableSet[_T], Generic[_T]):
     def remove(self, element: _T) -> None: ...
     def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
     def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
-    def union(self, s: Iterable[_T]) -> set[_T]: ...
-    def update(self, s: Iterable[_T]) -> None: ...
+    def union(self, *s: Iterable[_T]) -> set[_T]: ...
+    def update(self, *s: Iterable[_T]) -> None: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -605,13 +603,13 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     @overload
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def copy(self) -> frozenset[_T]: ...
-    def difference(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
-    def intersection(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def difference(self, *s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def intersection(self, *s: AbstractSet[Any]) -> frozenset[_T]: ...
     def isdisjoint(self, s: AbstractSet[_T]) -> bool: ...
     def issubset(self, s: AbstractSet[Any]) -> bool: ...
     def issuperset(self, s: AbstractSet[Any]) -> bool: ...
     def symmetric_difference(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
-    def union(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def union(self, *s: AbstractSet[_T]) -> frozenset[_T]: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -868,6 +866,9 @@ class BytesWarning(Warning): ...
 class ResourceWarning(Warning): ...
 
 def eval(s: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ...
+def exec(object: str,
+         globals: Dict[str, Any] = None,
+         locals: Dict[str, Any] = None) -> Any: ...  # TODO code object as source
 
 def cmp(x: Any, y: Any) -> int: ...
 
diff --git a/typeshed/stdlib/2.7/__future__.pyi b/typeshed/stdlib/2.7/__future__.pyi
index 065057b..2414069 100644
--- a/typeshed/stdlib/2.7/__future__.pyi
+++ b/typeshed/stdlib/2.7/__future__.pyi
@@ -4,10 +4,10 @@ class _Feature:
     def getOptionalRelease(self) -> _version_info: ...
     def getMandatoryRelease(self) -> _version_info: ...
 
-absolute_import = None  # type: _Feature
-division = None  # type: _Feature
-generators = None  # type: _Feature
-nested_scopes = None  # type: _Feature
-print_function = None  # type: _Feature
-unicode_literals = None  # type: _Feature
-with_statement = None  # type: _Feature
+absolute_import = ...  # type: _Feature
+division = ...  # type: _Feature
+generators = ...  # type: _Feature
+nested_scopes = ...  # type: _Feature
+print_function = ...  # type: _Feature
+unicode_literals = ...  # type: _Feature
+with_statement = ...  # type: _Feature
diff --git a/typeshed/stdlib/2.7/argparse.pyi b/typeshed/stdlib/2.7/argparse.pyi
deleted file mode 100644
index 890b4da..0000000
--- a/typeshed/stdlib/2.7/argparse.pyi
+++ /dev/null
@@ -1,192 +0,0 @@
-# Stubs for argparse (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Callable, Dict, List, IO, Iterable, Sequence, Union
-
-SUPPRESS = ... # type: Any
-OPTIONAL = ... # type: Any
-ZERO_OR_MORE = ... # type: Any
-ONE_OR_MORE = ... # type: Any
-PARSER = ... # type: Any
-REMAINDER = ... # type: Any
-
-class _AttributeHolder: ...
-
-class HelpFormatter:
-    def __init__(self, prog, indent_increment=..., max_help_position=..., width=...) -> None: ...
-    class _Section:
-        formatter = ... # type: Any
-        parent = ... # type: Any
-        heading = ... # type: Any
-        items = ... # type: Any
-        def __init__(self, formatter, parent, heading=...) -> None: ...
-        def format_help(self): ...
-    def start_section(self, heading): ...
-    def end_section(self): ...
-    def add_text(self, text): ...
-    def add_usage(self, usage, actions, groups, prefix=...): ...
-    def add_argument(self, action): ...
-    def add_arguments(self, actions): ...
-    def format_help(self): ...
-
-class RawDescriptionHelpFormatter(HelpFormatter): ...
-class RawTextHelpFormatter(RawDescriptionHelpFormatter): ...
-class ArgumentDefaultsHelpFormatter(HelpFormatter): ...
-
-class ArgumentError(Exception):
-    argument_name = ... # type: Any
-    message = ... # type: Any
-    def __init__(self, argument, message) -> None: ...
-
-class ArgumentTypeError(Exception): ...
-
-class Action(_AttributeHolder):
-    option_strings = ... # type: Any
-    dest = ... # type: Any
-    nargs = ... # type: Any
-    const = ... # type: Any
-    default = ... # type: Any
-    type = ... # type: Any
-    choices = ... # type: Any
-    required = ... # type: Any
-    help = ... # type: Any
-    metavar = ... # type: Any
-    def __init__(self,
-                 option_strings: List[str],
-                 dest = str,
-                 nargs: Union[int, str] = ...,
-                 const: Any = ...,
-                 default: Any = ...,
-                 type: Callable[[str], Any] = ...,
-                 choices: Iterable[Any] = ...,
-                 required: bool = ...,
-                 help: str = ...,
-                 metavar: str = ...) -> None: ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _StoreAction(Action):
-    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
-                 choices=..., required=..., help=..., metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _StoreConstAction(Action):
-    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
-                 metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _StoreTrueAction(_StoreConstAction):
-    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
-
-class _StoreFalseAction(_StoreConstAction):
-    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
-
-class _AppendAction(Action):
-    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
-                 choices=..., required=..., help=..., metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _AppendConstAction(Action):
-    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
-                 metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _CountAction(Action):
-    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _HelpAction(Action):
-    def __init__(self, option_strings, dest=..., default=..., help=...) -> None: ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _VersionAction(Action):
-    version = ... # type: Any
-    def __init__(self, option_strings, version=..., dest=..., default=..., help=...) -> None: ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _SubParsersAction(Action):
-    class _ChoicesPseudoAction(Action):
-        def __init__(self, name, help) -> None: ...
-    def __init__(self, option_strings, prog, parser_class, dest=..., help=..., metavar=...) -> None: ...
-    def add_parser(self, name, **kwargs): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class FileType:
-    def __init__(self, mode=..., bufsize=...) -> None: ...
-    def __call__(self, string): ...
-
-class Namespace(_AttributeHolder):
-    def __init__(self, **kwargs) -> None: ...
-    __hash__ = ... # type: Any
-    def __eq__(self, other): ...
-    def __ne__(self, other): ...
-    def __contains__(self, key): ...
-    def __getattr__(self, name: str) -> Any: ...
-
-class _ActionsContainer:
-    description = ... # type: Any
-    argument_default = ... # type: Any
-    prefix_chars = ... # type: Any
-    conflict_handler = ... # type: Any
-    def __init__(self, description, prefix_chars, argument_default, conflict_handler) -> None: ...
-    def register(self, registry_name, value, object): ...
-    def set_defaults(self, **kwargs): ...
-    def get_default(self, dest): ...
-    def add_argument(self,
-        *args: str,
-        action: Union[str, Action] = ...,
-        nargs: str = ...,
-        const: Any = ...,
-        default: Any = ...,
-        type: Any = ...,
-        choices: Any = ..., # TODO: Container?
-        required: bool = ...,
-        help: str = ...,
-        metavar: str = ...,
-        dest: str = ...,
-        version: str = ...
-    ) -> None: ...
-    def add_argument_group(self, *args, **kwargs): ...
-    def add_mutually_exclusive_group(self, **kwargs) -> _MutuallyExclusiveGroup: ...
-
-class _ArgumentGroup(_ActionsContainer):
-    title = ... # type: Any
-    def __init__(self, container, title=..., description=..., **kwargs) -> None: ...
-
-class _MutuallyExclusiveGroup(_ArgumentGroup):
-    required = ... # type: Any
-    def __init__(self, container, required=...) -> None: ...
-
-class ArgumentParser(_AttributeHolder, _ActionsContainer):
-    prog = ... # type: Any
-    usage = ... # type: Any
-    epilog = ... # type: Any
-    version = ... # type: Any
-    formatter_class = ... # type: Any
-    fromfile_prefix_chars = ... # type: Any
-    add_help = ... # type: Any
-    def __init__(self,
-                 prog: str = ...,
-                 usage: str = ...,
-                 description: str = ...,
-                 epilog: str = ...,
-                 version: None = ...,
-                 parents: Iterable[ArgumentParser] = ...,
-                 formatter_class: HelpFormatter = ...,
-                 prefix_chars: str = ...,
-                 fromfile_prefix_chars: str = ...,
-                 argument_default: str = ...,
-                 conflict_handler: str = ...,
-                 add_help: bool = ...) -> None: ...
-    def add_subparsers(self, **kwargs): ...
-    def parse_args(self, args: Sequence[str] = ..., namespace=...): ...
-    def parse_known_args(self, args=..., namespace=...): ...
-    def convert_arg_line_to_args(self, arg_line): ...
-    def format_usage(self): ...
-    def format_help(self): ...
-    def format_version(self): ...
-    def print_usage(self, file=...): ...
-    def print_help(self, file: IO[Any] = None) -> None: ...
-    def print_version(self, file=...): ...
-    def exit(self, status=..., message=...): ...
-    def error(self, message): ...
diff --git a/typeshed/stdlib/2.7/builtins.pyi b/typeshed/stdlib/2.7/builtins.pyi
index c23c913..0ce76c9 100644
--- a/typeshed/stdlib/2.7/builtins.pyi
+++ b/typeshed/stdlib/2.7/builtins.pyi
@@ -365,7 +365,7 @@ class str(basestring, Sequence[str]):
     def __ge__(self, x: unicode) -> bool: ...
     def __mod__(self, x: Any) -> str: ...
 
-class bytearray(Sequence[int]):
+class bytearray(MutableSequence[int]):
     @overload
     def __init__(self) -> None: ...
     @overload
@@ -383,6 +383,7 @@ class bytearray(Sequence[int]):
     def expandtabs(self, tabsize: int = 8) -> bytearray: ...
     def find(self, sub: str, start: int = 0, end: int = ...) -> int: ...
     def index(self, sub: str, start: int = 0, end: int = ...) -> int: ...
+    def insert(self, index: int, object: int) -> None: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
     def isdigit(self) -> bool: ...
@@ -431,10 +432,7 @@ class bytearray(Sequence[int]):
     @overload
     def __setitem__(self, s: slice, x: Union[Sequence[int], str]) -> None: ...
     def __setslice__(self, start: int, stop: int, x: Union[Sequence[int], str]) -> None: ...
-    @overload
-    def __delitem__(self, i: int) -> None: ...
-    @overload
-    def __delitem__(self, s: slice) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
     def __delslice__(self, start: int, stop: int) -> None: ...
     def __add__(self, s: str) -> bytearray: ...
     def __mul__(self, n: int) -> bytearray: ...
@@ -492,7 +490,7 @@ class list(MutableSequence[_T], Generic[_T]):
     def insert(self, index: int, object: _T) -> None: ...
     def remove(self, object: _T) -> None: ...
     def reverse(self) -> None: ...
-    def sort(self, *, key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ...
+    def sort(self, cmp: Callable[[_T, _T], Any] = ..., key: Callable[[_T], Any] = ..., reverse: bool = ...) -> None: ...
 
     def __len__(self) -> int: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -539,9 +537,9 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def popitem(self) -> Tuple[_KT, _VT]: ...
     def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
     @overload
-    def update(self, m: Mapping[_KT, _VT]) -> None: ...
+    def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
     @overload
-    def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
     def keys(self) -> List[_KT]: ...
     def values(self) -> List[_VT]: ...
     def items(self) -> List[Tuple[_KT, _VT]]: ...
@@ -567,11 +565,11 @@ class set(MutableSet[_T], Generic[_T]):
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> set[_T]: ...
-    def difference(self, s: Iterable[Any]) -> set[_T]: ...
-    def difference_update(self, s: Iterable[Any]) -> None: ...
+    def difference(self, *s: Iterable[Any]) -> set[_T]: ...
+    def difference_update(self, *s: Iterable[Any]) -> None: ...
     def discard(self, element: _T) -> None: ...
-    def intersection(self, s: Iterable[Any]) -> set[_T]: ...
-    def intersection_update(self, s: Iterable[Any]) -> None: ...
+    def intersection(self, *s: Iterable[Any]) -> set[_T]: ...
+    def intersection_update(self, *s: Iterable[Any]) -> None: ...
     def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
     def issubset(self, s: AbstractSet[Any]) -> bool: ...
     def issuperset(self, s: AbstractSet[Any]) -> bool: ...
@@ -579,8 +577,8 @@ class set(MutableSet[_T], Generic[_T]):
     def remove(self, element: _T) -> None: ...
     def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
     def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
-    def union(self, s: Iterable[_T]) -> set[_T]: ...
-    def update(self, s: Iterable[_T]) -> None: ...
+    def union(self, *s: Iterable[_T]) -> set[_T]: ...
+    def update(self, *s: Iterable[_T]) -> None: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -605,13 +603,13 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     @overload
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def copy(self) -> frozenset[_T]: ...
-    def difference(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
-    def intersection(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def difference(self, *s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def intersection(self, *s: AbstractSet[Any]) -> frozenset[_T]: ...
     def isdisjoint(self, s: AbstractSet[_T]) -> bool: ...
     def issubset(self, s: AbstractSet[Any]) -> bool: ...
     def issuperset(self, s: AbstractSet[Any]) -> bool: ...
     def symmetric_difference(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
-    def union(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def union(self, *s: AbstractSet[_T]) -> frozenset[_T]: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -868,6 +866,9 @@ class BytesWarning(Warning): ...
 class ResourceWarning(Warning): ...
 
 def eval(s: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ...
+def exec(object: str,
+         globals: Dict[str, Any] = None,
+         locals: Dict[str, Any] = None) -> Any: ...  # TODO code object as source
 
 def cmp(x: Any, y: Any) -> int: ...
 
diff --git a/typeshed/stdlib/2.7/collections.pyi b/typeshed/stdlib/2.7/collections.pyi
index 0d7182e..9d46243 100644
--- a/typeshed/stdlib/2.7/collections.pyi
+++ b/typeshed/stdlib/2.7/collections.pyi
@@ -7,8 +7,8 @@
 # NOTE: These are incomplete!
 
 from typing import (
-    Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload, Iterator, Sized,
-    Optional, List, Set, Sequence, Union, Reversible, MutableMapping, MutableSequence
+    Any, Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload, Iterator,
+    Sized, Optional, List, Set, Sequence, Union, Reversible, MutableMapping, MutableSequence
 )
 import typing
 
@@ -17,7 +17,7 @@ _KT = TypeVar('_KT')
 _VT = TypeVar('_VT')
 
 # namedtuple is special-cased in the type checker; the initializer is ignored.
-namedtuple = object()
+namedtuple = ...  # type: Any
 
 class deque(Sized, Iterable[_T], Reversible[_T], Generic[_T]):
     def __init__(self, iterable: Iterable[_T] = ...,
@@ -64,9 +64,9 @@ class Counter(Dict[_T, int], Generic[_T]):
     # Dict.update. Not sure if we should use '# type: ignore' instead
     # and omit the type from the union.
     @overload
-    def update(self, m: Mapping[_T, int]) -> None: ...
+    def update(self, m: Mapping[_T, int], **kwargs: _VT) -> None: ...
     @overload
-    def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]]) -> None: ...
+    def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: _VT) -> None: ...
 
 class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
     def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
@@ -90,3 +90,17 @@ class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
     def __init__(self, default_factory: Callable[[], _VT],
                  iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...
     def __missing__(self, key: _KT) -> _VT: ...
+
+class ChainMap(Dict[_KT, _VT], Generic[_KT, _VT]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ...
+
+    @property
+    def maps(self) -> List[Mapping[_KT, _VT]]: ...
+
+    def new_child(self, m: Mapping[_KT, _VT] = ...) -> ChainMap[_KT, _VT]: ...
+
+    @property
+    def parents(self) -> ChainMap[_KT, _VT]: ...
diff --git a/typeshed/stdlib/2.7/csv.pyi b/typeshed/stdlib/2.7/csv.pyi
index ce33010..c91d330 100644
--- a/typeshed/stdlib/2.7/csv.pyi
+++ b/typeshed/stdlib/2.7/csv.pyi
@@ -2,27 +2,22 @@
 #
 # NOTE: Based on a dynamically typed stub automatically generated by stubgen.
 
-from abc import ABCMeta, abstractmethod
 from typing import Any, Dict, Iterable, List, Sequence, Union
 
-# Public interface of _csv.reader
-class Reader(Iterable[List[str]], metaclass=ABCMeta):
+# Public interface of _csv.reader's return type
+class _Reader(Iterable[List[str]]):
     dialect = ...  # type: Dialect
     line_num = ...  # type: int
 
-    @abstractmethod
     def next(self) -> List[str]: ...
 
 _Row = Sequence[Union[str, int]]
 
-# Public interface of _csv.writer
-class Writer(metaclass=ABCMeta):
+# Public interface of _csv.writer's return type
+class _Writer:
     dialect = ...  # type: Dialect
 
-    @abstractmethod
     def writerow(self, row: _Row) -> None: ...
-
-    @abstractmethod
     def writerows(self, rows: Iterable[_Row]) -> None: ...
 
 QUOTE_ALL = ...  # type: int
@@ -34,8 +29,8 @@ class Error(Exception): ...
 
 _Dialect = Union[str, Dialect]
 
-def writer(csvfile: Any, dialect: _Dialect = ..., **fmtparams) -> Writer: ...
-def reader(csvfile: Iterable[str], dialect: _Dialect = ..., **fmtparams) -> Reader: ...
+def writer(csvfile: Any, dialect: _Dialect = ..., **fmtparams) -> _Writer: ...
+def reader(csvfile: Iterable[str], dialect: _Dialect = ..., **fmtparams) -> _Reader: ...
 def register_dialect(name, dialect=..., **fmtparams): ...
 def unregister_dialect(name): ...
 def get_dialect(name: str) -> Dialect: ...
diff --git a/typeshed/stdlib/2.7/datetime.pyi b/typeshed/stdlib/2.7/datetime.pyi
index 6d3ce65..30651f2 100644
--- a/typeshed/stdlib/2.7/datetime.pyi
+++ b/typeshed/stdlib/2.7/datetime.pyi
@@ -181,7 +181,7 @@ class datetime(object):
     @classmethod
     def fromordinal(cls, n: int) -> datetime: ...
     @classmethod
-    def now(cls, tz: timezone = ...) -> datetime: ...
+    def now(cls, tz: _tzinfo = ...) -> datetime: ...
     @classmethod
     def utcnow(cls) -> datetime: ...
     @classmethod
diff --git a/typeshed/stdlib/2.7/difflib.pyi b/typeshed/stdlib/2.7/difflib.pyi
index eaf2b5d..5580ce6 100644
--- a/typeshed/stdlib/2.7/difflib.pyi
+++ b/typeshed/stdlib/2.7/difflib.pyi
@@ -5,7 +5,8 @@
 # TODO: Support unicode?
 
 from typing import (
-    TypeVar, Callable, Iterable, List, NamedTuple, Sequence, Tuple, Generic
+    TypeVar, Callable, Iterable, Iterator, List, NamedTuple, Sequence, Tuple,
+    Generic
 )
 
 _T = TypeVar('_T')
@@ -33,20 +34,20 @@ def get_close_matches(word: Sequence[_T], possibilities: List[Sequence[_T]],
 class Differ:
     def __init__(self, linejunk: Callable[[str], bool] = ...,
                  charjunk: Callable[[str], bool] = ...) -> None: ...
-    def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterable[str]: ...
+    def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ...
 
 def IS_LINE_JUNK(str) -> bool: ...
 def IS_CHARACTER_JUNK(str) -> bool: ...
 def unified_diff(a: Sequence[str], b: Sequence[str], fromfile: str = ...,
                  tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
-                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+                 n: int = ..., lineterm: str = ...) -> Iterator[str]: ...
 def context_diff(a: Sequence[str], b: Sequence[str], fromfile: str=...,
                  tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
-                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+                 n: int = ..., lineterm: str = ...) -> Iterator[str]: ...
 def ndiff(a: Sequence[str], b: Sequence[str],
           linejunk: Callable[[str], bool] = ...,
           charjunk: Callable[[str], bool] = ...
-          ) -> Iterable[str]: ...
+          ) -> Iterator[str]: ...
 
 class HtmlDiff(object):
     def __init__(self, tabsize: int = ..., wrapcolumn: int = ...,
@@ -60,4 +61,4 @@ class HtmlDiff(object):
                    fromdesc: str = ..., todesc: str = ..., context: bool = ...,
                    numlines: int = ...) -> str: ...
 
-def restore(delta: Iterable[str], which: int) -> Iterable[int]: ...
+def restore(delta: Iterable[str], which: int) -> Iterator[int]: ...
diff --git a/typeshed/stdlib/2.7/gettext.pyi b/typeshed/stdlib/2.7/gettext.pyi
index 7c30484..7752aa1 100644
--- a/typeshed/stdlib/2.7/gettext.pyi
+++ b/typeshed/stdlib/2.7/gettext.pyi
@@ -22,7 +22,7 @@ class Translations(object):
     def ugettext(self, message: str) -> unicode: ...
     def ngettext(self, singular: str, plural: str, n: int) -> str: ...
     def lngettext(self, singular: str, plural: str, n: int) -> str: ...
-    def ungettext(self, singular: str, plural: str, n: int) -> str: ...
+    def ungettext(self, singular: str, plural: str, n: int) -> unicode: ...
     def info(self) -> Any: ...
     def charset(self) -> Any: ...
     def output_charset(self) -> Any: ...
diff --git a/typeshed/stdlib/2.7/json.pyi b/typeshed/stdlib/2.7/json.pyi
index d5b7106..7547250 100644
--- a/typeshed/stdlib/2.7/json.pyi
+++ b/typeshed/stdlib/2.7/json.pyi
@@ -1,6 +1,6 @@
 from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union, Text
 
-class JSONDecodeError(object):
+class JSONDecodeError(ValueError):
     def dumps(self, obj: Any) -> str: ...
     def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
     def loads(self, s: str) -> Any: ...
@@ -52,3 +52,47 @@ def load(fp: IO[str],
     parse_constant: Optional[Callable[[str], Any]] = ...,
     object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
     **kwds: Any) -> Any: ...
+
+class JSONDecoder(object):
+    def __init__(self,
+        encoding: Union[Text, bytes] = ...,
+        object_hook: Callable[..., Any] = ...,
+        parse_float: Callable[[str], float] = ...,
+        parse_int: Callable[[str], int] = ...,
+        parse_constant: Callable[[str], Any] = ...,
+        strict: bool = ...,
+        object_pairs_hook: Callable[..., Any] = ...) -> None: ...
+
+    def decode(self, s: Union[Text, bytes], _w = ...) -> Any: ...
+
+    def raw_decode(self,
+        s: Union[Text, bytes],
+        idx: int = ...) -> Tuple[Any, Any]: ...
+
+class JSONEncoder(object):
+    item_separator = ... # type: str
+    key_separator = ... # type: str
+    skipkeys = ... # type: bool
+    ensure_ascii = ... # type: bool
+    check_circular = ... # type: bool
+    allow_nan = ... # type: bool
+    sort_keys = ... # type: bool
+    indent = ... # type: int
+
+    def __init__(self,
+        skipkeys: bool = ...,
+        ensure_ascii: bool = ...,
+        check_circular: bool = ...,
+        allow_nan: bool = ...,
+        sort_keys: bool = ...,
+        indent: int = ...,
+        separators: Tuple[Union[Text, bytes], Union[Text, bytes]] = ...,
+        encoding: Union[Text, bytes] = ...,
+        default: Callable[..., Any] = ...) -> None: ...
+
+    def default(self, o: Any) -> Any: ...
+
+    def encode(self, o: Any) -> str: ...
+
+    def iterencode(self, o: Any, _one_shot: bool = ...) -> str: ...
+
diff --git a/typeshed/stdlib/2.7/logging/__init__.pyi b/typeshed/stdlib/2.7/logging/__init__.pyi
deleted file mode 100644
index 903fb89..0000000
--- a/typeshed/stdlib/2.7/logging/__init__.pyi
+++ /dev/null
@@ -1,239 +0,0 @@
-# Stubs for logging (Python 2.7)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Dict, Optional, Sequence, Tuple, overload, Union
-
-CRITICAL = ... # type: int
-FATAL = ... # type: int
-ERROR = ... # type: int
-WARNING = ... # type: int
-WARN = ... # type: int
-INFO = ... # type: int
-DEBUG = ... # type: int
-NOTSET = ... # type: int
-
-def getLevelName(level: int) -> str: ...
-def addLevelName(level: int, levelName: str) -> None: ...
-
-class LogRecord:
-    name = ...  # type: str
-    msg = ...  # type: str
-    args = ...  # type: Sequence[Any]
-    levelname = ...  # type: str
-    levelno = ...  # type: int
-    pathname = ...  # type: str
-    filename = ...  # type: str
-    module = ...  # type: str
-    exc_info = ...  # type: Tuple[Any, Any, Any]
-    exc_text = ...  # type: str
-    lineno = ...  # type: int
-    funcName = ...  # type: Optional[str]
-    created = ...  # type: float
-    msecs = ...  # type: float
-    relativeCreated = ...  # type: float
-    thread = ...  # type: Any
-    threadName = ...  # type: Any
-    processName = ...  # type: Any
-    process = ...  # type: Any
-    def __init__(self, name: str, level: int, pathname: str, lineno: int, msg: str,
-                 args: Sequence[Any], exc_info: Tuple[Any, Any, Any], func: str = ...) -> None: ...
-    def getMessage(self) -> str: ...
-
-def makeLogRecord(dict: Dict[str, Any]) -> LogRecord: ...
-
-class PercentStyle:
-    default_format = ...  # type: Any
-    asctime_format = ...  # type: Any
-    asctime_search = ...  # type: Any
-    def __init__(self, fmt) -> None: ...
-    def usesTime(self) -> bool: ...
-    def format(self, record: LogRecord) -> str: ...
-
-class StrFormatStyle(PercentStyle):
-    default_format = ...  # type: Any
-    asctime_format = ...  # type: Any
-    asctime_search = ...  # type: Any
-    def format(self, record: LogRecord) -> str: ...
-
-class StringTemplateStyle(PercentStyle):
-    default_format = ...  # type: Any
-    asctime_format = ...  # type: Any
-    asctime_search = ...  # type: Any
-    def __init__(self, fmt) -> None: ...
-    def usesTime(self) -> bool: ...
-    def format(self, record: LogRecord) -> str: ...
-
-BASIC_FORMAT = ...  # type: Any
-
-class Formatter:
-    converter = ...  # type: Any
-    datefmt = ...  # type: Any
-    def __init__(self, fmt: str = ..., datefmt: str = ...) -> None: ...
-    default_time_format = ...  # type: Any
-    default_msec_format = ...  # type: Any
-    def formatTime(self, record: LogRecord, datefmt: str =...) -> str: ...
-    def formatException(self, ei) -> str: ...
-    def usesTime(self) -> bool: ...
-    def formatMessage(self, record: LogRecord) -> str: ...
-    def formatStack(self, stack_info) -> str: ...
-    def format(self, record: LogRecord) -> str: ...
-
-class BufferingFormatter:
-    linefmt = ...  # type: Any
-    def __init__(self, linefmt: Formatter =...) -> None: ...
-    def formatHeader(self, records: list[LogRecord]) -> str: ...
-    def formatFooter(self, records: list[LogRecord]) -> str: ...
-    def format(self, records: list[LogRecord]) -> str: ...
-
-class Filter:
-    name = ...  # type: Any
-    nlen = ...  # type: Any
-    def __init__(self, name: str = ...) -> None: ...
-    def filter(self, record: LogRecord) -> int: ...
-
-class Filterer:
-    filters = ...  # type: Any
-    def __init__(self) -> None: ...
-    def addFilter(self, filter: Filter) -> None: ...
-    def removeFilter(self, filter: Filter) -> None: ...
-    def filter(self, record: LogRecord) -> int: ...
-
-class Handler(Filterer):
-    level = ...  # type: Any
-    formatter = ...  # type: Any
-    def __init__(self, level: int = ...) -> None: ...
-    def get_name(self): ...
-    def set_name(self, name) -> None: ...
-    name = ...  # type: Any
-    lock = ...  # type: Any
-    def createLock(self) -> None: ...
-    def acquire(self) -> None: ...
-    def release(self) -> None: ...
-    def setLevel(self, level: int) -> None: ...
-    def format(self, record: LogRecord) -> str: ...
-    def emit(self, record: LogRecord) -> None: ...
-    def handle(self, record: LogRecord) -> Any: ... # Return value undocumented
-    def setFormatter(self, fmt: Formatter) -> None: ...
-    def flush(self) -> None: ...
-    def close(self) -> None: ...
-    def handleError(self, record: LogRecord) -> None: ...
-
-class StreamHandler(Handler):
-    terminator = ...  # type: Any
-    stream = ...  # type: Any
-    def __init__(self, stream=...) -> None: ...
-    def flush(self) -> None: ...
-    def emit(self, record: LogRecord) -> None: ...
-
-class FileHandler(StreamHandler):
-    baseFilename = ...  # type: Any
-    mode = ...  # type: Any
-    encoding = ...  # type: Any
-    delay = ...  # type: Any
-    stream = ...  # type: Any
-    def __init__(self, filename: str, mode: str = ..., encoding: str = ..., delay: int = ...) -> None: ...
-    def close(self) -> None: ...
-    def emit(self, record: LogRecord) -> None: ...
-
-class _StderrHandler(StreamHandler):
-    def __init__(self, level: int =...) -> None: ...
-
-lastResort = ...  # type: Any
-
-class PlaceHolder:
-    loggerMap = ...  # type: Any
-    def __init__(self, alogger) -> None: ...
-    def append(self, alogger) -> None: ...
-
-def setLoggerClass(klass) -> None: ...
-def getLoggerClass(): ...
-
-class Manager:
-    root = ...  # type: Any
-    disable = ...  # type: Any
-    emittedNoHandlerWarning = ...  # type: Any
-    loggerDict = ...  # type: Any
-    loggerClass = ...  # type: Any
-    logRecordFactory = ...  # type: Any
-    def __init__(self, rootnode) -> None: ...
-    def getLogger(self, name: unicode): ...
-    def setLoggerClass(self, klass) -> None: ...
-    def setLogRecordFactory(self, factory) -> None: ...
-
-class Logger(Filterer):
-    name = ...  # type: Any
-    level = ...  # type: Any
-    parent = ...  # type: Any
-    propagate = False
-    handlers = ...  # type: Any
-    disabled = ...  # type: Any
-    def __init__(self, name: str, level: int = ...) -> None: ...
-    def setLevel(self, level: int) -> None: ...
-    def debug(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def info(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def warning(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def warn(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def error(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def exception(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def critical(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    fatal = ...  # type: Any
-    def log(self, level: int, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def findCaller(self) -> Tuple[str, int, str]: ...
-    def makeRecord(self, name, level: int, fn, lno, msg, args,
-                   exc_info, func=..., extra=..., sinfo=...) -> LogRecord: ...
-    def handle(self, record: LogRecord) -> None: ...
-    def addHandler(self, hdlr: Handler) -> None: ...
-    def removeHandler(self, hdlr: Handler) -> None: ...
-    def hasHandlers(self) -> None: ...
-    def callHandlers(self, record: LogRecord) -> None: ...
-    def getEffectiveLevel(self) -> int: ...
-    def isEnabledFor(self, level: int) -> bool: ...
-    def getChild(self, suffix: str) -> Logger: ...
-
-class RootLogger(Logger):
-    def __init__(self, level: int) -> None: ...
-
-class LoggerAdapter:
-    logger = ...  # type: Any
-    extra = ...  # type: Any
-    def __init__(self, logger, extra) -> None: ...
-    def process(self, msg, kwargs) -> Tuple[Any,Any]: ... # Possible mistake: kwargs missing **
-    def debug(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def info(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def warning(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def warn(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def error(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def exception(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def critical(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def log(self, level: int, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-    def isEnabledFor(self, level: int) -> bool: ...
-    def setLevel(self, level: int) -> None: ...
-    def getEffectiveLevel(self) -> int: ...
-    def hasHandlers(self) -> bool: ...
-
-def basicConfig(**kwargs) -> None: ...
-def getLogger(name: str = ...) -> Logger: ...
-def critical(msg: str, *args, **kwargs) -> None: ...
-
-fatal = ...  # type: Any
-
-def error(msg: Union[str, unicode], *args, **kwargs) -> None: ...
- at overload
-def exception(msg: Union[str, unicode], *args, **kwargs) -> None: ...
- at overload
-def exception(exception: Exception, *args, **kwargs) -> None: ...
-def warning(msg: Union[str, unicode], *args, **kwargs) -> None: ...
-def warn(msg: Union[str, unicode], *args, **kwargs) -> None: ...
-def info(msg: Union[str, unicode], *args, **kwargs) -> None: ...
-def debug(msg: Union[str, unicode], *args, **kwargs) -> None: ...
-def log(level: int, msg: Union[str, unicode], *args, **kwargs) -> None: ...
-def disable(level: int) -> None: ...
-
-class NullHandler(Handler):
-    def handle(self, record: LogRecord) -> None: ...
-    def emit(self, record: LogRecord) -> None: ...
-    lock = ...  # type: Any
-    def createLock(self): ...
-
-def captureWarnings(capture: bool) -> None: ...
diff --git a/typeshed/stdlib/2.7/logging/handlers.pyi b/typeshed/stdlib/2.7/logging/handlers.pyi
deleted file mode 100644
index 658c768..0000000
--- a/typeshed/stdlib/2.7/logging/handlers.pyi
+++ /dev/null
@@ -1,202 +0,0 @@
-# Stubs for logging.handlers (Python 2.7)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Union, Tuple
-from . import LogRecord
-import logging
-import socket
-
-threading = ...  # type: Any
-DEFAULT_TCP_LOGGING_PORT = ...  # type: Any
-DEFAULT_UDP_LOGGING_PORT = ...  # type: Any
-DEFAULT_HTTP_LOGGING_PORT = ...  # type: Any
-DEFAULT_SOAP_LOGGING_PORT = ...  # type: Any
-SYSLOG_UDP_PORT = ...  # type: Any
-SYSLOG_TCP_PORT = ...  # type: Any
-
-class BaseRotatingHandler(logging.FileHandler):
-    mode = ...  # type: Any
-    encoding = ...  # type: Any
-    namer = ...  # type: Any
-    rotator = ...  # type: Any
-    def __init__(self, filename: unicode, mode: unicode, encoding: unicode =..., delay: int =...) -> None: ...
-    def emit(self, record: LogRecord) -> None: ...
-    def rotation_filename(self, default_name: unicode): ...
-    def rotate(self, source, dest): ...
-
-class RotatingFileHandler(BaseRotatingHandler):
-    maxBytes = ...  # type: Any
-    backupCount = ...  # type: Any
-    def __init__(self, filename: unicode, mode: unicode = ..., maxBytes: int = ..., backupCount:int = ...,
-                 encoding: str = ..., delay: int = ...) -> None: ...
-    stream = ...  # type: Any
-    def doRollover(self) -> None: ...
-    def shouldRollover(self, record: LogRecord) -> int: ...
-
-class TimedRotatingFileHandler(BaseRotatingHandler):
-    when = ...  # type: Any
-    backupCount = ...  # type: Any
-    utc = ...  # type: Any
-    atTime = ...  # type: Any
-    interval = ...  # type: Any
-    suffix = ...  # type: Any
-    extMatch = ...  # type: Any
-    dayOfWeek = ...  # type: Any
-    rolloverAt = ...  # type: Any
-    def __init__(self, filename: unicode, when: unicode =..., interval: int =..., backupCount: int =...,
-                 encoding: unicode =..., delay: bool =..., utc: bool =..., atTime=...) -> None: ...
-    def computeRollover(self, currentTime: int) -> int: ...
-    def shouldRollover(self, record: LogRecord) -> int: ...
-    def getFilesToDelete(self) -> list[str]: ...
-    stream = ...  # type: Any
-    def doRollover(self) -> None: ...
-
-class WatchedFileHandler(logging.FileHandler):
-    def __init__(self, filename: str, mode: str = ..., encoding: str = ..., delay: int = ...) -> None: ...
-    stream = ...  # type: Any
-    def emit(self, record: LogRecord) -> None: ...
-
-class SocketHandler(logging.Handler):
-    host = ...  # type: Any
-    port = ...  # type: Any
-    address = ...  # type: Any
-    sock = ...  # type: Any
-    closeOnError = ...  # type: Any
-    retryTime = ...  # type: Any
-    retryStart = ...  # type: Any
-    retryMax = ...  # type: Any
-    retryFactor = ...  # type: Any
-    def __init__(self, host, port) -> None: ...
-    def makeSocket(self, timeout: int =...): ...
-    retryPeriod = ...  # type: Any
-    def createSocket(self) -> None: ...
-    def send(self, s: str) -> None: ...
-    def makePickle(self, record: LogRecord) -> str: ...
-    def handleError(self, record: LogRecord) -> None: ...
-    def emit(self, record: LogRecord) -> None: ...
-    def close(self) -> None: ...
-
-class DatagramHandler(SocketHandler):
-    closeOnError = ...  # type: Any
-    def __init__(self, host, port) -> None: ...
-    def makeSocket(self, timeout: int =...) -> None: ...
-    def send(self, s: str) -> None: ...
-
-class SysLogHandler(logging.Handler):
-    LOG_EMERG = ...  # type: Any
-    LOG_ALERT = ...  # type: Any
-    LOG_CRIT = ...  # type: Any
-    LOG_ERR = ...  # type: Any
-    LOG_WARNING = ...  # type: Any
-    LOG_NOTICE = ...  # type: Any
-    LOG_INFO = ...  # type: Any
-    LOG_DEBUG = ...  # type: Any
-    LOG_KERN = ...  # type: Any
-    LOG_USER = ...  # type: Any
-    LOG_MAIL = ...  # type: Any
-    LOG_DAEMON = ...  # type: Any
-    LOG_AUTH = ...  # type: Any
-    LOG_SYSLOG = ...  # type: Any
-    LOG_LPR = ...  # type: Any
-    LOG_NEWS = ...  # type: Any
-    LOG_UUCP = ...  # type: Any
-    LOG_CRON = ...  # type: Any
-    LOG_AUTHPRIV = ...  # type: Any
-    LOG_FTP = ...  # type: Any
-    LOG_LOCAL0 = ...  # type: Any
-    LOG_LOCAL1 = ...  # type: Any
-    LOG_LOCAL2 = ...  # type: Any
-    LOG_LOCAL3 = ...  # type: Any
-    LOG_LOCAL4 = ...  # type: Any
-    LOG_LOCAL5 = ...  # type: Any
-    LOG_LOCAL6 = ...  # type: Any
-    LOG_LOCAL7 = ...  # type: Any
-    priority_names = ...  # type: Any
-    facility_names = ...  # type: Any
-    priority_map = ...  # type: Any
-    address = ...  # type: Any
-    facility = ...  # type: Any
-    socktype = ...  # type: Any
-    unixsocket = ...  # type: Any
-    socket = ...  # type: Any
-    formatter = ...  # type: Any
-    def __init__(self, address: tuple[str,int] =..., facility: int =..., socktype: int =...) -> None: ...
-    def encodePriority(self, facility: int, priority: Union[basestring,int]) -> int: ...
-    def close(self) -> None: ...
-    def mapPriority(self, levelName: str) -> str: ...
-    ident = ...  # type: Any
-    append_nul = ...  # type: Any
-    def emit(self, record: LogRecord) -> None: ...
-
-class SMTPHandler(logging.Handler):
-    username = ...  # type: Any
-    fromaddr = ...  # type: Any
-    toaddrs = ...  # type: Any
-    subject = ...  # type: Any
-    secure = ...  # type: Any
-    timeout = ...  # type: Any
-    def __init__(self, mailhost, fromaddr, toaddrs, subject: unicode, credentials: Tuple[Any,Any]=...,
-                 secure=...) -> None: ...
-    def getSubject(self, record: LogRecord) -> unicode: ...
-    def emit(self, record: LogRecord) -> None: ...
-
-class NTEventLogHandler(logging.Handler):
-    appname = ...  # type: Any
-    dllname = ...  # type: Any
-    logtype = ...  # type: Any
-    deftype = ...  # type: Any
-    typemap = ...  # type: Any
-    def __init__(self, appname, dllname=..., logtype: str =...) -> None: ...
-    def getMessageID(self, record: LogRecord) -> int: ...
-    def getEventCategory(self, record: LogRecord) -> int: ...
-    def getEventType(self, record: LogRecord): ...
-    def emit(self, record: LogRecord) -> None: ...
-    def close(self) -> None: ...
-
-class HTTPHandler(logging.Handler):
-    host = ...  # type: Any
-    url = ...  # type: Any
-    method = ...  # type: Any
-    secure = ...  # type: Any
-    credentials = ...  # type: Any
-    def __init__(self, host, url, method: str =..., secure=..., credentials=...) -> None: ...
-    def mapLogRecord(self, record: LogRecord) -> dict[Any,Any]: ...
-    def emit(self, record: LogRecord) -> None: ...
-
-class BufferingHandler(logging.Handler):
-    capacity = ...  # type: Any
-    buffer = ...  # type: Any
-    def __init__(self, capacity: int) -> None: ...
-    def shouldFlush(self, record: LogRecord) -> bool: ...
-    def emit(self, record: LogRecord) -> None: ...
-    def flush(self) -> None: ...
-    def close(self) -> None: ...
-
-class MemoryHandler(BufferingHandler):
-    flushLevel = ...  # type: Any
-    target = ...  # type: Any
-    def __init__(self, capacity: int, flushLevel: int =..., target=...) -> None: ...
-    def shouldFlush(self, record: LogRecord) -> bool: ...
-    def setTarget(self, target) -> None: ...
-    buffer = ...  # type: Any
-    def flush(self) -> None: ...
-    def close(self) -> None: ...
-
-class QueueHandler(logging.Handler):
-    queue = ...  # type: Any
-    def __init__(self, queue) -> None: ...
-    def enqueue(self, record: LogRecord): ...
-    def prepare(self, record: LogRecord): ...
-    def emit(self, record: LogRecord) -> None: ...
-
-class QueueListener:
-    queue = ...  # type: Any
-    handlers = ...  # type: Any
-    def __init__(self, queue, *handlers) -> None: ...
-    def dequeue(self, block): ...
-    def start(self) -> None: ...
-    def prepare(self, record: LogRecord): ...
-    def handle(self, record: LogRecord): ...
-    def enqueue_sentinel(self): ...
-    def stop(self) -> None: ...
diff --git a/typeshed/stdlib/2.7/os/__init__.pyi b/typeshed/stdlib/2.7/os/__init__.pyi
index 55145c8..3f9aa0d 100644
--- a/typeshed/stdlib/2.7/os/__init__.pyi
+++ b/typeshed/stdlib/2.7/os/__init__.pyi
@@ -1,7 +1,8 @@
 # created from https://docs.python.org/2/library/os.html
 
 from typing import (
-    List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, Iterator, MutableMapping
+    List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, Iterator,
+    MutableMapping, NamedTuple
 )
 import os.path as path
 
@@ -30,8 +31,8 @@ def getppid() -> int: ...
 def getresuid() -> Tuple[int, int, int]: ...
 def getresgid() -> Tuple[int, int, int]: ...
 def getuid() -> int: ...
-def getenv(varname: str, value: str = ...) -> str: ...
-def putenv(varname: str, value: str) -> None: ...
+def getenv(varname: unicode, value: unicode = ...) -> str: ...
+def putenv(varname: unicode, value: unicode) -> None: ...
 def setegid(egid: int) -> None: ...
 def seteuid(euid: int) -> None: ...
 def setgid(gid: int) -> None: ...
@@ -76,7 +77,6 @@ def fpathconf(fd: int, name: str) -> None: ...
 
 # TODO(prvak)
 def fstat(fd: int) -> Any: ...
-def fstatvfs(fd: int) -> Any: ...
 def fsync(fd: int) -> None: ...
 def ftruncate(fd: int, length: int) -> None: ...
 def isatty(fd: int) -> bool: ...
@@ -140,7 +140,15 @@ def rmdir(path: unicode) -> None: ...
 # TODO(MichalPokorny)
 def stat(path: unicode) -> Any: ...
 
-# TODO: stat_float_times, statvfs, tempnam, tmpnam, TMP_MAX
+_StatVFS = NamedTuple('_StatVFS', [('f_bsize', int), ('f_frsize', int), ('f_blocks', int),
+                                   ('f_bfree', int), ('f_bavail', int), ('f_files', int),
+                                   ('f_ffree', int), ('f_favail', int), ('f_flag', int),
+                                   ('f_namemax', int)])
+
+def fstatvfs(fd: int) -> _StatVFS: ...
+def statvfs(path: unicode) -> _StatVFS: ...
+
+# TODO: stat_float_times, tempnam, tmpnam, TMP_MAX
 def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
          followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
                                                     List[AnyStr]]]: ...
diff --git a/typeshed/stdlib/2.7/re.pyi b/typeshed/stdlib/2.7/re.pyi
index 7c47bcd..4c69094 100644
--- a/typeshed/stdlib/2.7/re.pyi
+++ b/typeshed/stdlib/2.7/re.pyi
@@ -6,7 +6,7 @@
 
 from typing import (
     List, Iterator, overload, Callable, Tuple, Sequence, Dict,
-    Generic, AnyStr, Match, Pattern
+    Generic, AnyStr, Match, Pattern, Any, Union
 )
 
 # ----- re variables and constants -----
@@ -34,63 +34,63 @@ def compile(pattern: AnyStr, flags: int = ...) -> Pattern[AnyStr]: ...
 def compile(pattern: Pattern[AnyStr], flags: int = ...) -> Pattern[AnyStr]: ...
 
 @overload
-def search(pattern: AnyStr, string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def search(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
 @overload
-def search(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def search(pattern: Union[Pattern[str],Pattern[unicode]], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
 
 @overload
-def match(pattern: AnyStr, string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def match(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
 @overload
-def match(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def match(pattern: Union[Pattern[str],Pattern[unicode]], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
 
 @overload
-def split(pattern: AnyStr, string: AnyStr,
+def split(pattern: Union[str, unicode], string: AnyStr,
           maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ...
 @overload
-def split(pattern: Pattern[AnyStr], string: AnyStr,
+def split(pattern: Union[Pattern[str],Pattern[unicode]], string: AnyStr,
           maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ...
 
 @overload
-def findall(pattern: AnyStr, string: AnyStr, flags: int = ...) -> List[AnyStr]: ...
+def findall(pattern: Union[str, unicode], string: AnyStr, flags: int = ...) -> List[Any]: ...
 @overload
-def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> List[AnyStr]: ...
+def findall(pattern: Union[Pattern[str],Pattern[unicode]], string: AnyStr, flags: int = ...) -> List[Any]: ...
 
 # Return an iterator yielding match objects over all non-overlapping matches
 # for the RE pattern in string. The string is scanned left-to-right, and
 # matches are returned in the order found. Empty matches are included in the
 # result unless they touch the beginning of another match.
 @overload
-def finditer(pattern: AnyStr, string: AnyStr,
+def finditer(pattern: Union[str, unicode], string: AnyStr,
              flags: int = ...) -> Iterator[Match[AnyStr]]: ...
 @overload
-def finditer(pattern: Pattern[AnyStr], string: AnyStr,
+def finditer(pattern: Union[Pattern[str],Pattern[unicode]], string: AnyStr,
              flags: int = ...) -> Iterator[Match[AnyStr]]: ...
 
 @overload
-def sub(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ...,
+def sub(pattern: Union[str, unicode], repl: AnyStr, string: AnyStr, count: int = ...,
         flags: int = ...) -> AnyStr: ...
 @overload
-def sub(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr],
+def sub(pattern: Union[str, unicode], repl: Callable[[Match[AnyStr]], AnyStr],
         string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ...
 @overload
-def sub(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ...,
+def sub(pattern: Union[Pattern[str],Pattern[unicode]], repl: AnyStr, string: AnyStr, count: int = ...,
         flags: int = ...) -> AnyStr: ...
 @overload
-def sub(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr],
+def sub(pattern: Union[Pattern[str],Pattern[unicode]], repl: Callable[[Match[AnyStr]], AnyStr],
         string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ...
 
 @overload
-def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ...,
+def subn(pattern: Union[str, unicode], repl: AnyStr, string: AnyStr, count: int = ...,
          flags: int = ...) -> Tuple[AnyStr, int]: ...
 @overload
-def subn(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr],
+def subn(pattern: Union[str, unicode], repl: Callable[[Match[AnyStr]], AnyStr],
          string: AnyStr, count: int = ...,
          flags: int = ...) -> Tuple[AnyStr, int]: ...
 @overload
-def subn(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ...,
+def subn(pattern: Union[Pattern[str],Pattern[unicode]], repl: AnyStr, string: AnyStr, count: int = ...,
          flags: int = ...) -> Tuple[AnyStr, int]: ...
 @overload
-def subn(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr],
+def subn(pattern: Union[Pattern[str],Pattern[unicode]], repl: Callable[[Match[AnyStr]], AnyStr],
          string: AnyStr, count: int = ...,
          flags: int = ...) -> Tuple[AnyStr, int]: ...
 
diff --git a/typeshed/stdlib/2.7/socket.pyi b/typeshed/stdlib/2.7/socket.pyi
index 5b71485..e12e35c 100644
--- a/typeshed/stdlib/2.7/socket.pyi
+++ b/typeshed/stdlib/2.7/socket.pyi
@@ -316,8 +316,8 @@ class socket:
                       flags: int = ...) -> Any: ...
     def recv_into(self, buffer: str, nbytes: int,
                   flags: int = ...) -> Any: ...
-    def send(self, data: str, flags=...) -> int: ...
-    def sendall(self, data: str, flags=...) -> Any:
+    def send(self, data: str, flags: int = ...) -> int: ...
+    def sendall(self, data: str, flags: int = ...) -> None:
         ... # return type: None on success
     def sendto(self, data: str, address: Union[tuple, str], flags: int = ...) -> int: ...
     def setblocking(self, flag: bool) -> None: ...
diff --git a/typeshed/stdlib/2.7/stat.pyi b/typeshed/stdlib/2.7/stat.pyi
index a83d880..b158ff7 100644
--- a/typeshed/stdlib/2.7/stat.pyi
+++ b/typeshed/stdlib/2.7/stat.pyi
@@ -19,13 +19,13 @@ ST_SIZE = 0
 ST_ATIME = 0
 ST_MTIME = 0
 ST_CTIME = 0
-ST_IFSOCK = 0
-ST_IFLNK = 0
-ST_IFREG = 0
-ST_IFBLK = 0
-ST_IFDIR = 0
-ST_IFCHR = 0
-ST_IFIFO = 0
+S_IFSOCK = 0
+S_IFLNK = 0
+S_IFREG = 0
+S_IFBLK = 0
+S_IFDIR = 0
+S_IFCHR = 0
+S_IFIFO = 0
 S_ISUID = 0
 S_ISGID = 0
 S_ISVTX = 0
diff --git a/typeshed/stdlib/2.7/subprocess.pyi b/typeshed/stdlib/2.7/subprocess.pyi
index 7a42eff..df8da0a 100644
--- a/typeshed/stdlib/2.7/subprocess.pyi
+++ b/typeshed/stdlib/2.7/subprocess.pyi
@@ -6,20 +6,51 @@ from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Union, Optional
 
 _FILE = Union[int, IO[Any]]
 
-# TODO force keyword arguments
-# TODO more keyword arguments (from Popen)
-def call(args: Union[str, Sequence[str]], *,
-         stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ...,
-         shell: bool = ..., env: Mapping[str, str] = ...,
-         cwd: str = ...) -> int: ...
-def check_call(args: Union[str, Sequence[str]], *,
-               stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ...,
-               shell: bool = ..., env: Mapping[str, str] = ..., cwd: str = ...,
-               close_fds: Sequence[_FILE] = ..., preexec_fn: Callable[[], Any] = ...) -> int: ...
-def check_output(args: Union[str, Sequence[str]], *,
-                 stdin: _FILE = ..., stderr: _FILE = ...,
-                 shell: bool = ..., universal_newlines: bool = ...,
-                 env: Mapping[str, str] = ..., cwd: str = ...) -> str: ...
+# Same args as Popen.__init__
+def call(args: Union[str, Sequence[str]],
+         bufsize: int = ...,
+         executable: str = ...,
+         stdin: _FILE = ...,
+         stdout: _FILE = ...,
+         stderr: _FILE = ...,
+         preexec_fn: Callable[[], Any] = ...,
+         close_fds: bool = ...,
+         shell: bool = ...,
+         cwd: str = ...,
+         env: Mapping[str, str] = ...,
+         universal_newlines: bool = ...,
+         startupinfo: Any = ...,
+         creationflags: int = ...) -> int: ...
+
+def check_call(args: Union[str, Sequence[str]],
+               bufsize: int = ...,
+               executable: str = ...,
+               stdin: _FILE = ...,
+               stdout: _FILE = ...,
+               stderr: _FILE = ...,
+               preexec_fn: Callable[[], Any] = ...,
+               close_fds: bool = ...,
+               shell: bool = ...,
+               cwd: str = ...,
+               env: Mapping[str, str] = ...,
+               universal_newlines: bool = ...,
+               startupinfo: Any = ...,
+               creationflags: int = ...) -> int: ...
+
+# Same args as Popen.__init__ except for stdout
+def check_output(args: Union[str, Sequence[str]],
+                 bufsize: int = ...,
+                 executable: str = ...,
+                 stdin: _FILE = ...,
+                 stderr: _FILE = ...,
+                 preexec_fn: Callable[[], Any] = ...,
+                 close_fds: bool = ...,
+                 shell: bool = ...,
+                 cwd: str = ...,
+                 env: Mapping[str, str] = ...,
+                 universal_newlines: bool = ...,
+                 startupinfo: Any = ...,
+                 creationflags: int = ...) -> str: ...
 
 PIPE = ... # type: int
 STDOUT = ... # type: int
diff --git a/typeshed/stdlib/2.7/sys.pyi b/typeshed/stdlib/2.7/sys.pyi
index a1c0247..3576159 100644
--- a/typeshed/stdlib/2.7/sys.pyi
+++ b/typeshed/stdlib/2.7/sys.pyi
@@ -108,7 +108,11 @@ def __displayhook__(value: int) -> None: ...
 def __excepthook__(type_: type, value: BaseException, traceback: TracebackType) -> None: ...
 def exc_clear() -> None:
     raise DeprecationWarning()
-def exc_info() -> Tuple[type, BaseException, TracebackType]: ...
+# TODO should be a union of tuple, see mypy#1178
+def exc_info() -> Tuple[Optional[type],
+                        Optional[BaseException],
+                        Optional[TracebackType]]: ...
+
 # sys.exit() accepts an optional argument of anything printable
 def exit(arg: Any = ...) -> None:
     raise SystemExit()
diff --git a/typeshed/stdlib/2.7/tarfile.pyi b/typeshed/stdlib/2.7/tarfile.pyi
index d9a4d50..6672135 100644
--- a/typeshed/stdlib/2.7/tarfile.pyi
+++ b/typeshed/stdlib/2.7/tarfile.pyi
@@ -235,3 +235,5 @@ class TarFileCompat:
     def close(self): ...
 
 def is_tarfile(name): ...
+
+open = TarFile.open
diff --git a/typeshed/stdlib/2.7/token.pyi b/typeshed/stdlib/2.7/token.pyi
index d0c8412..1c14dc4 100644
--- a/typeshed/stdlib/2.7/token.pyi
+++ b/typeshed/stdlib/2.7/token.pyi
@@ -55,7 +55,7 @@ OP = 0
 ERRORTOKEN = 0
 N_TOKENS = 0
 NT_OFFSET = 0
-tok_name = {} # type: Dict[int, str]
+tok_name = ...  # type: Dict[int, str]
 
 def ISTERMINAL(x) -> bool: ...
 def ISNONTERMINAL(x) -> bool: ...
diff --git a/typeshed/stdlib/2.7/traceback.pyi b/typeshed/stdlib/2.7/traceback.pyi
index 4101c20..6f7387a 100644
--- a/typeshed/stdlib/2.7/traceback.pyi
+++ b/typeshed/stdlib/2.7/traceback.pyi
@@ -12,8 +12,9 @@ def print_stack(f: FrameType = ..., limit: int = ..., file: IO[AnyStr] = ...) ->
 def extract_tb(f: TracebackType, limit: int = ...) -> ExtractTbResult: ...
 def extract_stack(f: FrameType = ..., limit: int = ...) -> ExtractTbResult: ...
 def format_list(list: ExtractTbResult) -> List[str]: ...
-def format_exception_only(type: type, value: List[str]) -> str: ...
-def format_exception(type: type, value: List[str], tb: TracebackType, limit: int = ...) -> str: ...
+def format_exception_only(type: type, value: List[str]) -> List[str]: ...
+def format_exception(type: type, value: BaseException, tb: TracebackType, limit: int = ...) -> List[str]: ...
 def format_tb(f: TracebackType, limit: int = ...) -> List[str]: ...
 def format_stack(f: FrameType = ..., limit: int = ...) -> List[str]: ...
 def tb_lineno(tb: TracebackType) -> AnyStr: ...
+def _print(f: IO[str], str: str = ..., terminator: str = ...) -> None: ...
diff --git a/typeshed/stdlib/2.7/typing.pyi b/typeshed/stdlib/2.7/typing.pyi
index 9d74469..bee6314 100644
--- a/typeshed/stdlib/2.7/typing.pyi
+++ b/typeshed/stdlib/2.7/typing.pyi
@@ -181,12 +181,14 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
     def popitem(self) -> Tuple[_KT, _VT]: ...
     def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
     @overload
-    def update(self, m: Mapping[_KT, _VT]) -> None: ...
+    def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
     @overload
-    def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+    def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
 
 Text = unicode
 
+TYPE_CHECKING = True
+
 class IO(Iterable[AnyStr], Generic[AnyStr]):
     # TODO detach
     # TODO use abstract properties
@@ -262,12 +264,12 @@ class Match(Generic[AnyStr]):
     pos = 0
     endpos = 0
     lastindex = 0
-    lastgroup = None  # type: AnyStr
-    string = None  # type: AnyStr
+    lastgroup = ...  # type: AnyStr
+    string = ...  # type: AnyStr
 
     # The regular expression object whose match() or search() method produced
     # this match instance.
-    re = None  # type: 'Pattern[AnyStr]'
+    re = ...  # type: 'Pattern[AnyStr]'
 
     def expand(self, template: AnyStr) -> AnyStr: ...
 
@@ -292,7 +294,7 @@ class Pattern(Generic[AnyStr]):
     flags = 0
     groupindex = 0
     groups = 0
-    pattern = None  # type: AnyStr
+    pattern = ...  # type: AnyStr
 
     def search(self, string: AnyStr, pos: int = ...,
                endpos: int = ...) -> Match[AnyStr]: ...
@@ -300,7 +302,7 @@ class Pattern(Generic[AnyStr]):
               endpos: int = ...) -> Match[AnyStr]: ...
     def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ...
     def findall(self, string: AnyStr, pos: int = ...,
-                endpos: int = ...) -> list[AnyStr]: ...
+                endpos: int = ...) -> list[Any]: ...
     def finditer(self, string: AnyStr, pos: int = ...,
                  endpos: int = ...) -> Iterator[Match[AnyStr]]: ...
 
diff --git a/typeshed/stdlib/2.7/unittest.pyi b/typeshed/stdlib/2.7/unittest.pyi
index a2ad502..a914cec 100644
--- a/typeshed/stdlib/2.7/unittest.pyi
+++ b/typeshed/stdlib/2.7/unittest.pyi
@@ -73,9 +73,6 @@ class TestCase(Testable):
                         msg: object = ...) -> None: ...
     def assertNotEqual(self, first: Any, second: Any,
                        msg: object = ...) -> None: ...
-    def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
-                            msg: object = ...,
-                            seq_type: type = ...) -> None: ...
     def failIfEqual(self, first: Any, second: Any,
                     msg: object = ...) -> None: ...
     def assertAlmostEqual(self, first: float, second: float, places: int = ...,
@@ -92,7 +89,20 @@ class TestCase(Testable):
     def assertGreater(self, first: Any, second: Any,
                       msg: object = ...) -> None: ...
     def assertGreaterEqual(self, first: Any, second: Any,
-                      msg: object = ...) -> None: ...
+                           msg: object = ...) -> None: ...
+    def assertMultiLineEqual(self, first: str, second: str,
+                             msg: object = ...) -> None: ...
+    def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
+                            msg: object = ...,
+                            seq_type: type = ...) -> None: ...
+    def assertListEqual(self, first: List[Any], second: List[Any],
+                        msg: object = ...) -> None: ...
+    def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...],
+                         msg: object = ...) -> None: ...
+    def assertSetEqual(self, first: Set[Any], second: Set[Any],
+                       msg: object = ...) -> None: ...
+    def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any],
+                        msg: object = ...) -> None: ...
     def assertLess(self, first: Any, second: Any,
                    msg: object = ...) -> None: ...
     def assertLessEqual(self, first: Any, second: Any,
diff --git a/typeshed/stdlib/2.7/urllib2.pyi b/typeshed/stdlib/2.7/urllib2.pyi
index 471b493..fba73a0 100644
--- a/typeshed/stdlib/2.7/urllib2.pyi
+++ b/typeshed/stdlib/2.7/urllib2.pyi
@@ -1,4 +1,5 @@
 
+from typing import AnyStr
 from urllib import addinfourl
 
 class URLError(IOError): ...
@@ -149,3 +150,6 @@ class CacheFTPHandler(FTPHandler):
     def setMaxConns(self, m): ...
     def check_cache(self): ...
     def clear_cache(self): ...
+
+def parse_http_list(s: AnyStr) -> List[AnyStr] : ...
+def parse_keqv_list(l: List[AnyStr]) -> Dict[AnyStr, AnyStr]: ...
diff --git a/typeshed/stdlib/2.7/urlparse.pyi b/typeshed/stdlib/2.7/urlparse.pyi
index aacc80b..823fd5d 100644
--- a/typeshed/stdlib/2.7/urlparse.pyi
+++ b/typeshed/stdlib/2.7/urlparse.pyi
@@ -2,12 +2,12 @@
 
 from typing import Dict, List, NamedTuple, Tuple, Sequence, Union, overload
 
-uses_relative = []  # type: List[str]
-uses_netloc = []  # type: List[str]
-uses_params = []  # type: List[str]
-non_hierarchical = []  # type: List[str]
-uses_query = []  # type: List[str]
-uses_fragment = []  # type: List[str]
+uses_relative = ...  # type: List[str]
+uses_netloc = ...  # type: List[str]
+uses_params = ...  # type: List[str]
+non_hierarchical = ...  # type: List[str]
+uses_query = ...  # type: List[str]
+uses_fragment = ...  # type: List[str]
 scheme_chars = ...  # type: str
 MAX_CACHE_SIZE = 0
 
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi b/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi
index b212430..d7c68ac 100644
--- a/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi
@@ -21,12 +21,12 @@ class ParseError(SyntaxError): ...
 
 def iselement(element: 'Element') -> bool: ...
 
-class Element:
+class Element(Sequence['Element']):
     tag = ... # type: _str_or_bytes
     attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
     text = ... # type: Optional[_str_or_bytes]
     tail = ... # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
     def append(self, element: 'Element') -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> 'Element': ...
@@ -52,7 +52,7 @@ class Element:
     def __len__(self) -> int: ...
     def __setitem__(self, index: int, element: 'Element') -> None: ...
 
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
 def Comment(text: _str_or_bytes=...) -> Element: ...
 def ProcessingInstruction(target: str, text: str=...) -> Element: ...
 
diff --git a/typeshed/stdlib/2and3/argparse.pyi b/typeshed/stdlib/2and3/argparse.pyi
new file mode 100644
index 0000000..da1f6fd
--- /dev/null
+++ b/typeshed/stdlib/2and3/argparse.pyi
@@ -0,0 +1,154 @@
+# Stubs for argparse (Python 3.4)
+
+from typing import (
+    Any, Callable, Iterable, List, IO, Optional, Sequence, Tuple, Type, Union,
+    TypeVar, overload
+)
+import sys
+
+_T = TypeVar('_T')
+
+
+ONE_OR_MORE = ...  # type: str
+OPTIONAL = ...  # type: str
+PARSER = ...  # type: str
+REMAINDER = ...  # type: str
+SUPPRESS = ...  # type: str
+ZERO_OR_MORE = ...  # type: str
+
+class ArgumentError(Exception): ...
+
+class ArgumentParser:
+    if sys.version_info >= (3, 5):
+        def __init__(self,
+                     prog: Optional[str] = ...,
+                     usage: Optional[str] = ...,
+                     description: Optional[str] = ...,
+                     epilog: Optional[str] = ...,
+                     parents: List[ArgumentParser] = ...,
+                     formatter_class: Type[HelpFormatter] = ...,
+                     prefix_chars: str = ...,
+                     fromfile_prefix_chars: Optional[str] = ...,
+                     argument_default: Optional[str] = ...,
+                     conflict_handler: str = ...,
+                     add_help: bool = ...,
+                     allow_abbrev: bool = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     prog: Optional[str] = ...,
+                     usage: Optional[str] = ...,
+                     description: Optional[str] = ...,
+                     epilog: Optional[str] = ...,
+                     parents: List[ArgumentParser] = ...,
+                     formatter_class: Type[HelpFormatter] = ...,
+                     prefix_chars: str = ...,
+                     fromfile_prefix_chars: Optional[str] = ...,
+                     argument_default: Optional[str] = ...,
+                     conflict_handler: str = ...,
+                     add_help: bool = ...) -> None: ...
+    def add_argument(self,
+                     *name_or_flags: Union[str, List[str]],
+                     action: Union[str, Action] = ...,
+                     nargs: Union[int, str] = ...,
+                     const: Any = ...,
+                     default: Any = ...,
+                     type: Callable[[str], _T] = ...,
+                     choices: Iterable[_T] = ...,
+                     required: bool = ...,
+                     help: str = ...,
+                     metavar: Union[str, Tuple[str, ...]] = ...,
+                     dest: str = ...,
+                     version: str = ...) -> None: ...  # weirdly documented
+    def parse_args(self, args: Optional[Sequence[str]] = ...,
+                   namespace: Optional[Namespace] = ...) -> Namespace: ...
+    def add_subparsers(self, title: str = ...,
+                       description: Optional[str] = ...,
+                       prog: str = ...,
+                       parser_class: Type[ArgumentParser] = ...,
+                       action: Type[Action] = ...,
+                       option_string: str = ...,
+                       dest: Optional[str] = ...,
+                       help: Optional[str] = ...,
+                       metavar: Optional[str] = ...) -> _SubParsersAction: ...
+    def add_argument_group(self, title: Optional[str] = ...,
+                           description: Optional[str] = ...) -> _ArgumentGroup: ...
+    def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ...
+    def set_defaults(self, **kwargs: Any) -> None: ...
+    def get_default(self, dest: str) -> Any: ...
+    def print_usage(self, file: Optional[IO[str]] = ...) -> None: ...
+    def print_help(self, file: Optional[IO[str]] = ...) -> None: ...
+    def format_usage(self) -> str: ...
+    def format_help(self) -> str: ...
+    def parse_known_args(self, args: Optional[str] = ...,
+                         namespace: Optional[Namespace] = ...) \
+                         -> Tuple[Namespace, List[str]]: ...
+    def convert_arg_line_to_args(self, arg_line: str) -> List[str]: ...
+    def exit(self, status: int = ..., message: Optional[str] = ...) -> None: ...
+    def error(self, message: str) -> None: ...
+
+class HelpFormatter:
+    # not documented
+    def __init__(self, prog: str, indent_increment: int = ...,
+                 max_help_position: int = ...,
+                 width: Optional[int] = ...) -> None: ...
+class RawDescriptionHelpFormatter(HelpFormatter): ...
+class RawTextHelpFormatter(HelpFormatter): ...
+class ArgumentDefaultsHelpFormatter(HelpFormatter): ...
+if sys.version_info >= (3,):
+    class MetavarTypeHelpFormatter(HelpFormatter): ...
+
+class Action:
+    def __init__(self,
+                 option_strings: List[str],
+                 dest: str = ...,
+                 nargs: Optional[Union[int, str]] = ...,
+                 const: Any = ...,
+                 default: Any = ...,
+                 type: Optional[Callable[[str], _T]] = ...,
+                 choices: Optional[Iterable[_T]] = ...,
+                 required: bool = ...,
+                 help: Optional[str] = ...,
+                 metavar: Union[str, Tuple[str, ...]] = ...) -> None: ...
+    def __call__(self, parser: ArgumentParser, namespace: Namespace,
+                 values: List[Any], option_string: str = ...) -> None: ...
+
+class Namespace:
+    def __getattr__(self, name: str) -> Any: ...
+    def __setattr__(self, name: str, value: Any) -> None: ...
+
+class FileType:
+    if sys.version_info >= (3, 4):
+        def __init__(self, mode: str = ..., bufsize: int = ...,
+                     encoding: Optional[str] = ...,
+                     errors: Optional[str] = ...) -> None: ...
+    elif sys.version_info >= (3,):
+        def __init__(self,  # type: ignore
+                     mode: str = ..., bufsize: int = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     mode: str = ..., bufsize: Optional[int] = ...) -> None: ...
+    def __call__(self, string: str) -> IO[Any]: ...
+
+class _ArgumentGroup:
+    def add_argument(self,
+                     *name_or_flags: Union[str, List[str]],
+                     action: Union[str, Action] = ...,
+                     nargs: Union[int, str] = ...,
+                     const: Any = ...,
+                     default: Any = ...,
+                     type: Callable[[str], _T] = ...,
+                     choices: Iterable[_T] = ...,
+                     required: bool = ...,
+                     help: str = ...,
+                     metavar: Union[str, Tuple[str, ...]] = ...,
+                     dest: str = ...,
+                     version: str = ...) -> None: ...
+
+class _MutuallyExclusiveGroup(_ArgumentGroup): ...
+
+class _SubParsersAction:
+    # TODO: Type keyword args properly.
+    def add_parser(self, name: str, **kwargs: Any) -> ArgumentParser: ...
+
+# not documented
+class ArgumentTypeError(Exception): ...
diff --git a/typeshed/stdlib/2and3/logging/__init__.pyi b/typeshed/stdlib/2and3/logging/__init__.pyi
new file mode 100644
index 0000000..acc51a7
--- /dev/null
+++ b/typeshed/stdlib/2and3/logging/__init__.pyi
@@ -0,0 +1,355 @@
+## Stubs for logging (Python 3.4)
+
+from typing import (
+    Any, Callable, Iterable, Mapping, MutableMapping, Optional, IO, Tuple,
+    Text, Union,
+    overload,
+)
+from types import TracebackType
+import sys
+
+_SysExcInfoType = Union[Tuple[type, BaseException, TracebackType],
+                        Tuple[None, None, None]]
+if sys.version_info >= (3, 5):
+    _ExcInfoType = Union[bool, _SysExcInfoType, Exception]
+else:
+    _ExcInfoType = Union[bool, _SysExcInfoType]
+_ArgsType = Union[Tuple[Any, ...], Dict[str, Any]]
+_FilterType = Union['Filter', Callable[[LogRecord], int]]
+
+
+class Logger:
+    propagate = ...  # type: bool
+    def setLevel(self, lvl: Union[int, str]) -> None: ...
+    def isEnabledFor(self, lvl: int) -> None: ...
+    def getEffectiveLevel(self) -> int: ...
+    def getChild(self, suffix: str) -> 'Logger': ...
+    if sys.version_info > (3,):
+        def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                  **kwargs: Any) -> None: ...
+        def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                 **kwargs: Any) -> None: ...
+        def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                    stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                    **kwargs: Any) -> None: ...
+        def warn(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                 **kwargs: Any) -> None: ...
+        def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                  **kwargs: Any) -> None: ...
+        def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                     stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                     **kwargs: Any) -> None: ...
+        def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                **kwargs: Any) -> None: ...
+        def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                      stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                      **kwargs: Any) -> None: ...
+    else:
+        def debug(self,  # type: ignore
+                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def info(self,  # type: ignore
+                 msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def warning(self,  # type: ignore
+                    msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                    extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def warn(self,  # type: ignore
+                 msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def error(self,  # type: ignore
+                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def critical(self,  # type: ignore
+                     msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                     extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def log(self,  # type: ignore
+                lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def exception(self,  # type: ignore
+                      msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                      extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def addFilter(self, filt: _FilterType) -> None: ...
+    def removeFilter(self, filt: _FilterType) -> None: ...
+    def filter(self, record: 'LogRecord') -> bool: ...
+    def addHandler(self, hdlr: 'Handler') -> None: ...
+    def removeHandler(self, hdlr: 'Handler') -> None: ...
+    if sys.version_info >= (3,):
+        def findCaller(self, stack_info: bool = ...) \
+                       -> Tuple[str, int, str, Optional[str]]: ...
+    else:
+        def findCaller(self  # type: ignore
+                       ) -> Tuple[str, int, str]: ...
+    def handle(self, record: 'LogRecord') -> None: ...
+    if sys.version_info >= (3,):
+        def makeRecord(self, name: str, lvl: int, fn: str, lno: int, msg: Text,
+                       args: Mapping[str, Any],
+                       exc_info: Optional[_SysExcInfoType],
+                       func: Optional[str] = ...,
+                       extra: Optional[Mapping[str, Any]] = ...,
+                       sinfo: Optional[str] = ...) -> None: ...
+    else:
+        def makeRecord(self,  # type: ignore
+                       name: str, lvl: int, fn: str, lno: int, msg: Text,
+                       args: Mapping[str, Any],
+                       exc_info: Optional[_SysExcInfoType],
+                       func: Optional[str] = ...,
+                       extra: Optional[Mapping[str, Any]] = ...) -> None: ...
+    if sys.version_info >= (3,):
+        def hasHandlers(self) -> bool: ...
+
+
+CRITICAL = ...  # type: int
+ERROR = ...  # type: int
+WARNING = ...  # type: int
+WARN = ...  # type: int
+INFO = ...  # type: int
+DEBUG = ...  # type: int
+NOTSET = ...  # type: int
+
+
+class Handler:
+    def __init__(self, level: int = ...) -> None: ...
+    def createLock(self) -> None: ...
+    def acquire(self) -> None: ...
+    def release(self) -> None: ...
+    def setLevel(self, lvl: Union[int, str]) -> None: ...
+    def setFormatter(self, form: 'Formatter') -> None: ...
+    def addFilter(self, filt: _FilterType) -> None: ...
+    def removeFilter(self, filt: _FilterType) -> None: ...
+    def filter(self, record: 'LogRecord') -> bool: ...
+    def flush(self) -> None: ...
+    def close(self) -> None: ...
+    def handle(self, record: 'LogRecord') -> None: ...
+    def handleError(self, record: 'LogRecord') -> None: ...
+    def format(self, record: 'LogRecord') -> None: ...
+    def emit(self, record: 'LogRecord') -> None: ...
+
+
+class Formatter:
+    if sys.version_info >= (3,):
+        def __init__(self, fmt: Optional[str] = ...,
+                     datefmt: Optional[str] =...,
+                     style: str = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     fmt: Optional[str] = ...,
+                     datefmt: Optional[str] =...) -> None: ...
+    def format(self, record: LogRecord) -> str: ...
+    def formatTime(self, record: LogRecord, datefmt: str = ...) -> str: ...
+    def formatException(self, exc_info: _SysExcInfoType) -> str: ...
+    if sys.version_info >= (3,):
+        def formatStack(self, stack_info: str) -> str: ...
+
+
+class Filter:
+    def __init__(self, name: str = ...) -> None: ...
+    def filter(self, record: LogRecord) -> int: ...
+
+
+class LogRecord:
+    args = ...  # type: _ArgsType
+    asctime = ...  # type: str
+    created = ...  # type: int
+    exc_info = ...  # type: Optional[_SysExcInfoType]
+    filename = ...  # type: str
+    funcName = ...  # type: str
+    levelname = ...  # type: str
+    levelno = ...  # type: int
+    lineno = ...  # type: int
+    module = ...  # type: str
+    msecs = ...  # type: int
+    message = ...  # type: str
+    msg = ...  # type: str
+    name = ...  # type: str
+    pathname = ...  # type: str
+    process = ...  # type: int
+    processName = ...  # type: str
+    relativeCreated = ...  # type: int
+    if sys.version_info >= (3,):
+        stack_info = ...  # type: Optional[str]
+    thread = ...  # type: int
+    threadName = ...  # type: str
+    if sys.version_info >= (3,):
+        def __init__(self, name: str, level: int, pathname: str, lineno: int,
+                     msg: Text, args: _ArgsType,
+                     exc_info: Optional[_SysExcInfoType],
+                     func: Optional[str] = ...,
+                     sinfo: Optional[str] = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     name: str, level: int, pathname: str, lineno: int,
+                     msg: Text, args: _ArgsType,
+                     exc_info: Optional[_SysExcInfoType],
+                     func: Optional[str] = ...) -> None: ...
+    def getMessage(self) -> str: ...
+
+
+class LoggerAdapter:
+    def __init__(self, logger: Logger, extra: Mapping[str, Any]) -> None: ...
+    def process(self, msg: Text, kwargs: MutableMapping[str, Any]) \
+                -> Tuple[str, MutableMapping[str, Any]]: ...
+    if sys.version_info > (3,):
+        def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                  **kwargs: Any) -> None: ...
+        def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                 **kwargs: Any) -> None: ...
+        def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                    stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                    **kwargs: Any) -> None: ...
+        def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                  **kwargs: Any) -> None: ...
+        def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                      stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                      **kwargs: Any) -> None: ...
+        def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                     stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                     **kwargs: Any) -> None: ...
+        def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                **kwargs: Any) -> None: ...
+    else:
+        def debug(self,  # type: ignore
+                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def info(self,  # type: ignore
+                 msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def warning(self,  # type: ignore
+                    msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                    extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def error(self,  # type: ignore
+                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def exception(self,  # type: ignore
+                      msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                      extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def critical(self,  # type: ignore
+                     msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                     extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+        def log(self,  # type: ignore
+                lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def isEnabledFor(self, lvl: int) -> None: ...
+    if sys.version_info >= (3,):
+        def getEffectiveLevel(self) -> int: ...
+        def setLevel(self, lvl: Union[int, str]) -> None: ...
+        def hasHandlers(self) -> bool: ...
+
+
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3,):
+#    def getLogger(name: Optional[str] = ...) -> Logger: ...
+#else:
+#    @overload
+#    def getLogger() -> Logger: ...
+#    @overload
+#    def getLogger(name: str) -> Logger: ...
+def getLogger(name: Optional[str] = ...) -> Logger: ...
+def getLoggerClass() -> type: ...
+if sys.version_info >= (3,):
+    def getLogRecordFactory() -> Callable[..., LogRecord]: ...
+
+if sys.version_info > (3,):
+    def debug(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+              stack_info: bool = ..., extra: Dict[str, Any] = ...,
+              **kwargs: Any) -> None: ...
+    def info(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+             stack_info: bool = ..., extra: Dict[str, Any] = ...,
+             **kwargs: Any) -> None: ...
+    def warning(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                **kwargs: Any) -> None: ...
+    def warn(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+             stack_info: bool = ..., extra: Dict[str, Any] = ...,
+             **kwargs: Any) -> None: ...
+    def error(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+              stack_info: bool = ..., extra: Dict[str, Any] = ...,
+              **kwargs: Any) -> None: ...
+    def critical(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                 **kwargs: Any) -> None: ...
+    def exception(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  stack_info: bool = ..., extra: Dict[str, Any] = ...,
+                  **kwargs: Any) -> None: ...
+    def log(lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+            stack_info: bool = ..., extra: Dict[str, Any] = ...,
+            **kwargs: Any) -> None: ...
+else:
+    def debug(# type: ignore
+              msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+              extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def info(# type: ignore
+             msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+             extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def warning(# type: ignore
+                msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def warn(# type: ignore
+             msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+             extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def error(# type: ignore
+              msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+              extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def critical(# type: ignore
+                 msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                 extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def exception(# type: ignore
+                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+                  extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+    def log(# type: ignore
+            lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+            extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+
+def disable(lvl: int) -> None: ...
+def addLevelName(lvl: int, levelName: str) -> None: ...
+def getLevelName(lvl: int) -> str: ...
+
+def makeLogRecord(attrdict: Mapping[str, Any]) -> LogRecord: ...
+
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3,):
+#    def basicConfig(*, filename: str = ..., filemode: str = ...,
+#                    format: str = ..., datefmt: str = ..., style: str = ...,
+#                    level: int = ..., stream: IO[str] = ...,
+#                    handlers: Iterable[Handler]) -> None: ...
+#else:
+#    @overload
+#    def basicConfig() -> None: ...
+#    @overload
+#    def basicConfig(*, filename: str = ..., filemode: str = ...,
+#                    format: str = ..., datefmt: str = ...,
+#                    level: int = ..., stream: IO[str] = ...) -> None: ...
+def basicConfig(*, filename: str = ..., filemode: str = ...,
+                format: str = ..., datefmt: str = ..., style: str = ...,
+                level: int = ..., stream: IO[str] = ...,
+                handlers: Iterable[Handler]) -> None: ...
+def shutdown() -> None: ...
+
+def setLoggerClass(klass: type) -> None: ...
+if sys.version_info >= (3,):
+    def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ...
+
+
+if sys.version_info >= (3,):
+    lastResort = ...  # type: Optional['StreamHandler']
+
+
+class StreamHandler(Handler):
+    def __init__(self, stream: Optional[IO[str]] = ...) -> None: ...
+
+
+class FileHandler(Handler):
+    def __init__(self, filename: str, mode: str = ...,
+                 encoding: Optional[str] = ..., delay: bool = ...) -> None: ...
+
+
+class NullHandler(Handler): ...
diff --git a/typeshed/stdlib/2and3/logging/config.pyi b/typeshed/stdlib/2and3/logging/config.pyi
new file mode 100644
index 0000000..b2cdf6c
--- /dev/null
+++ b/typeshed/stdlib/2and3/logging/config.pyi
@@ -0,0 +1,29 @@
+# Stubs for logging.config (Python 3.4)
+
+from typing import Any, Callable, Dict, Optional, IO, Union
+import sys
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3,):
+#    from configparser import RawConfigParser
+#else:
+#    from ConfigParser import RawConfigParser
+# TODO add RawConfigParser to configparser stubs
+RawConfigParser = Any
+
+
+def dictConfig(config: Dict[str, Any]) -> None: ...
+if sys.version_info >= (3, 4):
+    def fileConfig(fname: Union[str, IO[str], RawConfigParser],
+                   defaults: Optional[Dict[str, str]] = ...,
+                   disable_existing_loggers: bool = ...) -> None: ...
+    def listen(port: int = ...,
+               verify: Optional[Callable[[bytes], Optional[bytes]]] = ...) \
+               -> None: ...
+else:
+    def fileConfig(  # type: ignore
+                   fname: Union[str, IO[str]],
+                   defaults: Optional[Dict[str, str]] = ...,
+                   disable_existing_loggers: bool = ...) -> None: ...
+    def listen(  # type: ignore
+               port: int = ...) -> None: ...
+def stopListening() -> None: ...
diff --git a/typeshed/stdlib/2and3/logging/handlers.pyi b/typeshed/stdlib/2and3/logging/handlers.pyi
new file mode 100644
index 0000000..e722354
--- /dev/null
+++ b/typeshed/stdlib/2and3/logging/handlers.pyi
@@ -0,0 +1,223 @@
+## Stubs for logging.handlers (Python 2.4)
+
+from typing import Any, Callable, Optional, Tuple, Union, overload
+from logging import Handler, FileHandler, LogRecord
+import datetime
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3,):
+#    from queue import Queue
+#else:
+#    from Queue import Queue
+Queue = Any
+from socket import SocketType
+# TODO update socket stubs to add SocketKind
+SocketKind = int
+import ssl
+import sys
+
+
+class WatchedFileHandler(Handler):
+    @overload
+    def __init__(self, filename: str) -> None: ...
+    @overload
+    def __init__(self, filename: str, mode: str) -> None: ...
+    @overload
+    def __init__(self, filename: str, mode: str,
+                 encoding: Optional[str]) -> None: ...
+    @overload
+    def __init__(self, filename: str, mode: str, encoding: Optional[str],
+                 delay: bool) -> None: ...
+
+
+if sys.version_info >= (3,):
+    class BaseRotatingHandler(FileHandler):
+        namer = ...  # type: Optional[Callable[[str], None]]
+        rotator = ...  # type: Optional[Callable[[str, str], None]]
+        def __init__(self, filename: str, mode: str,
+                     encoding: Optional[str] = ...,
+                     delay: bool = ...) -> None: ...
+        def rotation_filename(self, default_name: str) -> None: ...
+        def rotate(self, source: str, dest: str) -> None: ...
+
+
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3,):
+#    class RotatingFileHandler(BaseRotatingHandler):
+#        def __init__(self, filename: str, mode: str = ..., maxBytes: int = ...,
+#                     backupCount: int = ..., encoding: Optional[str] = ...,
+#                     delay: bool = ...) -> None: ...
+#        def doRollover(self) -> None: ...
+#else:
+#    class RotatingFileHandler(Handler):
+#        def __init__(self, filename: str, mode: str = ..., maxBytes: int = ...,
+#                     backupCount: int = ..., encoding: Optional[str] = ...,
+#                     delay: bool = ...) -> None: ...
+#        def doRollover(self) -> None: ...
+class RotatingFileHandler(BaseRotatingHandler):
+    def __init__(self, filename: str, mode: str = ..., maxBytes: int = ...,
+                 backupCount: int = ..., encoding: Optional[str] = ...,
+                 delay: bool = ...) -> None: ...
+    def doRollover(self) -> None: ...
+
+
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3,):
+#    class TimedRotatingFileHandler(BaseRotatingHandler):
+#        if sys.version_info >= (3, 4):
+#            def __init__(self, filename: str, when: str = ...,
+#                         interval: int = ...,
+#                         backupCount: int = ..., encoding: Optional[str] = ...,
+#                         delay: bool = ..., utc: bool = ...,
+#                         atTime: Optional[datetime.datetime] = ...) -> None: ...
+#        else:
+#            def __init__(self,  # type: ignore
+#                         filename: str, when: str = ..., interval: int = ...,
+#                         backupCount: int = ..., encoding: Optional[str] = ...,
+#                         delay: bool = ..., utc: bool = ...) -> None: ...
+#        def doRollover(self) -> None: ...
+#else:
+#    class TimedRotatingFileHandler:
+#        def __init__(self,  # type: ignore
+#                     filename: str, when: str = ..., interval: int = ...,
+#                     backupCount: int = ..., encoding: Optional[str] = ...,
+#                     delay: bool = ..., utc: bool = ...) -> None: ...
+#        def doRollover(self) -> None: ...
+class TimedRotatingFileHandler(BaseRotatingHandler):
+    if sys.version_info >= (3, 4):
+        def __init__(self, filename: str, when: str = ...,
+                     interval: int = ...,
+                     backupCount: int = ..., encoding: Optional[str] = ...,
+                     delay: bool = ..., utc: bool = ...,
+                     atTime: Optional[datetime.datetime] = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     filename: str, when: str = ..., interval: int = ...,
+                     backupCount: int = ..., encoding: Optional[str] = ...,
+                     delay: bool = ..., utc: bool = ...) -> None: ...
+    def doRollover(self) -> None: ...
+
+
+class SocketHandler(Handler):
+    retryStart = ...  # type: float
+    retryFactor = ...  # type: float
+    retryMax = ...  # type: float
+    if sys.version_info >= (3, 4):
+        def __init__(self, host: str, port: Optional[int]) -> None: ...
+    else:
+        def __init__(self, host: str, port: int) -> None: ...  # type: ignore
+    def makeSocket(self) -> SocketType: ...
+    def makePickle(self, record: LogRecord) -> bytes: ...
+    def send(self, packet: bytes) -> None: ...
+    def createSocket(self) -> None: ...
+
+
+class DatagramHandler(SocketHandler): ...
+
+
+class SysLogHandler(Handler):
+    LOG_ALERT = ...  # type: int
+    LOG_CRIT = ...  # type: int
+    LOG_DEBUG = ...  # type: int
+    LOG_EMERG = ...  # type: int
+    LOG_ERR = ...  # type: int
+    LOG_INFO = ...  # type: int
+    LOG_NOTICE = ...  # type: int
+    LOG_WARNING = ...  # type: int
+    LOG_AUTH = ...  # type: int
+    LOG_AUTHPRIV = ...  # type: int
+    LOG_CRON = ...  # type: int
+    LOG_DAEMON = ...  # type: int
+    LOG_FTP = ...  # type: int
+    LOG_KERN = ...  # type: int
+    LOG_LPR = ...  # type: int
+    LOG_MAIL = ...  # type: int
+    LOG_NEWS = ...  # type: int
+    LOG_SYSLOG = ...  # type: int
+    LOG_USER = ...  # type: int
+    LOG_UUCP = ...  # type: int
+    LOG_LOCAL0 = ...  # type: int
+    LOG_LOCAL1 = ...  # type: int
+    LOG_LOCAL2 = ...  # type: int
+    LOG_LOCAL3 = ...  # type: int
+    LOG_LOCAL4 = ...  # type: int
+    LOG_LOCAL5 = ...  # type: int
+    LOG_LOCAL6 = ...  # type: int
+    LOG_LOCAL7 = ...  # type: int
+    def __init__(self, address: Union[Tuple[str, int], str] = ...,
+            facility: int = ..., socktype: SocketKind = ...) -> None: ...
+    def encodePriority(self, facility: Union[int, str],
+                       priority: Union[int, str]) -> int: ...
+    def mapPriority(self, levelName: int) -> str: ...
+
+
+class NTEventLogHandler(Handler):
+    def __init__(self, appname: str, dllname: str = ...,
+                 logtype: str = ...) -> None: ...
+    def getEventCategory(self, record: LogRecord) -> int: ...
+    # TODO correct return value?
+    def getEventType(self, record: LogRecord) -> int: ...
+    def getMessageID(self, record: LogRecord) -> int: ...
+
+
+class SMTPHandler(Handler):
+    # TODO `secure` can also be an empty tuple
+    if sys.version_info >= (3,):
+        def __init__(self, mailhost: Union[str, Tuple[str, int]], fromaddr: str,
+                     toaddrs: List[str], subject: str,
+                     credentials: Optional[Tuple[str, str]] = ...,
+                     secure: Union[Tuple[str], Tuple[str, str], None] =...,
+                     timeout: float = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     mailhost: Union[str, Tuple[str, int]], fromaddr: str,
+                     toaddrs: List[str], subject: str,
+                     credentials: Optional[Tuple[str, str]] = ...,
+                     secure: Union[Tuple[str], Tuple[str, str], None] =...) \
+                     -> None: ...
+    def getSubject(self, record: LogRecord) -> str: ...
+
+
+class BufferingHandler(Handler):
+    def __init__(self, capacity: int) -> None: ...
+    def shouldFlush(self, record: LogRecord) -> bool: ...
+
+class MemoryHandler(BufferingHandler):
+    def __init__(self, capacity: int, flushLevel: int = ...,
+                 target: Optional[Handler] =...) -> None: ...
+    def setTarget(self, target: Handler) -> None: ...
+
+
+class HTTPHandler(Handler):
+    if sys.version_info >= (3, 5):
+        def __init__(self, host: str, url: str, method: str = ...,
+                     secure: bool = ...,
+                     credentials: Optional[Tuple[str, str]] = ...,
+                     context: Optional[ssl.SSLContext] = ...) -> None: ...
+    elif sys.version_info >= (3,):
+        def __init__(self,  # type: ignore
+                     host: str, url: str, method: str = ..., secure: bool = ...,
+                     credentials: Optional[Tuple[str, str]] = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     host: str, url: str, method: str = ...) -> None: ...
+    def mapLogRecord(self, record: LogRecord) -> Dict[str, Any]: ...
+
+
+if sys.version_info > (3,):
+    class QueueHandler(Handler):
+        def __init__(self, queue: Queue) -> None: ...
+        def prepare(self, record: LogRecord) -> Any: ...
+        def enqueue(self, record: LogRecord) -> None: ...
+
+    class QueueListener:
+        if sys.version_info >= (3, 5):
+            def __init__(self, queue: Queue, *handlers: Handler,
+                         respect_handler_level: bool = ...) -> None: ...
+        else:
+            def __init__(self,  # type: ignore
+                         queue: Queue, *handlers: Handler) -> None: ...
+        def dequeue(self, block: bool) -> LogRecord: ...
+        def prepare(self, record: LogRecord) -> Any: ...
+        def start(self) -> None: ...
+        def stop(self) -> None: ...
+        def enqueue_sentinel(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/warnings.pyi b/typeshed/stdlib/2and3/warnings.pyi
index c15d8bc..a3890cf 100644
--- a/typeshed/stdlib/2and3/warnings.pyi
+++ b/typeshed/stdlib/2and3/warnings.pyi
@@ -1,33 +1,39 @@
 # Stubs for warnings
 
-# Based on http://docs.python.org/3.2/library/warnings.html
+from typing import Any, Dict, NamedTuple, Optional, TextIO, Tuple, Type, Union
+from types import ModuleType, TracebackType
 
-from typing import Any, List, TextIO, Union
-
-def warn(message: Union[str, Warning], category: type = ...,
+def warn(message: Union[str, Warning], category: Optional[Type[Warning]] = ...,
          stacklevel: int = ...) -> None: ...
-
-def warn_explicit(message: Union[str, Warning], category: type, filename: str,
-                  lineno: int, module: str = ..., registry: Any = ...,
-                  module_globals: Any = ...) -> None: ...
-
-# logging modifies showwarning => make it a variable.
-def _showwarning(message: str, category: type, filename: str, lineno: int,
-                 file: TextIO = ..., line: str = ...) -> None: ...
-showwarning = _showwarning
-
-def formatwarning(message: str, category: type, filename: str, lineno: int,
-                  line: str = ...) -> None: ...
-def filterwarnings(action: str, message: str = ..., category: type = ...,
-                   module: str = ..., lineno: int = ...,
-                   append: bool = ...) -> None: ...
-def simplefilter(action: str, category: type = ..., lineno: int = ...,
+def warn_explicit(message: Union[str, Warning], category: Type[Warning],
+                  filename: str, lineno: int, module: Optional[str] = ...,
+                  registry: Optional[Dict[Union[str, Tuple[str, Type[Warning], int]], int]] = ...,
+                  module_globals: Optional[Dict[str, Any]] = ...) -> None: ...
+def showwarning(message: str, category: Type[Warning], filename: str,
+                lineno: int, file: Optional[TextIO] = ...,
+                line: Optional[str] = ...) -> None: ...
+def formatwarning(message: str, category: Type[Warning], filename: str,
+                  lineno: int, line: Optional[str] = ...) -> None: ...
+def filterwarnings(action: str, message: str = ...,
+                   category: Type[Warning] = ..., module: str = ...,
+                   lineno: int = ..., append: bool = ...) -> None: ...
+def simplefilter(action: str, category: Type[Warning] = ..., lineno: int = ...,
                  append: bool = ...) -> None: ...
 def resetwarnings() -> None: ...
 
+_Record = NamedTuple('_Record',
+    [('message', str),
+     ('category', Type[Warning]),
+     ('filename', str),
+     ('lineno', int),
+     ('file', Optional[TextIO]),
+     ('line', Optional[str])]
+)
+
 class catch_warnings:
-    # TODO record and module must be keyword arguments!
-    # TODO type of module?
-    def __init__(self, record: bool = ..., module: Any = ...) -> None: ...
-    def __enter__(self) -> List[Any]: ...
-    def __exit__(self, type, value, traceback) -> bool: ...
+    def __init__(self, *, record: bool = ...,
+                 module: Optional[ModuleType] = ...) -> None: ...
+    def __enter__(self) -> Optional[List[_Record]]: ...
+    def __exit__(self, exc_type: Optional[Type[BaseException]],
+                 exc_val: Optional[Exception],
+                 exc_tb: Optional[TracebackType]) -> bool: ...
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
index a78606d..19675b9 100644
--- a/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
@@ -21,12 +21,12 @@ class ParseError(SyntaxError): ...
 
 def iselement(element: 'Element') -> bool: ...
 
-class Element:
+class Element(Sequence['Element']):
     tag = ... # type: _str_or_bytes
     attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
     text = ... # type: Optional[_str_or_bytes]
     tail = ... # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
     def append(self, subelement: 'Element') -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> 'Element': ...
@@ -52,7 +52,7 @@ class Element:
     def __len__(self) -> int: ...
     def __setitem__(self, index: int, element: 'Element') -> None: ...
 
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
 def Comment(text: _str_or_bytes=...) -> Element: ...
 def ProcessingInstruction(target: str, text: str=...) -> Element: ...
 
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
index 488d6a5..cccf493 100644
--- a/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
@@ -15,12 +15,12 @@ _Ss = TypeVar('_Ss', str, bytes)
 _T = TypeVar('_T')
 _str_or_bytes = Union[str, bytes]
 
-class Element:
+class Element(Sequence['Element']):
     tag = ... # type: _str_or_bytes
     attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
     text = ... # type: Optional[_str_or_bytes]
     tail = ... # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
     def append(self, subelement: 'Element') -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> 'Element': ...
@@ -46,7 +46,7 @@ class Element:
     def __len__(self) -> int: ...
     def __setitem__(self, index: int, element: 'Element') -> None: ...
 
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
 def Comment(text: _str_or_bytes=...) -> Element: ...
 def ProcessingInstruction(target: str, text: str=...) -> Element: ...
 
diff --git a/typeshed/stdlib/3.4/asyncio/events.pyi b/typeshed/stdlib/3.4/asyncio/events.pyi
index beaa637..6b4f11a 100644
--- a/typeshed/stdlib/3.4/asyncio/events.pyi
+++ b/typeshed/stdlib/3.4/asyncio/events.pyi
@@ -13,9 +13,9 @@ AF_UNSPEC = 0     # from socket
 AI_PASSIVE = 0
 
 class Handle:
-    __slots__ = [] # type: List[str]
+    __slots__ = ... # type: List[str]
     _cancelled = False
-    _args = [] # type: List[Any]
+    _args = ... # type: List[Any]
     def __init__(self, callback: Callable[[],Any], args: List[Any],
         loop: AbstractEventLoop) -> None: ...
     def __repr__(self) -> str: ...
@@ -102,13 +102,13 @@ class AbstractEventLoop(metaclass=ABCMeta):
     @abstractmethod
     def subprocess_shell(self, protocol_factory: Any, cmd: Union[bytes, str], *, stdin: Any = ...,
                          stdout: Any = ..., stderr: Any = ...,
-                         **kwargs: Dict[str, Any]) -> tuple: ...
+                         **kwargs: Any) -> tuple: ...
                     #?? check Any
                     # return (Transport, Protocol)
     @abstractmethod
     def subprocess_exec(self, protocol_factory: Any, *args: List[Any], stdin: Any = ...,
                         stdout: Any = ..., stderr: Any = ...,
-                        **kwargs: Dict[str, Any]) -> tuple: ...
+                        **kwargs: Any) -> tuple: ...
                     #?? check Any
                     # return (Transport, Protocol)
     @abstractmethod
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
index 318f9c1..a3de8a8 100644
--- a/typeshed/stdlib/3.4/asyncio/futures.pyi
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -6,9 +6,9 @@ __all__ = ... # type: str
 _T = TypeVar('_T')
 
 class _TracebackLogger:
-    __slots__ = [] # type: List[str]
+    __slots__ = ... # type: List[str]
     exc = ...  # type: BaseException
-    tb = [] # type: List[str]
+    tb = ... # type: List[str]
     def __init__(self, exc: Any, loop: AbstractEventLoop) -> None: ...
     def activate(self) -> None: ...
     def clear(self) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index b4649d3..66abd00 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -16,8 +16,8 @@ def wait_for(fut: Union[Future[_T], Generator[Any, None, _T]], timeout: float, *
 
 
 class Task(Future[_T], Generic[_T]):
-    _all_tasks = None  # type: Set[Task]
-    _current_tasks = {}  # type: Dict[AbstractEventLoop, Task]
+    _all_tasks = ...  # type: Set[Task]
+    _current_tasks = ...  # type: Dict[AbstractEventLoop, Task]
     @classmethod
     def current_task(cls, loop: AbstractEventLoop = ...) -> Task: ...
     @classmethod
diff --git a/typeshed/stdlib/3.4/enum.pyi b/typeshed/stdlib/3.4/enum.pyi
index dcb3b9c..4032e81 100644
--- a/typeshed/stdlib/3.4/enum.pyi
+++ b/typeshed/stdlib/3.4/enum.pyi
@@ -10,7 +10,7 @@ class Enum:
     def __reduce_ex__(self, proto: Any) -> Any: ...
 
     name = ...  # type: str
-    value = None  # type: Any
+    value = ...  # type: Any
 
 class IntEnum(int, Enum): ...
 
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
index 8b2eecd..74e8084 100644
--- a/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
@@ -15,12 +15,12 @@ _Ss = TypeVar('_Ss', str, bytes)
 _T = TypeVar('_T')
 _str_or_bytes = Union[str, bytes]
 
-class Element:
+class Element(Sequence['Element']):
     tag = ... # type: _str_or_bytes
     attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
     text = ... # type: Optional[_str_or_bytes]
     tail = ... # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
     def append(self, subelement: 'Element') -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> 'Element': ...
@@ -46,7 +46,7 @@ class Element:
     def __len__(self) -> int: ...
     def __setitem__(self, index: int, element: 'Element') -> None: ...
 
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
 def Comment(text: _str_or_bytes=...) -> Element: ...
 def ProcessingInstruction(target: str, text: str=...) -> Element: ...
 
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
index 8b2eecd..74e8084 100644
--- a/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
@@ -15,12 +15,12 @@ _Ss = TypeVar('_Ss', str, bytes)
 _T = TypeVar('_T')
 _str_or_bytes = Union[str, bytes]
 
-class Element:
+class Element(Sequence['Element']):
     tag = ... # type: _str_or_bytes
     attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
     text = ... # type: Optional[_str_or_bytes]
     tail = ... # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
     def append(self, subelement: 'Element') -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> 'Element': ...
@@ -46,7 +46,7 @@ class Element:
     def __len__(self) -> int: ...
     def __setitem__(self, index: int, element: 'Element') -> None: ...
 
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
 def Comment(text: _str_or_bytes=...) -> Element: ...
 def ProcessingInstruction(target: str, text: str=...) -> Element: ...
 
diff --git a/typeshed/stdlib/3/_importlib_modulespec.pyi b/typeshed/stdlib/3/_importlib_modulespec.pyi
new file mode 100644
index 0000000..20d8894
--- /dev/null
+++ b/typeshed/stdlib/3/_importlib_modulespec.pyi
@@ -0,0 +1,44 @@
+# ModuleSpec, ModuleType, Loader are part of a dependency cycle.
+# They are officially defined/exported in other places:
+#
+# - ModuleType in types
+# - Loader in importlib.abc
+# - ModuleSpec in importlib.machinery (3.4 and later only)
+
+import abc
+import sys
+from typing import Any, Optional
+
+if sys.version_info >= (3, 4):
+    class ModuleSpec:
+        def __init__(self, name: str, loader: Optional['Loader'], *,
+                     origin: str = None, loader_state: Any = None,
+                     is_package: bool = None) -> None: ...
+        name = ... # type: str
+        loader = ... # type: Optional[Loader]
+        origin = ... # type: Optional[str]
+        submodule_search_locations = ... # type: Optional[List[str]]
+        loader_state = ... # type: Any
+        cached = ... # type: Optional[str]
+        parent = ... # type: Optional[str]
+        has_location = ... # type: bool
+
+class ModuleType:
+    __name__ = ... # type: str
+    __file__ = ...  # type: str
+    __doc__ = ... # type: Optional[str]
+    if sys.version_info >= (3, 4):
+        __loader__ = ... # type: Optional[Loader]
+        __package__ = ... # type: Optional[str]
+        __spec__ = ... # type: Optional[ModuleSpec]
+    def __init__(self, name: str, doc: str) -> None: ...
+
+class Loader(metaclass=abc.ABCMeta):
+    def load_module(self, fullname: str) -> ModuleType: ...
+    if sys.version_info >= (3, 3):
+        def module_repr(self, module: ModuleType) -> str: ...
+    if sys.version_info >= (3, 4):
+        def create_module(self, spec: ModuleSpec) -> Optional[ModuleType]: ...
+        # Not defined on the actual class for backwards-compatibility reasons,
+        # but expected in new code.
+        def exec_module(self, module: ModuleType) -> None: ...
diff --git a/typeshed/stdlib/3/abc.pyi b/typeshed/stdlib/3/abc.pyi
index d24b258..6b37f75 100644
--- a/typeshed/stdlib/3/abc.pyi
+++ b/typeshed/stdlib/3/abc.pyi
@@ -1,4 +1,5 @@
 from typing import Any
+import sys
 # Stubs for abc.
 
 # Thesee definitions have special processing in type checker.
@@ -6,3 +7,7 @@ class ABCMeta:
     def register(cls: "ABCMeta", subclass: Any) -> None: ...
 abstractmethod = object()
 abstractproperty = object()
+
+if sys.version_info >= (3, 4):
+    class ABC(metaclass=ABCMeta):
+        pass
diff --git a/typeshed/stdlib/3/argparse.pyi b/typeshed/stdlib/3/argparse.pyi
deleted file mode 100644
index 02328cd..0000000
--- a/typeshed/stdlib/3/argparse.pyi
+++ /dev/null
@@ -1,163 +0,0 @@
-# Stubs for argparse (Python 3)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Sequence, Union
-
-SUPPRESS = ... # type: Any
-OPTIONAL = ... # type: Any
-ZERO_OR_MORE = ... # type: Any
-ONE_OR_MORE = ... # type: Any
-PARSER = ... # type: Any
-REMAINDER = ... # type: Any
-
-class _AttributeHolder: ...
-
-class HelpFormatter:
-    def __init__(self, prog, indent_increment=..., max_help_position=..., width=...) -> None: ...
-    def start_section(self, heading): ...
-    def end_section(self): ...
-    def add_text(self, text): ...
-    def add_usage(self, usage, actions, groups, prefix=...): ...
-    def add_argument(self, action): ...
-    def add_arguments(self, actions): ...
-    def format_help(self): ...
-
-class RawDescriptionHelpFormatter(HelpFormatter): ...
-class RawTextHelpFormatter(RawDescriptionHelpFormatter): ...
-class ArgumentDefaultsHelpFormatter(HelpFormatter): ...
-class MetavarTypeHelpFormatter(HelpFormatter): ...
-
-class ArgumentError(Exception):
-    argument_name = ... # type: Any
-    message = ... # type: Any
-    def __init__(self, argument, message) -> None: ...
-
-class ArgumentTypeError(Exception): ...
-
-class Action(_AttributeHolder):
-    option_strings = ... # type: Any
-    dest = ... # type: Any
-    nargs = ... # type: Any
-    const = ... # type: Any
-    default = ... # type: Any
-    type = ... # type: Any
-    choices = ... # type: Any
-    required = ... # type: Any
-    help = ... # type: Any
-    metavar = ... # type: Any
-    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
-                 choices=..., required=..., help=..., metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _StoreAction(Action):
-    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
-                 choices=..., required=..., help=..., metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _StoreConstAction(Action):
-    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
-                 metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _StoreTrueAction(_StoreConstAction):
-    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
-
-class _StoreFalseAction(_StoreConstAction):
-    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
-
-class _AppendAction(Action):
-    def __init__(self, option_strings, dest, nargs=..., const=..., default=..., type=...,
-                 choices=..., required=..., help=..., metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _AppendConstAction(Action):
-    def __init__(self, option_strings, dest, const, default=..., required=..., help=...,
-                 metavar=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _CountAction(Action):
-    def __init__(self, option_strings, dest, default=..., required=..., help=...) -> None: ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _HelpAction(Action):
-    def __init__(self, option_strings, dest=..., default=..., help=...) -> None: ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _VersionAction(Action):
-    version = ... # type: Any
-    def __init__(self, option_strings, version=..., dest=..., default=...,
-                 help=...): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class _SubParsersAction(Action):
-    def __init__(self, option_strings, prog, parser_class, dest=..., help=...,
-                 metavar=...): ...
-    def add_parser(self, name, **kwargs): ...
-    def __call__(self, parser, namespace, values, option_string=...): ...
-
-class FileType:
-    def __init__(self, mode=..., bufsize=..., encoding=..., errors=...) -> None: ...
-    def __call__(self, string): ...
-
-class Namespace(_AttributeHolder):
-    def __init__(self, **kwargs) -> None: ...
-    def __eq__(self, other): ...
-    def __ne__(self, other): ...
-    def __contains__(self, key): ...
-    def __getattr__(self, name: str) -> Any: ...
-
-class _ActionsContainer:
-    description = ... # type: Any
-    argument_default = ... # type: Any
-    prefix_chars = ... # type: Any
-    conflict_handler = ... # type: Any
-    def __init__(self, description, prefix_chars, argument_default, conflict_handler) -> None: ...
-    def register(self, registry_name, value, object): ...
-    def set_defaults(self, **kwargs): ...
-    def get_default(self, dest): ...
-    def add_argument(self,
-        *args: str,
-        action: Union[str, Action] = ...,
-        nargs: str = ...,
-        const: Any = ...,
-        default: Any = ...,
-        type: Any = ...,
-        choices: Any = ..., # TODO: Container?
-        required: bool = ...,
-        help: str = ...,
-        metavar: str = ...,
-        dest: str = ...,
-        version: str = ...
-    ) -> None: ...
-    def add_argument_group(self, *args, **kwargs): ...
-    def add_mutually_exclusive_group(self, **kwargs): ...
-
-class _ArgumentGroup(_ActionsContainer):
-    title = ... # type: Any
-    def __init__(self, container, title=..., description=..., **kwargs) -> None: ...
-
-class _MutuallyExclusiveGroup(_ArgumentGroup):
-    required = ... # type: Any
-    def __init__(self, container, required=...) -> None: ...
-
-class ArgumentParser(_AttributeHolder, _ActionsContainer):
-    prog = ... # type: Any
-    usage = ... # type: Any
-    epilog = ... # type: Any
-    formatter_class = ... # type: Any
-    fromfile_prefix_chars = ... # type: Any
-    add_help = ... # type: Any
-    def __init__(self, prog=..., usage=..., description=..., epilog=..., parents=...,
-                 formatter_class=..., prefix_chars=..., fromfile_prefix_chars=...,
-                 argument_default=..., conflict_handler=..., add_help=...): ...
-    def add_subparsers(self, **kwargs): ...
-    def parse_args(self, args: Sequence[str] = ..., namespace=...) -> Namespace: ...
-    def parse_known_args(self, args=..., namespace=...): ...
-    def convert_arg_line_to_args(self, arg_line): ...
-    def format_usage(self): ...
-    def format_help(self): ...
-    def print_usage(self, file=...): ...
-    def print_help(self, file=...): ...
-    def exit(self, status=..., message=...): ...
-    def error(self, message): ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index e0c4189..291296a 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -537,11 +537,11 @@ class set(MutableSet[_T], Generic[_T]):
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> set[_T]: ...
-    def difference(self, s: Iterable[Any]) -> set[_T]: ...
-    def difference_update(self, s: Iterable[Any]) -> None: ...
+    def difference(self, *s: Iterable[Any]) -> set[_T]: ...
+    def difference_update(self, *s: Iterable[Any]) -> None: ...
     def discard(self, element: _T) -> None: ...
-    def intersection(self, s: Iterable[Any]) -> set[_T]: ...
-    def intersection_update(self, s: Iterable[Any]) -> None: ...
+    def intersection(self, *s: Iterable[Any]) -> set[_T]: ...
+    def intersection_update(self, *s: Iterable[Any]) -> None: ...
     def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
     def issubset(self, s: AbstractSet[Any]) -> bool: ...
     def issuperset(self, s: AbstractSet[Any]) -> bool: ...
@@ -549,8 +549,8 @@ class set(MutableSet[_T], Generic[_T]):
     def remove(self, element: _T) -> None: ...
     def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
     def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
-    def union(self, s: Iterable[_T]) -> set[_T]: ...
-    def update(self, s: Iterable[_T]) -> None: ...
+    def union(self, *s: Iterable[_T]) -> set[_T]: ...
+    def update(self, *s: Iterable[_T]) -> None: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -572,13 +572,13 @@ class set(MutableSet[_T], Generic[_T]):
 class frozenset(AbstractSet[_T], Generic[_T]):
     def __init__(self, iterable: Iterable[_T]=None) -> None: ...
     def copy(self) -> frozenset[_T]: ...
-    def difference(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
-    def intersection(self, s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def difference(self, *s: AbstractSet[Any]) -> frozenset[_T]: ...
+    def intersection(self, *s: AbstractSet[Any]) -> frozenset[_T]: ...
     def isdisjoint(self, s: AbstractSet[_T]) -> bool: ...
     def issubset(self, s: AbstractSet[Any]) -> bool: ...
     def issuperset(self, s: AbstractSet[Any]) -> bool: ...
     def symmetric_difference(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
-    def union(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
+    def union(self, *s: AbstractSet[_T]) -> frozenset[_T]: ...
     def __len__(self) -> int: ...
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
@@ -751,7 +751,6 @@ Ellipsis = ...  # type: ellipsis
 
 class BaseException:
     args = ...  # type: Any
-    message = ...  # type: str
     def __init__(self, *args: Any) -> None: ...
     def with_traceback(self, tb: Any) -> BaseException: ...
 
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index fcf3b2f..6e74100 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -35,7 +35,7 @@ class MutableString(UserString, MutableSequence): ...
 # Technically, deque only derives from MutableSequence in 3.5.
 # But in practice it's not worth losing sleep over.
 class deque(MutableSequence[_T], Generic[_T]):
-    maxlen = 0 # type: Optional[int] # TODO readonly
+    maxlen = ... # type: Optional[int] # TODO readonly
     def __init__(self, iterable: Iterable[_T] = ...,
                  maxlen: int = ...) -> None: ...
     def append(self, x: _T) -> None: ...
@@ -133,3 +133,17 @@ class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
 
     def __missing__(self, key: _KT) -> _VT: ...
     # TODO __reversed__
+
+class ChainMap(Dict[_KT, _VT], Generic[_KT, _VT]):
+    @overload
+    def __init__(self) -> None: ...
+    @overload
+    def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ...
+
+    @property
+    def maps(self) -> List[Mapping[_KT, _VT]]: ...
+
+    def new_child(self, m: Mapping[_KT, _VT] = ...) -> ChainMap[_KT, _VT]: ...
+
+    @property
+    def parents(self) -> ChainMap[_KT, _VT]: ...
diff --git a/typeshed/stdlib/3/concurrent/futures/_base.pyi b/typeshed/stdlib/3/concurrent/futures/_base.pyi
index 19a23e8..84d10a0 100644
--- a/typeshed/stdlib/3/concurrent/futures/_base.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/_base.pyi
@@ -75,7 +75,7 @@ class Future:
 
 class Executor:
     def submit(self, fn, *args, **kwargs): ...
-    def map(self, fn, *iterables, *, timeout=None, chunksize=1): ...
+    def map(self, fn, *iterables, timeout=None, chunksize=1): ...
     def shutdown(self, wait=True): ...
     def __enter__(self): ...
     def __exit__(self, exc_type, exc_val, exc_tb): ...
diff --git a/typeshed/stdlib/3/concurrent/futures/process.pyi b/typeshed/stdlib/3/concurrent/futures/process.pyi
index 605cd80..9bc56fa 100644
--- a/typeshed/stdlib/3/concurrent/futures/process.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/process.pyi
@@ -42,5 +42,5 @@ class BrokenProcessPool(RuntimeError): ...
 class ProcessPoolExecutor(_base.Executor):
     def __init__(self, max_workers=None): ...
     def submit(self, fn, *args, **kwargs): ...
-    def map(self, fn, *iterables, *, timeout=None, chunksize=1): ...
+    def map(self, fn, *iterables, timeout=None, chunksize=1): ...
     def shutdown(self, wait=True): ...
diff --git a/typeshed/stdlib/3/configparser.pyi b/typeshed/stdlib/3/configparser.pyi
index 584da44..b495d59 100644
--- a/typeshed/stdlib/3/configparser.pyi
+++ b/typeshed/stdlib/3/configparser.pyi
@@ -4,7 +4,7 @@
 # reading configparser.py.
 
 from typing import (MutableMapping, Mapping, Dict, Sequence, List,
-                    Iterable, Iterator, Callable, Any, TextIO)
+                    Iterable, Iterator, Callable, Any, IO)
 # Types only used in type comments only
 from typing import Optional, Tuple  # noqa
 
@@ -103,7 +103,7 @@ class ConfigParser(_parser):
     def set(self, section: str, option: str, value: str) -> None: ...
 
     def write(self,
-              fileobject: TextIO,
+              fileobject: IO[str],
               space_around_delimiters: bool = True) -> None: ...
 
     def remove_option(self, section: str, option: str) -> bool: ...
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
index c0ee98b..6e0294f 100644
--- a/typeshed/stdlib/3/datetime.pyi
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -181,7 +181,7 @@ class datetime:
     @classmethod
     def fromordinal(cls, n: int) -> datetime: ...
     @classmethod
-    def now(cls, tz: timezone = ...) -> datetime: ...
+    def now(cls, tz: _tzinfo = ...) -> datetime: ...
     @classmethod
     def utcnow(cls) -> datetime: ...
     @classmethod
diff --git a/typeshed/stdlib/3/difflib.pyi b/typeshed/stdlib/3/difflib.pyi
index 98da327..a05db8d 100644
--- a/typeshed/stdlib/3/difflib.pyi
+++ b/typeshed/stdlib/3/difflib.pyi
@@ -3,7 +3,8 @@
 # Based on https://docs.python.org/3.2/library/difflib.html
 
 from typing import (
-    TypeVar, Callable, Iterable, List, NamedTuple, Sequence, Tuple, Generic
+    TypeVar, Callable, Iterable, Iterator, List, NamedTuple, Sequence, Tuple,
+    Generic
 )
 
 _T = TypeVar('_T')
@@ -31,20 +32,20 @@ def get_close_matches(word: Sequence[_T], possibilities: List[Sequence[_T]],
 class Differ:
     def __init__(self, linejunk: Callable[[str], bool] = ...,
                  charjunk: Callable[[str], bool] = ...) -> None: ...
-    def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterable[str]: ...
+    def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ...
 
 def IS_LINE_JUNK(str) -> bool: ...
 def IS_CHARACTER_JUNK(str) -> bool: ...
 def unified_diff(a: Sequence[str], b: Sequence[str], fromfile: str = ...,
                  tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
-                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+                 n: int = ..., lineterm: str = ...) -> Iterator[str]: ...
 def context_diff(a: Sequence[str], b: Sequence[str], fromfile: str=...,
                  tofile: str = ..., fromfiledate: str = ..., tofiledate: str = ...,
-                 n: int = ..., lineterm: str = ...) -> Iterable[str]: ...
+                 n: int = ..., lineterm: str = ...) -> Iterator[str]: ...
 def ndiff(a: Sequence[str], b: Sequence[str],
           linejunk: Callable[[str], bool] = ...,
           charjunk: Callable[[str], bool] = ...
-          ) -> Iterable[str]: ...
+          ) -> Iterator[str]: ...
 
 class HtmlDiff(object):
     def __init__(self, tabsize: int = ..., wrapcolumn: int = ...,
@@ -58,4 +59,4 @@ class HtmlDiff(object):
                    fromdesc: str = ..., todesc: str = ..., context: bool = ...,
                    numlines: int = ...) -> str: ...
 
-def restore(delta: Iterable[str], which: int) -> Iterable[int]: ...
+def restore(delta: Iterable[str], which: int) -> Iterator[int]: ...
diff --git a/typeshed/stdlib/3/dis.pyi b/typeshed/stdlib/3/dis.pyi
new file mode 100644
index 0000000..8c08946
--- /dev/null
+++ b/typeshed/stdlib/3/dis.pyi
@@ -0,0 +1,72 @@
+from typing import (List, Union, Iterator, Iterable, Tuple, Optional, Dict,
+                   Any, IO, NamedTuple)
+
+from opcode import (hasconst, hasname, hasjrel, hasjabs, haslocal, hascompare,
+                    hasfree, hasnargs, cmp_op, opname , opmap , HAVE_ARGUMENT,
+                    EXTENDED_ARG, stack_effect)
+
+import types
+
+_have_code = Union[types.MethodType, types.FunctionType, types.CodeType, type]
+_have_code_or_string = Union[_have_code, str, bytes]
+
+
+class Instruction(NamedTuple("Instruction", [
+        ('opname', str),
+        ('opcode', int),
+        ('arg', Optional[int]),
+        ('argval', Any),
+        ('argrepr', str),
+        ('offset', int),
+        ('starts_line', Optional[int]),
+        ('is_jump_target', bool)
+    ])):
+    # ad-hoc - seems to be an error in the NamedTuple type hint
+    # TODO: mypy issue #1076 
+    _fields = ...  # type: List[str]
+    _source = ...  # type: str
+    def _replace(self, *, opname: str = ..., opcode: int = ...,
+                arg: Optional[int] = ..., argval: Any = ..., argrepr: str = ...,
+                offset: int = ..., starts_line: Optional[int] = ...,
+                is_jump_target: bool = ...) -> Instruction: ...
+    def _asdict(self) -> Dict[str, Any]: ...
+    @staticmethod
+    def _make(iterable: Iterable[Any]) -> Instruction: ...
+
+
+# if sys.version_info >= (3, 4): 
+class Bytecode:
+    codeobj = ...  # type: types.CodeType
+    first_line = ...  # type: int
+    def __init__(self, x: _have_code_or_string, *, first_line: int=...,
+                 current_offset: int=...) -> None: ...
+    def __iter__(self) -> Iterator[Instruction]: ...
+    def __repr__(self) -> str: ...
+    def info(self) -> str: ...
+    def dis(self) -> str: ...
+
+    @classmethod
+    def from_traceback(cls, tb: types.TracebackType) -> Bytecode: ...
+
+ 
+COMPILER_FLAG_NAMES = ...  # type:  Dict[int, str]
+
+
+def pretty_flags(flags: int) -> str: ...
+def findlabels(code: _have_code) -> List[int]: ...
+def findlinestarts(code: _have_code) -> Iterator[Tuple[int, int]]: ...
+
+# Signature changes are not allowed by mypy
+# 'All conditional function variants must have identical signatures'
+# TODO: mypy issue #698
+
+# if sys.version_info >= (3, 2):
+def code_info(x: _have_code_or_string) -> str: ...
+    
+# `file` parameter requires sys.version_info >= (3, 4):
+def dis(x: _have_code_or_string = ..., *, file = ...) -> None: ...
+def distb(tb: types.TracebackType = ..., *, file: IO[str] = ...) -> None: ...
+def disassemble(co: _have_code, lasti: int = ..., *, file = ...) -> None: ...
+def show_code(co: _have_code, *, file: IO[str]=...) -> None: ...
+
+def get_instructions(x: _have_code, *, first_line: int = ...) -> Iterator[Instruction]: ...
diff --git a/typeshed/stdlib/3/email/__init__.pyi b/typeshed/stdlib/3/email/__init__.pyi
index 8962ed5..4d50f09 100644
--- a/typeshed/stdlib/3/email/__init__.pyi
+++ b/typeshed/stdlib/3/email/__init__.pyi
@@ -1,6 +1,6 @@
 # Stubs for email (Python 3.4)
 
-from typing import Callable, Optional, BinaryIO, TextIO
+from typing import Callable, Optional, IO
 import sys
 from email.message import Message, Policy
 
@@ -9,9 +9,9 @@ if sys.version_info >= (3, 3):
                             policy: Policy = ...) -> Message: ...
     def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *,
                            policy: Policy = ...) -> Message: ...
-    def message_from_file(fp: TextIO, _class: Callable[[], Message] = ..., *,
+    def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *,
                            policy: Policy = ...) -> Message: ...
-    def message_from_binary_file(fp: BinaryIO,
+    def message_from_binary_file(fp: IO[bytes],
                                  _class: Callable[[], Message] = ..., *,
                                  policy: Policy = ...) -> Message: ...
 elif sys.version_info >= (3, 2):
@@ -21,10 +21,10 @@ elif sys.version_info >= (3, 2):
     def message_from_bytes(s: bytes,  # type: ignore
                            _class: Callable[[], Message] = ..., *,
                            strict: Optional[bool] = ...) -> Message: ...
-    def message_from_file(fp: TextIO,  # type: ignore
+    def message_from_file(fp: IO[str],  # type: ignore
                           _class: Callable[[], Message] = ..., *,
                           strict: Optional[bool] = ...) -> Message: ...
-    def message_from_binary_file(fp: BinaryIO,  # type: ignore
+    def message_from_binary_file(fp: IO[bytes],  # type: ignore
                                  _class: Callable[[], Message] = ..., *,
                                  strict: Optional[bool] = ...) -> Message: ...
 
diff --git a/typeshed/stdlib/3/email/header.pyi b/typeshed/stdlib/3/email/header.pyi
index 051e575..f446d4a 100644
--- a/typeshed/stdlib/3/email/header.pyi
+++ b/typeshed/stdlib/3/email/header.pyi
@@ -18,7 +18,7 @@ class Header:
     def __eq__(self, other: Any) -> bool: ...
     def __ne__(self, other: Any) -> bool: ...
 
-def decode_header(header: Header) -> List[Tuple[bytes, Optional[str]]]: ...
+def decode_header(header: Union[Header, str]) -> List[Tuple[bytes, Optional[str]]]: ...
 def make_header(decoded_seq: List[Tuple[bytes, Optional[str]]],
                 maxlinelen: Optional[int] =...,
                 header_name: Optional[str] = ...,
diff --git a/typeshed/stdlib/3/email/message.pyi b/typeshed/stdlib/3/email/message.pyi
index d44a77f..4bf3ca3 100644
--- a/typeshed/stdlib/3/email/message.pyi
+++ b/typeshed/stdlib/3/email/message.pyi
@@ -11,7 +11,7 @@ from email.contentmanager import ContentManager
 
 _T = TypeVar('_T')
 
-_PayloadType = Union[List[Message], str]
+_PayloadType = Union[List[Message], str, bytes]
 _CharsetType = Union[Charset, str, None]
 _ParamsType = Union[str, None, Tuple[str, Optional[str], str]]
 _ParamType = Union[str, Tuple[Optional[str], Optional[str], str]]
@@ -25,7 +25,8 @@ class Message:
     def set_unixfrom(self, unixfrom: str) -> None: ...
     def get_unixfrom(self) -> Optional[str]: ...
     def attach(self, payload: 'Message') -> None: ...
-    def get_payload(self, i: int = ..., decode: bool = ...) -> _PayloadType: ...
+    def get_payload(self, i: int = ..., decode: bool = ...) \
+                    -> Optional[_PayloadType]: ...
     def set_payload(self, payload: _PayloadType,
                     charset: _CharsetType = ...) -> None: ...
     def set_charset(self, charset: _CharsetType) -> None: ...
diff --git a/typeshed/stdlib/3/heapq.pyi b/typeshed/stdlib/3/heapq.pyi
index 0894f98..81b6135 100644
--- a/typeshed/stdlib/3/heapq.pyi
+++ b/typeshed/stdlib/3/heapq.pyi
@@ -2,6 +2,7 @@
 
 # Based on http://docs.python.org/3.2/library/heapq.html
 
+import sys
 from typing import TypeVar, List, Iterable, Any, Callable
 
 _T = TypeVar('_T')
@@ -11,7 +12,11 @@ def heappop(heap: List[_T]) -> _T: ...
 def heappushpop(heap: List[_T], item: _T) -> _T: ...
 def heapify(x: List[_T]) -> None: ...
 def heapreplace(heap: List[_T], item: _T) -> _T: ...
-def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ...
+if sys.version_info >= (3, 5):
+    def merge(*iterables: Iterable[_T], key: Callable[[_T], Any] = ...,
+              reverse: bool = ...) -> Iterable[_T]: ...
+else:
+    def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ...  # type: ignore
 def nlargest(n: int, iterable: Iterable[_T],
              key: Callable[[_T], Any] = ...) -> List[_T]: ...
 def nsmallest(n: int, iterable: Iterable[_T],
diff --git a/typeshed/stdlib/3/http/client.pyi b/typeshed/stdlib/3/http/client.pyi
index 1c83fd4..c9b92d6 100644
--- a/typeshed/stdlib/3/http/client.pyi
+++ b/typeshed/stdlib/3/http/client.pyi
@@ -1,101 +1,213 @@
 # Stubs for http.client (Python 3.4)
 
-from typing import Any, Dict
+from typing import (
+    Any, Dict, IO, Iterable, List, Iterator, Mapping, Optional, Tuple, TypeVar,
+    Union,
+    overload,
+)
 import email.message
 import io
+import sys
+import ssl
+import types
+
+_DataType = Union[bytes, IO[Any], Iterable[bytes], str]
+_T = TypeVar('_T')
+
+HTTP_PORT = ...  # type: int
+HTTPS_PORT = ...  # type: int
+
+CONTINUE = ...  # type: int
+SWITCHING_PROTOCOLS = ...  # type: int
+PROCESSING = ...  # type: int
+
+OK = ...  # type: int
+CREATED = ...  # type: int
+ACCEPTED = ...  # type: int
+NON_AUTHORITATIVE_INFORMATION = ...  # type: int
+NO_CONTENT = ...  # type: int
+RESET_CONTENT = ...  # type: int
+PARTIAL_CONTENT = ...  # type: int
+MULTI_STATUS = ...  # type: int
+IM_USED = ...  # type: int
+
+MULTIPLE_CHOICES = ...  # type: int
+MOVED_PERMANENTLY = ...  # type: int
+FOUND = ...  # type: int
+SEE_OTHER = ...  # type: int
+NOT_MODIFIED = ...  # type: int
+USE_PROXY = ...  # type: int
+TEMPORARY_REDIRECT = ...  # type: int
+
+BAD_REQUEST = ...  # type: int
+UNAUTHORIZED = ...  # type: int
+PAYMENT_REQUIRED = ...  # type: int
+FORBIDDEN = ...  # type: int
+NOT_FOUND = ...  # type: int
+METHOD_NOT_ALLOWED = ...  # type: int
+NOT_ACCEPTABLE = ...  # type: int
+PROXY_AUTHENTICATION_REQUIRED = ...  # type: int
+REQUEST_TIMEOUT = ...  # type: int
+CONFLICT = ...  # type: int
+GONE = ...  # type: int
+LENGTH_REQUIRED = ...  # type: int
+PRECONDITION_FAILED = ...  # type: int
+REQUEST_ENTITY_TOO_LARGE = ...  # type: int
+REQUEST_URI_TOO_LONG = ...  # type: int
+UNSUPPORTED_MEDIA_TYPE = ...  # type: int
+REQUESTED_RANGE_NOT_SATISFIABLE = ...  # type: int
+EXPECTATION_FAILED = ...  # type: int
+UNPROCESSABLE_ENTITY = ...  # type: int
+LOCKED = ...  # type: int
+FAILED_DEPENDENCY = ...  # type: int
+UPGRADE_REQUIRED = ...  # type: int
+PRECONDITION_REQUIRED = ...  # type: int
+TOO_MANY_REQUESTS = ...  # type: int
+REQUEST_HEADER_FIELDS_TOO_LARGE = ...  # type: int
+
+INTERNAL_SERVER_ERROR = ...  # type: int
+NOT_IMPLEMENTED = ...  # type: int
+BAD_GATEWAY = ...  # type: int
+SERVICE_UNAVAILABLE = ...  # type: int
+GATEWAY_TIMEOUT = ...  # type: int
+HTTP_VERSION_NOT_SUPPORTED = ...  # type: int
+INSUFFICIENT_STORAGE = ...  # type: int
+NOT_EXTENDED = ...  # type: int
+NETWORK_AUTHENTICATION_REQUIRED = ...  # type: int
 
 responses = ...  # type: Dict[int, str]
 
-class HTTPMessage(email.message.Message):
-    def getallmatchingheaders(self, name): ...
-
-class HTTPResponse(io.RawIOBase):
-    fp = ...  # type: Any
-    debuglevel = ...  # type: Any
-    headers = ...  # type: Any
-    version = ...  # type: Any
-    status = ...  # type: Any
-    reason = ...  # type: Any
-    chunked = ...  # type: Any
-    chunk_left = ...  # type: Any
-    length = ...  # type: Any
-    will_close = ...  # type: Any
-    def __init__(self, sock, debuglevel=..., method=..., url=...) -> None: ...
-    code = ...  # type: Any
-    def begin(self): ...
-    def close(self): ...
-    def flush(self): ...
-    def readable(self): ...
-    def isclosed(self): ...
-    def read(self, amt=...): ...
-    def readinto(self, b): ...
-    def fileno(self): ...
-    def getheader(self, name, default=...): ...
-    def getheaders(self): ...
-    def __iter__(self): ...
-    def info(self): ...
-    def geturl(self): ...
-    def getcode(self): ...
+class HTTPMessage(email.message.Message): ...
+
+# TODO uncomment when mypy handle conditionals
+#if sys.version_info >= (3, 5):
+#    class HTTPResponse(io.BufferedIOBase):
+#        msg = ...  # type: HTTPMessage
+#        version = ...  # type: int
+#        debuglevel = ...  # type: int
+#        closed = ...  # type: bool
+#        status = ...  # type: int
+#        reason = ...  # type: str
+#        def read(self, amt: Optional[int] = ...) -> bytes: ...
+#        def readinto(self, b: bytearray) -> int: ...
+#        @overload
+#        def getheader(self, name: str) -> Optional[str]: ...
+#        @overload
+#        def getheader(self, name: str, default: _T) -> Union[str, _T]: ...
+#        def getheaders(self) -> List[Tuple[str, str]]: ...
+#        def fileno(self) -> int: ...
+#        def __iter__(self) -> Iterator[bytes]: ...
+#        def __enter__(self) -> 'HTTPResponse': ...
+#        def __exit__(self, exc_type: Optional[type],
+#                     exc_val: Optional[Exception],
+#                     exc_tb: Optional[types.TracebackType]) -> bool: ...
+#else:
+#    class HTTPResponse:
+#        msg = ...  # type: HTTPMessage
+#        version = ...  # type: int
+#        debuglevel = ...  # type: int
+#        closed = ...  # type: bool
+#        status = ...  # type: int
+#        reason = ...  # type: str
+#        def read(self, amt: Optional[int] = ...) -> bytes: ...
+#        if sys.version_info >= (3, 3):
+#            def readinto(self, b: bytearray) -> int: ...
+#        @overload
+#        def getheader(self, name: str) -> Optional[str]: ...
+#        @overload
+#        def getheader(self, name: str, default: _T) -> Union[str, _T]: ...
+#        def getheaders(self) -> List[Tuple[str, str]]: ...
+#        def fileno(self) -> int: ...
+#        def __iter__(self) -> Iterator[bytes]: ...
+#        def __enter__(self) -> 'HTTPResponse': ...
+#        def __exit__(self, exc_type: Optional[type],
+#                     exc_val: Optional[Exception],
+#                     exc_tb: Optional[types.TracebackType]) -> bool: ...
+class HTTPResponse(io.BufferedIOBase):
+    msg = ...  # type: HTTPMessage
+    version = ...  # type: int
+    debuglevel = ...  # type: int
+    closed = ...  # type: bool
+    status = ...  # type: int
+    reason = ...  # type: str
+    def read(self, amt: Optional[int] = ...) -> bytes: ...
+    def readinto(self, b: bytearray) -> int: ...
+    @overload
+    def getheader(self, name: str) -> Optional[str]: ...
+    @overload
+    def getheader(self, name: str, default: _T) -> Union[str, _T]: ...
+    def getheaders(self) -> List[Tuple[str, str]]: ...
+    def fileno(self) -> int: ...
+    def __iter__(self) -> Iterator[bytes]: ...
+    def __enter__(self) -> 'HTTPResponse': ...
+    def __exit__(self, exc_type: Optional[type],
+                 exc_val: Optional[Exception],
+                 exc_tb: Optional[types.TracebackType]) -> bool: ...
 
 class HTTPConnection:
-    response_class = ...  # type: Any
-    default_port = ...  # type: Any
-    auto_open = ...  # type: Any
-    debuglevel = ...  # type: Any
-    mss = ...  # type: Any
-    timeout = ...  # type: Any
-    source_address = ...  # type: Any
-    sock = ...  # type: Any
-    def __init__(self, host, port=..., timeout=..., source_address=...) -> None: ...
-    def set_tunnel(self, host, port=..., headers=...): ...
-    def set_debuglevel(self, level): ...
-    def connect(self): ...
-    def close(self): ...
-    def send(self, data): ...
-    def putrequest(self, method, url, skip_host=..., skip_accept_encoding=...): ...
-    def putheader(self, header, *values): ...
-    def endheaders(self, message_body=...): ...
-    def request(self, method, url, body=..., headers=...): ...
-    def getresponse(self): ...
+    if sys.version_info >= (3, 4):
+        def __init__(self,  # type: ignore
+                     host: str, port: Optional[int] = ...,
+                     timeout: int = ...,
+                     source_address: Optional[Tuple[str, int]] = ...) \
+                     -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     host: str, port: Optional[int] = ...,
+                     strict: bool = ..., timeout: int = ...,
+                     source_address: Optional[Tuple[str, int]] = ...) \
+                             -> None: ...
+    def request(self, method: str, url: str,
+                body: Optional[_DataType] = ...,
+                headers: Mapping[str, str] = ...) -> None: ...
+    def getresponse(self) -> HTTPResponse: ...
+    def set_debuglevel(self, level: int) -> None: ...
+    def set_tunnel(self, host: str, port: Optional[int] = ...,
+                   headers: Optional[Mapping[str, str]] = ...) -> None: ...
+    def connect(self) -> None: ...
+    def close(self) -> None: ...
+    def putrequest(self, request: str, selector: str, skip_host: bool = ...,
+                   skip_accept_encoding: bool = ...) -> None: ...
+    def putheader(self, header: str, *argument: str) -> None: ...
+    def endheaders(self, message_body: Optional[_DataType] = ...) -> None: ...
+    def send(self, data: _DataType) -> None: ...
 
 class HTTPSConnection(HTTPConnection):
-    default_port = ...  # type: Any
-    key_file = ...  # type: Any
-    cert_file = ...  # type: Any
-    def __init__(self, host, port=..., key_file=..., cert_file=..., timeout=...,
-                 source_address=..., *, context=..., check_hostname=...): ...
-    sock = ...  # type: Any
-    def connect(self): ...
+    if sys.version_info >= (3, 4):
+        def __init__(self,  # type: ignore
+                     host: str, port: Optional[int] = ...,
+                     key_file: Optional[str] = ...,
+                     cert_file: Optional[str] = ...,
+                     timeout: int = ...,
+                     source_address: Optional[Tuple[str, int]] = ...,
+                     *, context: Optional[ssl.SSLContext] = ...,
+                     check_hostname: Optional[bool] = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     host: str, port: Optional[int] = ...,
+                     key_file: Optional[str] = ...,
+                     cert_file: Optional[str] = ...,
+                     strict: bool = ..., timeout: int = ...,
+                     source_address: Optional[Tuple[str, int]] = ...,
+                     *, context: Optional[ssl.SSLContext] = ...,
+                     check_hostname: Optional[bool] = ...) -> None: ...
 
 class HTTPException(Exception): ...
+
 class NotConnected(HTTPException): ...
 class InvalidURL(HTTPException): ...
-
-class UnknownProtocol(HTTPException):
-    args = ...  # type: Any
-    version = ...  # type: Any
-    def __init__(self, version) -> None: ...
-
+class UnknownProtocol(HTTPException): ...
 class UnknownTransferEncoding(HTTPException): ...
 class UnimplementedFileMode(HTTPException): ...
-
-class IncompleteRead(HTTPException):
-    args = ...  # type: Any
-    partial = ...  # type: Any
-    expected = ...  # type: Any
-    def __init__(self, partial, expected=...) -> None: ...
+class IncompleteRead(HTTPException): ...
 
 class ImproperConnectionState(HTTPException): ...
 class CannotSendRequest(ImproperConnectionState): ...
 class CannotSendHeader(ImproperConnectionState): ...
 class ResponseNotReady(ImproperConnectionState): ...
 
-class BadStatusLine(HTTPException):
-    args = ...  # type: Any
-    line = ...  # type: Any
-    def __init__(self, line) -> None: ...
-
-class LineTooLong(HTTPException):
-    def __init__(self, line_type) -> None: ...
+class BadStatusLine(HTTPException): ...
+class LineTooLong(HTTPException): ...
 
-error = HTTPException
+if sys.version_info >= (3, 5):
+    class RemoteDisconnected(ConnectionResetError, BadStatusLine): ...
diff --git a/typeshed/stdlib/3/http/cookiejar.pyi b/typeshed/stdlib/3/http/cookiejar.pyi
index 8e56a33..3761e08 100644
--- a/typeshed/stdlib/3/http/cookiejar.pyi
+++ b/typeshed/stdlib/3/http/cookiejar.pyi
@@ -1,121 +1,114 @@
 # Stubs for http.cookiejar (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
-from typing import Any
+from typing import Iterable, Iterator, Optional, Sequence, Tuple, TypeVar, Union, overload
+from http.client import HTTPResponse
+import sys
+from urllib.request import Request
 
-class Cookie:
-    version = ...  # type: Any
-    name = ...  # type: Any
-    value = ...  # type: Any
-    port = ...  # type: Any
-    port_specified = ...  # type: Any
-    domain = ...  # type: Any
-    domain_specified = ...  # type: Any
-    domain_initial_dot = ...  # type: Any
-    path = ...  # type: Any
-    path_specified = ...  # type: Any
-    secure = ...  # type: Any
-    expires = ...  # type: Any
-    discard = ...  # type: Any
-    comment = ...  # type: Any
-    comment_url = ...  # type: Any
-    rfc2109 = ...  # type: Any
-    def __init__(self, version, name, value, port, port_specified, domain, domain_specified,
-                 domain_initial_dot, path, path_specified, secure, expires, discard, comment,
-                 comment_url, rest, rfc2109=...): ...
-    def has_nonstandard_attr(self, name): ...
-    def get_nonstandard_attr(self, name, default=...): ...
-    def set_nonstandard_attr(self, name, value): ...
-    def is_expired(self, now=...): ...
+_T = TypeVar('_T')
 
-class CookiePolicy:
-    def set_ok(self, cookie, request): ...
-    def return_ok(self, cookie, request): ...
-    def domain_return_ok(self, domain, request): ...
-    def path_return_ok(self, path, request): ...
+# TODO uncomment when mypy handles conditionals
+#if sys.version_info >= (3, 3):
+#    class LoadError(OSError): ...
+#else:
+#    class LoadError(IOError): ...
+class LoadError(OSError): ...
 
-class DefaultCookiePolicy(CookiePolicy):
-    DomainStrictNoDots = ...  # type: Any
-    DomainStrictNonDomain = ...  # type: Any
-    DomainRFC2965Match = ...  # type: Any
-    DomainLiberal = ...  # type: Any
-    DomainStrict = ...  # type: Any
-    netscape = ...  # type: Any
-    rfc2965 = ...  # type: Any
-    rfc2109_as_netscape = ...  # type: Any
-    hide_cookie2 = ...  # type: Any
-    strict_domain = ...  # type: Any
-    strict_rfc2965_unverifiable = ...  # type: Any
-    strict_ns_unverifiable = ...  # type: Any
-    strict_ns_domain = ...  # type: Any
-    strict_ns_set_initial_dollar = ...  # type: Any
-    strict_ns_set_path = ...  # type: Any
-    def __init__(self, blocked_domains=..., allowed_domains=..., netscape=..., rfc2965=...,
-                 rfc2109_as_netscape=..., hide_cookie2=..., strict_domain=...,
-                 strict_rfc2965_unverifiable=..., strict_ns_unverifiable=...,
-                 strict_ns_domain=..., strict_ns_set_initial_dollar=...,
-                 strict_ns_set_path=...): ...
-    def blocked_domains(self): ...
-    def set_blocked_domains(self, blocked_domains): ...
-    def is_blocked(self, domain): ...
-    def allowed_domains(self): ...
-    def set_allowed_domains(self, allowed_domains): ...
-    def is_not_allowed(self, domain): ...
-    def set_ok(self, cookie, request): ...
-    def set_ok_version(self, cookie, request): ...
-    def set_ok_verifiability(self, cookie, request): ...
-    def set_ok_name(self, cookie, request): ...
-    def set_ok_path(self, cookie, request): ...
-    def set_ok_domain(self, cookie, request): ...
-    def set_ok_port(self, cookie, request): ...
-    def return_ok(self, cookie, request): ...
-    def return_ok_version(self, cookie, request): ...
-    def return_ok_verifiability(self, cookie, request): ...
-    def return_ok_secure(self, cookie, request): ...
-    def return_ok_expires(self, cookie, request): ...
-    def return_ok_port(self, cookie, request): ...
-    def return_ok_domain(self, cookie, request): ...
-    def domain_return_ok(self, domain, request): ...
-    def path_return_ok(self, path, request): ...
 
-class Absent: ...
+class CookieJar(Iterable['Cookie']):
+    def __init__(self, policy: Optional['CookiePolicy'] = ...) -> None: ...
+    def add_cookie_header(self, request: Request) -> None: ...
+    def extract_cookies(self, response: HTTPResponse,
+                        request: Request) -> None: ...
+    def set_policy(self, policy: 'CookiePolicy') -> None: ...
+    def make_cookies(self, response: HTTPResponse,
+                     request: Request) -> Sequence['Cookie']: ...
+    def set_cookie(self, cookie: 'Cookie') -> None: ...
+    def set_cookie_if_ok(self, cookie: 'Cookie',
+                         request: Request) -> None: ...
+    def clear(self, domain: str = ..., path: str = ...,
+              name: str = ...) -> None: ...
+    def clear_session_cookies(self) -> None: ...
+    def __iter__(self) -> Iterator['Cookie']: ...
 
-class CookieJar:
-    non_word_re = ...  # type: Any
-    quote_re = ...  # type: Any
-    strict_domain_re = ...  # type: Any
-    domain_re = ...  # type: Any
-    dots_re = ...  # type: Any
-    magic_re = ...  # type: Any
-    def __init__(self, policy=...) -> None: ...
-    def set_policy(self, policy): ...
-    def add_cookie_header(self, request): ...
-    def make_cookies(self, response, request): ...
-    def set_cookie_if_ok(self, cookie, request): ...
-    def set_cookie(self, cookie): ...
-    def extract_cookies(self, response, request): ...
-    def clear(self, domain=..., path=..., name=...): ...
-    def clear_session_cookies(self): ...
-    def clear_expired_cookies(self): ...
-    def __iter__(self): ...
-    def __len__(self): ...
+class FileCookieJar(CookieJar):
+    filename = ...  # type: str
+    delayload = ...  # type: bool
+    def __init__(self, filename: str = ..., delayload: bool = ...,
+                 policy: Optional['CookiePolicy'] = ...) -> None: ...
+    def save(self, filename: Optional[str] = ..., ignore_discard: bool = ...,
+             ignore_expires: bool = ...) -> None: ...
+    def load(self, filename: Optional[str] = ..., ignore_discard: bool = ...,
+             ignore_expires: bool = ...) -> None: ...
+    def revert(self, filename: Optional[str] = ..., ignore_discard: bool = ...,
+               ignore_expires: bool = ...) -> None: ...
 
-class LoadError(OSError): ...
+class MozillaCookieJar(FileCookieJar): ...
+class LWPCookieJar(FileCookieJar): ...
 
-class FileCookieJar(CookieJar):
-    filename = ...  # type: Any
-    delayload = ...  # type: Any
-    def __init__(self, filename=..., delayload=..., policy=...) -> None: ...
-    def save(self, filename=..., ignore_discard=..., ignore_expires=...): ...
-    def load(self, filename=..., ignore_discard=..., ignore_expires=...): ...
-    def revert(self, filename=..., ignore_discard=..., ignore_expires=...): ...
 
-class LWPCookieJar(FileCookieJar):
-    def as_lwp_str(self, ignore_discard=..., ignore_expires=...): ...
-    def save(self, filename=..., ignore_discard=..., ignore_expires=...): ...
+class CookiePolicy:
+    netscape = ...  # type: bool
+    rfc2965 = ...  # type: bool
+    hide_cookie2 = ...  # type: bool
+    def set_ok(self, cookie: 'Cookie', request: Request) -> bool: ...
+    def return_ok(self, cookie: 'Cookie', request: Request) -> bool: ...
+    def domain_return_ok(self, domain: str, request: Request) -> bool: ...
+    def path_return_ok(self, path: str, request: Request) -> bool: ...
+
+
+class DefaultCookiePolicy(CookiePolicy):
+    rfc2109_as_netscape = ...  # type: bool
+    strict_domain = ...  # type: bool
+    strict_rfc2965_unverifiable = ...  # type: bool
+    strict_ns_unverifiable = ...  # type: bool
+    strict_ns_domain = ...  # type: int
+    strict_ns_set_initial_dollar = ...  # type: bool
+    strict_ns_set_path = ...  # type: bool
+    DomainStrictNoDots = ...  # type: int
+    DomainStrictNonDomain = ...  # type: int
+    DomainRFC2965Match = ...  # type: int
+    DomainLiberal = ...  # type: int
+    DomainStrict = ...  # type: int
+    def __init__(self, blocked_domains: Optional[Sequence[str]] = ...,
+                 allowed_domains: Optional[Sequence[str]] = ...,
+                 netscape: bool = ...,
+                 rfc2965: bool = ...,
+                 rfc2109_as_netscape: Optional[bool] = ...,
+                 hide_cookie2: bool = ..., strict_domain: bool = ...,
+                 strict_rfc2965_unverifiable: bool =...,
+                 strict_ns_unverifiable: bool = ...,
+                 strict_ns_domain: int = ...,
+                 strict_ns_set_initial_dollar: bool = ...,
+                 strict_ns_set_path: bool = ...) -> None: ...
+    def blocked_domains(self) -> Tuple[str, ...]: ...
+    def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ...
+    def is_blocked(self, domain: str) -> bool: ...
+    def allowed_domains(self) -> Optional[Tuple[str, ...]]: ...
+    def set_allowed_domains(self, allowed_domains: Optional[Sequence[str]]) \
+                            -> None: ...
+    def is_not_allowed(self, domain: str) -> bool: ...
+
 
-class MozillaCookieJar(FileCookieJar):
-    magic_re = ...  # type: Any
-    header = ...  # type: Any
-    def save(self, filename=..., ignore_discard=..., ignore_expires=...): ...
+class Cookie:
+    version = ...  # type: Optional[int]
+    name = ...  # type: str
+    value = ...  # type: Optional[str]
+    port = ...  # type: Optional[str]
+    path = ...  # type: str
+    secure = ...  # type: bool
+    expires = ...  # type: Optional[int]
+    discard = ...  # type: bool
+    comment = ...  # type: Optional[str]
+    comment_url = ...  # type: Optional[str]
+    rfc2109 = ...  # type: bool
+    port_specified = ...  # type: bool
+    domain_specified = ...  # type: bool
+    domain_initial_dot = ...  # type: bool
+    def has_nonstandard_attr(self, name: str) -> bool: ...
+    @overload
+    def get_nonstandard_attr(self, name: str) -> Optional[str]: ...
+    @overload
+    def get_nonstandard_attr(self, name: str, default: _T = ...) -> Union[str, _T]: ...
+    def set_nonstandard_attr(self, name: str, value: str) -> None: ...
+    def is_expired(self, now: int = ...) -> bool: ...
diff --git a/typeshed/stdlib/3/http/cookies.pyi b/typeshed/stdlib/3/http/cookies.pyi
index e7e7855..50bc0ab 100644
--- a/typeshed/stdlib/3/http/cookies.pyi
+++ b/typeshed/stdlib/3/http/cookies.pyi
@@ -1,46 +1,31 @@
 # Stubs for http.cookies (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
-from typing import Any
+from typing import Generic, Mapping, MutableMapping, Optional, TypeVar, Union
+
+_DataType = Union[str, Mapping[str, Union[str, 'Morsel']]]
+_T = TypeVar('_T')
 
 class CookieError(Exception): ...
 
-class Morsel(dict):
-    def __init__(self): ...
-    @property
-    def key(self): ...
-    @key.setter
-    def key(self, key): ...
-    @property
-    def value(self): ...
-    @value.setter
-    def value(self, value): ...
-    @property
-    def coded_value(self): ...
-    @coded_value.setter
-    def coded_value(self, coded_value): ...
-    def __setitem__(self, K, V): ...
-    def setdefault(self, key, val=None): ...
-    def __eq__(self, morsel): ...
-    __ne__ = ... # type: Any
-    def copy(self): ...
-    def update(self, values): ...
-    def isReservedKey(self, K): ...
-    def set(self, key, val, coded_val, LegalChars=...): ...
-    def output(self, attrs=None, header=''): ...
-    def js_output(self, attrs=None): ...
-    def OutputString(self, attrs=None): ...
+class Morsel(Dict[str, str], Generic[_T]):
+    value = ...  # type: str
+    coded_value = ...  # type: _T
+    key = ...  # type: str
+    def set(self, key: str, val: str, coded_val: _T) -> None: ...
+    def isReservedKey(self, K: str) -> bool: ...
+    def output(self, attrs: Optional[List[str]] = ...,
+               header: str = ...) -> str: ...
+    def js_output(self, attrs: Optional[List[str]] = ...) -> str: ...
+    def OutputString(self, attrs: Optional[List[str]] = ...) -> str: ...
 
-class BaseCookie(dict):
-    def value_decode(self, val): ...
-    def value_encode(self, val): ...
-    def __init__(self, input=None): ...
-    def __setitem__(self, key, value): ...
-    def output(self, attrs=None, header='', sep=''): ...
-    def js_output(self, attrs=None): ...
-    def load(self, rawdata): ...
+class BaseCookie(MutableMapping[str, Morsel], Generic[_T]):
+    def __init__(self, input: Optional[_DataType] = ...) -> None: ...
+    def value_decode(self, val: str) -> _T: ...
+    def value_encode(self, val: _T) -> str: ...
+    def output(self, attrs: Optional[List[str]] = ..., header: str = ...,
+               sep: str = ...) -> str: ...
+    def js_output(self, attrs: Optional[List[str]] = ...) -> str: ...
+    def load(self, rawdata: _DataType) -> None: ...
+    def __setitem__(self, key: str, value: Union[str, Morsel]) -> None: ...
 
-class SimpleCookie(BaseCookie):
-    def value_decode(self, val): ...
-    def value_encode(self, val): ...
+class SimpleCookie(BaseCookie): ...
diff --git a/typeshed/stdlib/3/http/server.pyi b/typeshed/stdlib/3/http/server.pyi
new file mode 100644
index 0000000..2f1d0c2
--- /dev/null
+++ b/typeshed/stdlib/3/http/server.pyi
@@ -0,0 +1,63 @@
+# Stubs for http.server (Python 3.4)
+
+from typing import Any, BinaryIO, Dict, List, Mapping, Optional, Tuple, Union
+import socketserver
+import email.message
+
+class HTTPServer(socketserver.TCPServer):
+    server_name = ...  # type: str
+    server_port = ...  # type: int
+    def __init__(self, server_address: Tuple[str, int],
+                 RequestHandlerClass: type) -> None: ...
+
+class BaseHTTPRequestHandler:
+    client_address = ...  # type: Tuple[str, int]
+    server = ...  # type: socketserver.BaseServer
+    close_connection = ...  # type: bool
+    requestline = ...  # type: str
+    command = ...  # type: str
+    path = ...  # type: str
+    request_version = ...  # type: str
+    headers = ...  # type: email.message.Message
+    rfile = ...  # type: BinaryIO
+    wfile = ...  # type: BinaryIO
+    server_version = ...  # type: str
+    sys_version = ...  # type: str
+    error_message_format = ...  # type: str
+    error_content_type = ...  # type: str
+    protocol_version = ...  # type: str
+    MessageClass = ...  # type: type
+    responses = ...  # type: Mapping[int, Tuple[str, str]]
+    def __init__(self, request: bytes, client_address: Tuple[str, int],
+                 server: socketserver.BaseServer) -> None: ...
+    def handle(self) -> None: ...
+    def handle_one_request(self) -> None: ...
+    def handle_expect_100(self) -> bool: ...
+    def send_error(self, code: int, message: Optional[str] = ...,
+                   explain: Optional[str] = ...) -> None: ...
+    def send_response(self, code: int,
+                      message: Optional[str] = ...) -> None: ...
+    def send_header(self, keyword: str, value: str) -> None: ...
+    def send_response_only(self, code: int,
+                           message: Optional[str] = ...) -> None: ...
+    def end_headers(self) -> None: ...
+    def flush_headers(self) -> None: ...
+    def log_request(self, code: Union[int, str] = ...,
+                    size: Union[int, str] = ...) -> None: ...
+    def log_error(self, format: str, *args: Any) -> None: ...
+    def log_message(self, format: str, *args: Any) -> None: ...
+    def version_string(self) -> str: ...
+    def date_time_string(self, timestamp: Optional[int] = ...) -> str: ...
+    def log_date_time_string(self) -> str: ...
+    def address_string(self) -> str: ...
+
+class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
+    extensions_map = ...  # type: Dict[str, str]
+    def __init__(self, request: bytes, client_address: Tuple[str, int],
+                 server: socketserver.BaseServer) -> None: ...
+    def do_GET(self) -> None: ...
+    def do_HEAD(self) -> None: ...
+
+class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
+    cgi_directories = ...  # type: List[str]
+    def do_POST(self) -> None: ...
diff --git a/typeshed/stdlib/3/importlib/__init__.pyi b/typeshed/stdlib/3/importlib/__init__.pyi
new file mode 100644
index 0000000..1191ed7
--- /dev/null
+++ b/typeshed/stdlib/3/importlib/__init__.pyi
@@ -0,0 +1,18 @@
+import sys
+import types
+from typing import Any, Mapping, Optional, Sequence
+
+def __import__(name: str, globals: Mapping[str, Any] = None,
+               locals: Mapping[str, Any] = None, fromlist: Sequence[str] = (),
+               level: int = 0) -> types.ModuleType: ...
+
+def import_module(name: str, package: str = None) -> types.ModuleType: ...
+
+if sys.version_info >= (3, 3):
+    # Optionally returns a loader, but importlib.abc doesn't have a stub file.
+    def find_loader(name: str, path: str = None): ...
+
+    def invalidate_caches() -> None: ...
+
+if sys.version_info >= (3, 4):
+    def reload(module: types.ModuleType) -> types.ModuleType: ...
diff --git a/typeshed/stdlib/3/importlib/abc.pyi b/typeshed/stdlib/3/importlib/abc.pyi
new file mode 100644
index 0000000..9600492
--- /dev/null
+++ b/typeshed/stdlib/3/importlib/abc.pyi
@@ -0,0 +1,84 @@
+import abc
+from _importlib_modulespec import ModuleSpec
+import sys
+import types
+from typing import Any, Mapping, Optional, Sequence, Tuple, Union
+
+_Path = Union[bytes, str]
+
+# Loader is exported from this module, but for circular import reasons
+# exists in its own stub file (with ModuleSpec and ModuleType).
+from _importlib_modulespec import Loader as Loader  # Exported
+
+class Finder(metaclass=abc.ABCMeta): ...
+    # Technically this class defines the following method, but its subclasses
+    # in this module violate its signature. Since this class is deprecated, it's
+    # easier to simply ignore that this method exists.
+    #@abc.abstractmethod
+    #def find_module(self, fullname: str,
+    #                path: Sequence[_Path] = None) -> Optional[Loader]: ...
+
+class ResourceLoader(Loader):
+    @abc.abstractmethod
+    def get_data(self, path: _Path) -> bytes: ...
+
+class InspectLoader(Loader):
+    def is_package(self, fullname: str) -> bool: ...
+    def get_code(self, fullname: str) -> Optional[types.CodeType]: ...
+    def load_module(self, fullname: str) -> types.ModuleType: ...
+    @abc.abstractmethod
+    def get_source(self, fullname: str) -> Optional[str]: ...
+    if sys.version_info >= (3, 4):
+        def exec_module(self, module: types.ModuleType) -> None: ...
+    if sys.version_info == (3, 4):
+        def source_to_code(self, data: Union[bytes, str],
+                           path: str = '<string>') -> types.CodeType: ...
+    elif sys.version_info >= (3, 5):
+        @staticmethod
+        def source_to_code(self, data: Union[bytes, str],
+                           path: str = '<string>') -> types.CodeType: ...
+
+class ExecutionLoader(InspectLoader):
+    @abc.abstractmethod
+    def get_filename(self, fullname: str) -> _Path: ...
+    def get_code(self, fullname: str) -> Optional[types.CodeType]: ...
+
+class SourceLoader(ResourceLoader, ExecutionLoader):
+    def path_mtime(self, path: _Path) -> Union[int, float]: ...
+    def set_data(self, path: _Path, data: bytes) -> None: ...
+    def get_source(self, fullname: str) -> Optional[str]: ...
+    if sys.version_info >= (3, 3):
+        def path_stats(self, path: _Path) -> Mapping[str, Any]: ...
+
+
+if sys.version_info >= (3, 3):
+    class MetaPathFinder(Finder):
+        def find_module(self, fullname: str,
+                        path: Optional[Sequence[_Path]]) -> Optional[Loader]:
+            ...
+        def invalidate_caches(self) -> None: ...
+        if sys.version_info >= (3, 4):
+            # Not defined on the actual class, but expected to exist.
+            def find_spec(self, fullname: str, path: Optional[Sequence[_Path]],
+                          target: types.ModuleType = None
+                         ) -> Optional[ModuleSpec]:
+                ...
+
+    class PathEntryFinder(Finder):
+        def find_module(self, fullname: str) -> Optional[Loader]: ...
+        def find_loader(self, fullname: str
+                       ) -> Tuple[Optional[Loader], Sequence[_Path]]: ...
+        def invalidate_caches(self) -> None: ...
+        if sys.version_info >= (3, 4):
+            # Not defined on the actual class, but expected to exist.
+            def find_spec(self, fullname: str,
+                          target: types.ModuleType = None
+                         ) -> Optional[ModuleSpec]:
+                ...
+
+    class FileLoader(ResourceLoader, ExecutionLoader):
+        name = ... # type: str
+        path = ... # type: _Path
+        def __init__(self, fullname: str, path: _Path) -> None: ...
+        def get_data(self, path: _Path) -> bytes: ...
+        def get_filename(self, fullname: str) -> _Path: ...
diff --git a/typeshed/stdlib/3/importlib/machinery.pyi b/typeshed/stdlib/3/importlib/machinery.pyi
new file mode 100644
index 0000000..d50b4a5
--- /dev/null
+++ b/typeshed/stdlib/3/importlib/machinery.pyi
@@ -0,0 +1,124 @@
+import importlib.abc
+import sys
+import types
+from typing import Any, Callable, List, Optional, Sequence, Tuple, Union
+
+# ModuleSpec is exported from this module, but for circular import
+# reasons exists in its own stub file (with Loader and ModuleType).
+from _importlib_modulespec import ModuleSpec  # Exported
+
+class BuiltinImporter(importlib.abc.MetaPathFinder,
+                      importlib.abc.InspectLoader):
+    # MetaPathFinder
+    @classmethod
+    def find_module(cls, fullname: str,
+                    path: Optional[Sequence[importlib.abc._Path]]
+                   ) -> Optional[importlib.abc.Loader]:
+        ...
+    if sys.version_info >= (3, 4):
+        @classmethod
+        def find_spec(cls, fullname: str,
+                      path: Optional[Sequence[importlib.abc._Path]],
+                      target: types.ModuleType = None) -> Optional[ModuleSpec]:
+            ...
+    # InspectLoader
+    @classmethod
+    def is_package(cls, fullname: str) -> bool: ...
+    @classmethod
+    def load_module(cls, fullname: str) -> types.ModuleType: ...
+    @classmethod
+    def get_code(cls, fullname: str) -> None: ...  # type: ignore
+    @classmethod
+    def get_source(cls, fullname: str) -> None: ...  # type: ignore
+    # Loader
+    @classmethod
+    def load_module(cls, fullname: str) -> types.ModuleType: ...
+    if sys.version_info >= (3, 3):
+        @staticmethod
+        def module_repr(module: types.ModuleType) -> str: ...  # type: ignore
+    if sys.version_info >= (3, 4):
+        @classmethod
+        def create_module(cls, spec: ModuleSpec) -> Optional[types.ModuleType]:
+            ...
+        @classmethod
+        def exec_module(cls, module: types.ModuleType) -> None: ...
+
+class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader):
+    # MetaPathFinder
+    @classmethod
+    def find_module(cls, fullname: str,
+                    path: Optional[Sequence[importlib.abc._Path]]
+                   ) -> Optional[importlib.abc.Loader]:
+        ...
+    if sys.version_info >= (3, 4):
+        @classmethod
+        def find_spec(cls, fullname: str,
+                      path: Optional[Sequence[importlib.abc._Path]],
+                      target: types.ModuleType = None) -> Optional[ModuleSpec]:
+            ...
+    # InspectLoader
+    @classmethod
+    def is_package(cls, fullname: str) -> bool: ...
+    @classmethod
+    def load_module(cls, fullname: str) -> types.ModuleType: ...
+    @classmethod
+    def get_code(cls, fullname: str) -> None: ...  # type: ignore
+    @classmethod
+    def get_source(cls, fullname: str) -> None: ...  # type: ignore
+    # Loader
+    @classmethod
+    def load_module(cls, fullname: str) -> types.ModuleType: ...
+    if sys.version_info >= (3, 3):
+        @staticmethod
+        def module_repr(module: types.ModuleType) -> str: ...  # type: ignore
+    if sys.version_info >= (3, 4):
+        @classmethod
+        def create_module(cls, spec: ModuleSpec) -> Optional[types.ModuleType]:
+            ...
+        @staticmethod
+        def exec_module(module: types.ModuleType) -> None: ...  # type: ignore
+
+class WindowsRegisteryFinder(importlib.abc.MetaPathFinder):
+    @classmethod
+    def find_module(cls, fullname: str,
+                    path: Optional[Sequence[importlib.abc._Path]]
+                   ) -> Optional[importlib.abc.Loader]:
+        ...
+    if sys.version_info >= (3, 4):
+        @classmethod
+        def find_spec(cls, fullname: str,
+                      path: Optional[Sequence[importlib.abc._Path]],
+                      target: types.ModuleType = None) -> Optional[ModuleSpec]:
+            ...
+
+class PathFinder(importlib.abc.MetaPathFinder): ...
+
+if sys.version_info >= (3, 3):
+    SOURCE_SUFFIXES = ... # type: List[str]
+    DEBUG_BYTECODE_SUFFIXES = ... # type: List[str]
+    OPTIMIZED_BYTECODE_SUFFIXES = ... # type: List[str]
+    BYTECODE_SUFFIXES = ... # type: List[str]
+    EXTENSION_SUFFIXES = ... # type: List[str]
+
+    def all_suffixes() -> List[str]: ...
+
+    class FileFinder(importlib.abc.PathEntryFinder):
+        path = ... # type: str
+        def __init__(self, path: str,
+                     *loader_details: Tuple[importlib.abc.Loader, List[str]]
+                    ) -> None: ...
+        @classmethod
+        def path_hook(*loader_details: Tuple[importlib.abc.Loader, List[str]]
+                     ) -> Callable[[str], importlib.abc.PathEntryFinder]: ...
+
+    class SourceFileLoader(importlib.abc.FileLoader,
+                           importlib.abc.SourceLoader):
+        ...
+
+    class SourcelessFileLoader(importlib.abc.FileLoader,
+                               importlib.abc.SourceLoader):
+        ...
+
+    class ExtensionFileLoader(importlib.abc.ExecutionLoader):
+        def get_filename(self, fullname: str) -> importlib.abc._Path: ...
+        def get_source(self, fullname: str) -> None: ... # type: ignore
diff --git a/typeshed/stdlib/3/importlib/util.pyi b/typeshed/stdlib/3/importlib/util.pyi
new file mode 100644
index 0000000..6928035
--- /dev/null
+++ b/typeshed/stdlib/3/importlib/util.pyi
@@ -0,0 +1,47 @@
+import importlib.abc
+import importlib.machinery
+import sys
+import types
+from typing import Any, Callable, List, Optional
+
+def module_for_loader(fxn: Callable[..., types.ModuleType]
+                     ) -> Callable[..., types.ModuleType]: ...
+def set_loader(fxn: Callable[..., types.ModuleType]
+              ) -> Callable[..., types.ModuleType]: ...
+def set_package(fxn: Callable[..., types.ModuleType]
+               ) -> Callable[..., types.ModuleType]: ...
+
+if sys.version_info >= (3, 3):
+    def resolve_name(name: str, package: str) -> str: ...
+
+if sys.version_info >= (3, 4):
+    MAGIC_NUMBER = ... # type: bytes
+
+    def cache_from_source(path: str, debug_override: bool = None, *,
+                          optimization: Any = None) -> str: ...
+    def source_from_cache(path: str) -> str: ...
+    def decode_source(source_bytes: bytes) -> str: ...
+    def find_spec(name: str, package: str = None
+                 ) -> importlib.machinery.ModuleSpec: ...
+    def spec_from_loader(name: str, loader: Optional[importlib.abc.Loader], *,
+                         origin: str = None, loader_state: Any = None,
+                         is_package: bool = None
+                        ) -> importlib.machinery.ModuleSpec: ...
+    def spec_from_file_location(name: str, location: str, *,
+                                loader: importlib.abc.Loader = None,
+                                submodule_search_locations: List[str]=None
+                               ) -> importlib.machinery.ModuleSpec: ...
+
+if sys.version_info >= (3, 5):
+    def module_from_spec(spec: importlib.machinery.ModuleSpec
+                        ) -> types.ModuleType: ...
+
+    class LazyLoader(importlib.abc.Loader):
+        def __init__(self, loader: importlib.abc.Loader) -> None: ...
+        @classmethod
+        def factory(cls, loader: importlib.abc.Loader
+                   ) -> Callable[..., 'LazyLoader']: ...
+        def create_module(self, spec: importlib.machinery.ModuleSpec
+                         ) -> Optional[types.ModuleType]:
+            ...
+        def exec_module(self, module: types.ModuleType) -> None: ...
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
index 8a41482..7b915fc 100644
--- a/typeshed/stdlib/3/io.pyi
+++ b/typeshed/stdlib/3/io.pyi
@@ -132,7 +132,7 @@ class BytesIO(BinaryIO):
     # copied from BufferedIOBase
     def detach(self) -> RawIOBase: ...
     def readinto(self, b: bytearray) -> int: ...
-    def write(self, b: Union[bytes, bytearray]) -> Optional[int]: ...
+    def write(self, b: Union[bytes, bytearray]) -> int: ...
     if sys.version_info >= (3, 5):
         def readinto1(self, b: bytearray) -> int: ...
     if sys.version_info >= (3, 4):
diff --git a/typeshed/stdlib/3/json.pyi b/typeshed/stdlib/3/json.pyi
index 31a8120..a706f27 100644
--- a/typeshed/stdlib/3/json.pyi
+++ b/typeshed/stdlib/3/json.pyi
@@ -1,6 +1,6 @@
 from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union
 
-class JSONDecodeError(object):
+class JSONDecodeError(ValueError):
     def dumps(self, obj: Any) -> str: ...
     def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
     def loads(self, s: str) -> Any: ...
diff --git a/typeshed/stdlib/3/logging/__init__.pyi b/typeshed/stdlib/3/logging/__init__.pyi
deleted file mode 100644
index 4a58102..0000000
--- a/typeshed/stdlib/3/logging/__init__.pyi
+++ /dev/null
@@ -1,239 +0,0 @@
-# Stubs for logging (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-CRITICAL = ...  # type: Any
-FATAL = ...  # type: Any
-ERROR = ...  # type: Any
-WARNING = ...  # type: Any
-WARN = ...  # type: Any
-INFO = ...  # type: Any
-DEBUG = ...  # type: Any
-NOTSET = ...  # type: Any
-
-def getLevelName(level): ...
-def addLevelName(level, levelName): ...
-
-class LogRecord:
-    name = ...  # type: Any
-    msg = ...  # type: Any
-    args = ...  # type: Any
-    levelname = ...  # type: Any
-    levelno = ...  # type: Any
-    pathname = ...  # type: Any
-    filename = ...  # type: Any
-    module = ...  # type: Any
-    exc_info = ...  # type: Any
-    exc_text = ...  # type: Any
-    stack_info = ...  # type: Any
-    lineno = ...  # type: Any
-    funcName = ...  # type: Any
-    created = ...  # type: Any
-    msecs = ...  # type: Any
-    relativeCreated = ...  # type: Any
-    thread = ...  # type: Any
-    threadName = ...  # type: Any
-    processName = ...  # type: Any
-    process = ...  # type: Any
-    def __init__(self, name, level, pathname, lineno, msg, args, exc_info, func=..., sinfo=...,
-                 **kwargs): ...
-    def getMessage(self): ...
-
-def setLogRecordFactory(factory): ...
-def getLogRecordFactory(): ...
-def makeLogRecord(dict): ...
-
-class PercentStyle:
-    default_format = ...  # type: Any
-    asctime_format = ...  # type: Any
-    asctime_search = ...  # type: Any
-    def __init__(self, fmt) -> None: ...
-    def usesTime(self): ...
-    def format(self, record): ...
-
-class StrFormatStyle(PercentStyle):
-    default_format = ...  # type: Any
-    asctime_format = ...  # type: Any
-    asctime_search = ...  # type: Any
-    def format(self, record): ...
-
-class StringTemplateStyle(PercentStyle):
-    default_format = ...  # type: Any
-    asctime_format = ...  # type: Any
-    asctime_search = ...  # type: Any
-    def __init__(self, fmt) -> None: ...
-    def usesTime(self): ...
-    def format(self, record): ...
-
-BASIC_FORMAT = ...  # type: Any
-
-class Formatter:
-    converter = ...  # type: Any
-    datefmt = ...  # type: Any
-    def __init__(self, fmt=..., datefmt=..., style=...) -> None: ...
-    default_time_format = ...  # type: Any
-    default_msec_format = ...  # type: Any
-    def formatTime(self, record, datefmt=...): ...
-    def formatException(self, ei): ...
-    def usesTime(self): ...
-    def formatMessage(self, record): ...
-    def formatStack(self, stack_info): ...
-    def format(self, record): ...
-
-class BufferingFormatter:
-    linefmt = ...  # type: Any
-    def __init__(self, linefmt=...) -> None: ...
-    def formatHeader(self, records): ...
-    def formatFooter(self, records): ...
-    def format(self, records): ...
-
-class Filter:
-    name = ...  # type: Any
-    nlen = ...  # type: Any
-    def __init__(self, name=...) -> None: ...
-    def filter(self, record): ...
-
-class Filterer:
-    filters = ...  # type: Any
-    def __init__(self) -> None: ...
-    def addFilter(self, filter): ...
-    def removeFilter(self, filter): ...
-    def filter(self, record): ...
-
-class Handler(Filterer):
-    level = ...  # type: Any
-    formatter = ...  # type: Any
-    def __init__(self, level=...) -> None: ...
-    def get_name(self): ...
-    def set_name(self, name): ...
-    name = ...  # type: Any
-    lock = ...  # type: Any
-    def createLock(self): ...
-    def acquire(self): ...
-    def release(self): ...
-    def setLevel(self, level): ...
-    def format(self, record): ...
-    def emit(self, record): ...
-    def handle(self, record): ...
-    def setFormatter(self, fmt): ...
-    def flush(self): ...
-    def close(self): ...
-    def handleError(self, record): ...
-
-class StreamHandler(Handler):
-    terminator = ...  # type: Any
-    stream = ...  # type: Any
-    def __init__(self, stream=...) -> None: ...
-    def flush(self): ...
-    def emit(self, record): ...
-
-class FileHandler(StreamHandler):
-    baseFilename = ...  # type: Any
-    mode = ...  # type: Any
-    encoding = ...  # type: Any
-    delay = ...  # type: Any
-    stream = ...  # type: Any
-    def __init__(self, filename, mode=..., encoding=..., delay=...) -> None: ...
-    def close(self): ...
-    def emit(self, record): ...
-
-class _StderrHandler(StreamHandler):
-    def __init__(self, level=...) -> None: ...
-
-lastResort = ...  # type: Any
-
-class PlaceHolder:
-    loggerMap = ...  # type: Any
-    def __init__(self, alogger) -> None: ...
-    def append(self, alogger): ...
-
-def setLoggerClass(klass): ...
-def getLoggerClass(): ...
-
-class Manager:
-    root = ...  # type: Any
-    disable = ...  # type: Any
-    emittedNoHandlerWarning = ...  # type: Any
-    loggerDict = ...  # type: Any
-    loggerClass = ...  # type: Any
-    logRecordFactory = ...  # type: Any
-    def __init__(self, rootnode) -> None: ...
-    def getLogger(self, name): ...
-    def setLoggerClass(self, klass): ...
-    def setLogRecordFactory(self, factory): ...
-
-class Logger(Filterer):
-    name = ...  # type: Any
-    level = ...  # type: Any
-    parent = ...  # type: Any
-    propagate = ...  # type: Any
-    handlers = ...  # type: Any
-    disabled = ...  # type: Any
-    def __init__(self, name, level=...) -> None: ...
-    def setLevel(self, level): ...
-    def debug(self, msg, *args, **kwargs): ...
-    def info(self, msg, *args, **kwargs): ...
-    def warning(self, msg, *args, **kwargs): ...
-    def warn(self, msg, *args, **kwargs): ...
-    def error(self, msg, *args, **kwargs): ...
-    def exception(self, msg, *args, **kwargs): ...
-    def critical(self, msg, *args, **kwargs): ...
-    fatal = ...  # type: Any
-    def log(self, level, msg, *args, **kwargs): ...
-    def findCaller(self, stack_info=...): ...
-    def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=..., extra=...,
-                   sinfo=...): ...
-    def handle(self, record): ...
-    def addHandler(self, hdlr): ...
-    def removeHandler(self, hdlr): ...
-    def hasHandlers(self): ...
-    def callHandlers(self, record): ...
-    def getEffectiveLevel(self): ...
-    def isEnabledFor(self, level): ...
-    def getChild(self, suffix): ...
-
-class RootLogger(Logger):
-    def __init__(self, level) -> None: ...
-
-class LoggerAdapter:
-    logger = ...  # type: Any
-    extra = ...  # type: Any
-    def __init__(self, logger, extra) -> None: ...
-    def process(self, msg, kwargs): ...
-    def debug(self, msg, *args, **kwargs): ...
-    def info(self, msg, *args, **kwargs): ...
-    def warning(self, msg, *args, **kwargs): ...
-    def warn(self, msg, *args, **kwargs): ...
-    def error(self, msg, *args, **kwargs): ...
-    def exception(self, msg, *args, **kwargs): ...
-    def critical(self, msg, *args, **kwargs): ...
-    def log(self, level, msg, *args, **kwargs): ...
-    def isEnabledFor(self, level): ...
-    def setLevel(self, level): ...
-    def getEffectiveLevel(self): ...
-    def hasHandlers(self): ...
-
-def basicConfig(**kwargs): ...
-def getLogger(name=...): ...
-def critical(msg, *args, **kwargs): ...
-
-fatal = ...  # type: Any
-
-def error(msg, *args, **kwargs): ...
-def exception(msg, *args, **kwargs): ...
-def warning(msg, *args, **kwargs): ...
-def warn(msg, *args, **kwargs): ...
-def info(msg, *args, **kwargs): ...
-def debug(msg, *args, **kwargs): ...
-def log(level, msg, *args, **kwargs): ...
-def disable(level): ...
-
-class NullHandler(Handler):
-    def handle(self, record): ...
-    def emit(self, record): ...
-    lock = ...  # type: Any
-    def createLock(self): ...
-
-def captureWarnings(capture): ...
diff --git a/typeshed/stdlib/3/logging/handlers.pyi b/typeshed/stdlib/3/logging/handlers.pyi
deleted file mode 100644
index 458044e..0000000
--- a/typeshed/stdlib/3/logging/handlers.pyi
+++ /dev/null
@@ -1,200 +0,0 @@
-# Stubs for logging.handlers (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-import logging
-
-threading = ...  # type: Any
-DEFAULT_TCP_LOGGING_PORT = ...  # type: Any
-DEFAULT_UDP_LOGGING_PORT = ...  # type: Any
-DEFAULT_HTTP_LOGGING_PORT = ...  # type: Any
-DEFAULT_SOAP_LOGGING_PORT = ...  # type: Any
-SYSLOG_UDP_PORT = ...  # type: Any
-SYSLOG_TCP_PORT = ...  # type: Any
-
-class BaseRotatingHandler(logging.FileHandler):
-    mode = ...  # type: Any
-    encoding = ...  # type: Any
-    namer = ...  # type: Any
-    rotator = ...  # type: Any
-    def __init__(self, filename, mode, encoding=..., delay=...) -> None: ...
-    def emit(self, record): ...
-    def rotation_filename(self, default_name): ...
-    def rotate(self, source, dest): ...
-
-class RotatingFileHandler(BaseRotatingHandler):
-    maxBytes = ...  # type: Any
-    backupCount = ...  # type: Any
-    def __init__(self, filename, mode=..., maxBytes=..., backupCount=..., encoding=...,
-                 delay=...): ...
-    stream = ...  # type: Any
-    def doRollover(self): ...
-    def shouldRollover(self, record): ...
-
-class TimedRotatingFileHandler(BaseRotatingHandler):
-    when = ...  # type: Any
-    backupCount = ...  # type: Any
-    utc = ...  # type: Any
-    atTime = ...  # type: Any
-    interval = ...  # type: Any
-    suffix = ...  # type: Any
-    extMatch = ...  # type: Any
-    dayOfWeek = ...  # type: Any
-    rolloverAt = ...  # type: Any
-    def __init__(self, filename, when=..., interval=..., backupCount=..., encoding=..., delay=...,
-                 utc=..., atTime=...): ...
-    def computeRollover(self, currentTime): ...
-    def shouldRollover(self, record): ...
-    def getFilesToDelete(self): ...
-    stream = ...  # type: Any
-    def doRollover(self): ...
-
-class WatchedFileHandler(logging.FileHandler):
-    def __init__(self, filename, mode=..., encoding=..., delay=...) -> None: ...
-    stream = ...  # type: Any
-    def emit(self, record): ...
-
-class SocketHandler(logging.Handler):
-    host = ...  # type: Any
-    port = ...  # type: Any
-    address = ...  # type: Any
-    sock = ...  # type: Any
-    closeOnError = ...  # type: Any
-    retryTime = ...  # type: Any
-    retryStart = ...  # type: Any
-    retryMax = ...  # type: Any
-    retryFactor = ...  # type: Any
-    def __init__(self, host, port) -> None: ...
-    def makeSocket(self, timeout=...): ...
-    retryPeriod = ...  # type: Any
-    def createSocket(self): ...
-    def send(self, s): ...
-    def makePickle(self, record): ...
-    def handleError(self, record): ...
-    def emit(self, record): ...
-    def close(self): ...
-
-class DatagramHandler(SocketHandler):
-    closeOnError = ...  # type: Any
-    def __init__(self, host, port) -> None: ...
-    def makeSocket(self, timeout=...): ... # TODO: Actually does not have the timeout argument.
-    def send(self, s): ...
-
-class SysLogHandler(logging.Handler):
-    LOG_EMERG = ...  # type: Any
-    LOG_ALERT = ...  # type: Any
-    LOG_CRIT = ...  # type: Any
-    LOG_ERR = ...  # type: Any
-    LOG_WARNING = ...  # type: Any
-    LOG_NOTICE = ...  # type: Any
-    LOG_INFO = ...  # type: Any
-    LOG_DEBUG = ...  # type: Any
-    LOG_KERN = ...  # type: Any
-    LOG_USER = ...  # type: Any
-    LOG_MAIL = ...  # type: Any
-    LOG_DAEMON = ...  # type: Any
-    LOG_AUTH = ...  # type: Any
-    LOG_SYSLOG = ...  # type: Any
-    LOG_LPR = ...  # type: Any
-    LOG_NEWS = ...  # type: Any
-    LOG_UUCP = ...  # type: Any
-    LOG_CRON = ...  # type: Any
-    LOG_AUTHPRIV = ...  # type: Any
-    LOG_FTP = ...  # type: Any
-    LOG_LOCAL0 = ...  # type: Any
-    LOG_LOCAL1 = ...  # type: Any
-    LOG_LOCAL2 = ...  # type: Any
-    LOG_LOCAL3 = ...  # type: Any
-    LOG_LOCAL4 = ...  # type: Any
-    LOG_LOCAL5 = ...  # type: Any
-    LOG_LOCAL6 = ...  # type: Any
-    LOG_LOCAL7 = ...  # type: Any
-    priority_names = ...  # type: Any
-    facility_names = ...  # type: Any
-    priority_map = ...  # type: Any
-    address = ...  # type: Any
-    facility = ...  # type: Any
-    socktype = ...  # type: Any
-    unixsocket = ...  # type: Any
-    socket = ...  # type: Any
-    formatter = ...  # type: Any
-    def __init__(self, address=..., facility=..., socktype=...) -> None: ...
-    def encodePriority(self, facility, priority): ...
-    def close(self): ...
-    def mapPriority(self, levelName): ...
-    ident = ...  # type: Any
-    append_nul = ...  # type: Any
-    def emit(self, record): ...
-
-class SMTPHandler(logging.Handler):
-    username = ...  # type: Any
-    fromaddr = ...  # type: Any
-    toaddrs = ...  # type: Any
-    subject = ...  # type: Any
-    secure = ...  # type: Any
-    timeout = ...  # type: Any
-    def __init__(self, mailhost, fromaddr, toaddrs, subject, credentials=..., secure=...,
-                 timeout=...): ...
-    def getSubject(self, record): ...
-    def emit(self, record): ...
-
-class NTEventLogHandler(logging.Handler):
-    appname = ...  # type: Any
-    dllname = ...  # type: Any
-    logtype = ...  # type: Any
-    deftype = ...  # type: Any
-    typemap = ...  # type: Any
-    def __init__(self, appname, dllname=..., logtype=...) -> None: ...
-    def getMessageID(self, record): ...
-    def getEventCategory(self, record): ...
-    def getEventType(self, record): ...
-    def emit(self, record): ...
-    def close(self): ...
-
-class HTTPHandler(logging.Handler):
-    host = ...  # type: Any
-    url = ...  # type: Any
-    method = ...  # type: Any
-    secure = ...  # type: Any
-    credentials = ...  # type: Any
-    def __init__(self, host, url, method=..., secure=..., credentials=...) -> None: ...
-    def mapLogRecord(self, record): ...
-    def emit(self, record): ...
-
-class BufferingHandler(logging.Handler):
-    capacity = ...  # type: Any
-    buffer = ...  # type: Any
-    def __init__(self, capacity) -> None: ...
-    def shouldFlush(self, record): ...
-    def emit(self, record): ...
-    def flush(self): ...
-    def close(self): ...
-
-class MemoryHandler(BufferingHandler):
-    flushLevel = ...  # type: Any
-    target = ...  # type: Any
-    def __init__(self, capacity, flushLevel=..., target=...) -> None: ...
-    def shouldFlush(self, record): ...
-    def setTarget(self, target): ...
-    buffer = ...  # type: Any
-    def flush(self): ...
-    def close(self): ...
-
-class QueueHandler(logging.Handler):
-    queue = ...  # type: Any
-    def __init__(self, queue) -> None: ...
-    def enqueue(self, record): ...
-    def prepare(self, record): ...
-    def emit(self, record): ...
-
-class QueueListener:
-    queue = ...  # type: Any
-    handlers = ...  # type: Any
-    def __init__(self, queue, *handlers) -> None: ...
-    def dequeue(self, block): ...
-    def start(self): ...
-    def prepare(self, record): ...
-    def handle(self, record): ...
-    def enqueue_sentinel(self): ...
-    def stop(self): ...
diff --git a/typeshed/stdlib/3/multiprocessing/__init__.pyi b/typeshed/stdlib/3/multiprocessing/__init__.pyi
index f3b5bb3..ff78662 100644
--- a/typeshed/stdlib/3/multiprocessing/__init__.pyi
+++ b/typeshed/stdlib/3/multiprocessing/__init__.pyi
@@ -1,15 +1,41 @@
 # Stubs for multiprocessing
 
-from typing import Any
+from typing import Any, Callable, Iterable, Mapping
 
-class Lock(): ...
-class Process(): ...
+class Lock():
+    def acquire(self, block: bool = ..., timeout: int = ...) -> None: ...
+    def release(self) -> None: ...
+
+class Process():
+    # TODO: set type of group to None
+    def __init__(self,
+                 group: Any = ...,
+                 target: Callable = ...,
+                 name: str = ...,
+                 args: Iterable[Any] = ...,
+                 kwargs: Mapping[Any, Any] = ...,
+                 daemon: bool = ...) -> None: ...
+    def start(self) -> None: ...
+    def run(self) -> None: ...
+    def terminate(self) -> None: ...
+    def is_alive(self) -> bool: ...
+    def join(self, timeout: float = ...) -> None: ...
 
 class Queue():
-    def get(block: bool = ..., timeout: float = ...) -> Any: ...
+    def __init__(self, maxsize: int = ...) -> None: ...
+    def get(self, block: bool = ..., timeout: float = ...) -> Any: ...
+    def put(self, item: Any, block: bool = ..., timeout: float = ...) -> None: ...
+    def qsize(self) -> int: ...
+    def empty(self) -> bool: ...
+    def full(self) -> bool: ...
+    def put_nowait(self, item: Any) -> None: ...
+    def get_nowait(self) -> Any: ...
+    def close(self) -> None: ...
+    def join_thread(self) -> None: ...
+    def cancel_join_thread(self) -> None: ...
 
 class Value():
-    def __init__(typecode_or_type: str, *args: Any, lock: bool = ...) -> None: ...
+    def __init__(self, typecode_or_type: str, *args: Any, lock: bool = ...) -> None: ...
 
 # ----- multiprocessing function stubs -----
 def cpu_count() -> int: ...
diff --git a/typeshed/stdlib/3/opcode.pyi b/typeshed/stdlib/3/opcode.pyi
new file mode 100644
index 0000000..e577a9f
--- /dev/null
+++ b/typeshed/stdlib/3/opcode.pyi
@@ -0,0 +1,18 @@
+from typing import List, Dict, Sequence
+
+cmp_op = ...  # type: Sequence[str]
+hasconst = ...  # type: List[int]
+hasname = ...  # type: List[int]
+hasjrel = ...  # type: List[int]
+hasjabs = ...  # type: List[int]
+haslocal = ...  # type: List[int]
+hascompare = ...  # type: List[int]
+hasfree = ...  # type: List[int]
+opname = ...  # type: List[str]
+
+opmap = ...  # Dict[str, int]
+HAVE_ARGUMENT = ...  # type: int
+EXTENDED_ARG = ...  # type: int
+hasnargs = ...  # type: List[int]
+
+def stack_effect(opcode: int, oparg: int = ...) -> int: ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index fdda65f..be9cbbd 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -5,7 +5,7 @@
 
 from typing import (
     Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr,
-    Optional, Generic, Set
+    Optional, Generic, Set, Callable
 )
 import sys
 from builtins import OSError as error
@@ -15,9 +15,9 @@ import os.path as path
 
 supports_bytes_environ = False  # TODO: True when bytes implemented?
 
-SEEK_SET = 0 # type: int
-SEEK_CUR = 1 # type: int
-SEEK_END = 2 # type: int
+SEEK_SET = 0
+SEEK_CUR = 0
+SEEK_END = 0
 
 O_RDONLY = 0
 O_WRONLY = 0
@@ -56,6 +56,7 @@ pathsep = ...  # type: str
 defpath = ...  # type: str
 linesep = ...  # type: str
 devnull = ...  # type: str
+name = ... # type: str
 
 F_OK = 0
 R_OK = 0
@@ -123,7 +124,8 @@ class stat_result:
     st_ctime = 0.0 # platform dependent (time of most recent metadata change
                    # on  Unix, or the time of creation on Windows)
 
-    def __init__(self, tuple) -> None: ...
+    # not documented
+    def __init__(self, tuple: Tuple[int, ...]) -> None: ...
 
     # On some Unix systems (such as Linux), the following attributes may also
     # be available:
@@ -155,10 +157,9 @@ class statvfs_result:  # Unix only
     f_namemax = 0
 
 # ----- os function stubs -----
-def name() -> str: ...
 def fsencode(filename: str) -> bytes: ...
 def fsdecode(filename: bytes) -> str: ...
-def get_exec_path(env=...) -> List[str] : ...
+def get_exec_path(env: Optional[Mapping[str, str]] = ...) -> List[str] : ...
 # NOTE: get_exec_path(): returns List[bytes] when env not None
 def ctermid() -> str: ...  # Unix only
 def getegid() -> int: ...  # Unix only
@@ -190,20 +191,20 @@ def setresuid(ruid: int, euid: int, suid: int) -> None: ...  # Unix only
 def setreuid(ruid: int, euid: int) -> None: ...  # Unix only
 def getsid(pid: int) -> int: ...  # Unix only
 def setsid() -> int: ...  # Unix only
-def setuid(uid) -> None: ...  # Unix only
+def setuid(uid: int) -> None: ...  # Unix only
 def strerror(code: int) -> str: ...
 def umask(mask: int) -> int: ...
 def uname() -> Tuple[str, str, str, str, str]: ...  # Unix only
 def unsetenv(key: AnyStr) -> None: ...
 # Return IO or TextIO
-def fdopen(fd: int, mode: str = ..., encoding: str = ..., errors: str = ...,
-           newline: str = ..., closefd: bool = ...) -> Any: ...
+def fdopen(fd: int, mode: str = ..., buffering: int = ..., encoding: str = ...,
+           errors: str = ..., newline: str = ..., closefd: bool = ...) -> Any: ...
 def close(fd: int) -> None: ...
 def closerange(fd_low: int, fd_high: int) -> None: ...
 def device_encoding(fd: int) -> Optional[str]: ...
 def dup(fd: int) -> int: ...
 def dup2(fd: int, fd2: int) -> None: ...
-def fchmod(fd: int, intmode) -> None: ...  # Unix only
+def fchmod(fd: int, mode: int) -> None: ...  # Unix only
 def fchown(fd: int, uid: int, gid: int) -> None: ...  # Unix only
 def fdatasync(fd: int) -> None: ...  # Unix only, not Mac
 def fpathconf(fd: int, name: str) -> int: ...  # Unix only
@@ -241,7 +242,7 @@ def listdir(path: str = ...) -> List[str]: ...
 def listdir(path: bytes) -> List[bytes]: ...
 
 def lstat(path: AnyStr) -> stat_result: ...
-def mkfifo(path, mode: int=...) -> None: ...  # Unix only
+def mkfifo(path: str, mode: int = ...) -> None: ...  # Unix only
 def mknod(filename: AnyStr, mode: int = ..., device: int = ...) -> None: ...
 def major(device: int) -> int: ...
 def minor(device: int) -> int: ...
@@ -341,3 +342,13 @@ def waitid(idtype: int, id: int, options: int) -> waitresult: ...
 P_ALL = 0
 WEXITED = 0
 WNOWAIT = 0
+
+if sys.version_info >= (3, 3):
+    def sync() -> None: ...  # Unix only
+
+    def truncate(path: Union[AnyStr, int], length: int) -> None: ...  # Unix only up to version 3.4
+
+    def fwalk(top: AnyStr = ..., topdown: bool = ...,
+              onerror: Callable = ..., *, follow_symlinks: bool = ...,
+              dir_fd: int = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
+                                             List[AnyStr], int]]: ...  # Unix only
diff --git a/typeshed/stdlib/3/queue.pyi b/typeshed/stdlib/3/queue.pyi
index 15d62f5..90cd5fc 100644
--- a/typeshed/stdlib/3/queue.pyi
+++ b/typeshed/stdlib/3/queue.pyi
@@ -17,4 +17,4 @@ class Queue(Generic[_T]):
     def qsize(self) -> int: ...
     def task_done(self) -> None: pass
 
-class Empty: ...
+class Empty(Exception): ...
diff --git a/typeshed/stdlib/3/re.pyi b/typeshed/stdlib/3/re.pyi
index 8369133..31e3cad 100644
--- a/typeshed/stdlib/3/re.pyi
+++ b/typeshed/stdlib/3/re.pyi
@@ -7,7 +7,7 @@
 
 from typing import (
     List, Iterator, overload, Callable, Tuple, Sequence, Dict,
-    Generic, AnyStr, Match, Pattern
+    Generic, AnyStr, Match, Pattern, Any
 )
 
 # ----- re variables and constants -----
@@ -52,9 +52,9 @@ def split(pattern: Pattern[AnyStr], string: AnyStr,
           maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ...
 
 @overload
-def findall(pattern: AnyStr, string: AnyStr, flags: int = ...) -> List[AnyStr]: ...
+def findall(pattern: AnyStr, string: AnyStr, flags: int = ...) -> List[Any]: ...
 @overload
-def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> List[AnyStr]: ...
+def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> List[Any]: ...
 
 # Return an iterator yielding match objects over all non-overlapping matches
 # for the RE pattern in string. The string is scanned left-to-right, and
diff --git a/typeshed/stdlib/3/socketserver.pyi b/typeshed/stdlib/3/socketserver.pyi
index 56c5973..4cd6e2d 100644
--- a/typeshed/stdlib/3/socketserver.pyi
+++ b/typeshed/stdlib/3/socketserver.pyi
@@ -1,6 +1,6 @@
-# Stubs for socketserver (Python 3.4)
+# Stubs for socketserver
 
-from typing import Optional, Tuple
+from typing import BinaryIO, Optional, Tuple
 from socket import SocketType
 import sys
 import types
@@ -73,5 +73,10 @@ class BaseRequestHandler:
     def handle(self) -> None: ...
     def finish(self) -> None: ...
 
-class StreamRequestHandler(BaseRequestHandler): ...
-class DatagramRequestHandler(BaseRequestHandler): ...
+class StreamRequestHandler(BaseRequestHandler):
+    rfile = ...  # type: BinaryIO
+    wfile = ...  # type: BinaryIO
+
+class DatagramRequestHandler(BaseRequestHandler):
+    rfile = ...  # type: BinaryIO
+    wfile = ...  # type: BinaryIO
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index 83d62cf..43934b5 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -4,21 +4,61 @@
 
 from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Optional, Union
 
-# TODO force keyword arguments
-# TODO more keyword arguments
-def call(args: Union[str, Sequence[str]], *, stdin: Any = ..., stdout: Any = ...,
-         stderr: Any = ..., shell: bool = ...,
+# Same args as Popen.__init__
+def call(args: Union[str, Sequence[str]],
+         bufsize: int = ...,
+         executable: str = ...,
+         stdin: Any = ...,
+         stdout: Any = ...,
+         stderr: Any = ...,
+         preexec_fn: Callable[[], Any] = ...,
+         close_fds: bool = ...,
+         shell: bool = ...,
+         cwd: str = ...,
          env: Mapping[str, str] = ...,
-         cwd: str = ...) -> int: ...
-def check_call(args: Union[str, Sequence[str]], *, stdin: Any = ..., stdout: Any = ...,
-               stderr: Any = ..., shell: bool = ...,
+         universal_newlines: bool = ...,
+         startupinfo: Any = ...,
+         creationflags: int = ...,
+         restore_signals: bool = ...,
+         start_new_session: bool = ...,
+         pass_fds: Any = ...) -> int: ...
+
+# Same args as Popen.__init__
+def check_call(args: Union[str, Sequence[str]],
+               bufsize: int = ...,
+               executable: str = ...,
+               stdin: Any = ...,
+               stdout: Any = ...,
+               stderr: Any = ...,
+               preexec_fn: Callable[[], Any] = ...,
+               close_fds: bool = ...,
+               shell: bool = ...,
+               cwd: str = ...,
                env: Mapping[str, str] = ...,
-               cwd: str = ...) -> int: ...
-# Return str/bytes
-def check_output(args: Union[str, Sequence[str]], *, stdin: Any = ..., stderr: Any = ...,
-                 shell: bool = ..., universal_newlines: bool = ...,
+               universal_newlines: bool = ...,
+               startupinfo: Any = ...,
+               creationflags: int = ...,
+               restore_signals: bool = ...,
+               start_new_session: bool = ...,
+               pass_fds: Any = ...) -> int: ...
+
+# Same args as Popen.__init__, except for stdout
+def check_output(args: Union[str, Sequence[str]],
+                 bufsize: int = ...,
+                 executable: str = ...,
+                 stdin: Any = ...,
+                 stderr: Any = ...,
+                 preexec_fn: Callable[[], Any] = ...,
+                 close_fds: bool = ...,
+                 shell: bool = ...,
+                 cwd: str = ...,
                  env: Mapping[str, str] = ...,
-                 cwd: str = ...) -> Any: ...
+                 universal_newlines: bool = ...,
+                 startupinfo: Any = ...,
+                 creationflags: int = ...,
+                 restore_signals: bool = ...,
+                 start_new_session: bool = ...,
+                 pass_fds: Any = ...) -> Any: ...
 
 # TODO types
 PIPE = ... # type: Any
@@ -29,7 +69,7 @@ class CalledProcessError(Exception):
     cmd = ...  # type: str
     output = b'' # May be None
 
-    def __init__(self, returncode: int, cmd: str, output: Optional[str],
+    def __init__(self, returncode: int, cmd: str, output: Optional[str] = ...,
                  stderr: Optional[str] = ...) -> None: ...
 
 class Popen:
diff --git a/typeshed/stdlib/3/sys.pyi b/typeshed/stdlib/3/sys.pyi
index accaadf..e65d792 100644
--- a/typeshed/stdlib/3/sys.pyi
+++ b/typeshed/stdlib/3/sys.pyi
@@ -117,7 +117,10 @@ def _current_frames() -> Dict[int, Any]: ...
 def displayhook(value: Optional[int]) -> None: ...
 def excepthook(type_: type, value: BaseException,
                traceback: TracebackType) -> None: ...
-def exc_info() -> Tuple[type, BaseException, TracebackType]: ...
+# TODO should be a union of tuple, see mypy#1178
+def exc_info() -> Tuple[Optional[type],
+                        Optional[BaseException],
+                        Optional[TracebackType]]: ...
 # sys.exit() accepts an optional argument of anything printable
 def exit(arg: Any = ...) -> None:
     raise SystemExit()
diff --git a/typeshed/stdlib/3/token.pyi b/typeshed/stdlib/3/token.pyi
index 76a746f..a2a5b59 100644
--- a/typeshed/stdlib/3/token.pyi
+++ b/typeshed/stdlib/3/token.pyi
@@ -56,7 +56,7 @@ OP = 0
 ERRORTOKEN = 0
 N_TOKENS = 0
 NT_OFFSET = 0
-tok_name = {} # type: Dict[int, str]
+tok_name = ... # type: Dict[int, str]
 
 def ISTERMINAL(x: int) -> bool: pass
 def ISNONTERMINAL(x: int) -> bool: pass
diff --git a/typeshed/stdlib/3/traceback.pyi b/typeshed/stdlib/3/traceback.pyi
index 9d4d2fd..4abc031 100644
--- a/typeshed/stdlib/3/traceback.pyi
+++ b/typeshed/stdlib/3/traceback.pyi
@@ -5,8 +5,8 @@ from types import TracebackType
 import typing
 
 # TODO signatures
-def format_exception_only(etype, value): ...
-def format_exception(type: type, value: List[str], tb: TracebackType, limit: int, chain: bool) -> str: ...
+def format_exception_only(etype, value) -> List[str]: ...
+def format_exception(type: type, value: BaseException, tb: TracebackType, limit: int = ..., chain: bool = ...) -> List[str]: ...
 def format_tb(traceback): ...
 def print_exc(limit=..., file=..., chain=...): ...
 def format_exc(limit: int = ..., chain: bool = ...) -> str: ...
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
index 5e94ee8..1026cb1 100644
--- a/typeshed/stdlib/3/types.pyi
+++ b/typeshed/stdlib/3/types.pyi
@@ -3,9 +3,15 @@
 
 # TODO parts of this should be conditional on version
 
-from typing import Any, Callable, Dict, Iterator, Optional, Tuple, TypeVar, Union, overload
+import sys
+from typing import (
+    Any, Callable, Dict, Generic, Iterator, Mapping, Optional, Tuple, TypeVar,
+    Union, overload
+)
 
 _T = TypeVar('_T')
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
 
 class _Cell:
     cell_contents = ...  # type: Any
@@ -57,16 +63,12 @@ class CodeType:
             cellvars: Tuple[str, ...] = ...,
     ) -> None: ...
 
-class MappingProxyType:
-    def copy(self) -> dict: ...
-    def get(self, key: str, default: _T = ...) -> Union[Any, _T]: ...
-    def items(self) -> Iterator[Tuple[str, Any]]: ...
-    def keys(self) -> Iterator[str]: ...
-    def values(self) -> Iterator[Any]: ...
-    def __contains__(self, key: str) -> bool: ...
-    def __getitem__(self, key: str) -> Any: ...
-    def __iter__(self) -> Iterator[str]: ...
+class MappingProxyType(Mapping[_KT, _VT], Generic[_KT, _VT]):
+    def __init__(self, mapping: Mapping[_KT, _VT]) -> None: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __iter__(self) -> Iterator[_KT]: ...
     def __len__(self) -> int: ...
+
 class SimpleNamespace(Any): ...
 
 class GeneratorType:
@@ -104,10 +106,9 @@ class BuiltinFunctionType:
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 BuiltinMethodType = BuiltinFunctionType
 
-class ModuleType:
-    __name__ = ... # type: str
-    __file__ = ... # type: str
-    def __init__(self, name: str, doc: Any) -> None: ...
+# ModuleType is exported from this module, but for circular import
+# reasons exists in its own stub file (with ModuleSpec and Loader).
+from _importlib_modulespec import ModuleType as ModuleType  # Exported
 
 class TracebackType:
     tb_frame = ... # type: FrameType
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index 33235c7..4492424 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -254,6 +254,8 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
 
 Text = str
 
+TYPE_CHECKING = True
+
 class IO(Iterable[AnyStr], Generic[AnyStr]):
     # TODO detach
     # TODO use abstract properties
@@ -377,7 +379,7 @@ class Pattern(Generic[AnyStr]):
               endpos: int = ...) -> Match[AnyStr]: ...
     def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ...
     def findall(self, string: AnyStr, pos: int = ...,
-                endpos: int = ...) -> list[AnyStr]: ...
+                endpos: int = ...) -> list[Any]: ...
     def finditer(self, string: AnyStr, pos: int = ...,
                  endpos: int = ...) -> Iterator[Match[AnyStr]]: ...
 
diff --git a/typeshed/stdlib/3/unittest.pyi b/typeshed/stdlib/3/unittest.pyi
index 2669999..41fe554 100644
--- a/typeshed/stdlib/3/unittest.pyi
+++ b/typeshed/stdlib/3/unittest.pyi
@@ -1,167 +1,329 @@
-# Stubs for unittest
-
-# Based on http://docs.python.org/3.0/library/unittest.html
-
-# NOTE: These stubs are based on the 3.0 version API, since later versions
-#       would require featurs not supported currently by mypy.
-
-# Only a subset of functionality is included.
+## Stubs for unittest
 
 from typing import (
-    Any, Callable, Iterable, Tuple, List, TextIO, Sequence,
-    overload, TypeVar, Pattern
+    Any, Callable, Iterable, Iterator, List, Optional, Pattern, Sequence,
+    TextIO, Tuple, Type, TypeVar, Union,
+    overload,
 )
-from abc import abstractmethod, ABCMeta
+import logging
+import sys
+from types import ModuleType, TracebackType
 
-_T = TypeVar('_T')
-_FT = TypeVar('_FT')
 
-class Testable(metaclass=ABCMeta):
-    @abstractmethod
-    def run(self, result: 'TestResult') -> None: ...
-    @abstractmethod
-    def debug(self) -> None: ...
-    @abstractmethod
-    def countTestCases(self) -> int: ...
+_T = TypeVar('_T')
+_FT = TypeVar('_FT', Callable[[Any], Any])
 
-# TODO ABC for test runners?
 
-class TestResult:
-    errors = ... # type: List[Tuple[Testable, str]]
-    failures = ... # type: List[Tuple[Testable, str]]
-    testsRun = 0
-    shouldStop = False
+def skip(reason: str) -> Callable[[_FT], _FT]: ...
+def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ...
+def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ...
+def expectedFailure(func: _FT) -> _FT: ...
 
-    def wasSuccessful(self) -> bool: ...
-    def stop(self) -> None: ...
-    def startTest(self, test: Testable) -> None: ...
-    def stopTest(self, test: Testable) -> None: ...
-    def addError(self, test: Testable,
-                  err: Tuple[type, Any, Any]) -> None: ... # TODO
-    def addFailure(self, test: Testable,
-                    err: Tuple[type, Any, Any]) -> None: ... # TODO
-    def addSuccess(self, test: Testable) -> None: ...
-
-class _AssertRaisesBaseContext:
-    expected = ... # type: Any
-    failureException = ... # type: type
-    obj_name = ...  # type: str
-    expected_regex = ... # type: Pattern[str]
-
-class _AssertRaisesContext(_AssertRaisesBaseContext):
-    exception = ... # type: Any # TODO precise type
-    def __enter__(self) -> _AssertRaisesContext: ...
-    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
+class SkipTest(Exception):
+    def __init__(self, reason: str) -> None: ...
 
-class _AssertWarnsContext(_AssertRaisesBaseContext):
-    warning = ... # type: Any # TODO precise type
-    filename = ...  # type: str
-    lineno = 0
-    def __enter__(self) -> _AssertWarnsContext: ...
-    def __exit__(self, exc_type, exc_value, tb) -> bool: ...
 
-class TestCase(Testable):
+class TestCase:
+    failureException = ...  # type: Type[BaseException]
+    longMessage = ...  # type: bool
+    maxDiff = ...  # type: Optional[int]
     def __init__(self, methodName: str = ...) -> None: ...
-    # TODO failureException
     def setUp(self) -> None: ...
     def tearDown(self) -> None: ...
-    def run(self, result: TestResult = ...) -> None: ...
+    @classmethod
+    def setUpClass(cls) -> None: ...
+    @classmethod
+    def tearDownClass(cls) -> None: ...
+    def run(self, result: Optional[TestResult] = ...) -> TestCase: ...
+    def skipTest(self, reason: Any) -> None: ...
+    if sys.version_info >= (3, 4):
+        def subTest(self, msg: Any = ..., **params: Any) -> None: ...
     def debug(self) -> None: ...
-    def assert_(self, expr: Any, msg: object = ...) -> None: ...
-    def failUnless(self, expr: Any, msg: object = ...) -> None: ...
-    def assertTrue(self, expr: Any, msg: object = ...) -> None: ...
-    def assertEqual(self, first: Any, second: Any,
-                    msg: object = ...) -> None: ...
-    def failUnlessEqual(self, first: Any, second: Any,
-                        msg: object = ...) -> None: ...
+    def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ...
     def assertNotEqual(self, first: Any, second: Any,
-                       msg: object = ...) -> None: ...
-    def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
-                            msg: object = ...,
-                            seq_type: type = ...) -> None: ...
-    def failIfEqual(self, first: Any, second: Any,
-                    msg: object = ...) -> None: ...
-    def assertAlmostEqual(self, first: float, second: float, places: int = ...,
-                          msg: object = ...,
-                          delta: float = ...) -> None: ...
-    def failUnlessAlmostEqual(self, first: float, second: float,
-                              places: int = ...,
-                              msg: object = ...) -> None: ...
-    def assertNotAlmostEqual(self, first: float, second: float,
-                             places: int = ..., msg: object = ...,
-                             delta: float = ...) -> None: ...
-    def failIfAlmostEqual(self, first: float, second: float, places: int = ...,
-                          msg: object = ...) -> None: ...
+                       msg: Any = ...) -> None: ...
+    def assertTrue(self, expr: Any, msg: Any = ...) -> None: ...
+    def assertFalse(self, expr: Any, msg: Any = ...) -> None: ...
+    def assertIs(self, first: Any, second: Any, msg: Any = ...) -> None: ...
+    def assertIsNot(self, first: Any, second: Any,
+                    msg: Any = ...) -> None: ...
+    def assertIsNone(self, expr: Any, msg: Any = ...) -> None: ...
+    def assertIsNotNone(self, expr: Any, msg: Any = ...) -> None: ...
+    def assertIn(self, first: _T, second: Iterable[_T],
+                 msg: Any = ...) -> None: ...
+    def assertNotIn(self, first: _T, second: Iterable[_T],
+                    msg: Any = ...) -> None: ...
+    def assertIsInstance(self, obj: Any,
+                         cls: Union[Type[Any], Tuple[Type[Any], ...]],
+                         msg: Any = ...) -> None: ...
+    def assertNotIsInstance(self, obj: Any,
+                            cls: Union[Type[Any], Tuple[Type[Any], ...]],
+                            msg: Any = ...) -> None: ...
     def assertGreater(self, first: Any, second: Any,
-                      msg: object = ...) -> None: ...
+                      msg: Any = ...) -> None: ...
     def assertGreaterEqual(self, first: Any, second: Any,
-                      msg: object = ...) -> None: ...
-    def assertLess(self, first: Any, second: Any,
-                   msg: object = ...) -> None: ...
+                           msg: Any = ...) -> None: ...
+    def assertLess(self, first: Any, second: Any, msg: Any = ...) -> None: ...
     def assertLessEqual(self, first: Any, second: Any,
-                        msg: object = ...) -> None: ...
-    # TODO: If callableObj is None, the return value is None.
-    def assertRaises(self, excClass: type, callableObj: Any = ...,
-                     *args: Any, **kwargs: Any) -> _AssertRaisesContext: ...
-    def failIf(self, expr: Any, msg: object = ...) -> None: ...
-    def assertFalse(self, expr: Any, msg: object = ...) -> None: ...
-    def assertIs(self, first: object, second: object,
-                 msg: object = ...) -> None: ...
-    def assertIsNot(self, first: object, second: object,
-                    msg: object = ...) -> None: ...
-    def assertIsNone(self, expr: Any, msg: object = ...) -> None: ...
-    def assertIsNotNone(self, expr: Any, msg: object = ...) -> None: ...
-    def assertIn(self, first: _T, second: Iterable[_T],
-                 msg: object = ...) -> None: ...
-    def assertNotIn(self, first: _T, second: Iterable[_T],
-                    msg: object = ...) -> None: ...
-    def assertIsInstance(self, obj: Any, cls: type,
-                         msg: object = ...) -> None: ...
-    def assertNotIsInstance(self, obj: Any, cls: type,
-                            msg: object = ...) -> None: ...
-    def assertWarns(self, expected_warning: type, callable_obj: Any = ...,
-                    *args: Any, **kwargs: Any) -> _AssertWarnsContext: ...
-    def fail(self, msg: object = ...) -> None: ...
+                        msg: Any = ...) -> None: ...
+    @overload
+    def assertRaises(self,  # type: ignore
+                     exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                     callable: Callable[..., Any] = ...,
+                     *args: Any, **kwargs: Any) -> None: ...
+    @overload
+    def assertRaises(self,
+                     exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                     msg: Any = ...) -> _AssertRaisesContext: ...
+    @overload
+    def assertRaisesRegex(self,  # type: ignore
+                          exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                          callable: Callable[..., Any] = ...,
+                          *args: Any, **kwargs: Any) -> None: ...
+    @overload
+    def assertRaisesRegex(self,
+                          exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                          msg: Any = ...) -> _AssertRaisesContext: ...
+    @overload
+    def assertWarns(self,  # type: ignore
+                    exception: Union[Type[Warning], Tuple[Type[Warning], ...]],
+                    callable: Callable[..., Any] = ...,
+                    *args: Any, **kwargs: Any) -> None: ...
+    @overload
+    def assertWarns(self,
+                    exception: Union[Type[Warning], Tuple[Type[Warning], ...]],
+                    msg: Any = ...) -> _AssertWarnsContext: ...
+    @overload
+    def assertWarnsRegex(self,  # type: ignore
+                         exception: Union[Type[Warning], Tuple[Type[Warning], ...]],
+                         callable: Callable[..., Any] = ...,
+                         *args: Any, **kwargs: Any) -> None: ...
+    @overload
+    def assertWarnsRegex(self,
+                         exception: Union[Type[Warning], Tuple[Type[Warning], ...]],
+                         msg: Any = ...) -> _AssertWarnsContext: ...
+    if sys.version_info >= (3, 4):
+        def assertLogs(self, logger: Optional[logging.Logger] = ...,
+                       level: Union[int, str, None] = ...) \
+                       -> _AssertLogsContext: ...
+    def assertAlmostEqual(self, first: float, second: float, places: int = ...,
+                          msg: Any = ..., delta: float = ...) -> None: ...
+    def assertNotAlmostEqual(self, first: float, second: float,
+                             places: int = ..., msg: Any = ...,
+                             delta: float = ...) -> None: ...
+    def assertRegex(self, text: str, regex: Union[str, Pattern[str]],
+                    msg: Any = ...) -> None: ...
+    def assertNotRegex(self, text: str, regex: Union[str, Pattern[str]],
+                       msg: Any = ...) -> None: ...
+    def assertCountEqual(self, first: Sequence[Any], second: Sequence[Any],
+                         msg: Any = ...) -> None: ...
+    def addTypeEqualityFunc(self, typeobj: Type[Any],
+                            function: Callable[..., None]) -> None: ...
+    def assertMultiLineEqual(self, first: str, second: str,
+                             msg: Any = ...) -> None: ...
+    def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
+                            msg: Any = ...,
+                            seq_type: Type[Sequence[Any]] = ...) -> None: ...
+    def assertListEqual(self, first: List[Any], second: List[Any],
+                        msg: Any = ...) -> None: ...
+    def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...],
+                         msg: Any = ...) -> None: ...
+    def assertSetEqual(self, first: Set[Any], second: Set[Any],
+                       msg: Any = ...) -> None: ...
+    def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any],
+                        msg: Any = ...) -> None: ...
+    def fail(self, msg: Any = ...) -> None: ...
     def countTestCases(self) -> int: ...
     def defaultTestResult(self) -> TestResult: ...
     def id(self) -> str: ...
-    def shortDescription(self) -> str: ... # May return None
-    def addCleanup(function: Any, *args: Any, **kwargs: Any) -> None: ...
-    def skipTest(self, reason: Any) -> None: ...
+    def shortDescription(self) -> Optional[str]: ...
+    def addCleanup(function: Callable[..., Any], *args: Any,
+                   **kwargs: Any) -> None: ...
+    def doCleanups(self) -> None: ...
+    # below is deprecated
+    def failUnlessEqual(self, first: Any, second: Any,
+                        msg: Any = ...) -> None: ...
+    def assertEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ...
+    def failIfEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ...
+    def assertNotEquals(self, first: Any, second: Any,
+                        msg: Any = ...) -> None: ...
+    def failUnless(self, expr: bool, msg: Any = ...) -> None: ...
+    def assert_(self, expr: bool, msg: Any = ...) -> None: ...
+    def failIf(self, expr: bool, msg: Any = ...) -> None: ...
+    @overload
+    def failUnlessRaises(self,  # type: ignore
+                         exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                         callable: Callable[..., Any] = ...,
+                         *args: Any, **kwargs: Any) -> None: ...
+    @overload
+    def failUnlessRaises(self,
+                         exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                         msg: Any = ...) -> _AssertRaisesContext: ...
+    def failUnlessAlmostEqual(self, first: float, second: float,
+                              places: int = ..., msg: Any = ...) -> None: ...
+    def assertAlmostEquals(self, first: float, second: float, places: int = ...,
+                           msg: Any = ..., delta: float = ...) -> None: ...
+    def failIfAlmostEqual(self, first: float, second: float, places: int = ...,
+                          msg: Any = ...) -> None: ...
+    def assertNotAlmostEquals(self, first: float, second: float,
+                              places: int = ..., msg: Any = ...,
+                              delta: float = ...) -> None: ...
+    def assertRegexpMatches(self, text: str, regex: Union[str, Pattern[str]],
+                            msg: Any = ...) -> None: ...
+    @overload
+    def assertRaisesRegexp(self,  # type: ignore
+                           exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                           callable: Callable[..., Any] = ...,
+                           *args: Any, **kwargs: Any) -> None: ...
+    @overload
+    def assertRaisesRegexp(self,
+                           exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+                           msg: Any = ...) -> _AssertRaisesContext: ...
 
-class CallableTestCase(Testable):
+class FunctionTestCase(TestCase):
     def __init__(self, testFunc: Callable[[], None],
-                 setUp: Callable[[], None] = ...,
-                 tearDown: Callable[[], None] = ...,
-                 description: str = ...) -> None: ...
-    def run(self, result: TestResult) -> None: ...
-    def debug(self) -> None: ...
-    def countTestCases(self) -> int: ...
+                 setUp: Optional[Callable[[], None]] = ...,
+                 tearDown: Optional[Callable[[], None]] = ...,
+                 description: Optional[str] = ...) -> None: ...
+
+class _AssertRaisesContext:
+    exception = ... # type: Exception
+    def __enter__(self) -> _AssertRaisesContext: ...
+    def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception],
+                 exc_tb: Optional[TracebackType]) -> bool: ...
+
+class _AssertWarnsContext:
+    warning = ... # type: Warning
+    filename = ...  # type: str
+    lineno = ...  # type: int
+    def __enter__(self) -> _AssertWarnsContext: ...
+    def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception],
+                 exc_tb: Optional[TracebackType]) -> bool: ...
+
+class _AssertLogsContext:
+    records = ...  # type: List[logging.LogRecord]
+    output = ...  # type: List[str]
+    def __enter__(self) -> _AssertLogsContext: ...
+    def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception],
+                 exc_tb: Optional[TracebackType]) -> bool: ...
+
+
+_TestType = Union[TestCase, TestSuite]
 
-class TestSuite(Testable):
-    def __init__(self, tests: Iterable[Testable] = ...) -> None: ...
-    def addTest(self, test: Testable) -> None: ...
-    def addTests(self, tests: Iterable[Testable]) -> None: ...
-    def run(self, result: TestResult) -> None: ...
+class TestSuite(Iterable[_TestType]):
+    def __init__(self, tests: Iterable[_TestType] = ...) -> None: ...
+    def addTest(self, test: _TestType) -> None: ...
+    def addTests(self, tests: Iterable[_TestType]) -> None: ...
+    def run(self, result: TestResult) -> TestResult: ...
     def debug(self) -> None: ...
     def countTestCases(self) -> int: ...
+    def __iter__(self) -> Iterator[_TestType]: ...
 
-# TODO TestLoader
-# TODO defaultTestLoader
 
-class TextTestRunner:
+class TestLoader:
+    if sys.version_info >= (3, 5):
+        errors = ...  # type: List[Type[BaseException]]
+    testMethodPrefix = ...  # type: str
+    sortTestMethodsUsing = ...  # type: Callable[[str, str], bool]
+    suiteClass = ...  # type: Callable[[List[TestCase]], TestSuite]
+    def loadTestsFromTestCase(self,
+                              testCaseClass: Type[TestCase]) -> TestSuite: ...
+    if sys.version_info >= (3, 5):
+        def loadTestsFromModule(self, module: ModuleType,
+                                *, pattern: Any = ...) -> TestSuite: ...
+    else:
+        def loadTestsFromModule(self,  # type: ignore
+                                module: ModuleType) -> TestSuite: ...
+    def loadTestsFromName(self, name: str,
+                          module: Optional[ModuleType] = ...) -> TestSuite: ...
+    def loadTestsFromNames(self, names: Sequence[str],
+                           module: Optional[ModuleType] = ...) -> TestSuite: ...
+    def getTestCaseNames(self,
+                         testCaseClass: Type[TestCase]) -> Sequence[str]: ...
+    def discover(self, start_dir: str, pattern: str = ...,
+                 top_level_dir: Optional[str] = ...) -> TestSuite: ...
+
+_SysExcInfoType = Tuple[Optional[Type[BaseException]],
+                        Optional[BaseException],
+                        Optional[TracebackType]]
+
+class TestResult:
+    errors = ... # type: List[Tuple[TestCase, str]]
+    failures = ... # type: List[Tuple[TestCase, str]]
+    skipped = ... # type: List[Tuple[TestCase, str]]
+    expectedFailures = ... # type: List[Tuple[TestCase, str]]
+    unexpectedSuccesses = ... # type: List[TestCase]
+    shouldStop = ...  # type: bool
+    testsRun = ...  # type: int
+    buffer = ...  # type: bool
+    failfast = ...  # type: bool
+    tb_locals = ...  # type: bool
+    def wasSuccessful(self) -> bool: ...
+    def stop(self) -> None: ...
+    def startTest(self, test: TestCase) -> None: ...
+    def stopTest(self, test: TestCase) -> None: ...
+    def startTestRun(self) -> None: ...
+    def stopTestRun(self) -> None: ...
+    def addError(self, test: TestCase, err: _SysExcInfoType) -> None: ...
+    def addFailure(self, test: TestCase, err: _SysExcInfoType) -> None: ...
+    def addSuccess(self, test: TestCase) -> None: ...
+    def addSkip(self, test: TestCase, reason: str) -> None: ...
+    def addExpectedFailure(self, test: TestCase,
+                           err: _SysExcInfoType) -> None: ...
+    def addUnexpectedSuccess(self, test: TestCase) -> None: ...
+    if sys.version_info >= (3, 4):
+        def addSubTest(self, test: TestCase, subtest: TestCase,
+                       outcome: Optional[_SysExcInfoType]) -> None: ...
+
+class TextTestResult(TestResult):
     def __init__(self, stream: TextIO = ..., descriptions: bool = ...,
-                 verbosity: int = ..., failfast: bool = ...) -> None: ...
+                 verbosity: int = ...) -> None: ...
+_TextTestResult = TextTestResult
 
-class SkipTest(Exception):
-    ...
+defaultTestLoader = ...  # type: TestLoader
+
+_ResultClassType = Callable[[TextIO, bool, int], TestResult]
+
+# not really documented
+class TestRunner:
+    def run(self, test: Union[TestSuite, TestCase]) -> None: ...
+
+class TextTestRunner(TestRunner):
+    if sys.version_info >= (3, 5):
+        def __init__(self, stream: Optional[TextIO] = ...,
+                     descriptions: bool = ..., verbosity: int = ...,
+                     failfast: bool = ..., buffer: bool = ...,
+                     resultclass: Optional[_ResultClassType] = ...,
+                     warnings: Optional[Type[Warning]] = ...,
+                     *, tb_locals: bool = ...) -> None: ...
+    else:
+        def __init__(self,  # type: ignore
+                     stream: Optional[TextIO] = ...,
+                     descriptions: bool = ..., verbosity: int = ...,
+                     failfast: bool = ..., buffer: bool = ...,
+                     resultclass: Optional[_ResultClassType] = ...,
+                     warnings: Optional[Type[Warning]] = ...) -> None: ...
+    def _makeResult(self) -> TestResult: ...
+
+if sys.version_info >= (3, 4):
+    _DefaultTestType = Union[str, Iterable[str], None]
+else:
+    _DefaultTestType = Optional[str]
+
+# not really documented
+class TestProgram:
+    result = ...  # type: TestResult
+
+def main(module: str = ..., defaultTest: _DefaultTestType = ...,
+         argv: Optional[List[str]] = ...,
+         testRunner: Union[Type[TestRunner], TestRunner, None] = ...,
+         testLoader: TestLoader = ..., exit: bool = ..., verbosity: int = ...,
+         failfast: Optional[bool] = ..., catchbreak: Optional[bool] = ...,
+         buffer: Optional[bool] = ...,
+         warnings: Optional[str] = ...) -> TestProgram: ...
 
-# TODO precise types
-def skipUnless(condition: Any, reason: str) -> Any: ...
-def skipIf(condition: Any, reason: str) -> Any: ...
-def expectedFailure(func: _FT) -> _FT: ...
-def skip(reason: str) -> Any: ...
 
-def main(module: str = ..., defaultTest: str = ...,
-         argv: List[str] = ..., testRunner: Any = ...,
-         testLoader: Any = ...) -> None: ... # TODO types
+def installHandler() -> None: ...
+def registerResult(result: TestResult) -> None: ...
+def removeResult(result: TestResult) -> None: ...
+def removeHandler(function: Optional[_FT]) -> _FT: ...
diff --git a/typeshed/stdlib/3/urllib/parse.pyi b/typeshed/stdlib/3/urllib/parse.pyi
index 1a453dc..3e5433f 100644
--- a/typeshed/stdlib/3/urllib/parse.pyi
+++ b/typeshed/stdlib/3/urllib/parse.pyi
@@ -19,12 +19,12 @@ __all__ = (
     'unquote_to_bytes'
 )
 
-uses_relative = []  # type: List[str]
-uses_netloc = []  # type: List[str]
-uses_params = []  # type: List[str]
-non_hierarchical = []  # type: List[str]
-uses_query = []  # type: List[str]
-uses_fragment = []  # type: List[str]
+uses_relative = ...  # type: List[str]
+uses_netloc = ...  # type: List[str]
+uses_params = ...  # type: List[str]
+non_hierarchical = ...  # type: List[str]
+uses_query = ...  # type: List[str]
+uses_fragment = ...  # type: List[str]
 scheme_chars = ...  # type: str
 MAX_CACHE_SIZE = 0
 
@@ -88,11 +88,17 @@ def parse_qs(qs: str, keep_blank_values : bool = ..., strict_parsing : bool = ..
 
 def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> List[Tuple[str,str]]: ...
 
-def quote(string: AnyStr, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+ at overload
+def quote(string: str, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+ at overload
+def quote(string: bytes, safe: AnyStr = ...) -> str: ...
 
-def quote_from_bytes(bs: bytes, safe: AnyStr = ...) -> bytes: ...
+def quote_from_bytes(bs: bytes, safe: AnyStr = ...) -> str: ...
 
-def quote_plus(string: AnyStr, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+ at overload
+def quote_plus(string: str, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+ at overload
+def quote_plus(string: bytes, safe: AnyStr = ...) -> str: ...
 
 def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ...
 
diff --git a/typeshed/stdlib/3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
index f673f97..e4be890 100644
--- a/typeshed/stdlib/3/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
@@ -11,7 +11,7 @@ _Ss = TypeVar('_Ss', str, bytes)
 _T = TypeVar('_T')
 _str_or_bytes = Union[str, bytes]
 
-class _ElementInterface:
+class _ElementInterface(Sequence['_ElementInterface']):
     tag = ... # type: _str_or_bytes
     attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
     text = ... # type: Optional[_str_or_bytes]
@@ -39,8 +39,8 @@ class _ElementInterface:
     def items(self) -> ItemsView[AnyStr, AnyStr]: ...
     def getiterator(self, tag: Union[str, AnyStr]=...) -> List['_ElementInterface']: ...
 
-def Element(tag: Union[AnyStr, Callable[..., _ElementInterface]], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> _ElementInterface: ...
-def SubElement(parent: _ElementInterface, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> _ElementInterface: ...
+def Element(tag: Union[AnyStr, Callable[..., _ElementInterface]], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> _ElementInterface: ...
+def SubElement(parent: _ElementInterface, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> _ElementInterface: ...
 def Comment(text: _str_or_bytes=...) -> _ElementInterface: ...
 def ProcessingInstruction(target: str, text: str=...) -> _ElementInterface: ...
 
diff --git a/typeshed/runtests.py b/typeshed/tests/mypy_test.py
similarity index 100%
rename from typeshed/runtests.py
rename to typeshed/tests/mypy_test.py
diff --git a/typeshed/tests/pytype_test.py b/typeshed/tests/pytype_test.py
new file mode 100755
index 0000000..078ef55
--- /dev/null
+++ b/typeshed/tests/pytype_test.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+"""Test runner for typeshed.
+
+Depends on mypy and pytype being installed.
+
+If pytype is installed:
+    1. For every pyi, run "pytd <foo.pyi>" in a separate process
+"""
+
+import os
+import re
+import sys
+import argparse
+import subprocess
+import collections
+
+parser = argparse.ArgumentParser(description="Pytype tests.")
+parser.add_argument('-n', '--dry-run', action='store_true', help="Don't actually run tests")
+parser.add_argument('--num-parallel', type=int, default=1,
+                    help="Number of test processes to spawn")
+
+
+def main():
+    args = parser.parse_args()
+    code, runs = pytype_test(args)
+
+    if code:
+        print("--- exit status %d ---" % code)
+        sys.exit(code)
+    if not runs:
+        print("--- nothing to do; exit 1 ---")
+        sys.exit(1)
+
+
+def load_blacklist():
+    filename = os.path.join(os.path.dirname(__file__), "pytype_blacklist.txt")
+    regex = r"^\s*([^\s#]+)\s*(?:#.*)?$"
+
+    with open(filename) as f:
+        return re.findall(regex, f.read(), flags=re.M)
+
+
+class PytdRun(object):
+    def __init__(self, args, dry_run=False):
+        self.args = args
+        self.dry_run = dry_run
+        self.results = None
+
+        if dry_run:
+            self.results = (0, "", "")
+        else:
+            self.proc = subprocess.Popen(
+                ["pytd"] + args,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE)
+
+    def communicate(self):
+        if self.results:
+            return self.results
+
+        stdout, stderr = self.proc.communicate()
+        self.results = self.proc.returncode, stdout, stderr
+        return self.results
+
+
+def pytype_test(args):
+    try:
+        PytdRun(["-h"]).communicate()
+    except OSError:
+        print("Cannot run pytd. Did you install pytype?")
+        return 0, 0
+
+    wanted = re.compile(r"stdlib/(2\.7|2and3)/.*\.pyi$")
+    skipped = re.compile("(%s)$" % "|".join(load_blacklist()))
+    files = []
+
+    for root, _, filenames in os.walk("stdlib"):
+        for f in sorted(filenames):
+            f = os.path.join(root, f)
+            if wanted.search(f) and not skipped.search(f):
+                files.append(f)
+
+    running_tests = collections.deque()
+    max_code, runs, errors = 0, 0, 0
+    print("Running pytype tests...")
+    while 1:
+        while files and len(running_tests) < args.num_parallel:
+            test_run = PytdRun([files.pop()], dry_run=args.dry_run)
+            running_tests.append(test_run)
+
+        if not running_tests:
+            break
+
+        test_run = running_tests.popleft()
+        code, stdout, stderr = test_run.communicate()
+        max_code = max(max_code, code)
+        runs += 1
+
+        if code:
+            print("pytd error processing \"%s\":" % test_run.args[0])
+            print(stderr)
+            errors += 1
+
+    print("Ran pytype with %d pyis, got %d errors." % (runs, errors))
+    return max_code, runs
+
+
+if __name__ == '__main__':
+    main()
diff --git a/typeshed/third_party/2.7/boto/exception.pyi b/typeshed/third_party/2.7/boto/exception.pyi
new file mode 100644
index 0000000..2a20228
--- /dev/null
+++ b/typeshed/third_party/2.7/boto/exception.pyi
@@ -0,0 +1,149 @@
+# Stubs for boto.exception (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class BotoClientError(StandardError):
+    reason = ...  # type: Any
+    def __init__(self, reason, *args): ...
+
+class SDBPersistenceError(StandardError): ...
+class StoragePermissionsError(BotoClientError): ...
+class S3PermissionsError(StoragePermissionsError): ...
+class GSPermissionsError(StoragePermissionsError): ...
+
+class BotoServerError(StandardError):
+    status = ...  # type: Any
+    reason = ...  # type: Any
+    body = ...  # type: Any
+    request_id = ...  # type: Any
+    error_code = ...  # type: Any
+    message = ...  # type: Any
+    box_usage = ...  # type: Any
+    def __init__(self, status, reason, body=None, *args): ...
+    def __getattr__(self, name): ...
+    def __setattr__(self, name, value): ...
+    def startElement(self, name, attrs, connection): ...
+    def endElement(self, name, value, connection): ...
+
+class ConsoleOutput:
+    parent = ...  # type: Any
+    instance_id = ...  # type: Any
+    timestamp = ...  # type: Any
+    comment = ...  # type: Any
+    output = ...  # type: Any
+    def __init__(self, parent=None): ...
+    def startElement(self, name, attrs, connection): ...
+    def endElement(self, name, value, connection): ...
+
+class StorageCreateError(BotoServerError):
+    bucket = ...  # type: Any
+    def __init__(self, status, reason, body=None): ...
+    def endElement(self, name, value, connection): ...
+
+class S3CreateError(StorageCreateError): ...
+class GSCreateError(StorageCreateError): ...
+class StorageCopyError(BotoServerError): ...
+class S3CopyError(StorageCopyError): ...
+class GSCopyError(StorageCopyError): ...
+
+class SQSError(BotoServerError):
+    detail = ...  # type: Any
+    type = ...  # type: Any
+    def __init__(self, status, reason, body=None): ...
+    def startElement(self, name, attrs, connection): ...
+    def endElement(self, name, value, connection): ...
+
+class SQSDecodeError(BotoClientError):
+    message = ...  # type: Any
+    def __init__(self, reason, message): ...
+
+class StorageResponseError(BotoServerError):
+    resource = ...  # type: Any
+    def __init__(self, status, reason, body=None): ...
+    def startElement(self, name, attrs, connection): ...
+    def endElement(self, name, value, connection): ...
+
+class S3ResponseError(StorageResponseError): ...
+class GSResponseError(StorageResponseError): ...
+
+class EC2ResponseError(BotoServerError):
+    errors = ...  # type: Any
+    def __init__(self, status, reason, body=None): ...
+    def startElement(self, name, attrs, connection): ...
+    request_id = ...  # type: Any
+    def endElement(self, name, value, connection): ...
+
+class JSONResponseError(BotoServerError):
+    status = ...  # type: Any
+    reason = ...  # type: Any
+    body = ...  # type: Any
+    error_message = ...  # type: Any
+    error_code = ...  # type: Any
+    def __init__(self, status, reason, body=None, *args): ...
+
+class DynamoDBResponseError(JSONResponseError): ...
+class SWFResponseError(JSONResponseError): ...
+class EmrResponseError(BotoServerError): ...
+
+class _EC2Error:
+    connection = ...  # type: Any
+    error_code = ...  # type: Any
+    error_message = ...  # type: Any
+    def __init__(self, connection=None): ...
+    def startElement(self, name, attrs, connection): ...
+    def endElement(self, name, value, connection): ...
+
+class SDBResponseError(BotoServerError): ...
+class AWSConnectionError(BotoClientError): ...
+class StorageDataError(BotoClientError): ...
+class S3DataError(StorageDataError): ...
+class GSDataError(StorageDataError): ...
+
+class InvalidUriError(Exception):
+    message = ...  # type: Any
+    def __init__(self, message): ...
+
+class InvalidAclError(Exception):
+    message = ...  # type: Any
+    def __init__(self, message): ...
+
+class InvalidCorsError(Exception):
+    message = ...  # type: Any
+    def __init__(self, message): ...
+
+class NoAuthHandlerFound(Exception): ...
+
+class InvalidLifecycleConfigError(Exception):
+    message = ...  # type: Any
+    def __init__(self, message): ...
+
+class ResumableTransferDisposition:
+    START_OVER = ...  # type: Any
+    WAIT_BEFORE_RETRY = ...  # type: Any
+    ABORT_CUR_PROCESS = ...  # type: Any
+    ABORT = ...  # type: Any
+
+class ResumableUploadException(Exception):
+    message = ...  # type: Any
+    disposition = ...  # type: Any
+    def __init__(self, message, disposition): ...
+
+class ResumableDownloadException(Exception):
+    message = ...  # type: Any
+    disposition = ...  # type: Any
+    def __init__(self, message, disposition): ...
+
+class TooManyRecordsException(Exception):
+    message = ...  # type: Any
+    def __init__(self, message): ...
+
+class PleaseRetryException(Exception):
+    message = ...  # type: Any
+    response = ...  # type: Any
+    def __init__(self, message, response=None): ...
+
+class InvalidInstanceMetadataError(Exception):
+    MSG = ...  # type: Any
+    def __init__(self, msg): ...
diff --git a/typeshed/third_party/2.7/dateutil/__init__.pyi b/typeshed/third_party/2.7/dateutil/__init__.pyi
new file mode 100644
index 0000000..e69de29
diff --git a/typeshed/third_party/2.7/dateutil/parser.pyi b/typeshed/third_party/2.7/dateutil/parser.pyi
new file mode 100644
index 0000000..4962b8a
--- /dev/null
+++ b/typeshed/third_party/2.7/dateutil/parser.pyi
@@ -0,0 +1,39 @@
+from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict
+from datetime import datetime, tzinfo
+
+__all__ = ...  # type: List[str]
+
+
+class parserinfo(object):
+    JUMP = ... # type: List[str]
+    WEEKDAYS = ... # type: List[Tuple[str, str]]
+    MONTHS = ... # type: List[Tuple[str, str]]
+    HMS = ... # type: List[Tuple[str, str, str]]
+    AMPM = ... # type: List[Tuple[str, str]]
+    UTCZONE = ... # type: List[str]
+    PERTAIN = ... # type: List[str]
+    TZOFFSET = ... # type: Dict[str, int]
+
+    def __init__(self, dayfirst: bool=..., yearfirst: bool=...) -> None: ...
+    def jump(self, name: unicode) -> bool: ...
+    def weekday(self, name: unicode) -> Union[int, None]: ...
+    def month(self, name: unicode) -> Union[int, None]: ...
+    def hms(self, name: unicode) -> Union[int, None]: ...
+    def ampm(self, name: unicode) -> Union[int, None]: ...
+    def pertain(self, name: unicode) -> bool: ...
+    def utczone(self, name: unicode) -> bool: ...
+    def tzoffset(self, name: unicode) -> Union[int, None]: ...
+    def convertyear(self, year: int) -> int: ...
+    def validate(self, res: datetime) -> bool: ...
+
+class parser(object):
+    def __init__(self, info: parserinfo = None) -> None: ...
+    def parse(self, timestr: Union[str, unicode, IO[unicode]],
+              default: datetime = None,
+              ignoretz: bool = ..., tzinfos: Dict[Union[str, unicode], tzinfo] = None,
+              **kwargs: Any) -> datetime: ...
+
+DEFAULTPARSER = ... # type: parser
+def parse(timestr: Union[str, unicode, IO[unicode]],
+          parserinfo: parserinfo = None,
+          **kwargs: Any) -> datetime: ...
diff --git a/typeshed/third_party/2.7/enum.pyi b/typeshed/third_party/2.7/enum.pyi
index dcb3b9c..4032e81 100644
--- a/typeshed/third_party/2.7/enum.pyi
+++ b/typeshed/third_party/2.7/enum.pyi
@@ -10,7 +10,7 @@ class Enum:
     def __reduce_ex__(self, proto: Any) -> Any: ...
 
     name = ...  # type: str
-    value = None  # type: Any
+    value = ...  # type: Any
 
 class IntEnum(int, Enum): ...
 
diff --git a/typeshed/third_party/2.7/gflags.pyi b/typeshed/third_party/2.7/gflags.pyi
index 55bdc75..a91cee5 100644
--- a/typeshed/third_party/2.7/gflags.pyi
+++ b/typeshed/third_party/2.7/gflags.pyi
@@ -64,19 +64,19 @@ class FlagValues:
   # TODO validator: gflags_validators.Validator
     def AddValidator(self, validator: Any) -> None: ...
 
-FLAGS = None  # type: FlagValues
+FLAGS = ...  # type: FlagValues
 
 class Flag:
     name = ...  # type: str
-    default = None  # type: Any
+    default = ...  # type: Any
     default_as_str = ...  # type: str
-    value = None  # type: Any
+    value = ...  # type: Any
     help = ...  # type: str
     short_name = ...  # type: str
     boolean = False
     present = False
-    parser = None  # type: ArgumentParser
-    serializer = None  # type: ArgumentSerializer
+    parser = ...  # type: ArgumentParser
+    serializer = ...  # type: ArgumentSerializer
     allow_override = False
 
     def __init__(self, parser: ArgumentParser, serializer: ArgumentSerializer, name: str,
diff --git a/typeshed/third_party/2.7/pymssql.pyi b/typeshed/third_party/2.7/pymssql.pyi
new file mode 100644
index 0000000..c11b9c4
--- /dev/null
+++ b/typeshed/third_party/2.7/pymssql.pyi
@@ -0,0 +1,48 @@
+from datetime import datetime, date, time
+
+from typing import Any, Tuple, Iterable, List, Optional, Union, Sequence
+
+Scalar = Union[int, float, str, datetime, date, time]
+Result = Union[Tuple[Scalar, ...], Dict[str, Scalar]]
+
+class Connection(object):
+    def __init__(self, user, password, host, database, timeout,
+                 login_timeout, charset, as_dict) -> None: ...
+    def autocommit(self, status: bool) -> None: ...
+    def close(self) -> None: ...
+    def commit(self) -> None: ...
+    def cursor(self) -> 'Cursor': ...
+    def rollback(self) -> None: ...
+
+class Cursor(object):
+    def __init__(self) -> None: ...
+    def __iter__(self): ...
+    def __next__(self) -> Any: ...
+    def callproc(procname: str, **kwargs) -> None: ...
+    def close(self) -> None: ...
+    def execute(self, stmt: str,
+                params: Optional[Union[Scalar, Tuple[Scalar, ...],
+                                       Dict[str, Scalar]]]) -> None: ...
+    def executemany(self, stmt: str,
+                    params: Optional[Sequence[Tuple[Scalar, ...]]]) -> None: ...
+    def fetchall(self) -> List[Result]: ...
+    def fetchmany(self, size: Optional[Union[int, None]]) -> List[Result]: ...
+    def fetchone(self) -> Result: ...
+
+def connect(server: Optional[str],
+            user: Optional[str],
+            password: Optional[str],
+            database: Optional[str],
+            timeout: Optional[int],
+            login_timeout: Optional[int],
+            charset: Optional[str],
+            as_dict: Optional[bool],
+            host: Optional[str],
+            appname: Optional[str],
+            port: Optional[str],
+
+            conn_properties: Optional[Union[str, Sequence[str]]],
+            autocommit: Optional[bool],
+            tds_version: Optional[str]) -> Connection: ...
+def get_max_connections() -> int: ...
+def set_max_connections(n: int) -> None: ...
diff --git a/typeshed/third_party/2.7/six/moves/__init__.pyi b/typeshed/third_party/2.7/six/moves/__init__.pyi
index e2dee78..072b4d6 100644
--- a/typeshed/third_party/2.7/six/moves/__init__.pyi
+++ b/typeshed/third_party/2.7/six/moves/__init__.pyi
@@ -23,8 +23,10 @@ from itertools import izip_longest as zip_longest
 import six.moves.cPickle as cPickle
 import HTMLParser as html_parser
 import htmlentitydefs as html_entities
+import httplib as http_client
 
 import six.moves.urllib_parse as urllib_parse
 import six.moves.urllib_error as urllib_error
 import six.moves.urllib as urllib
 import six.moves.urllib_robotparser as urllib_robotparser
+
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi
index 18ceed9..49eca6d 100644
--- a/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/__init__.pyi
@@ -2,5 +2,10 @@
 #
 # NOTE: This dynamically typed stub was automatically generated by stubgen.
 
+from .base import Connection as Connection
+from .base import Engine as Engine
+from .base import RowProxy as RowProxy
+from .base import Transaction as Transaction
+
 def create_engine(*args, **kwargs): ...
 def engine_from_config(configuration, prefix=..., **kwargs): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi
index 3b89813..71f6e19 100644
--- a/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi
@@ -1,3 +1,21 @@
+from typing import Any, List, Tuple
+
 # Dummy until I figure out something better.
 class Connectable:
     pass
+
+class Connection:
+    def begin(self): ...
+    def execute(self, object, *multiparams, **params): ...
+
+class Engine(object): ...
+
+class RowProxy:
+    def items(self) -> List[Tuple[Any, Any]]: ...
+    def keys(self) -> List[Any]: ...
+    def values(self) -> List[Any]: ...
+    def __getitem__(self, key: str): ...
+
+class Transaction:
+    def commit(self): ...
+    def rollback(self): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi
index 1b33d00..5ae39cd 100644
--- a/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/operators.pyi
@@ -27,7 +27,7 @@ class ColumnOperators(Operators):
     def nullsfirst(self): ...
     def nullslast(self): ...
     def collate(self, collation): ...
-    def between(self, cleft, cright, symmetric: bool): ...
+    def between(self, cleft, cright, symmetric: bool = ...): ...
     def distinct(self): ...
 
     def __lt__(self, other): ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi b/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
index 403795e..acad50e 100644
--- a/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/sql/schema.pyi
@@ -20,7 +20,14 @@ class SchemaItem(SchemaEventTarget, visitors.Visitable):
     def __repr__(self): ...
 
 
-class Table(DialectKWArgs, SchemaItem, TableClause): ...
+class Table(DialectKWArgs, SchemaItem, TableClause):
+    def __init__(self, name, metadata, *args, **kwargs): ...
+    def delete(self, *args, **kwargs): ...
+    def insert(self, *args, **kwargs): ...
+    def select(self, *args, **kwargs): ...
+    def update(self, *args, **kwargs): ...
+    c = ... # type: ColumnCollection
+
 
 class Column(SchemaItem, ColumnClause):
     primary_key = ... # type: Any
@@ -71,7 +78,7 @@ class DefaultClause(FetchedValue): ...
 class PassiveDefault(DefaultClause): ...
 class Constraint(DialectKWArgs, SchemaItem): ...
 class ColumnCollectionMixin(object):
-    columns = None # type: Any
+    columns = ...  # type: Any
     def __init__(self, *columns, **kw): ...
     @classmethod
     def _extract_col_expression_collection(cls, expressions): ...
diff --git a/typeshed/third_party/3/enum.pyi b/typeshed/third_party/3/enum.pyi
index dcb3b9c..4032e81 100644
--- a/typeshed/third_party/3/enum.pyi
+++ b/typeshed/third_party/3/enum.pyi
@@ -10,7 +10,7 @@ class Enum:
     def __reduce_ex__(self, proto: Any) -> Any: ...
 
     name = ...  # type: str
-    value = None  # type: Any
+    value = ...  # type: Any
 
 class IntEnum(int, Enum): ...
 
diff --git a/typeshed/third_party/3/six/moves/__init__.pyi b/typeshed/third_party/3/six/moves/__init__.pyi
index b2dc6cd..859a7eb 100644
--- a/typeshed/third_party/3/six/moves/__init__.pyi
+++ b/typeshed/third_party/3/six/moves/__init__.pyi
@@ -26,6 +26,7 @@ from itertools import zip_longest as zip_longest
 import six.moves.cPickle as cPickle
 import html.parser as html_parser
 import html.entities as html_entities
+import http.client as http_client
 
 import six.moves.urllib_parse as urllib_parse
 import six.moves.urllib_error as urllib_error

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list