[med-svn] [mypy] 01/01: New upstream version 0.540
Michael Crusoe
misterc-guest at moszumanska.debian.org
Mon Oct 23 17:43:23 UTC 2017
This is an automated email from the git hooks/post-receive script.
misterc-guest pushed a commit to annotated tag upstream/0.540
in repository mypy.
commit e236de0c06e05219dedce501e800b1ac65068372
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date: Mon Oct 23 10:31:18 2017 -0700
New upstream version 0.540
---
PKG-INFO | 2 +-
docs/source/cheat_sheet_py3.rst | 4 +-
docs/source/getting_started.rst | 8 +-
docs/source/kinds_of_types.rst | 2 +-
docs/source/revision_history.rst | 2 +
mypy.egg-info/PKG-INFO | 2 +-
mypy.egg-info/SOURCES.txt | 5 +
mypy/binder.py | 36 +-
mypy/build.py | 44 +-
mypy/checker.py | 179 +++-
mypy/checkexpr.py | 29 +-
mypy/checkmember.py | 81 +-
mypy/constraints.py | 2 +
mypy/fixup.py | 3 -
mypy/indirection.py | 5 +-
mypy/main.py | 8 +-
mypy/maptype.py | 3 +-
mypy/messages.py | 17 +-
mypy/nodes.py | 90 +-
mypy/semanal.py | 971 ++-------------------
mypy/semanal_pass1.py | 296 +++++++
mypy/semanal_pass3.py | 560 ++++++++++++
mypy/server/astdiff.py | 1 -
mypy/server/deps.py | 2 +-
mypy/strconv.py | 17 +-
mypy/stubgen.py | 5 +-
mypy/test/testcheck.py | 2 +-
mypy/test/testcmdline.py | 2 +-
mypy/test/testpythoneval.py | 29 +-
mypy/traverser.py | 2 +-
mypy/treetransform.py | 15 -
mypy/typeanal.py | 35 +-
mypy/types.py | 50 +-
mypy/util.py | 11 +-
mypy/version.py | 2 +-
mypy_self_check.ini | 11 +
test-data/unit/check-classes.test | 86 +-
test-data/unit/check-functions.test | 6 +-
test-data/unit/check-generics.test | 10 +
test-data/unit/check-incremental.test | 106 ++-
test-data/unit/check-inference.test | 33 +-
test-data/unit/check-isinstance.test | 233 ++++-
test-data/unit/check-overloading.test | 47 +
test-data/unit/check-selftype.test | 143 ++-
test-data/unit/check-tuples.test | 2 +-
test-data/unit/check-type-aliases.test | 42 +
test-data/unit/check-typevar-values.test | 9 +-
test-data/unit/check-unions.test | 425 +++++++++
test-data/unit/cmdline.test | 12 +
test-data/unit/fixtures/dict.pyi | 1 +
test-data/unit/fixtures/for.pyi | 3 +-
test-data/unit/fixtures/list.pyi | 2 +
test-data/unit/fixtures/python2.pyi | 1 +
test-data/unit/fixtures/set.pyi | 2 +
test-data/unit/fixtures/tuple.pyi | 1 +
test-data/unit/fixtures/type.pyi | 3 +-
test-data/unit/fixtures/typing-full.pyi | 1 +
test-data/unit/parse-python2.test | 6 +-
test-data/unit/parse.test | 51 +-
test-data/unit/pythoneval.test | 40 +
test-data/unit/semanal-basic.test | 11 +-
test-data/unit/semanal-classes.test | 8 +-
test-data/unit/semanal-symtable.test | 30 +-
test-data/unit/semanal-types.test | 8 +-
typeshed/stdlib/2/__builtin__.pyi | 5 +-
typeshed/stdlib/2/builtins.pyi | 5 +-
typeshed/stdlib/2/collections.pyi | 1 +
typeshed/stdlib/2/datetime.pyi | 4 +-
typeshed/stdlib/2/textwrap.pyi | 86 +-
typeshed/stdlib/2/whichdb.pyi | 5 +
typeshed/stdlib/2and3/crypt.pyi | 18 +
typeshed/stdlib/2and3/socket.pyi | 173 ++--
typeshed/stdlib/2and3/threading.pyi | 12 +-
typeshed/stdlib/2and3/warnings.pyi | 2 +-
typeshed/stdlib/2and3/xml/etree/ElementTree.pyi | 2 +-
typeshed/stdlib/3.3/ipaddress.pyi | 245 ++----
typeshed/stdlib/3.4/asyncio/futures.pyi | 5 +-
typeshed/stdlib/3/array.pyi | 50 +-
typeshed/stdlib/3/builtins.pyi | 5 +-
typeshed/stdlib/3/collections/__init__.pyi | 1 +
typeshed/stdlib/3/datetime.pyi | 9 +-
typeshed/stdlib/3/itertools.pyi | 5 +-
typeshed/stdlib/3/os/__init__.pyi | 322 +++++--
typeshed/stdlib/3/pipes.pyi | 4 +-
typeshed/stdlib/3/posix.pyi | 20 +
typeshed/stdlib/3/textwrap.pyi | 103 +--
typeshed/third_party/2/dateutil/tz/__init__.pyi | 13 +-
typeshed/third_party/2/pycurl.pyi | 238 ++++-
typeshed/third_party/2and3/boto/utils.pyi | 239 +++++
typeshed/third_party/2and3/pymysql/connections.pyi | 1 +
typeshed/third_party/2and3/requests/api.pyi | 2 +-
91 files changed, 3784 insertions(+), 1646 deletions(-)
diff --git a/PKG-INFO b/PKG-INFO
index c457957..c85de6a 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: mypy
-Version: 0.530
+Version: 0.540
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst
index 5ef62b2..09eab13 100644
--- a/docs/source/cheat_sheet_py3.rst
+++ b/docs/source/cheat_sheet_py3.rst
@@ -239,7 +239,7 @@ Other stuff
else:
return sys.stdout
- # forward references are useful if you want to referemce a class before it is designed
+ # forward references are useful if you want to reference a class before it is designed
def f(foo: A) -> int: # this will fail
...
@@ -304,4 +304,4 @@ Mypy brings limited support for PEP 526 annotations.
def __init__(self) -> None:
self.items: List[str] = []
-Please see :ref:`python-36` for more on mypy's compatability with Python 3.6's new features.
+Please see :ref:`python-36` for more on mypy's compatibility with Python 3.6's new features.
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
index a41c125..310cd1a 100644
--- a/docs/source/getting_started.rst
+++ b/docs/source/getting_started.rst
@@ -6,7 +6,8 @@ Getting started
Installation
************
-Mypy requires Python 3.3 or later. Once you've `installed Python 3 <https://www.python.org/downloads/>`_, you can install mypy with:
+Mypy requires Python 3.3 or later. Once you've `installed Python 3 <https://www.python.org/downloads/>`_,
+you can install mypy with:
.. code-block:: text
@@ -15,10 +16,11 @@ Mypy requires Python 3.3 or later. Once you've `installed Python 3 <https://www
Installing from source
**********************
-To install mypy from source, clone the github repository and then run pip install locally:
+To install mypy from source, clone the github repository and then run
+``pip install`` locally:
.. code-block:: text
- $ git clone https://github.com/python/mypy.git
+ $ git clone --recurse-submodules https://github.com/python/mypy.git
$ cd mypy
$ sudo python3 -m pip install --upgrade .
diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst
index 0c3432f..1580a1b 100644
--- a/docs/source/kinds_of_types.rst
+++ b/docs/source/kinds_of_types.rst
@@ -977,7 +977,7 @@ a value, you should use the
Note that unlike many other generics in the typing module, the ``SendType`` of
``Generator`` behaves contravariantly, not covariantly or invariantly.
-If you do not plan on recieving or returning values, then set the ``SendType``
+If you do not plan on receiving or returning values, then set the ``SendType``
or ``ReturnType`` to ``None``, as appropriate. For example, we could have
annotated the first example as the following:
diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst
index 23ca45b..fbfd676 100644
--- a/docs/source/revision_history.rst
+++ b/docs/source/revision_history.rst
@@ -4,6 +4,8 @@ Revision history
List of major changes:
- October 2017
+ * Publish ``mypy`` version 0.540 on PyPI.
+
* Publish ``mypy`` version 0.530 on PyPI.
- August-September 2017
diff --git a/mypy.egg-info/PKG-INFO b/mypy.egg-info/PKG-INFO
index c457957..c85de6a 100644
--- a/mypy.egg-info/PKG-INFO
+++ b/mypy.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: mypy
-Version: 0.530
+Version: 0.540
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
diff --git a/mypy.egg-info/SOURCES.txt b/mypy.egg-info/SOURCES.txt
index 8ec764a..4dc9bb0 100644
--- a/mypy.egg-info/SOURCES.txt
+++ b/mypy.egg-info/SOURCES.txt
@@ -74,6 +74,8 @@ mypy/plugin.py
mypy/report.py
mypy/sametypes.py
mypy/semanal.py
+mypy/semanal_pass1.py
+mypy/semanal_pass3.py
mypy/sharedparse.py
mypy/solve.py
mypy/stats.py
@@ -455,6 +457,7 @@ typeshed/stdlib/2/urllib.pyi
typeshed/stdlib/2/urllib2.pyi
typeshed/stdlib/2/urlparse.pyi
typeshed/stdlib/2/user.pyi
+typeshed/stdlib/2/whichdb.pyi
typeshed/stdlib/2/xmlrpclib.pyi
typeshed/stdlib/2/distutils/__init__.pyi
typeshed/stdlib/2/distutils/emxccompiler.pyi
@@ -508,6 +511,7 @@ typeshed/stdlib/2and3/codeop.pyi
typeshed/stdlib/2and3/colorsys.pyi
typeshed/stdlib/2and3/contextlib.pyi
typeshed/stdlib/2and3/copy.pyi
+typeshed/stdlib/2and3/crypt.pyi
typeshed/stdlib/2and3/csv.pyi
typeshed/stdlib/2and3/difflib.pyi
typeshed/stdlib/2and3/dis.pyi
@@ -1021,6 +1025,7 @@ typeshed/third_party/2and3/boto/connection.pyi
typeshed/third_party/2and3/boto/exception.pyi
typeshed/third_party/2and3/boto/plugin.pyi
typeshed/third_party/2and3/boto/regioninfo.pyi
+typeshed/third_party/2and3/boto/utils.pyi
typeshed/third_party/2and3/boto/ec2/__init__.pyi
typeshed/third_party/2and3/boto/elb/__init__.pyi
typeshed/third_party/2and3/boto/kms/__init__.pyi
diff --git a/mypy/binder.py b/mypy/binder.py
index 956c950..0cba54f 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -1,5 +1,10 @@
-from typing import Dict, List, Set, Iterator, Union, Optional, cast
+from typing import Dict, List, Set, Iterator, Union, Optional, Tuple, cast
from contextlib import contextmanager
+from collections import defaultdict
+
+MYPY = False
+if MYPY:
+ from typing import DefaultDict
from mypy.types import Type, AnyType, PartialType, UnionType, TypeOfAny
from mypy.subtypes import is_subtype
@@ -37,6 +42,12 @@ class DeclarationsFrame(Dict[Key, Optional[Type]]):
self.unreachable = False
+if MYPY:
+ # This is the type of stored assignments for union type rvalues.
+ # We use 'if MYPY: ...' since typing-3.5.1 does not have 'DefaultDict'
+ Assigns = DefaultDict[Expression, List[Tuple[Type, Optional[Type]]]]
+
+
class ConditionalTypeBinder:
"""Keep track of conditional types of variables.
@@ -57,6 +68,9 @@ class ConditionalTypeBinder:
reveal_type(lst[0].a) # str
```
"""
+ # Stored assignments for situations with tuple/list lvalue and rvalue of union type.
+ # This maps an expression to a list of bound types for every item in the union type.
+ type_assignments = None # type: Optional[Assigns]
def __init__(self) -> None:
# The stack of frames currently used. These map
@@ -210,10 +224,30 @@ class ConditionalTypeBinder:
return result
+ @contextmanager
+ def accumulate_type_assignments(self) -> 'Iterator[Assigns]':
+ """Push a new map to collect assigned types in multiassign from union.
+
+ If this map is not None, actual binding is deferred until all items in
+ the union are processed (a union of collected items is later bound
+ manually by the caller).
+ """
+ old_assignments = None
+ if self.type_assignments is not None:
+ old_assignments = self.type_assignments
+ self.type_assignments = defaultdict(list)
+ yield self.type_assignments
+ self.type_assignments = old_assignments
+
def assign_type(self, expr: Expression,
type: Type,
declared_type: Optional[Type],
restrict_any: bool = False) -> None:
+ if self.type_assignments is not None:
+ # We are in a multiassign from union, defer the actual binding,
+ # just collect the types.
+ self.type_assignments[expr].append((type, declared_type))
+ return
if not isinstance(expr, BindableTypes):
return None
if not literal(expr):
diff --git a/mypy/build.py b/mypy/build.py
index a079aa0..891fc3c 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -21,6 +21,7 @@ import site
import sys
import time
from os.path import dirname, basename
+import errno
from typing import (AbstractSet, Dict, Iterable, Iterator, List, cast, Any,
NamedTuple, Optional, Set, Tuple, Union, Callable)
@@ -30,7 +31,9 @@ if MYPY:
from typing import Deque
from mypy.nodes import (MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll)
-from mypy.semanal import FirstPass, SemanticAnalyzer, ThirdPass
+from mypy.semanal_pass1 import SemanticAnalyzerPass1
+from mypy.semanal import SemanticAnalyzerPass2
+from mypy.semanal_pass3 import SemanticAnalyzerPass3
from mypy.checker import TypeChecker
from mypy.indirection import TypeIndirectionVisitor
from mypy.errors import Errors, CompileError, DecodeError, report_internal_error
@@ -469,7 +472,7 @@ class BuildManager:
all_types: Map {Expression: Type} collected from all modules
options: Build options
missing_modules: Set of modules that could not be imported encountered so far
- stale_modules: Set of modules that needed to be rechecked
+ stale_modules: Set of modules that needed to be rechecked (only used by tests)
version_id: The current mypy version (based on commit id when possible)
plugin: Active mypy plugin(s)
errors: Used for reporting all errors
@@ -496,10 +499,11 @@ class BuildManager:
self.modules = {} # type: Dict[str, MypyFile]
self.missing_modules = set() # type: Set[str]
self.plugin = plugin
- self.semantic_analyzer = SemanticAnalyzer(self.modules, self.missing_modules,
+ self.semantic_analyzer = SemanticAnalyzerPass2(self.modules, self.missing_modules,
lib_path, self.errors, self.plugin)
self.modules = self.semantic_analyzer.modules
- self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors, self.semantic_analyzer)
+ self.semantic_analyzer_pass3 = SemanticAnalyzerPass3(self.modules, self.errors,
+ self.semantic_analyzer)
self.all_types = {} # type: Dict[Expression, Type]
self.indirection_detector = TypeIndirectionVisitor()
self.stale_modules = set() # type: Set[str]
@@ -1165,6 +1169,25 @@ def write_cache(id: str, path: str, tree: MypyFile,
return interface_hash
+def delete_cache(id: str, path: str, manager: BuildManager) -> None:
+ """Delete cache files for a module.
+
+ The cache files for a module are deleted when mypy finds errors there.
+ This avoids inconsistent states with cache files from different mypy runs,
+ see #4043 for an example.
+ """
+ path = os.path.abspath(path)
+ meta_json, data_json = get_cache_names(id, path, manager)
+ manager.log('Deleting {} {} {} {}'.format(id, path, meta_json, data_json))
+
+ for filename in [data_json, meta_json]:
+ try:
+ os.remove(filename)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ manager.log("Error deleting cache file {}: {}".format(filename, e.strerror))
+
+
"""Dependency manager.
Design
@@ -1534,11 +1557,12 @@ class State:
"""Marks this module as having been fully re-analyzed by the type-checker."""
self.manager.rechecked_modules.add(self.id)
- def mark_interface_stale(self) -> None:
+ def mark_interface_stale(self, *, on_errors: bool = False) -> None:
"""Marks this module as having a stale public interface, and discards the cache data."""
self.meta = None
self.externally_same = False
- self.manager.stale_modules.add(self.id)
+ if not on_errors:
+ self.manager.stale_modules.add(self.id)
def check_blockers(self) -> None:
"""Raise CompileError if a blocking error is detected."""
@@ -1584,7 +1608,7 @@ class State:
"""
In Python, if a and a.b are both modules, running `import a.b` will
modify not only the current module's namespace, but a's namespace as
- well -- see SemanticAnalyzer.add_submodules_to_parent_modules for more
+ well -- see SemanticAnalyzerPass2.add_submodules_to_parent_modules for more
details.
However, this patching process can occur after `a` has been parsed and
@@ -1668,13 +1692,13 @@ class State:
# definitions in the file to the symbol table. We must do
# this before processing imports, since this may mark some
# import statements as unreachable.
- first = FirstPass(manager.semantic_analyzer)
+ first = SemanticAnalyzerPass1(manager.semantic_analyzer)
with self.wrap_context():
first.visit_file(self.tree, self.xpath, self.id, self.options)
# Initialize module symbol table, which was populated by the
# semantic analyzer.
- # TODO: Why can't FirstPass .analyze() do this?
+ # TODO: Why can't SemanticAnalyzerPass1 .analyze() do this?
self.tree.names = manager.semantic_analyzer.globals
# Compute (direct) dependencies.
@@ -1813,6 +1837,8 @@ class State:
else:
is_errors = self.manager.errors.is_errors()
if is_errors:
+ delete_cache(self.id, self.path, self.manager)
+ self.mark_interface_stale(on_errors=True)
return
dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
new_interface_hash = write_cache(
diff --git a/mypy/checker.py b/mypy/checker.py
index a34fba9..d58fb24 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -131,6 +131,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
# Used for collecting inferred attribute types so that they can be checked
# for consistency.
inferred_attribute_types = None # type: Optional[Dict[Var, Type]]
+ # Don't infer partial None types if we are processing assignment from Union
+ no_partial_types = False # type: bool
# The set of all dependencies (suppressed or not) that this module accesses, either
# directly or indirectly.
@@ -259,16 +261,15 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
def handle_cannot_determine_type(self, name: str, context: Context) -> None:
node = self.scope.top_function()
- if (self.pass_num < LAST_PASS and node is not None
- and isinstance(node, (FuncDef, LambdaExpr))):
+ if self.pass_num < LAST_PASS and isinstance(node, (FuncDef, LambdaExpr)):
# Don't report an error yet. Just defer.
if self.errors.type_name:
type_name = self.errors.type_name[-1]
else:
type_name = None
# Shouldn't we freeze the entire scope?
- active_class = self.scope.active_class()
- self.deferred_nodes.append(DeferredNode(node, type_name, active_class))
+ enclosing_class = self.scope.enclosing_class()
+ self.deferred_nodes.append(DeferredNode(node, type_name, enclosing_class))
# Set a marker so that we won't infer additional types in this
# function. Any inferred types could be bogus, because there's at
# least one type that we don't know.
@@ -1146,7 +1147,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
# variables. If an argument or return type of override
# does not have the correct subtyping relationship
# with the original type even after these variables
- # are erased, then it is definitely an incompatiblity.
+ # are erased, then it is definitely an incompatibility.
override_ids = override.type_var_ids()
@@ -1605,12 +1606,13 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
rvalue: Expression,
context: Context,
infer_lvalue_type: bool = True,
- msg: Optional[str] = None) -> None:
+ rv_type: Optional[Type] = None,
+ undefined_rvalue: bool = False) -> None:
"""Check the assignment of one rvalue to a number of lvalues."""
# Infer the type of an ordinary rvalue expression.
- rvalue_type = self.expr_checker.accept(rvalue) # TODO maybe elsewhere; redundant
- undefined_rvalue = False
+ # TODO: maybe elsewhere; redundant.
+ rvalue_type = rv_type or self.expr_checker.accept(rvalue)
if isinstance(rvalue_type, UnionType):
# If this is an Optional type in non-strict Optional code, unwrap it.
@@ -1628,10 +1630,71 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
elif isinstance(rvalue_type, TupleType):
self.check_multi_assignment_from_tuple(lvalues, rvalue, rvalue_type,
context, undefined_rvalue, infer_lvalue_type)
+ elif isinstance(rvalue_type, UnionType):
+ self.check_multi_assignment_from_union(lvalues, rvalue, rvalue_type, context,
+ infer_lvalue_type)
else:
self.check_multi_assignment_from_iterable(lvalues, rvalue_type,
context, infer_lvalue_type)
+ def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: Expression,
+ rvalue_type: UnionType, context: Context,
+ infer_lvalue_type: bool) -> None:
+ """Check assignment to multiple lvalue targets when rvalue type is a Union[...].
+ For example:
+
+ t: Union[Tuple[int, int], Tuple[str, str]]
+ x, y = t
+ reveal_type(x) # Union[int, str]
+
+ The idea in this case is to process the assignment for every item of the union.
+ Important note: the types are collected in two places, 'union_types' contains
+ inferred types for first assignments, 'assignments' contains the narrowed types
+ for binder.
+ """
+ self.no_partial_types = True
+ transposed = tuple([] for _ in
+ self.flatten_lvalues(lvalues)) # type: Tuple[List[Type], ...]
+ # Notify binder that we want to defer bindings and instead collect types.
+ with self.binder.accumulate_type_assignments() as assignments:
+ for item in rvalue_type.items:
+ # Type check the assignment separately for each union item and collect
+ # the inferred lvalue types for each union item.
+ self.check_multi_assignment(lvalues, rvalue, context,
+ infer_lvalue_type=infer_lvalue_type,
+ rv_type=item, undefined_rvalue=True)
+ for t, lv in zip(transposed, self.flatten_lvalues(lvalues)):
+ t.append(self.type_map.pop(lv, AnyType(TypeOfAny.special_form)))
+ union_types = tuple(UnionType.make_simplified_union(col) for col in transposed)
+ for expr, items in assignments.items():
+ # Bind a union of types collected in 'assignments' to every expression.
+ if isinstance(expr, StarExpr):
+ expr = expr.expr
+ types, declared_types = zip(*items)
+ self.binder.assign_type(expr,
+ UnionType.make_simplified_union(types),
+ UnionType.make_simplified_union(declared_types),
+ False)
+ for union, lv in zip(union_types, self.flatten_lvalues(lvalues)):
+ # Properly store the inferred types.
+ _1, _2, inferred = self.check_lvalue(lv)
+ if inferred:
+ self.set_inferred_type(inferred, lv, union)
+ else:
+ self.store_type(lv, union)
+ self.no_partial_types = False
+
+ def flatten_lvalues(self, lvalues: List[Expression]) -> List[Expression]:
+ res = [] # type: List[Expression]
+ for lv in lvalues:
+ if isinstance(lv, (TupleExpr, ListExpr)):
+ res.extend(self.flatten_lvalues(lv.items))
+ if isinstance(lv, StarExpr):
+ # Unwrap StarExpr, since it is unwrapped by other helpers.
+ lv = lv.expr
+ res.append(lv)
+ return res
+
def check_multi_assignment_from_tuple(self, lvalues: List[Lvalue], rvalue: Expression,
rvalue_type: TupleType, context: Context,
undefined_rvalue: bool,
@@ -1654,7 +1717,11 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
relevant_items = reinferred_rvalue_type.relevant_items()
if len(relevant_items) == 1:
reinferred_rvalue_type = relevant_items[0]
-
+ if isinstance(reinferred_rvalue_type, UnionType):
+ self.check_multi_assignment_from_union(lvalues, rvalue,
+ reinferred_rvalue_type, context,
+ infer_lvalue_type)
+ return
assert isinstance(reinferred_rvalue_type, TupleType)
rvalue_type = reinferred_rvalue_type
@@ -1716,7 +1783,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
returns in: ([1,2], [3,4,5], [6,7])
"""
nr_right_of_star = length - star_index - 1
- right_index = nr_right_of_star if -nr_right_of_star != 0 else len(items)
+ right_index = -nr_right_of_star if nr_right_of_star != 0 else len(items)
left = items[:star_index]
star = items[star_index:right_index]
right = items[right_index:]
@@ -1769,7 +1836,10 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True)
self.store_type(lvalue, lvalue_type)
elif isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
- types = [self.check_lvalue(sub_expr)[0] for sub_expr in lvalue.items]
+ types = [self.check_lvalue(sub_expr)[0] or
+ # This type will be used as a context for further inference of rvalue,
+ # we put Uninhabited if there is no information available from lvalue.
+ UninhabitedType() for sub_expr in lvalue.items]
lvalue_type = TupleType(types, self.named_type('builtins.tuple'))
else:
lvalue_type = self.expr_checker.accept(lvalue)
@@ -1797,7 +1867,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
"""Infer the type of initialized variables from initializer type."""
if isinstance(init_type, DeletedType):
self.msg.deleted_as_rvalue(init_type, context)
- elif not is_valid_inferred_type(init_type):
+ elif not is_valid_inferred_type(init_type) and not self.no_partial_types:
# We cannot use the type of the initialization expression for full type
# inference (it's not specific enough), but we might be able to give
# partial type which will be made more specific later. A partial type
@@ -1820,7 +1890,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
self.set_inferred_type(name, lvalue, init_type)
def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool:
- if isinstance(init_type, (NoneTyp, UninhabitedType)):
+ if isinstance(init_type, NoneTyp):
partial_type = PartialType(None, name, [init_type])
elif isinstance(init_type, Instance):
fullname = init_type.type.fullname()
@@ -1894,7 +1964,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
rvalue: Expression, context: Context) -> Tuple[Type, bool]:
"""Type member assigment.
- This is defers to check_simple_assignment, unless the member expression
+ This defers to check_simple_assignment, unless the member expression
is a descriptor, in which case this checks descriptor semantics as well.
Return the inferred rvalue_type and whether to infer anything about the attribute type
@@ -2694,7 +2764,19 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
iterable = map_instance_to_supertype(
instance,
self.lookup_typeinfo('typing.Iterable'))
- return iterable.args[0]
+ item_type = iterable.args[0]
+ if not isinstance(item_type, AnyType):
+ # This relies on 'map_instance_to_supertype' returning 'Iterable[Any]'
+ # in case there is no explicit base class.
+ return item_type
+ # Try also structural typing.
+ iter_type = find_member('__iter__', instance, instance)
+ if (iter_type and isinstance(iter_type, CallableType) and
+ isinstance(iter_type.ret_type, Instance)):
+ iterator = map_instance_to_supertype(iter_type.ret_type,
+ self.lookup_typeinfo('typing.Iterator'))
+ item_type = iterator.args[0]
+ return item_type
def function_type(self, func: FuncBase) -> FunctionLike:
return function_type(func, self.named_type('builtins.function'))
@@ -2871,6 +2953,39 @@ def remove_optional(typ: Type) -> Type:
return typ
+def builtin_item_type(tp: Type) -> Optional[Type]:
+ """Get the item type of a builtin container.
+
+ If 'tp' is not one of the built containers (these includes NamedTuple and TypedDict)
+ or if the container is not parameterized (like List or List[Any])
+ return None. This function is used to narrow optional types in situations like this:
+
+ x: Optional[int]
+ if x in (1, 2, 3):
+ x + 42 # OK
+
+ Note: this is only OK for built-in containers, where we know the behavior
+ of __contains__.
+ """
+ if isinstance(tp, Instance):
+ if tp.type.fullname() in ['builtins.list', 'builtins.tuple', 'builtins.dict',
+ 'builtins.set', 'builtins.frozenset']:
+ if not tp.args:
+ # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic).
+ return None
+ if not isinstance(tp.args[0], AnyType):
+ return tp.args[0]
+ elif isinstance(tp, TupleType) and all(not isinstance(it, AnyType) for it in tp.items):
+ return UnionType.make_simplified_union(tp.items) # this type is not externally visible
+ elif isinstance(tp, TypedDictType):
+ # TypedDict always has non-optional string keys.
+ if tp.fallback.type.fullname() == 'typing.Mapping':
+ return tp.fallback.args[0]
+ elif tp.fallback.type.bases[0].type.fullname() == 'typing.Mapping':
+ return tp.fallback.type.bases[0].args[0]
+ return None
+
+
def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
"""Calculate what information we can learn from the truth of (e1 and e2)
in terms of the information that we can learn from the truth of e1 and
@@ -3017,6 +3132,20 @@ def find_isinstance_check(node: Expression,
optional_expr = node.operands[1]
if is_overlapping_types(optional_type, comp_type):
return {optional_expr: remove_optional(optional_type)}, {}
+ elif node.operators in [['in'], ['not in']]:
+ expr = node.operands[0]
+ left_type = type_map[expr]
+ right_type = builtin_item_type(type_map[node.operands[1]])
+ right_ok = right_type and (not is_optional(right_type) and
+ (not isinstance(right_type, Instance) or
+ right_type.type.fullname() != 'builtins.object'))
+ if (right_type and right_ok and is_optional(left_type) and
+ literal(expr) == LITERAL_TYPE and not is_literal_none(expr) and
+ is_overlapping_types(left_type, right_type)):
+ if node.operators == ['in']:
+ return {expr: remove_optional(left_type)}, {}
+ if node.operators == ['not in']:
+ return {}, {expr: remove_optional(left_type)}
elif isinstance(node, RefExpr):
# Restrict the type of the variable to True-ish/False-ish in the if and else branches
# respectively
@@ -3262,11 +3391,11 @@ def is_valid_inferred_type(typ: Type) -> bool:
invalid. When doing strict Optional checking, only None and types that are
incompletely defined (i.e. contain UninhabitedType) are invalid.
"""
- if is_same_type(typ, NoneTyp()):
- # With strict Optional checking, we *may* eventually infer NoneTyp, but
- # we only do that if we can't infer a specific Optional type. This
- # resolution happens in leave_partial_types when we pop a partial types
- # scope.
+ if isinstance(typ, (NoneTyp, UninhabitedType)):
+ # With strict Optional checking, we *may* eventually infer NoneTyp when
+ # the initializer is None, but we only do that if we can't infer a
+ # specific Optional type. This resolution happens in
+ # leave_partial_types when we pop a partial types scope.
return False
return is_valid_inferred_type_component(typ)
@@ -3320,6 +3449,16 @@ class Scope:
return self.stack[-1]
return None
+ def enclosing_class(self) -> Optional[TypeInfo]:
+ top = self.top_function()
+ assert top, "This method must be called from inside a function"
+ index = self.stack.index(top)
+ assert index, "Scope stack must always start with a module"
+ enclosing = self.stack[index - 1]
+ if isinstance(enclosing, TypeInfo):
+ return enclosing
+ return None
+
def active_self_type(self) -> Optional[Union[Instance, TupleType]]:
info = self.active_class()
if info:
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index e35ab90..9098891 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -66,7 +66,7 @@ def extract_refexpr_names(expr: RefExpr) -> Set[str]:
while expr.kind == MODULE_REF or expr.fullname is not None:
if expr.kind == MODULE_REF and expr.fullname is not None:
# If it's None, something's wrong (perhaps due to an
- # import cycle or a supressed error). For now we just
+ # import cycle or a suppressed error). For now we just
# skip it.
output.add(expr.fullname)
@@ -150,7 +150,9 @@ class ExpressionChecker(ExpressionVisitor[Type]):
elif isinstance(node, FuncDef):
# Reference to a global function.
result = function_type(node, self.named_type('builtins.function'))
- elif isinstance(node, OverloadedFuncDef):
+ elif isinstance(node, OverloadedFuncDef) and node.type is not None:
+ # node.type is None when there are multiple definitions of a function
+ # and it's decorated by somthing that is not typing.overload
result = node.type
elif isinstance(node, TypeInfo):
# Reference to a type object.
@@ -536,7 +538,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if (isinstance(callable_node, RefExpr)
and callable_node.fullname in ('enum.Enum', 'enum.IntEnum',
'enum.Flag', 'enum.IntFlag')):
- # An Enum() call that failed SemanticAnalyzer.check_enum_call().
+ # An Enum() call that failed SemanticAnalyzerPass2.check_enum_call().
return callee.ret_type, callee
if (callee.is_type_obj() and callee.type_object().is_abstract
@@ -650,7 +652,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
res = type_object_type(item.type, self.named_type)
if isinstance(res, CallableType):
res = res.copy_modified(from_type_type=True)
- return res
+ return expand_type_by_instance(res, item)
if isinstance(item, UnionType):
return UnionType([self.analyze_type_type_callee(item, context)
for item in item.relevant_items()], item.line)
@@ -769,7 +771,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# Only substitute non-Uninhabited and non-erased types.
new_args = [] # type: List[Optional[Type]]
for arg in args:
- if isinstance(arg, UninhabitedType) or has_erased_component(arg):
+ if has_uninhabited_component(arg) or has_erased_component(arg):
new_args.append(None)
else:
new_args.append(arg)
@@ -2768,6 +2770,19 @@ class HasErasedComponentsQuery(types.TypeQuery[bool]):
return True
+def has_uninhabited_component(t: Optional[Type]) -> bool:
+ return t is not None and t.accept(HasUninhabitedComponentsQuery())
+
+
+class HasUninhabitedComponentsQuery(types.TypeQuery[bool]):
+ """Visitor for querying whether a type has an UninhabitedType component."""
+ def __init__(self) -> None:
+ super().__init__(any)
+
+ def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
+ return True
+
+
def overload_arg_similarity(actual: Type, formal: Type) -> int:
"""Return if caller argument (actual) is compatible with overloaded signature arg (formal).
@@ -2817,10 +2832,10 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
# Since Type[T] is covariant, check if actual = Type[A] is
# a subtype of formal = Type[F].
return overload_arg_similarity(actual.item, formal.item)
- elif isinstance(actual, CallableType) and actual.is_type_obj():
+ elif isinstance(actual, FunctionLike) and actual.is_type_obj():
# Check if the actual is a constructor of some sort.
# Note that this is this unsound, since we don't check the __init__ signature.
- return overload_arg_similarity(actual.ret_type, formal.item)
+ return overload_arg_similarity(actual.items()[0].ret_type, formal.item)
else:
return 0
if isinstance(actual, TypedDictType):
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index ea8aff8..db583ed 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -20,6 +20,8 @@ from mypy.typevars import fill_typevars
from mypy.plugin import Plugin, AttributeContext
from mypy import messages
from mypy import subtypes
+from mypy import meet
+
MYPY = False
if MYPY: # import for forward declaration only
import mypy.checker
@@ -177,26 +179,35 @@ def analyze_member_access(name: str,
elif isinstance(typ, TypeType):
# Similar to FunctionLike + is_type_obj() above.
item = None
+ fallback = builtin_type('builtins.type')
+ ignore_messages = msg.copy()
+ ignore_messages.disable_errors()
if isinstance(typ.item, Instance):
item = typ.item
elif isinstance(typ.item, AnyType):
- fallback = builtin_type('builtins.type')
- ignore_messages = msg.copy()
- ignore_messages.disable_errors()
return analyze_member_access(name, fallback, node, is_lvalue, is_super,
is_operator, builtin_type, not_ready_callback,
ignore_messages, original_type=original_type, chk=chk)
elif isinstance(typ.item, TypeVarType):
if isinstance(typ.item.upper_bound, Instance):
item = typ.item.upper_bound
+ elif isinstance(typ.item, FunctionLike) and typ.item.is_type_obj():
+ item = typ.item.fallback
+ elif isinstance(typ.item, TypeType):
+ # Access member on metaclass object via Type[Type[C]]
+ if isinstance(typ.item.item, Instance):
+ item = typ.item.item.type.metaclass_type
if item and not is_operator:
# See comment above for why operators are skipped
result = analyze_class_attribute_access(item, name, node, is_lvalue,
builtin_type, not_ready_callback, msg,
original_type=original_type)
if result:
- return result
- fallback = builtin_type('builtins.type')
+ if not (isinstance(result, AnyType) and item.type.fallback_to_any):
+ return result
+ else:
+ # We don't want errors on metaclass lookup for classes with Any fallback
+ msg = ignore_messages
if item is not None:
fallback = item.type.metaclass_type or fallback
return analyze_member_access(name, fallback, node, is_lvalue, is_super,
@@ -286,6 +297,7 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
This is conceptually part of analyze_member_access and the arguments are similar.
+ itype is the class object in which var is dedined
original_type is the type of E in the expression E.var
"""
# Found a member variable.
@@ -310,15 +322,21 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
msg.cant_assign_to_method(node)
if not var.is_staticmethod:
- # Class-level function objects and classmethods become bound
- # methods: the former to the instance, the latter to the
- # class.
+ # Class-level function objects and classmethods become bound methods:
+ # the former to the instance, the latter to the class.
functype = t
- check_method_type(functype, itype, var.is_classmethod, node, msg)
+ # Use meet to narrow original_type to the dispatched type.
+ # For example, assume
+ # * A.f: Callable[[A1], None] where A1 <: A (maybe A1 == A)
+ # * B.f: Callable[[B1], None] where B1 <: B (maybe B1 == B)
+ # * x: Union[A1, B1]
+ # In `x.f`, when checking `x` against A1 we assume x is compatible with A
+ # and similarly for B1 when checking agains B
+ dispatched_type = meet.meet_types(original_type, itype)
+ check_self_arg(functype, dispatched_type, var.is_classmethod, node, name, msg)
signature = bind_self(functype, original_type, var.is_classmethod)
if var.is_property:
- # A property cannot have an overloaded type => the cast
- # is fine.
+ # A property cannot have an overloaded type => the cast is fine.
assert isinstance(signature, CallableType)
result = signature.ret_type
else:
@@ -370,33 +388,28 @@ def lookup_member_var_or_accessor(info: TypeInfo, name: str,
return None
-def check_method_type(functype: FunctionLike, itype: Instance, is_classmethod: bool,
- context: Context, msg: MessageBuilder) -> None:
+def check_self_arg(functype: FunctionLike, dispatched_arg_type: Type, is_classmethod: bool,
+ context: Context, name: str, msg: MessageBuilder) -> None:
+ """For x.f where A.f: A1 -> T, check that meet(type(x), A) <: A1 for each overload.
+
+ dispatched_arg_type is meet(B, A) in the following example
+
+ def g(x: B): x.f
+ class A:
+ f: Callable[[A1], None]
+ """
+ # TODO: this is too strict. We can return filtered overloads for matching definitions
for item in functype.items():
if not item.arg_types or item.arg_kinds[0] not in (ARG_POS, ARG_STAR):
# No positional first (self) argument (*args is okay).
- msg.invalid_method_type(item, context)
- elif not is_classmethod:
- # Check that self argument has type 'Any' or valid instance type.
- selfarg = item.arg_types[0]
- # If this is a method of a tuple class, correct for the fact that
- # we passed to typ.fallback in analyze_member_access. See #1432.
- if isinstance(selfarg, TupleType):
- selfarg = selfarg.fallback
- if not subtypes.is_subtype(selfarg, itype):
- msg.invalid_method_type(item, context)
+ msg.no_formal_self(name, item, context)
else:
- # Check that cls argument has type 'Any' or valid class type.
- # (This is sufficient for the current treatment of @classmethod,
- # but probably needs to be revisited when we implement Type[C]
- # or advanced variants of it like Type[<args>, C].)
- clsarg = item.arg_types[0]
- if isinstance(clsarg, CallableType) and clsarg.is_type_obj():
- if not subtypes.is_equivalent(clsarg.ret_type, itype):
- msg.invalid_class_method_type(item, context)
- else:
- if not subtypes.is_equivalent(clsarg, AnyType(TypeOfAny.special_form)):
- msg.invalid_class_method_type(item, context)
+ selfarg = item.arg_types[0]
+ if is_classmethod:
+ dispatched_arg_type = TypeType.make_normalized(dispatched_arg_type)
+ if not subtypes.is_subtype(dispatched_arg_type, erase_to_bound(selfarg)):
+ msg.incompatible_self_argument(name, dispatched_arg_type, item,
+ is_classmethod, context)
def analyze_class_attribute_access(itype: Instance,
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 4e7c2ec..0a79483 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -506,6 +506,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
self.direction)
elif isinstance(self.actual, TypeType):
return infer_constraints(template.item, self.actual.item, self.direction)
+ elif isinstance(self.actual, AnyType):
+ return infer_constraints(template.item, self.actual, self.direction)
else:
return []
diff --git a/mypy/fixup.py b/mypy/fixup.py
index d60ac55..375ca09 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -54,9 +54,6 @@ class NodeFixer(NodeVisitor[None]):
info.defn.accept(self)
if info.names:
self.visit_symbol_table(info.names)
- if info.subtypes:
- for st in info.subtypes:
- self.visit_type_info(st)
if info.bases:
for base in info.bases:
base.accept(self.type_fixer)
diff --git a/mypy/indirection.py b/mypy/indirection.py
index badbe38..a40b718 100644
--- a/mypy/indirection.py
+++ b/mypy/indirection.py
@@ -103,4 +103,7 @@ class TypeIndirectionVisitor(SyntheticTypeVisitor[Set[str]]):
return self._visit(t.item)
def visit_forwardref_type(self, t: types.ForwardRef) -> Set[str]:
- return self._visit(t.link)
+ if t.resolved:
+ return self._visit(t.resolved)
+ else:
+ return set()
diff --git a/mypy/main.py b/mypy/main.py
index b194dbe..60bf8a2 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -750,7 +750,13 @@ def parse_section(prefix: str, template: Options,
print("%s: %s: %s" % (prefix, key, err), file=sys.stderr)
continue
if key == 'disallow_any':
- results['disallow_untyped_defs'] = v and 'unannotated' in v
+ # "disallow_any = " should disable all disallow_any options, including untyped defs,
+ # given in a more general config.
+ if not v:
+ results['disallow_untyped_defs'] = False
+ # If "unannotated" is explicitly given, turn on disallow_untyped_defs.
+ elif 'unannotated' in v:
+ results['disallow_untyped_defs'] = True
if key == 'silent_imports':
print("%s: silent_imports has been replaced by "
"ignore_missing_imports=True; follow_imports=skip" % prefix, file=sys.stderr)
diff --git a/mypy/maptype.py b/mypy/maptype.py
index ed681c8..f90d0a0 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -10,7 +10,8 @@ def map_instance_to_supertype(instance: Instance,
"""Produce a supertype of `instance` that is an Instance
of `superclass`, mapping type arguments up the chain of bases.
- `superclass` is required to be a superclass of `instance.type`.
+ If `superclass` is not a nominal superclass of `instance.type`,
+ then all type arguments are mapped to 'Any'.
"""
if instance.type == superclass:
# Fast path: `instance` already belongs to `superclass`.
diff --git a/mypy/messages.py b/mypy/messages.py
index bdde2e1..b49292e 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -310,7 +310,10 @@ class MessageBuilder:
elif isinstance(typ, TypeType):
return 'Type[{}]'.format(self.format_bare(typ.item, verbosity))
elif isinstance(typ, ForwardRef): # may appear in semanal.py
- return self.format_bare(typ.link, verbosity)
+ if typ.resolved:
+ return self.format_bare(typ.resolved, verbosity)
+ else:
+ return self.format_bare(typ.unbound, verbosity)
elif isinstance(typ, FunctionLike):
func = typ
if func.is_type_obj():
@@ -860,11 +863,15 @@ class MessageBuilder:
def cannot_determine_type_in_base(self, name: str, base: str, context: Context) -> None:
self.fail("Cannot determine type of '%s' in base class '%s'" % (name, base), context)
- def invalid_method_type(self, sig: CallableType, context: Context) -> None:
- self.fail('Invalid method type', context)
+ def no_formal_self(self, name: str, item: CallableType, context: Context) -> None:
+ self.fail('Attribute function "%s" with type %s does not accept self argument'
+ % (name, self.format(item)), context)
- def invalid_class_method_type(self, sig: CallableType, context: Context) -> None:
- self.fail('Invalid class method type', context)
+ def incompatible_self_argument(self, name: str, arg: Type, sig: CallableType,
+ is_classmethod: bool, context: Context) -> None:
+ kind = 'class attribute function' if is_classmethod else 'attribute function'
+ self.fail('Invalid self argument %s to %s "%s" with type %s'
+ % (self.format(arg), kind, name, self.format(sig)), context)
def incompatible_conditional_function_def(self, defn: FuncDef) -> None:
self.fail('All conditional function variants must have identical '
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 12bfa0d..761f936 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -255,7 +255,7 @@ class MypyFile(SymbolNode):
class ImportBase(Statement):
"""Base class for all import statements."""
- is_unreachable = False # Set by semanal.FirstPass if inside `if False` etc.
+ is_unreachable = False # Set by semanal.SemanticAnalyzerPass1 if inside `if False` etc.
is_top_level = False # Ditto if outside any class or def
is_mypy_only = False # Ditto if inside `if TYPE_CHECKING` or `if MYPY`
@@ -395,33 +395,14 @@ class Argument(Node):
type_annotation = None # type: Optional[mypy.types.Type]
initializer = None # type: Optional[Expression]
kind = None # type: int # must be an ARG_* constant
- initialization_statement = None # type: Optional[AssignmentStmt]
def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]',
- initializer: Optional[Expression], kind: int,
- initialization_statement: 'Optional[AssignmentStmt]' = None) -> None:
+ initializer: Optional[Expression], kind: int) -> None:
self.variable = variable
-
self.type_annotation = type_annotation
self.initializer = initializer
-
- self.initialization_statement = initialization_statement
- if not self.initialization_statement:
- self.initialization_statement = self._initialization_statement()
-
self.kind = kind
- def _initialization_statement(self) -> 'Optional[AssignmentStmt]':
- """Convert the initializer into an assignment statement.
- """
- if not self.initializer:
- return None
-
- rvalue = self.initializer
- lvalue = NameExpr(self.variable.name())
- assign = AssignmentStmt([lvalue], rvalue)
- return assign
-
def set_line(self, target: Union[Context, int], column: Optional[int] = None) -> None:
super().set_line(target, column)
@@ -430,10 +411,6 @@ class Argument(Node):
self.variable.set_line(self.line, self.column)
- if self.initialization_statement:
- self.initialization_statement.set_line(self.line, self.column)
- self.initialization_statement.lvalues[0].set_line(self.line, self.column)
-
class FuncItem(FuncBase):
arguments = [] # type: List[Argument]
@@ -1808,8 +1785,9 @@ class PromoteExpr(Expression):
class NewTypeExpr(Expression):
"""NewType expression NewType(...)."""
name = None # type: str
+ # The base type (the second argument to NewType)
old_type = None # type: mypy.types.Type
-
+ # The synthesized class representing the new type (inherits old_type)
info = None # type: Optional[TypeInfo]
def __init__(self, name: str, old_type: 'mypy.types.Type', line: int) -> None:
@@ -1885,7 +1863,6 @@ class TypeInfo(SymbolNode):
declared_metaclass = None # type: Optional[mypy.types.Instance]
metaclass_type = None # type: Optional[mypy.types.Instance]
- subtypes = None # type: Set[TypeInfo] # Direct subclasses encountered so far
names = None # type: SymbolTable # Names defined directly in this type
is_abstract = False # Does the class have any abstract attributes?
is_protocol = False # Is this a protocol class?
@@ -1992,7 +1969,6 @@ class TypeInfo(SymbolNode):
self.names = names
self.defn = defn
self.module_name = module_name
- self.subtypes = set()
self.type_vars = []
self.bases = []
# Leave self.mro uninitialized until we compute it for real,
@@ -2257,21 +2233,53 @@ class FakeInfo(TypeInfo):
class SymbolTableNode:
+ """Description of a name binding in a symbol table.
+
+ These are only used as values in module (global), function (local)
+ and class symbol tables (see SymbolTable). The name that is bound is
+ the key in SymbolTable.
+
+ Symbol tables don't contain direct references to AST nodes primarily
+ because there can be multiple symbol table references to a single
+ AST node (due to imports and aliases), and different references can
+ behave differently. This class describes the unique properties of
+ each reference.
+
+ The most fundamental attributes are 'kind' and 'node'. The 'node'
+ attribute defines the AST node that the name refers to.
+
+ For many bindings, including those targeting variables, functions
+ and classes, the kind is one of LDEF, GDEF or MDEF, depending on the
+ scope of the definition. These three kinds can usually be used
+ interchangeably and the difference between local, global and class
+ scopes is mostly descriptive, with no semantic significance.
+ However, some tools that consume mypy ASTs may care about these so
+ they should be correct.
+
+ A few definitions get special kinds, including type variables (TVAR),
+ imported modules and module aliases (MODULE_REF), and type aliases
+ (TYPE_ALIAS).
+
+ Type aliases are very special and have additional attributes that
+ are only used for them ('type_override', 'alias_tvars' at least).
+ """
+ # TODO: This is a mess. Refactor!
+ # TODO: Describe how type aliases work.
+
# Kind of node. Possible values:
- # - LDEF: local definition (of any kind)
+ # - LDEF: local definition
# - GDEF: global (module-level) definition
# - MDEF: class member definition
- # - TVAR: TypeVar(...) definition
+ # - TVAR: TypeVar(...) definition in any scope
# - MODULE_REF: reference to a module
# - TYPE_ALIAS: type alias
- # - UNBOUND_IMPORTED: temporary kind for imported names
+ # - UNBOUND_IMPORTED: temporary kind for imported names (we don't know the final kind yet)
kind = None # type: int
- # AST node of definition (FuncDef/Var/TypeInfo/Decorator/TypeVarExpr,
+ # AST node of definition (among others, this can be FuncDef/Var/TypeInfo/TypeVarExpr/MypyFile,
# or None for a bound type variable).
node = None # type: Optional[SymbolNode]
- # Module id (e.g. "foo.bar") or None
- mod_id = '' # type: Optional[str]
- # If this not None, override the type of the 'node' attribute.
+ # If this not None, override the type of the 'node' attribute. This is only used for
+ # type aliases.
type_override = None # type: Optional[mypy.types.Type]
# For generic aliases this stores the (qualified) names of type variables.
# (For example see testGenericAliasWithTypeVarsFromDifferentModules.)
@@ -2284,7 +2292,9 @@ class SymbolTableNode:
# For deserialized MODULE_REF nodes, the referenced module name;
# for other nodes, optionally the name of the referenced object.
cross_ref = None # type: Optional[str]
- # Was this node created by normalіze_type_alias?
+ # Used to distinguish between 'typing.List' and 'builtins.list'. This is
+ # True when the former has been normalized to the latter, and it allow us
+ # to reject 'list[str]' and similar.
normalized = False # type: bool
# Was this defined by assignment to self attribute?
implicit = False # type: bool
@@ -2292,7 +2302,6 @@ class SymbolTableNode:
def __init__(self,
kind: int,
node: Optional[SymbolNode],
- mod_id: Optional[str] = None,
typ: 'Optional[mypy.types.Type]' = None,
module_public: bool = True,
normalized: bool = False,
@@ -2302,7 +2311,6 @@ class SymbolTableNode:
self.kind = kind
self.node = node
self.type_override = typ
- self.mod_id = mod_id
self.module_hidden = module_hidden
self.module_public = module_public
self.normalized = normalized
@@ -2332,8 +2340,8 @@ class SymbolTableNode:
def __str__(self) -> str:
s = '{}/{}'.format(node_kinds[self.kind], short_type(self.node))
- if self.mod_id is not None:
- s += ' ({})'.format(self.mod_id)
+ if isinstance(self.node, SymbolNode):
+ s += ' ({})'.format(self.node.fullname())
# Include declared type of variables and functions.
if self.type is not None:
s += ' : {}'.format(self.type)
@@ -2425,7 +2433,7 @@ class SymbolTable(Dict[str, SymbolTableNode]):
for key, value in self.items():
# Skip __builtins__: it's a reference to the builtins
# module that gets added to every module by
- # SemanticAnalyzer.visit_file(), but it shouldn't be
+ # SemanticAnalyzerPass2.visit_file(), but it shouldn't be
# accessed by users of the module.
if key == '__builtins__':
continue
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 53cecba..369a30f 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -1,4 +1,4 @@
-"""The semantic analyzer.
+"""The semantic analyzer passes 1 and 2.
Bind names to definitions and do various other simple consistency
checks. For example, consider this program:
@@ -10,30 +10,19 @@ Here semantic analysis would detect that the assignment 'x = 1'
defines a new variable, the type of which is to be inferred (in a
later pass; type inference or type checking is not part of semantic
analysis). Also, it would bind both references to 'x' to the same
-module-level variable node. The second assignment would also be
-analyzed, and the type of 'y' marked as being inferred.
+module-level variable (Var) node. The second assignment would also
+be analyzed, and the type of 'y' marked as being inferred.
Semantic analysis is the first analysis pass after parsing, and it is
subdivided into three passes:
- * FirstPass looks up externally visible names defined in a module but
- ignores imports and local definitions. It helps enable (some)
- cyclic references between modules, such as module 'a' that imports
- module 'b' and used names defined in b *and* vice versa. The first
- pass can be performed before dependent modules have been processed.
+ * SemanticAnalyzerPass1 is defined in mypy.semanal_pass1.
- * SemanticAnalyzer is the second pass. It does the bulk of the work.
+ * SemanticAnalyzerPass2 is the second pass. It does the bulk of the work.
It assumes that dependent modules have been semantically analyzed,
up to the second pass, unless there is a import cycle.
- * ThirdPass checks that type argument counts are valid; for example,
- it will reject Dict[int]. We don't do this in the second pass,
- since we infer the type argument counts of classes during this
- pass, and it is possible to refer to classes defined later in a
- file, which would not have the type argument count set yet. This
- pass also recomputes the method resolution order of each class, in
- case one of its bases belongs to a module involved in an import
- loop.
+ * SemanticAnalyzerPass3 is the third pass. It's in mypy.semanal_pass3.
Semantic analysis of types is implemented in module mypy.typeanal.
@@ -72,7 +61,7 @@ from mypy.nodes import (
)
from mypy.literals import literal
from mypy.tvar_scope import TypeVarScope
-from mypy.typevars import has_no_typevars, fill_typevars
+from mypy.typevars import fill_typevars
from mypy.visitor import NodeVisitor
from mypy.traverser import TraverserVisitor
from mypy.errors import Errors, report_internal_error
@@ -80,19 +69,18 @@ from mypy.messages import CANNOT_ASSIGN_TO_TYPE, MessageBuilder
from mypy.types import (
FunctionLike, UnboundType, TypeVarDef, TypeType, TupleType, UnionType, StarType, function_type,
TypedDictType, NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType,
- TypeTranslator, TypeOfAny, TypeVisitor, UninhabitedType, ErasedType, DeletedType,
- PartialType, ForwardRef
+ TypeTranslator, TypeOfAny, TypeVisitor, UninhabitedType, ErasedType, DeletedType
)
from mypy.nodes import implicit_module_attrs
from mypy.typeanal import (
- TypeAnalyser, TypeAnalyserPass3, analyze_type_alias, no_subscript_builtin_alias,
+ TypeAnalyser, analyze_type_alias, no_subscript_builtin_alias,
TypeVariableQuery, TypeVarList, remove_dups, has_any_from_unimported_type,
- check_for_explicit_any, collect_any_types,
+ check_for_explicit_any
)
from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
from mypy.sametypes import is_same_type
from mypy.options import Options
-from mypy import experiments, messages
+from mypy import experiments
from mypy.plugin import Plugin
from mypy import join
@@ -156,7 +144,7 @@ TYPE_PROMOTIONS_PYTHON2.update({
# nested functions. In the first phase we add the function to the symbol table
# but don't process body. In the second phase we process function body. This
# way we can have mutually recursive nested functions.
-FUNCTION_BOTH_PHASES = 0 # Everthing in one go
+FUNCTION_BOTH_PHASES = 0 # Everything in one go
FUNCTION_FIRST_PHASE_POSTPONE_SECOND = 1 # Add to symbol table but postpone body
FUNCTION_SECOND_PHASE = 2 # Only analyze body
@@ -183,7 +171,7 @@ SUGGESTED_TEST_FIXTURES = {
}
-class SemanticAnalyzer(NodeVisitor[None]):
+class SemanticAnalyzerPass2(NodeVisitor[None]):
"""Semantically analyze parsed mypy files.
The analyzer binds names and does various consistency checks for a
@@ -283,8 +271,8 @@ class SemanticAnalyzer(NodeVisitor[None]):
with experiments.strict_optional_set(options.strict_optional):
if 'builtins' in self.modules:
- self.globals['__builtins__'] = SymbolTableNode(
- MODULE_REF, self.modules['builtins'], self.cur_mod_id)
+ self.globals['__builtins__'] = SymbolTableNode(MODULE_REF,
+ self.modules['builtins'])
for name in implicit_module_attrs:
v = self.globals[name].node
@@ -618,10 +606,6 @@ class SemanticAnalyzer(NodeVisitor[None]):
self.enter()
for arg in defn.arguments:
self.add_local(arg.variable, defn)
- for arg in defn.arguments:
- if arg.initialization_statement:
- lvalue = arg.initialization_statement.lvalues[0]
- lvalue.accept(self)
# The first argument of a non-static, non-class method is like 'self'
# (though the name could be different), having the enclosing class's
@@ -1082,7 +1066,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
# Calculate the MRO. It might be incomplete at this point if
# the bases of defn include classes imported from other
- # modules in an import loop. We'll recompute it in ThirdPass.
+ # modules in an import loop. We'll recompute it in SemanticAnalyzerPass3.
if not self.verify_base_classes(defn):
# Give it an MRO consisting of just the class itself and object.
defn.info.mro = [defn.info, self.object_type().type]
@@ -1413,7 +1397,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
if parent_mod and child not in parent_mod.names:
child_mod = self.modules.get(id)
if child_mod:
- sym = SymbolTableNode(MODULE_REF, child_mod, parent,
+ sym = SymbolTableNode(MODULE_REF, child_mod,
module_public=module_public)
parent_mod.names[child] = sym
id = parent
@@ -1422,7 +1406,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
context: Context, module_hidden: bool = False) -> None:
if id in self.modules:
m = self.modules[id]
- self.add_symbol(as_id, SymbolTableNode(MODULE_REF, m, self.cur_mod_id,
+ self.add_symbol(as_id, SymbolTableNode(MODULE_REF, m,
module_public=module_public,
module_hidden=module_hidden), context)
else:
@@ -1442,7 +1426,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
if not node or node.kind == UNBOUND_IMPORTED:
mod = self.modules.get(possible_module_id)
if mod is not None:
- node = SymbolTableNode(MODULE_REF, mod, import_id)
+ node = SymbolTableNode(MODULE_REF, mod)
self.add_submodules_to_parent_modules(possible_module_id, True)
elif possible_module_id in self.missing_modules:
missing = True
@@ -1460,7 +1444,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
else:
name = id
ast_node = Var(name, type=typ)
- symbol = SymbolTableNode(GDEF, ast_node, name)
+ symbol = SymbolTableNode(GDEF, ast_node)
self.add_symbol(name, symbol, imp)
return
if node and node.kind != UNBOUND_IMPORTED and not node.module_hidden:
@@ -1478,7 +1462,6 @@ class SemanticAnalyzer(NodeVisitor[None]):
module_public = not self.is_stub_file or as_id is not None
module_hidden = not module_public and possible_module_id not in self.modules
symbol = SymbolTableNode(node.kind, node.node,
- self.cur_mod_id,
node.type_override,
module_public=module_public,
normalized=node.normalized,
@@ -1534,8 +1517,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
node = self.lookup_qualified(collections_type_aliases[fullname], ctx)
normalized = True
if normalized:
- node = SymbolTableNode(node.kind, node.node,
- node.mod_id, node.type_override,
+ node = SymbolTableNode(node.kind, node.node, node.type_override,
normalized=True, alias_tvars=node.alias_tvars)
return node
@@ -1580,7 +1562,6 @@ class SemanticAnalyzer(NodeVisitor[None]):
name, existing_symbol, node, i):
continue
self.add_symbol(name, SymbolTableNode(node.kind, node.node,
- self.cur_mod_id,
node.type_override,
normalized=node.normalized,
alias_tvars=node.alias_tvars), i)
@@ -1601,7 +1582,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
any_type = AnyType(TypeOfAny.from_error)
var.type = any_type
var.is_suppressed_import = is_import
- self.add_symbol(name, SymbolTableNode(GDEF, var, self.cur_mod_id), context)
+ self.add_symbol(name, SymbolTableNode(GDEF, var), context)
#
# Statements
@@ -1732,11 +1713,10 @@ class SemanticAnalyzer(NodeVisitor[None]):
return Instance(fb_info, [])
def analyze_alias(self, rvalue: Expression,
- allow_unnormalized: bool) -> Tuple[Optional[Type], List[str]]:
+ warn_bound_tvar: bool = False) -> Tuple[Optional[Type], List[str]]:
"""Check if 'rvalue' represents a valid type allowed for aliasing
(e.g. not a type variable). If yes, return the corresponding type and a list of
qualified type variable names for generic aliases.
- If 'allow_unnormalized' is True, allow types like builtins.list[T].
"""
dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic())
global_scope = not self.type and not self.function_stack
@@ -1751,7 +1731,8 @@ class SemanticAnalyzer(NodeVisitor[None]):
self.is_typeshed_stub_file,
allow_unnormalized=True,
in_dynamic_func=dynamic,
- global_scope=global_scope)
+ global_scope=global_scope,
+ warn_bound_tvar=warn_bound_tvar)
if res:
alias_tvars = [name for (name, _) in
res.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope))]
@@ -1765,50 +1746,62 @@ class SemanticAnalyzer(NodeVisitor[None]):
For subscripted (including generic) aliases the resulting types are stored
in rvalue.analyzed.
"""
- # Type aliases are created only at module scope and class scope (for subscripted types),
- # at function scope assignments always create local variables with type object types.
lvalue = s.lvalues[0]
- if not isinstance(lvalue, NameExpr):
+ if len(s.lvalues) > 1 or not isinstance(lvalue, NameExpr):
+ # First rule: Only simple assignments like Alias = ... create aliases.
return
- if (len(s.lvalues) == 1 and not self.is_func_scope() and
- not (self.type and isinstance(s.rvalue, NameExpr) and lvalue.is_def)
- and not s.type):
- rvalue = s.rvalue
- res, alias_tvars = self.analyze_alias(rvalue, allow_unnormalized=True)
- if not res:
- return
- node = self.lookup(lvalue.name, lvalue)
- if not lvalue.is_def:
- # Only a definition can create a type alias, not regular assignment.
- if node and node.kind == TYPE_ALIAS or isinstance(node.node, TypeInfo):
- self.fail('Cannot assign multiple types to name "{}"'
- ' without an explicit "Type[...]" annotation'
- .format(lvalue.name), lvalue)
- return
- check_for_explicit_any(res, self.options, self.is_typeshed_stub_file, self.msg,
- context=s)
- # when this type alias gets "inlined", the Any is not explicit anymore,
- # so we need to replace it with non-explicit Anys
- res = make_any_non_explicit(res)
- if isinstance(res, Instance) and not res.args and isinstance(rvalue, RefExpr):
- # For simple (on-generic) aliases we use aliasing TypeInfo's
- # to allow using them in runtime context where it makes sense.
- node.node = res.type
- if isinstance(rvalue, RefExpr):
- sym = self.lookup_type_node(rvalue)
- if sym:
- node.normalized = sym.normalized
- return
- node.kind = TYPE_ALIAS
- node.type_override = res
- node.alias_tvars = alias_tvars
- if isinstance(rvalue, (IndexExpr, CallExpr)):
- # We only need this for subscripted aliases, since simple aliases
- # are already processed using aliasing TypeInfo's above.
- rvalue.analyzed = TypeAliasExpr(res, node.alias_tvars,
- fallback=self.alias_fallback(res))
- rvalue.analyzed.line = rvalue.line
- rvalue.analyzed.column = rvalue.column
+ if s.type:
+ # Second rule: Explicit type (cls: Type[A] = A) always creates variable, not alias.
+ return
+ non_global_scope = self.type or self.is_func_scope()
+ if isinstance(s.rvalue, NameExpr) and non_global_scope and lvalue.is_def:
+ # Third rule: Non-subscripted right hand side creates a variable
+ # at class and function scopes. For example:
+ #
+ # class Model:
+ # ...
+ # class C:
+ # model = Model # this is automatically a variable with type 'Type[Model]'
+ #
+ # without this rule, this typical use case will require a lot of explicit
+ # annotations (see the second rule).
+ return
+ rvalue = s.rvalue
+ res, alias_tvars = self.analyze_alias(rvalue, warn_bound_tvar=True)
+ if not res:
+ return
+ node = self.lookup(lvalue.name, lvalue)
+ if not lvalue.is_def:
+ # Type aliases can't be re-defined.
+ if node and (node.kind == TYPE_ALIAS or isinstance(node.node, TypeInfo)):
+ self.fail('Cannot assign multiple types to name "{}"'
+ ' without an explicit "Type[...]" annotation'
+ .format(lvalue.name), lvalue)
+ return
+ check_for_explicit_any(res, self.options, self.is_typeshed_stub_file, self.msg,
+ context=s)
+ # when this type alias gets "inlined", the Any is not explicit anymore,
+ # so we need to replace it with non-explicit Anys
+ res = make_any_non_explicit(res)
+ if isinstance(res, Instance) and not res.args and isinstance(rvalue, RefExpr):
+ # For simple (on-generic) aliases we use aliasing TypeInfo's
+ # to allow using them in runtime context where it makes sense.
+ node.node = res.type
+ if isinstance(rvalue, RefExpr):
+ sym = self.lookup_type_node(rvalue)
+ if sym:
+ node.normalized = sym.normalized
+ return
+ node.kind = TYPE_ALIAS
+ node.type_override = res
+ node.alias_tvars = alias_tvars
+ if isinstance(rvalue, (IndexExpr, CallExpr)):
+ # We only need this for subscripted aliases, since simple aliases
+ # are already processed using aliasing TypeInfo's above.
+ rvalue.analyzed = TypeAliasExpr(res, node.alias_tvars,
+ fallback=self.alias_fallback(res))
+ rvalue.analyzed.line = rvalue.line
+ rvalue.analyzed.column = rvalue.column
def analyze_lvalue(self, lval: Lvalue, nested: bool = False,
add_global: bool = False,
@@ -1835,8 +1828,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
lval.is_def = True
lval.kind = GDEF
lval.fullname = v._fullname
- self.globals[lval.name] = SymbolTableNode(GDEF, v,
- self.cur_mod_id)
+ self.globals[lval.name] = SymbolTableNode(GDEF, v)
elif isinstance(lval.node, Var) and lval.is_def:
# Since the is_def flag is set, this must have been analyzed
# already in the first pass and added to the symbol table.
@@ -2280,7 +2272,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
info = self.build_namedtuple_typeinfo(name, items, types, {})
# Store it as a global just in case it would remain anonymous.
# (Or in the nearest class if there is one.)
- stnode = SymbolTableNode(GDEF, info, self.cur_mod_id)
+ stnode = SymbolTableNode(GDEF, info)
if self.type:
self.type.names[name] = stnode
else:
@@ -2539,7 +2531,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
info = self.build_typeddict_typeinfo(name, items, types, required_keys)
# Store it as a global just in case it would remain anonymous.
# (Or in the nearest class if there is one.)
- stnode = SymbolTableNode(GDEF, info, self.cur_mod_id)
+ stnode = SymbolTableNode(GDEF, info)
if self.type:
self.type.names[name] = stnode
else:
@@ -2771,7 +2763,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
info = self.build_enum_call_typeinfo(name, items, fullname)
# Store it as a global just in case it would remain anonymous.
# (Or in the nearest class if there is one.)
- stnode = SymbolTableNode(GDEF, info, self.cur_mod_id)
+ stnode = SymbolTableNode(GDEF, info)
if self.type:
self.type.names[name] = stnode
else:
@@ -3374,7 +3366,7 @@ class SemanticAnalyzer(NodeVisitor[None]):
elif isinstance(expr.base, RefExpr) and expr.base.kind == TYPE_ALIAS:
# Special form -- subscripting a generic type alias.
# Perform the type substitution and create a new alias.
- res, alias_tvars = self.analyze_alias(expr, allow_unnormalized=self.is_stub_file)
+ res, alias_tvars = self.analyze_alias(expr)
expr.analyzed = TypeAliasExpr(res, alias_tvars, fallback=self.alias_fallback(res),
in_runtime=True)
expr.analyzed.line = expr.line
@@ -3803,621 +3795,6 @@ class SemanticAnalyzer(NodeVisitor[None]):
report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
-class FirstPass(NodeVisitor[None]):
- """First phase of semantic analysis.
-
- See docstring of 'analyze()' below for a description of what this does.
- """
-
- def __init__(self, sem: SemanticAnalyzer) -> None:
- self.sem = sem
-
- def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -> None:
- """Perform the first analysis pass.
-
- Populate module global table. Resolve the full names of
- definitions not nested within functions and construct type
- info structures, but do not resolve inter-definition
- references such as base classes.
-
- Also add implicit definitions such as __name__.
-
- In this phase we don't resolve imports. For 'from ... import',
- we generate dummy symbol table nodes for the imported names,
- and these will get resolved in later phases of semantic
- analysis.
- """
- sem = self.sem
- self.sem.options = options # Needed because we sometimes call into it
- self.pyversion = options.python_version
- self.platform = options.platform
- sem.cur_mod_id = mod_id
- sem.errors.set_file(fnam, mod_id)
- sem.globals = SymbolTable()
- sem.global_decls = [set()]
- sem.nonlocal_decls = [set()]
- sem.block_depth = [0]
-
- defs = file.defs
-
- with experiments.strict_optional_set(options.strict_optional):
- # Add implicit definitions of module '__name__' etc.
- for name, t in implicit_module_attrs.items():
- # unicode docstrings should be accepted in Python 2
- if name == '__doc__':
- if self.pyversion >= (3, 0):
- typ = UnboundType('__builtins__.str') # type: Type
- else:
- typ = UnionType([UnboundType('__builtins__.str'),
- UnboundType('__builtins__.unicode')])
- else:
- assert t is not None, 'type should be specified for {}'.format(name)
- typ = UnboundType(t)
- v = Var(name, typ)
- v._fullname = self.sem.qualified_name(name)
- self.sem.globals[name] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
-
- for d in defs:
- d.accept(self)
-
- # Add implicit definition of literals/keywords to builtins, as we
- # cannot define a variable with them explicitly.
- if mod_id == 'builtins':
- literal_types = [
- ('None', NoneTyp()),
- # reveal_type is a mypy-only function that gives an error with
- # the type of its arg.
- ('reveal_type', AnyType(TypeOfAny.special_form)),
- ] # type: List[Tuple[str, Type]]
-
- # TODO(ddfisher): This guard is only needed because mypy defines
- # fake builtins for its tests which often don't define bool. If
- # mypy is fast enough that we no longer need those, this
- # conditional check should be removed.
- if 'bool' in self.sem.globals:
- bool_type = self.sem.named_type('bool')
- literal_types.extend([
- ('True', bool_type),
- ('False', bool_type),
- ('__debug__', bool_type),
- ])
- else:
- # We are running tests without 'bool' in builtins.
- # TODO: Find a permanent solution to this problem.
- # Maybe add 'bool' to all fixtures?
- literal_types.append(('True', AnyType(TypeOfAny.special_form)))
-
- for name, typ in literal_types:
- v = Var(name, typ)
- v._fullname = self.sem.qualified_name(name)
- self.sem.globals[name] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
-
- del self.sem.options
-
- def visit_block(self, b: Block) -> None:
- if b.is_unreachable:
- return
- self.sem.block_depth[-1] += 1
- for node in b.body:
- node.accept(self)
- self.sem.block_depth[-1] -= 1
-
- def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
- if self.sem.is_module_scope():
- for lval in s.lvalues:
- self.analyze_lvalue(lval, explicit_type=s.type is not None)
-
- def visit_func_def(self, func: FuncDef) -> None:
- sem = self.sem
- func.is_conditional = sem.block_depth[-1] > 0
- func._fullname = sem.qualified_name(func.name())
- at_module = sem.is_module_scope()
- if at_module and func.name() in sem.globals:
- # Already defined in this module.
- original_sym = sem.globals[func.name()]
- if original_sym.kind == UNBOUND_IMPORTED:
- # Ah this is an imported name. We can't resolve them now, so we'll postpone
- # this until the main phase of semantic analysis.
- return
- if not sem.set_original_def(original_sym.node, func):
- # Report error.
- sem.check_no_global(func.name(), func)
- else:
- if at_module:
- sem.globals[func.name()] = SymbolTableNode(GDEF, func, sem.cur_mod_id)
- # Also analyze the function body (in case there are conditional imports).
- sem.function_stack.append(func)
- sem.errors.push_function(func.name())
- sem.enter()
- func.body.accept(self)
- sem.leave()
- sem.errors.pop_function()
- sem.function_stack.pop()
-
- def visit_overloaded_func_def(self, func: OverloadedFuncDef) -> None:
- kind = self.kind_by_scope()
- if kind == GDEF:
- self.sem.check_no_global(func.name(), func, True)
- func._fullname = self.sem.qualified_name(func.name())
- if kind == GDEF:
- self.sem.globals[func.name()] = SymbolTableNode(kind, func, self.sem.cur_mod_id)
- if func.impl:
- impl = func.impl
- # Also analyze the function body (in case there are conditional imports).
- sem = self.sem
-
- if isinstance(impl, FuncDef):
- sem.function_stack.append(impl)
- sem.errors.push_function(func.name())
- sem.enter()
- impl.body.accept(self)
- elif isinstance(impl, Decorator):
- sem.function_stack.append(impl.func)
- sem.errors.push_function(func.name())
- sem.enter()
- impl.func.body.accept(self)
- else:
- assert False, "Implementation of an overload needs to be FuncDef or Decorator"
- sem.leave()
- sem.errors.pop_function()
- sem.function_stack.pop()
-
- def visit_class_def(self, cdef: ClassDef) -> None:
- kind = self.kind_by_scope()
- if kind == LDEF:
- return
- elif kind == GDEF:
- self.sem.check_no_global(cdef.name, cdef)
- cdef.fullname = self.sem.qualified_name(cdef.name)
- info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id)
- info.set_line(cdef.line, cdef.column)
- cdef.info = info
- if kind == GDEF:
- self.sem.globals[cdef.name] = SymbolTableNode(kind, info, self.sem.cur_mod_id)
- self.process_nested_classes(cdef)
-
- def process_nested_classes(self, outer_def: ClassDef) -> None:
- self.sem.enter_class(outer_def.info)
- for node in outer_def.defs.body:
- if isinstance(node, ClassDef):
- node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id)
- if outer_def.fullname:
- node.info._fullname = outer_def.fullname + '.' + node.info.name()
- else:
- node.info._fullname = node.info.name()
- node.fullname = node.info._fullname
- symbol = SymbolTableNode(MDEF, node.info)
- outer_def.info.names[node.name] = symbol
- self.process_nested_classes(node)
- elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)):
- node.accept(self)
- self.sem.leave_class()
-
- def visit_import_from(self, node: ImportFrom) -> None:
- # We can't bind module names during the first pass, as the target module might be
- # unprocessed. However, we add dummy unbound imported names to the symbol table so
- # that we at least know that the name refers to a module.
- at_module = self.sem.is_module_scope()
- node.is_top_level = at_module
- if not at_module:
- return
- for name, as_name in node.names:
- imported_name = as_name or name
- if imported_name not in self.sem.globals:
- self.sem.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
-
- def visit_import(self, node: Import) -> None:
- node.is_top_level = self.sem.is_module_scope()
- # This is similar to visit_import_from -- see the comment there.
- if not self.sem.is_module_scope():
- return
- for id, as_id in node.ids:
- imported_id = as_id or id
- if imported_id not in self.sem.globals:
- self.sem.add_symbol(imported_id, SymbolTableNode(UNBOUND_IMPORTED, None), node)
- else:
- # If the previous symbol is a variable, this should take precedence.
- self.sem.globals[imported_id] = SymbolTableNode(UNBOUND_IMPORTED, None)
-
- def visit_import_all(self, node: ImportAll) -> None:
- node.is_top_level = self.sem.is_module_scope()
-
- def visit_while_stmt(self, s: WhileStmt) -> None:
- if self.sem.is_module_scope():
- s.body.accept(self)
- if s.else_body:
- s.else_body.accept(self)
-
- def visit_for_stmt(self, s: ForStmt) -> None:
- if self.sem.is_module_scope():
- self.analyze_lvalue(s.index, explicit_type=s.index_type is not None)
- s.body.accept(self)
- if s.else_body:
- s.else_body.accept(self)
-
- def visit_with_stmt(self, s: WithStmt) -> None:
- if self.sem.is_module_scope():
- for n in s.target:
- if n:
- self.analyze_lvalue(n, explicit_type=s.target_type is not None)
- s.body.accept(self)
-
- def visit_decorator(self, d: Decorator) -> None:
- d.var._fullname = self.sem.qualified_name(d.var.name())
- self.sem.add_symbol(d.var.name(), SymbolTableNode(self.kind_by_scope(), d.var), d)
-
- def visit_if_stmt(self, s: IfStmt) -> None:
- infer_reachability_of_if_statement(s, pyversion=self.pyversion, platform=self.platform)
- for node in s.body:
- node.accept(self)
- if s.else_body:
- s.else_body.accept(self)
-
- def visit_try_stmt(self, s: TryStmt) -> None:
- if self.sem.is_module_scope():
- self.sem.analyze_try_stmt(s, self, add_global=self.sem.is_module_scope())
-
- def analyze_lvalue(self, lvalue: Lvalue, explicit_type: bool = False) -> None:
- self.sem.analyze_lvalue(lvalue, add_global=self.sem.is_module_scope(),
- explicit_type=explicit_type)
-
- def kind_by_scope(self) -> int:
- if self.sem.is_module_scope():
- return GDEF
- elif self.sem.is_class_scope():
- return MDEF
- elif self.sem.is_func_scope():
- return LDEF
- else:
- assert False, "Couldn't determine scope"
-
-
-class ThirdPass(TraverserVisitor):
- """The third and final pass of semantic analysis.
-
- Check type argument counts and values of generic types, and perform some
- straightforward type inference.
- """
-
- def __init__(self, modules: Dict[str, MypyFile], errors: Errors,
- sem: SemanticAnalyzer) -> None:
- self.modules = modules
- self.errors = errors
- self.sem = sem
-
- def visit_file(self, file_node: MypyFile, fnam: str, options: Options,
- patches: List[Callable[[], None]]) -> None:
- self.errors.set_file(fnam, file_node.fullname())
- self.options = options
- self.sem.options = options
- self.patches = patches
- self.is_typeshed_file = self.errors.is_typeshed_file(fnam)
- self.sem.globals = file_node.names
- with experiments.strict_optional_set(options.strict_optional):
- self.accept(file_node)
-
- def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None:
- """Refresh a stale target in fine-grained incremental mode."""
- if isinstance(node, MypyFile):
- self.refresh_top_level(node)
- else:
- self.accept(node)
-
- def refresh_top_level(self, file_node: MypyFile) -> None:
- """Reanalyze a stale module top-level in fine-grained incremental mode."""
- for d in file_node.defs:
- if not isinstance(d, (FuncItem, ClassDef)):
- self.accept(d)
-
- def accept(self, node: Node) -> None:
- try:
- node.accept(self)
- except Exception as err:
- report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
-
- def visit_block(self, b: Block) -> None:
- if b.is_unreachable:
- return
- super().visit_block(b)
-
- def visit_func_def(self, fdef: FuncDef) -> None:
- self.errors.push_function(fdef.name())
- self.analyze(fdef.type, fdef)
- super().visit_func_def(fdef)
- self.errors.pop_function()
-
- def visit_class_def(self, tdef: ClassDef) -> None:
- # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to
- # check them again here.
- if not tdef.info.is_named_tuple:
- types = list(tdef.info.bases) # type: List[Type]
- for tvar in tdef.type_vars:
- if tvar.upper_bound:
- types.append(tvar.upper_bound)
- if tvar.values:
- types.extend(tvar.values)
- self.analyze_types(types, tdef.info)
- for type in tdef.info.bases:
- if tdef.info.is_protocol:
- if not isinstance(type, Instance) or not type.type.is_protocol:
- if type.type.fullname() != 'builtins.object':
- self.fail('All bases of a protocol must be protocols', tdef)
- # Recompute MRO now that we have analyzed all modules, to pick
- # up superclasses of bases imported from other modules in an
- # import loop. (Only do so if we succeeded the first time.)
- if tdef.info.mro:
- tdef.info.mro = [] # Force recomputation
- calculate_class_mro(tdef, self.fail_blocker)
- if tdef.info.is_protocol:
- add_protocol_members(tdef.info)
- if tdef.analyzed is not None:
- # Also check synthetic types associated with this ClassDef.
- # Currently these are TypedDict, and NamedTuple.
- if isinstance(tdef.analyzed, TypedDictExpr):
- self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True)
- elif isinstance(tdef.analyzed, NamedTupleExpr):
- self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True)
- for name in tdef.analyzed.info.names:
- sym = tdef.analyzed.info.names[name]
- if isinstance(sym.node, (FuncDef, Decorator)):
- self.accept(sym.node)
- if isinstance(sym.node, Var):
- self.analyze(sym.node.type, sym.node)
- super().visit_class_def(tdef)
-
- def visit_decorator(self, dec: Decorator) -> None:
- """Try to infer the type of the decorated function.
-
- This lets us resolve references to decorated functions during
- type checking when there are cyclic imports, as otherwise the
- type might not be available when we need it.
-
- This basically uses a simple special-purpose type inference
- engine just for decorators.
- """
- super().visit_decorator(dec)
- if dec.var.is_property:
- # Decorators are expected to have a callable type (it's a little odd).
- if dec.func.type is None:
- dec.var.type = CallableType(
- [AnyType(TypeOfAny.special_form)],
- [ARG_POS],
- [None],
- AnyType(TypeOfAny.special_form),
- self.builtin_type('function'),
- name=dec.var.name())
- elif isinstance(dec.func.type, CallableType):
- dec.var.type = dec.func.type
- return
- decorator_preserves_type = True
- for expr in dec.decorators:
- preserve_type = False
- if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef):
- if is_identity_signature(expr.node.type):
- preserve_type = True
- if not preserve_type:
- decorator_preserves_type = False
- break
- if decorator_preserves_type:
- # No non-identity decorators left. We can trivially infer the type
- # of the function here.
- dec.var.type = function_type(dec.func, self.builtin_type('function'))
- if dec.decorators:
- return_type = calculate_return_type(dec.decorators[0])
- if return_type and isinstance(return_type, AnyType):
- # The outermost decorator will return Any so we know the type of the
- # decorated function.
- dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type)
- sig = find_fixed_callable_return(dec.decorators[0])
- if sig:
- # The outermost decorator always returns the same kind of function,
- # so we know that this is the type of the decoratored function.
- orig_sig = function_type(dec.func, self.builtin_type('function'))
- sig.name = orig_sig.items()[0].name
- dec.var.type = sig
-
- def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
- """Traverse the assignment statement.
-
- This includes the actual assignment and synthetic types
- resulted from this assignment (if any). Currently this includes
- NewType, TypedDict, NamedTuple, and TypeVar.
- """
- self.analyze(s.type, s)
- if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr):
- self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True)
- if isinstance(s.rvalue, CallExpr):
- analyzed = s.rvalue.analyzed
- if isinstance(analyzed, NewTypeExpr):
- self.analyze(analyzed.old_type, analyzed)
- if analyzed.info and analyzed.info.mro:
- analyzed.info.mro = [] # Force recomputation
- calculate_class_mro(analyzed.info.defn, self.fail_blocker)
- if isinstance(analyzed, TypeVarExpr):
- types = []
- if analyzed.upper_bound:
- types.append(analyzed.upper_bound)
- if analyzed.values:
- types.extend(analyzed.values)
- self.analyze_types(types, analyzed)
- if isinstance(analyzed, TypedDictExpr):
- self.analyze(analyzed.info.typeddict_type, analyzed, warn=True)
- if isinstance(analyzed, NamedTupleExpr):
- self.analyze(analyzed.info.tuple_type, analyzed, warn=True)
- for name in analyzed.info.names:
- sym = analyzed.info.names[name]
- if isinstance(sym.node, (FuncDef, Decorator)):
- self.accept(sym.node)
- if isinstance(sym.node, Var):
- self.analyze(sym.node.type, sym.node)
- # We need to pay additional attention to assignments that define a type alias.
- # The resulting type is also stored in the 'type_override' attribute of
- # the corresponding SymbolTableNode.
- if isinstance(s.lvalues[0], RefExpr) and isinstance(s.lvalues[0].node, Var):
- self.analyze(s.lvalues[0].node.type, s.lvalues[0].node)
- if isinstance(s.lvalues[0], NameExpr):
- node = self.sem.lookup(s.lvalues[0].name, s, suppress_errors=True)
- if node:
- self.analyze(node.type_override, node)
- super().visit_assignment_stmt(s)
-
- def visit_for_stmt(self, s: ForStmt) -> None:
- self.analyze(s.index_type, s)
- super().visit_for_stmt(s)
-
- def visit_with_stmt(self, s: WithStmt) -> None:
- self.analyze(s.target_type, s)
- super().visit_with_stmt(s)
-
- def visit_cast_expr(self, e: CastExpr) -> None:
- self.analyze(e.type, e)
- super().visit_cast_expr(e)
-
- def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None:
- super().visit_reveal_type_expr(e)
-
- def visit_type_application(self, e: TypeApplication) -> None:
- for type in e.types:
- self.analyze(type, e)
- super().visit_type_application(e)
-
- # Helpers
-
- def perform_transform(self, node: Union[Node, SymbolTableNode],
- transform: Callable[[Type], Type]) -> None:
- """Apply transform to all types associated with node."""
- if isinstance(node, ForStmt):
- node.index_type = transform(node.index_type)
- self.transform_types_in_lvalue(node.index, transform)
- if isinstance(node, WithStmt):
- node.target_type = transform(node.target_type)
- for n in node.target:
- if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type:
- n.node.type = transform(n.node.type)
- if isinstance(node, (FuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)):
- node.type = transform(node.type)
- if isinstance(node, NewTypeExpr):
- node.old_type = transform(node.old_type)
- if isinstance(node, TypeVarExpr):
- if node.upper_bound:
- node.upper_bound = transform(node.upper_bound)
- if node.values:
- node.values = [transform(v) for v in node.values]
- if isinstance(node, TypedDictExpr):
- node.info.typeddict_type = cast(TypedDictType,
- transform(node.info.typeddict_type))
- if isinstance(node, NamedTupleExpr):
- node.info.tuple_type = cast(TupleType,
- transform(node.info.tuple_type))
- if isinstance(node, TypeApplication):
- node.types = [transform(t) for t in node.types]
- if isinstance(node, SymbolTableNode):
- node.type_override = transform(node.type_override)
- if isinstance(node, TypeInfo):
- for tvar in node.defn.type_vars:
- if tvar.upper_bound:
- tvar.upper_bound = transform(tvar.upper_bound)
- if tvar.values:
- tvar.values = [transform(v) for v in tvar.values]
- new_bases = []
- for base in node.bases:
- new_base = transform(base)
- if isinstance(new_base, Instance):
- new_bases.append(new_base)
- else:
- # Don't fix the NamedTuple bases, they are Instance's intentionally.
- # Patch the 'args' just in case, although generic tuple type are
- # not supported yet.
- alt_base = Instance(base.type, [transform(a) for a in base.args])
- new_bases.append(alt_base)
- node.bases = new_bases
-
- def transform_types_in_lvalue(self, lvalue: Lvalue,
- transform: Callable[[Type], Type]) -> None:
- if isinstance(lvalue, RefExpr):
- if isinstance(lvalue.node, Var):
- var = lvalue.node
- var.type = transform(var.type)
- elif isinstance(lvalue, TupleExpr):
- for item in lvalue.items:
- self.transform_types_in_lvalue(item, transform)
-
- def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode],
- warn: bool = False) -> None:
- # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn'
- # Flags appeared during analysis of 'type' are collected in this dict.
- indicator = {} # type: Dict[str, bool]
- if type:
- analyzer = self.make_type_analyzer(indicator)
- type.accept(analyzer)
- self.check_for_omitted_generics(type)
- if indicator.get('forward') or indicator.get('synthetic'):
- def patch() -> None:
- self.perform_transform(node,
- lambda tp: tp.accept(ForwardReferenceResolver(self.fail,
- node, warn)))
- self.patches.append(patch)
-
- def analyze_types(self, types: List[Type], node: Node) -> None:
- # Similar to above but for nodes with multiple types.
- indicator = {} # type: Dict[str, bool]
- for type in types:
- analyzer = self.make_type_analyzer(indicator)
- type.accept(analyzer)
- self.check_for_omitted_generics(type)
- if indicator.get('forward') or indicator.get('synthetic'):
- def patch() -> None:
- self.perform_transform(node,
- lambda tp: tp.accept(ForwardReferenceResolver(self.fail,
- node, warn=False)))
- self.patches.append(patch)
-
- def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3:
- return TypeAnalyserPass3(self.sem.lookup_qualified,
- self.sem.lookup_fully_qualified,
- self.fail,
- self.sem.note,
- self.sem.plugin,
- self.options,
- self.is_typeshed_file,
- indicator)
-
- def check_for_omitted_generics(self, typ: Type) -> None:
- if 'generics' not in self.options.disallow_any or self.is_typeshed_file:
- return
-
- for t in collect_any_types(typ):
- if t.type_of_any == TypeOfAny.from_omitted_generics:
- self.fail(messages.BARE_GENERIC, t)
-
- def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None:
- self.errors.report(ctx.get_line(), ctx.get_column(), msg)
-
- def fail_blocker(self, msg: str, ctx: Context) -> None:
- self.fail(msg, ctx, blocker=True)
-
- def builtin_type(self, name: str, args: List[Type] = None) -> Instance:
- names = self.modules['builtins']
- sym = names.names[name]
- node = sym.node
- assert isinstance(node, TypeInfo)
- if args:
- # TODO: assert len(args) == len(node.defn.type_vars)
- return Instance(node, args)
- any_type = AnyType(TypeOfAny.special_form)
- return Instance(node, [any_type] * len(node.defn.type_vars))
-
-
-def add_protocol_members(typ: TypeInfo) -> None:
- members = set() # type: Set[str]
- if typ.mro:
- for base in typ.mro[:-1]: # we skip "object" since everyone implements it
- if base.is_protocol:
- for name in base.names:
- members.add(name)
- typ.protocol_members = sorted(list(members))
-
-
def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
if isinstance(sig, CallableType):
return sig.copy_modified(arg_types=[new] + sig.arg_types[1:])
@@ -4748,54 +4125,6 @@ class MarkImportsMypyOnlyVisitor(TraverserVisitor):
node.is_mypy_only = True
-def is_identity_signature(sig: Type) -> bool:
- """Is type a callable of form T -> T (where T is a type variable)?"""
- if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]:
- if isinstance(sig.arg_types[0], TypeVarType) and isinstance(sig.ret_type, TypeVarType):
- return sig.arg_types[0].id == sig.ret_type.id
- return False
-
-
-def calculate_return_type(expr: Expression) -> Optional[Type]:
- """Return the return type if we can calculate it.
-
- This only uses information available during semantic analysis so this
- will sometimes return None because of insufficient information (as
- type inference hasn't run yet).
- """
- if isinstance(expr, RefExpr):
- if isinstance(expr.node, FuncDef):
- typ = expr.node.type
- if typ is None:
- # No signature -> default to Any.
- return AnyType(TypeOfAny.unannotated)
- # Explicit Any return?
- if isinstance(typ, CallableType):
- return typ.ret_type
- return None
- elif isinstance(expr.node, Var):
- return expr.node.type
- elif isinstance(expr, CallExpr):
- return calculate_return_type(expr.callee)
- return None
-
-
-def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]:
- if isinstance(expr, RefExpr):
- if isinstance(expr.node, FuncDef):
- typ = expr.node.type
- if typ:
- if isinstance(typ, CallableType) and has_no_typevars(typ.ret_type):
- if isinstance(typ.ret_type, CallableType):
- return typ.ret_type
- elif isinstance(expr, CallExpr):
- t = find_fixed_callable_return(expr.callee)
- if t:
- if isinstance(t.ret_type, CallableType):
- return t.ret_type
- return None
-
-
def make_any_non_explicit(t: Type) -> Type:
"""Replace all Any types within in with Any that has attribute 'explicit' set to False"""
return t.accept(MakeAnyNonExplicit())
@@ -4806,135 +4135,3 @@ class MakeAnyNonExplicit(TypeTranslator):
if t.type_of_any == TypeOfAny.explicit:
return t.copy_modified(TypeOfAny.special_form)
return t
-
-
-class ForwardReferenceResolver(TypeTranslator):
- """Visitor to replace previously detected forward reference to synthetic types.
-
- This is similar to TypeTranslator but tracks visited nodes to avoid
- infinite recursion on potentially circular (self- or mutually-referential) types.
- This visitor:
- * Fixes forward references by unwrapping the linked type.
- * Generates errors for unsupported type recursion and breaks recursion by resolving
- recursive back references to Any types.
- * Replaces instance types generated from unanalyzed NamedTuple and TypedDict class syntax
- found in first pass with analyzed TupleType and TypedDictType.
- """
- def __init__(self, fail: Callable[[str, Context], None],
- start: Union[Node, SymbolTableNode], warn: bool) -> None:
- self.seen = [] # type: List[Type]
- self.fail = fail
- self.start = start
- self.warn = warn
-
- def check_recursion(self, t: Type) -> bool:
- if any(t is s for s in self.seen):
- if self.warn:
- assert isinstance(self.start, Node), "Internal error: invalid error context"
- self.fail('Recursive types not fully supported yet,'
- ' nested types replaced with "Any"', self.start)
- return True
- self.seen.append(t)
- return False
-
- def visit_forwardref_type(self, t: ForwardRef) -> Type:
- """This visitor method tracks situations like this:
-
- x: A # This type is not yet known and therefore wrapped in ForwardRef,
- # its content is updated in ThirdPass, now we need to unwrap this type.
- A = NewType('A', int)
- """
- return t.link.accept(self)
-
- def visit_instance(self, t: Instance, from_fallback: bool = False) -> Type:
- """This visitor method tracks situations like this:
-
- x: A # When analyzing this type we will get an Instance from FirstPass.
- # Now we need to update this to actual analyzed TupleType.
- class A(NamedTuple):
- attr: str
-
- If from_fallback is True, then we always return an Instance type. This is needed
- since TupleType and TypedDictType fallbacks are always instances.
- """
- info = t.type
- # Special case, analyzed bases transformed the type into TupleType.
- if info.tuple_type and not from_fallback:
- items = [it.accept(self) for it in info.tuple_type.items]
- info.tuple_type.items = items
- return TupleType(items, Instance(info, []))
- # Update forward Instances to corresponding analyzed NamedTuples.
- if info.replaced and info.replaced.tuple_type:
- tp = info.replaced.tuple_type
- if self.check_recursion(tp):
- # The key idea is that when we recursively return to a type already traversed,
- # then we break the cycle and put AnyType as a leaf.
- return AnyType(TypeOfAny.from_error)
- return tp.copy_modified(fallback=Instance(info.replaced, [])).accept(self)
- # Same as above but for TypedDicts.
- if info.replaced and info.replaced.typeddict_type:
- td = info.replaced.typeddict_type
- if self.check_recursion(td):
- # We also break the cycles for TypedDicts as explained above for NamedTuples.
- return AnyType(TypeOfAny.from_error)
- return td.copy_modified(fallback=Instance(info.replaced, [])).accept(self)
- if self.check_recursion(t):
- # We also need to break a potential cycle with normal (non-synthetic) instance types.
- return Instance(t.type, [AnyType(TypeOfAny.from_error)] * len(t.type.defn.type_vars))
- return super().visit_instance(t)
-
- def visit_type_var(self, t: TypeVarType) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- if t.upper_bound:
- t.upper_bound = t.upper_bound.accept(self)
- if t.values:
- t.values = [v.accept(self) for v in t.values]
- return t
-
- def visit_callable_type(self, t: CallableType) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- arg_types = [tp.accept(self) for tp in t.arg_types]
- ret_type = t.ret_type.accept(self)
- variables = t.variables.copy()
- for v in variables:
- if v.upper_bound:
- v.upper_bound = v.upper_bound.accept(self)
- if v.values:
- v.values = [val.accept(self) for val in v.values]
- return t.copy_modified(arg_types=arg_types, ret_type=ret_type, variables=variables)
-
- def visit_overloaded(self, t: Overloaded) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- return super().visit_overloaded(t)
-
- def visit_tuple_type(self, t: TupleType) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- items = [it.accept(self) for it in t.items]
- fallback = self.visit_instance(t.fallback, from_fallback=True)
- assert isinstance(fallback, Instance)
- return TupleType(items, fallback, t.line, t.column)
-
- def visit_typeddict_type(self, t: TypedDictType) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- items = OrderedDict([
- (item_name, item_type.accept(self))
- for (item_name, item_type) in t.items.items()
- ])
- fallback = self.visit_instance(t.fallback, from_fallback=True)
- assert isinstance(fallback, Instance)
- return TypedDictType(items, t.required_keys, fallback, t.line, t.column)
-
- def visit_union_type(self, t: UnionType) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- return super().visit_union_type(t)
-
- def visit_type_type(self, t: TypeType) -> Type:
- if self.check_recursion(t):
- return AnyType(TypeOfAny.from_error)
- return super().visit_type_type(t)
diff --git a/mypy/semanal_pass1.py b/mypy/semanal_pass1.py
new file mode 100644
index 0000000..f0ce826
--- /dev/null
+++ b/mypy/semanal_pass1.py
@@ -0,0 +1,296 @@
+"""The semantic analyzer pass 1.
+
+This sets up externally visible names defined in a module but ignores
+imports and local definitions. It helps enable (some) cyclic references
+between modules, such as module 'a' that imports module 'b' and used
+names defined in b *and* vice versa. The first pass can be performed
+before dependent modules have been processed.
+
+Since this pass can't assume that other modules have been processed,
+this pass cannot determine the types of certain definitions that can
+only be recognized in later passes. Examples of these include TypeVar
+and NamedTuple definitions, as these look like regular assignments until
+we are able to bind names, which only happens in pass 2.
+"""
+
+from typing import List, Tuple
+
+from mypy import experiments
+from mypy.nodes import (
+ MypyFile, SymbolTable, SymbolTableNode, Var, Block, AssignmentStmt, FuncDef, Decorator,
+ ClassDef, TypeInfo, ImportFrom, Import, ImportAll, IfStmt, WhileStmt, ForStmt, WithStmt,
+ TryStmt, OverloadedFuncDef, Lvalue, LDEF, GDEF, MDEF, UNBOUND_IMPORTED, implicit_module_attrs
+)
+from mypy.types import Type, UnboundType, UnionType, AnyType, TypeOfAny, NoneTyp
+from mypy.semanal import SemanticAnalyzerPass2, infer_reachability_of_if_statement
+from mypy.options import Options
+from mypy.visitor import NodeVisitor
+
+
+class SemanticAnalyzerPass1(NodeVisitor[None]):
+ """First phase of semantic analysis.
+
+ See docstring of 'analyze()' below for a description of what this does.
+ """
+
+ def __init__(self, sem: SemanticAnalyzerPass2) -> None:
+ self.sem = sem
+
+ def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -> None:
+ """Perform the first analysis pass.
+
+ Populate module global table. Resolve the full names of
+ definitions not nested within functions and construct type
+ info structures, but do not resolve inter-definition
+ references such as base classes.
+
+ Also add implicit definitions such as __name__.
+
+ In this phase we don't resolve imports. For 'from ... import',
+ we generate dummy symbol table nodes for the imported names,
+ and these will get resolved in later phases of semantic
+ analysis.
+ """
+ sem = self.sem
+ self.sem.options = options # Needed because we sometimes call into it
+ self.pyversion = options.python_version
+ self.platform = options.platform
+ sem.cur_mod_id = mod_id
+ sem.errors.set_file(fnam, mod_id)
+ sem.globals = SymbolTable()
+ sem.global_decls = [set()]
+ sem.nonlocal_decls = [set()]
+ sem.block_depth = [0]
+
+ defs = file.defs
+
+ with experiments.strict_optional_set(options.strict_optional):
+ # Add implicit definitions of module '__name__' etc.
+ for name, t in implicit_module_attrs.items():
+ # unicode docstrings should be accepted in Python 2
+ if name == '__doc__':
+ if self.pyversion >= (3, 0):
+ typ = UnboundType('__builtins__.str') # type: Type
+ else:
+ typ = UnionType([UnboundType('__builtins__.str'),
+ UnboundType('__builtins__.unicode')])
+ else:
+ assert t is not None, 'type should be specified for {}'.format(name)
+ typ = UnboundType(t)
+ v = Var(name, typ)
+ v._fullname = self.sem.qualified_name(name)
+ self.sem.globals[name] = SymbolTableNode(GDEF, v)
+
+ for d in defs:
+ d.accept(self)
+
+ # Add implicit definition of literals/keywords to builtins, as we
+ # cannot define a variable with them explicitly.
+ if mod_id == 'builtins':
+ literal_types = [
+ ('None', NoneTyp()),
+ # reveal_type is a mypy-only function that gives an error with
+ # the type of its arg.
+ ('reveal_type', AnyType(TypeOfAny.special_form)),
+ ] # type: List[Tuple[str, Type]]
+
+ # TODO(ddfisher): This guard is only needed because mypy defines
+ # fake builtins for its tests which often don't define bool. If
+ # mypy is fast enough that we no longer need those, this
+ # conditional check should be removed.
+ if 'bool' in self.sem.globals:
+ bool_type = self.sem.named_type('bool')
+ literal_types.extend([
+ ('True', bool_type),
+ ('False', bool_type),
+ ('__debug__', bool_type),
+ ])
+ else:
+ # We are running tests without 'bool' in builtins.
+ # TODO: Find a permanent solution to this problem.
+ # Maybe add 'bool' to all fixtures?
+ literal_types.append(('True', AnyType(TypeOfAny.special_form)))
+
+ for name, typ in literal_types:
+ v = Var(name, typ)
+ v._fullname = self.sem.qualified_name(name)
+ self.sem.globals[name] = SymbolTableNode(GDEF, v)
+
+ del self.sem.options
+
+ def visit_block(self, b: Block) -> None:
+ if b.is_unreachable:
+ return
+ self.sem.block_depth[-1] += 1
+ for node in b.body:
+ node.accept(self)
+ self.sem.block_depth[-1] -= 1
+
+ def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+ if self.sem.is_module_scope():
+ for lval in s.lvalues:
+ self.analyze_lvalue(lval, explicit_type=s.type is not None)
+
+ def visit_func_def(self, func: FuncDef) -> None:
+ sem = self.sem
+ func.is_conditional = sem.block_depth[-1] > 0
+ func._fullname = sem.qualified_name(func.name())
+ at_module = sem.is_module_scope()
+ if at_module and func.name() in sem.globals:
+ # Already defined in this module.
+ original_sym = sem.globals[func.name()]
+ if original_sym.kind == UNBOUND_IMPORTED:
+ # Ah this is an imported name. We can't resolve them now, so we'll postpone
+ # this until the main phase of semantic analysis.
+ return
+ if not sem.set_original_def(original_sym.node, func):
+ # Report error.
+ sem.check_no_global(func.name(), func)
+ else:
+ if at_module:
+ sem.globals[func.name()] = SymbolTableNode(GDEF, func)
+ # Also analyze the function body (in case there are conditional imports).
+ sem.function_stack.append(func)
+ sem.errors.push_function(func.name())
+ sem.enter()
+ func.body.accept(self)
+ sem.leave()
+ sem.errors.pop_function()
+ sem.function_stack.pop()
+
+ def visit_overloaded_func_def(self, func: OverloadedFuncDef) -> None:
+ kind = self.kind_by_scope()
+ if kind == GDEF:
+ self.sem.check_no_global(func.name(), func, True)
+ func._fullname = self.sem.qualified_name(func.name())
+ if kind == GDEF:
+ self.sem.globals[func.name()] = SymbolTableNode(kind, func)
+ if func.impl:
+ impl = func.impl
+ # Also analyze the function body (in case there are conditional imports).
+ sem = self.sem
+
+ if isinstance(impl, FuncDef):
+ sem.function_stack.append(impl)
+ sem.errors.push_function(func.name())
+ sem.enter()
+ impl.body.accept(self)
+ elif isinstance(impl, Decorator):
+ sem.function_stack.append(impl.func)
+ sem.errors.push_function(func.name())
+ sem.enter()
+ impl.func.body.accept(self)
+ else:
+ assert False, "Implementation of an overload needs to be FuncDef or Decorator"
+ sem.leave()
+ sem.errors.pop_function()
+ sem.function_stack.pop()
+
+ def visit_class_def(self, cdef: ClassDef) -> None:
+ kind = self.kind_by_scope()
+ if kind == LDEF:
+ return
+ elif kind == GDEF:
+ self.sem.check_no_global(cdef.name, cdef)
+ cdef.fullname = self.sem.qualified_name(cdef.name)
+ info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id)
+ info.set_line(cdef.line, cdef.column)
+ cdef.info = info
+ if kind == GDEF:
+ self.sem.globals[cdef.name] = SymbolTableNode(kind, info)
+ self.process_nested_classes(cdef)
+
+ def process_nested_classes(self, outer_def: ClassDef) -> None:
+ self.sem.enter_class(outer_def.info)
+ for node in outer_def.defs.body:
+ if isinstance(node, ClassDef):
+ node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id)
+ if outer_def.fullname:
+ node.info._fullname = outer_def.fullname + '.' + node.info.name()
+ else:
+ node.info._fullname = node.info.name()
+ node.fullname = node.info._fullname
+ symbol = SymbolTableNode(MDEF, node.info)
+ outer_def.info.names[node.name] = symbol
+ self.process_nested_classes(node)
+ elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)):
+ node.accept(self)
+ self.sem.leave_class()
+
+ def visit_import_from(self, node: ImportFrom) -> None:
+ # We can't bind module names during the first pass, as the target module might be
+ # unprocessed. However, we add dummy unbound imported names to the symbol table so
+ # that we at least know that the name refers to a module.
+ at_module = self.sem.is_module_scope()
+ node.is_top_level = at_module
+ if not at_module:
+ return
+ for name, as_name in node.names:
+ imported_name = as_name or name
+ if imported_name not in self.sem.globals:
+ self.sem.add_symbol(imported_name, SymbolTableNode(UNBOUND_IMPORTED, None), node)
+
+ def visit_import(self, node: Import) -> None:
+ node.is_top_level = self.sem.is_module_scope()
+ # This is similar to visit_import_from -- see the comment there.
+ if not self.sem.is_module_scope():
+ return
+ for id, as_id in node.ids:
+ imported_id = as_id or id
+ if imported_id not in self.sem.globals:
+ self.sem.add_symbol(imported_id, SymbolTableNode(UNBOUND_IMPORTED, None), node)
+ else:
+ # If the previous symbol is a variable, this should take precedence.
+ self.sem.globals[imported_id] = SymbolTableNode(UNBOUND_IMPORTED, None)
+
+ def visit_import_all(self, node: ImportAll) -> None:
+ node.is_top_level = self.sem.is_module_scope()
+
+ def visit_while_stmt(self, s: WhileStmt) -> None:
+ if self.sem.is_module_scope():
+ s.body.accept(self)
+ if s.else_body:
+ s.else_body.accept(self)
+
+ def visit_for_stmt(self, s: ForStmt) -> None:
+ if self.sem.is_module_scope():
+ self.analyze_lvalue(s.index, explicit_type=s.index_type is not None)
+ s.body.accept(self)
+ if s.else_body:
+ s.else_body.accept(self)
+
+ def visit_with_stmt(self, s: WithStmt) -> None:
+ if self.sem.is_module_scope():
+ for n in s.target:
+ if n:
+ self.analyze_lvalue(n, explicit_type=s.target_type is not None)
+ s.body.accept(self)
+
+ def visit_decorator(self, d: Decorator) -> None:
+ d.var._fullname = self.sem.qualified_name(d.var.name())
+ self.sem.add_symbol(d.var.name(), SymbolTableNode(self.kind_by_scope(), d.var), d)
+
+ def visit_if_stmt(self, s: IfStmt) -> None:
+ infer_reachability_of_if_statement(s, pyversion=self.pyversion, platform=self.platform)
+ for node in s.body:
+ node.accept(self)
+ if s.else_body:
+ s.else_body.accept(self)
+
+ def visit_try_stmt(self, s: TryStmt) -> None:
+ if self.sem.is_module_scope():
+ self.sem.analyze_try_stmt(s, self, add_global=self.sem.is_module_scope())
+
+ def analyze_lvalue(self, lvalue: Lvalue, explicit_type: bool = False) -> None:
+ self.sem.analyze_lvalue(lvalue, add_global=self.sem.is_module_scope(),
+ explicit_type=explicit_type)
+
+ def kind_by_scope(self) -> int:
+ if self.sem.is_module_scope():
+ return GDEF
+ elif self.sem.is_class_scope():
+ return MDEF
+ elif self.sem.is_func_scope():
+ return LDEF
+ else:
+ assert False, "Couldn't determine scope"
diff --git a/mypy/semanal_pass3.py b/mypy/semanal_pass3.py
new file mode 100644
index 0000000..9e9ba0d
--- /dev/null
+++ b/mypy/semanal_pass3.py
@@ -0,0 +1,560 @@
+"""The semantic analyzer pass 3.
+
+This pass checks that type argument counts are valid; for example, it
+will reject Dict[int]. We don't do this in the second pass, since we
+infer the type argument counts of classes during this pass, and it is
+possible to refer to classes defined later in a file, which would not
+have the type argument count set yet. This pass also recomputes the
+method resolution order of each class, in case one of its bases
+belongs to a module involved in an import loop.
+"""
+
+from collections import OrderedDict
+from typing import Dict, List, Callable, Optional, Union, Set, cast
+
+from mypy import messages, experiments
+from mypy.nodes import (
+ Node, Expression, MypyFile, FuncDef, FuncItem, Decorator, RefExpr, Context, TypeInfo, ClassDef,
+ Block, TypedDictExpr, NamedTupleExpr, AssignmentStmt, IndexExpr, TypeAliasExpr, NameExpr,
+ CallExpr, NewTypeExpr, ForStmt, WithStmt, CastExpr, TypeVarExpr, TypeApplication, Lvalue,
+ TupleExpr, RevealTypeExpr, SymbolTableNode, Var, ARG_POS
+)
+from mypy.types import (
+ Type, Instance, AnyType, TypeOfAny, CallableType, TupleType, TypeVarType, TypedDictType,
+ UnionType, TypeType, Overloaded, ForwardRef, TypeTranslator, function_type
+)
+from mypy.errors import Errors, report_internal_error
+from mypy.options import Options
+from mypy.traverser import TraverserVisitor
+from mypy.typeanal import TypeAnalyserPass3, collect_any_types
+from mypy.typevars import has_no_typevars
+import mypy.semanal
+
+
+class SemanticAnalyzerPass3(TraverserVisitor):
+ """The third and final pass of semantic analysis.
+
+ Check type argument counts and values of generic types, and perform some
+ straightforward type inference.
+ """
+
+ def __init__(self, modules: Dict[str, MypyFile], errors: Errors,
+ sem: 'mypy.semanal.SemanticAnalyzerPass2') -> None:
+ self.modules = modules
+ self.errors = errors
+ self.sem = sem
+
+ def visit_file(self, file_node: MypyFile, fnam: str, options: Options,
+ patches: List[Callable[[], None]]) -> None:
+ self.errors.set_file(fnam, file_node.fullname())
+ self.options = options
+ self.sem.options = options
+ self.patches = patches
+ self.is_typeshed_file = self.errors.is_typeshed_file(fnam)
+ self.sem.globals = file_node.names
+ with experiments.strict_optional_set(options.strict_optional):
+ self.accept(file_node)
+
+ def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None:
+ """Refresh a stale target in fine-grained incremental mode."""
+ if isinstance(node, MypyFile):
+ self.refresh_top_level(node)
+ else:
+ self.accept(node)
+
+ def refresh_top_level(self, file_node: MypyFile) -> None:
+ """Reanalyze a stale module top-level in fine-grained incremental mode."""
+ for d in file_node.defs:
+ if not isinstance(d, (FuncItem, ClassDef)):
+ self.accept(d)
+
+ def accept(self, node: Node) -> None:
+ try:
+ node.accept(self)
+ except Exception as err:
+ report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
+
+ def visit_block(self, b: Block) -> None:
+ if b.is_unreachable:
+ return
+ super().visit_block(b)
+
+ def visit_func_def(self, fdef: FuncDef) -> None:
+ self.errors.push_function(fdef.name())
+ self.analyze(fdef.type, fdef)
+ super().visit_func_def(fdef)
+ self.errors.pop_function()
+
+ def visit_class_def(self, tdef: ClassDef) -> None:
+ # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to
+ # check them again here.
+ if not tdef.info.is_named_tuple:
+ types = list(tdef.info.bases) # type: List[Type]
+ for tvar in tdef.type_vars:
+ if tvar.upper_bound:
+ types.append(tvar.upper_bound)
+ if tvar.values:
+ types.extend(tvar.values)
+ self.analyze_types(types, tdef.info)
+ for type in tdef.info.bases:
+ if tdef.info.is_protocol:
+ if not isinstance(type, Instance) or not type.type.is_protocol:
+ if type.type.fullname() != 'builtins.object':
+ self.fail('All bases of a protocol must be protocols', tdef)
+ # Recompute MRO now that we have analyzed all modules, to pick
+ # up superclasses of bases imported from other modules in an
+ # import loop. (Only do so if we succeeded the first time.)
+ if tdef.info.mro:
+ tdef.info.mro = [] # Force recomputation
+ mypy.semanal.calculate_class_mro(tdef, self.fail_blocker)
+ if tdef.info.is_protocol:
+ add_protocol_members(tdef.info)
+ if tdef.analyzed is not None:
+ # Also check synthetic types associated with this ClassDef.
+ # Currently these are TypedDict, and NamedTuple.
+ if isinstance(tdef.analyzed, TypedDictExpr):
+ self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True)
+ elif isinstance(tdef.analyzed, NamedTupleExpr):
+ self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True)
+ for name in tdef.analyzed.info.names:
+ sym = tdef.analyzed.info.names[name]
+ if isinstance(sym.node, (FuncDef, Decorator)):
+ self.accept(sym.node)
+ if isinstance(sym.node, Var):
+ self.analyze(sym.node.type, sym.node)
+ super().visit_class_def(tdef)
+
+ def visit_decorator(self, dec: Decorator) -> None:
+ """Try to infer the type of the decorated function.
+
+ This lets us resolve references to decorated functions during
+ type checking when there are cyclic imports, as otherwise the
+ type might not be available when we need it.
+
+ This basically uses a simple special-purpose type inference
+ engine just for decorators.
+ """
+ super().visit_decorator(dec)
+ if dec.var.is_property:
+ # Decorators are expected to have a callable type (it's a little odd).
+ if dec.func.type is None:
+ dec.var.type = CallableType(
+ [AnyType(TypeOfAny.special_form)],
+ [ARG_POS],
+ [None],
+ AnyType(TypeOfAny.special_form),
+ self.builtin_type('function'),
+ name=dec.var.name())
+ elif isinstance(dec.func.type, CallableType):
+ dec.var.type = dec.func.type
+ return
+ decorator_preserves_type = True
+ for expr in dec.decorators:
+ preserve_type = False
+ if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef):
+ if is_identity_signature(expr.node.type):
+ preserve_type = True
+ if not preserve_type:
+ decorator_preserves_type = False
+ break
+ if decorator_preserves_type:
+ # No non-identity decorators left. We can trivially infer the type
+ # of the function here.
+ dec.var.type = function_type(dec.func, self.builtin_type('function'))
+ if dec.decorators:
+ return_type = calculate_return_type(dec.decorators[0])
+ if return_type and isinstance(return_type, AnyType):
+ # The outermost decorator will return Any so we know the type of the
+ # decorated function.
+ dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type)
+ sig = find_fixed_callable_return(dec.decorators[0])
+ if sig:
+ # The outermost decorator always returns the same kind of function,
+ # so we know that this is the type of the decoratored function.
+ orig_sig = function_type(dec.func, self.builtin_type('function'))
+ sig.name = orig_sig.items()[0].name
+ dec.var.type = sig
+
+ def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+ """Traverse the assignment statement.
+
+ This includes the actual assignment and synthetic types
+ resulted from this assignment (if any). Currently this includes
+ NewType, TypedDict, NamedTuple, and TypeVar.
+ """
+ self.analyze(s.type, s)
+ if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr):
+ self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True)
+ if isinstance(s.rvalue, CallExpr):
+ analyzed = s.rvalue.analyzed
+ if isinstance(analyzed, NewTypeExpr):
+ self.analyze(analyzed.old_type, analyzed)
+ if analyzed.info and analyzed.info.mro:
+ analyzed.info.mro = [] # Force recomputation
+ mypy.semanal.calculate_class_mro(analyzed.info.defn, self.fail_blocker)
+ if isinstance(analyzed, TypeVarExpr):
+ types = []
+ if analyzed.upper_bound:
+ types.append(analyzed.upper_bound)
+ if analyzed.values:
+ types.extend(analyzed.values)
+ self.analyze_types(types, analyzed)
+ if isinstance(analyzed, TypedDictExpr):
+ self.analyze(analyzed.info.typeddict_type, analyzed, warn=True)
+ if isinstance(analyzed, NamedTupleExpr):
+ self.analyze(analyzed.info.tuple_type, analyzed, warn=True)
+ for name in analyzed.info.names:
+ sym = analyzed.info.names[name]
+ if isinstance(sym.node, (FuncDef, Decorator)):
+ self.accept(sym.node)
+ if isinstance(sym.node, Var):
+ self.analyze(sym.node.type, sym.node)
+ # We need to pay additional attention to assignments that define a type alias.
+ # The resulting type is also stored in the 'type_override' attribute of
+ # the corresponding SymbolTableNode.
+ if isinstance(s.lvalues[0], RefExpr) and isinstance(s.lvalues[0].node, Var):
+ self.analyze(s.lvalues[0].node.type, s.lvalues[0].node)
+ if isinstance(s.lvalues[0], NameExpr):
+ node = self.sem.lookup(s.lvalues[0].name, s, suppress_errors=True)
+ if node:
+ self.analyze(node.type_override, node)
+ super().visit_assignment_stmt(s)
+
+ def visit_for_stmt(self, s: ForStmt) -> None:
+ self.analyze(s.index_type, s)
+ super().visit_for_stmt(s)
+
+ def visit_with_stmt(self, s: WithStmt) -> None:
+ self.analyze(s.target_type, s)
+ super().visit_with_stmt(s)
+
+ def visit_cast_expr(self, e: CastExpr) -> None:
+ self.analyze(e.type, e)
+ super().visit_cast_expr(e)
+
+ def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None:
+ super().visit_reveal_type_expr(e)
+
+ def visit_type_application(self, e: TypeApplication) -> None:
+ for type in e.types:
+ self.analyze(type, e)
+ super().visit_type_application(e)
+
+ # Helpers
+
+ def perform_transform(self, node: Union[Node, SymbolTableNode],
+ transform: Callable[[Type], Type]) -> None:
+ """Apply transform to all types associated with node."""
+ if isinstance(node, ForStmt):
+ node.index_type = transform(node.index_type)
+ self.transform_types_in_lvalue(node.index, transform)
+ if isinstance(node, WithStmt):
+ node.target_type = transform(node.target_type)
+ for n in node.target:
+ if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type:
+ n.node.type = transform(n.node.type)
+ if isinstance(node, (FuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)):
+ node.type = transform(node.type)
+ if isinstance(node, NewTypeExpr):
+ node.old_type = transform(node.old_type)
+ if isinstance(node, TypeVarExpr):
+ if node.upper_bound:
+ node.upper_bound = transform(node.upper_bound)
+ if node.values:
+ node.values = [transform(v) for v in node.values]
+ if isinstance(node, TypedDictExpr):
+ node.info.typeddict_type = cast(TypedDictType,
+ transform(node.info.typeddict_type))
+ if isinstance(node, NamedTupleExpr):
+ node.info.tuple_type = cast(TupleType,
+ transform(node.info.tuple_type))
+ if isinstance(node, TypeApplication):
+ node.types = [transform(t) for t in node.types]
+ if isinstance(node, SymbolTableNode):
+ node.type_override = transform(node.type_override)
+ if isinstance(node, TypeInfo):
+ for tvar in node.defn.type_vars:
+ if tvar.upper_bound:
+ tvar.upper_bound = transform(tvar.upper_bound)
+ if tvar.values:
+ tvar.values = [transform(v) for v in tvar.values]
+ new_bases = []
+ for base in node.bases:
+ new_base = transform(base)
+ if isinstance(new_base, Instance):
+ new_bases.append(new_base)
+ else:
+ # Don't fix the NamedTuple bases, they are Instance's intentionally.
+ # Patch the 'args' just in case, although generic tuple type are
+ # not supported yet.
+ alt_base = Instance(base.type, [transform(a) for a in base.args])
+ new_bases.append(alt_base)
+ node.bases = new_bases
+
+ def transform_types_in_lvalue(self, lvalue: Lvalue,
+ transform: Callable[[Type], Type]) -> None:
+ if isinstance(lvalue, RefExpr):
+ if isinstance(lvalue.node, Var):
+ var = lvalue.node
+ var.type = transform(var.type)
+ elif isinstance(lvalue, TupleExpr):
+ for item in lvalue.items:
+ self.transform_types_in_lvalue(item, transform)
+
+ def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode],
+ warn: bool = False) -> None:
+ # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn'
+ # Flags appeared during analysis of 'type' are collected in this dict.
+ indicator = {} # type: Dict[str, bool]
+ if type:
+ analyzer = self.make_type_analyzer(indicator)
+ type.accept(analyzer)
+ self.check_for_omitted_generics(type)
+ if indicator.get('forward') or indicator.get('synthetic'):
+ def patch() -> None:
+ self.perform_transform(node,
+ lambda tp: tp.accept(ForwardReferenceResolver(self.fail,
+ node, warn)))
+ self.patches.append(patch)
+
+ def analyze_types(self, types: List[Type], node: Node) -> None:
+ # Similar to above but for nodes with multiple types.
+ indicator = {} # type: Dict[str, bool]
+ for type in types:
+ analyzer = self.make_type_analyzer(indicator)
+ type.accept(analyzer)
+ self.check_for_omitted_generics(type)
+ if indicator.get('forward') or indicator.get('synthetic'):
+ def patch() -> None:
+ self.perform_transform(node,
+ lambda tp: tp.accept(ForwardReferenceResolver(self.fail,
+ node, warn=False)))
+ self.patches.append(patch)
+
+ def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3:
+ return TypeAnalyserPass3(self.sem.lookup_qualified,
+ self.sem.lookup_fully_qualified,
+ self.fail,
+ self.sem.note,
+ self.sem.plugin,
+ self.options,
+ self.is_typeshed_file,
+ indicator)
+
+ def check_for_omitted_generics(self, typ: Type) -> None:
+ if 'generics' not in self.options.disallow_any or self.is_typeshed_file:
+ return
+
+ for t in collect_any_types(typ):
+ if t.type_of_any == TypeOfAny.from_omitted_generics:
+ self.fail(messages.BARE_GENERIC, t)
+
+ def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None:
+ self.errors.report(ctx.get_line(), ctx.get_column(), msg)
+
+ def fail_blocker(self, msg: str, ctx: Context) -> None:
+ self.fail(msg, ctx, blocker=True)
+
+ def builtin_type(self, name: str, args: List[Type] = None) -> Instance:
+ names = self.modules['builtins']
+ sym = names.names[name]
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ if args:
+ # TODO: assert len(args) == len(node.defn.type_vars)
+ return Instance(node, args)
+ any_type = AnyType(TypeOfAny.special_form)
+ return Instance(node, [any_type] * len(node.defn.type_vars))
+
+
+def add_protocol_members(typ: TypeInfo) -> None:
+ members = set() # type: Set[str]
+ if typ.mro:
+ for base in typ.mro[:-1]: # we skip "object" since everyone implements it
+ if base.is_protocol:
+ for name in base.names:
+ members.add(name)
+ typ.protocol_members = sorted(list(members))
+
+
+def is_identity_signature(sig: Type) -> bool:
+ """Is type a callable of form T -> T (where T is a type variable)?"""
+ if isinstance(sig, CallableType) and sig.arg_kinds == [ARG_POS]:
+ if isinstance(sig.arg_types[0], TypeVarType) and isinstance(sig.ret_type, TypeVarType):
+ return sig.arg_types[0].id == sig.ret_type.id
+ return False
+
+
+def calculate_return_type(expr: Expression) -> Optional[Type]:
+ """Return the return type if we can calculate it.
+
+ This only uses information available during semantic analysis so this
+ will sometimes return None because of insufficient information (as
+ type inference hasn't run yet).
+ """
+ if isinstance(expr, RefExpr):
+ if isinstance(expr.node, FuncDef):
+ typ = expr.node.type
+ if typ is None:
+ # No signature -> default to Any.
+ return AnyType(TypeOfAny.unannotated)
+ # Explicit Any return?
+ if isinstance(typ, CallableType):
+ return typ.ret_type
+ return None
+ elif isinstance(expr.node, Var):
+ return expr.node.type
+ elif isinstance(expr, CallExpr):
+ return calculate_return_type(expr.callee)
+ return None
+
+
+def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]:
+ if isinstance(expr, RefExpr):
+ if isinstance(expr.node, FuncDef):
+ typ = expr.node.type
+ if typ:
+ if isinstance(typ, CallableType) and has_no_typevars(typ.ret_type):
+ if isinstance(typ.ret_type, CallableType):
+ return typ.ret_type
+ elif isinstance(expr, CallExpr):
+ t = find_fixed_callable_return(expr.callee)
+ if t:
+ if isinstance(t.ret_type, CallableType):
+ return t.ret_type
+ return None
+
+
+class ForwardReferenceResolver(TypeTranslator):
+ """Visitor to replace previously detected forward reference to synthetic types.
+
+ This is similar to TypeTranslator but tracks visited nodes to avoid
+ infinite recursion on potentially circular (self- or mutually-referential) types.
+ This visitor:
+ * Fixes forward references by unwrapping the linked type.
+ * Generates errors for unsupported type recursion and breaks recursion by resolving
+ recursive back references to Any types.
+ * Replaces instance types generated from unanalyzed NamedTuple and TypedDict class syntax
+ found in first pass with analyzed TupleType and TypedDictType.
+ """
+ def __init__(self, fail: Callable[[str, Context], None],
+ start: Union[Node, SymbolTableNode], warn: bool) -> None:
+ self.seen = [] # type: List[Type]
+ self.fail = fail
+ self.start = start
+ self.warn = warn
+
+ def check_recursion(self, t: Type) -> bool:
+ if any(t is s for s in self.seen):
+ if self.warn:
+ assert isinstance(self.start, Node), "Internal error: invalid error context"
+ self.fail('Recursive types not fully supported yet,'
+ ' nested types replaced with "Any"', self.start)
+ return True
+ self.seen.append(t)
+ return False
+
+ def visit_forwardref_type(self, t: ForwardRef) -> Type:
+ """This visitor method tracks situations like this:
+
+ x: A # This type is not yet known and therefore wrapped in ForwardRef,
+ # its content is updated in SemanticAnalyzerPass3, now we need to unwrap
+ # this type.
+ A = NewType('A', int)
+ """
+ assert t.resolved, 'Internal error: Unresolved forward reference: {}'.format(
+ t.unbound.name)
+ return t.resolved.accept(self)
+
+ def visit_instance(self, t: Instance, from_fallback: bool = False) -> Type:
+ """This visitor method tracks situations like this:
+
+ x: A # When analyzing this type we will get an Instance from SemanticAnalyzerPass1.
+ # Now we need to update this to actual analyzed TupleType.
+ class A(NamedTuple):
+ attr: str
+
+ If from_fallback is True, then we always return an Instance type. This is needed
+ since TupleType and TypedDictType fallbacks are always instances.
+ """
+ info = t.type
+ # Special case, analyzed bases transformed the type into TupleType.
+ if info.tuple_type and not from_fallback:
+ items = [it.accept(self) for it in info.tuple_type.items]
+ info.tuple_type.items = items
+ return TupleType(items, Instance(info, []))
+ # Update forward Instances to corresponding analyzed NamedTuples.
+ if info.replaced and info.replaced.tuple_type:
+ tp = info.replaced.tuple_type
+ if self.check_recursion(tp):
+ # The key idea is that when we recursively return to a type already traversed,
+ # then we break the cycle and put AnyType as a leaf.
+ return AnyType(TypeOfAny.from_error)
+ return tp.copy_modified(fallback=Instance(info.replaced, [])).accept(self)
+ # Same as above but for TypedDicts.
+ if info.replaced and info.replaced.typeddict_type:
+ td = info.replaced.typeddict_type
+ if self.check_recursion(td):
+ # We also break the cycles for TypedDicts as explained above for NamedTuples.
+ return AnyType(TypeOfAny.from_error)
+ return td.copy_modified(fallback=Instance(info.replaced, [])).accept(self)
+ if self.check_recursion(t):
+ # We also need to break a potential cycle with normal (non-synthetic) instance types.
+ return Instance(t.type, [AnyType(TypeOfAny.from_error)] * len(t.type.defn.type_vars))
+ return super().visit_instance(t)
+
+ def visit_type_var(self, t: TypeVarType) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ if t.upper_bound:
+ t.upper_bound = t.upper_bound.accept(self)
+ if t.values:
+ t.values = [v.accept(self) for v in t.values]
+ return t
+
+ def visit_callable_type(self, t: CallableType) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ arg_types = [tp.accept(self) for tp in t.arg_types]
+ ret_type = t.ret_type.accept(self)
+ variables = t.variables.copy()
+ for v in variables:
+ if v.upper_bound:
+ v.upper_bound = v.upper_bound.accept(self)
+ if v.values:
+ v.values = [val.accept(self) for val in v.values]
+ return t.copy_modified(arg_types=arg_types, ret_type=ret_type, variables=variables)
+
+ def visit_overloaded(self, t: Overloaded) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ return super().visit_overloaded(t)
+
+ def visit_tuple_type(self, t: TupleType) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ items = [it.accept(self) for it in t.items]
+ fallback = self.visit_instance(t.fallback, from_fallback=True)
+ assert isinstance(fallback, Instance)
+ return TupleType(items, fallback, t.line, t.column)
+
+ def visit_typeddict_type(self, t: TypedDictType) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ items = OrderedDict([
+ (item_name, item_type.accept(self))
+ for (item_name, item_type) in t.items.items()
+ ])
+ fallback = self.visit_instance(t.fallback, from_fallback=True)
+ assert isinstance(fallback, Instance)
+ return TypedDictType(items, t.required_keys, fallback, t.line, t.column)
+
+ def visit_union_type(self, t: UnionType) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ return super().visit_union_type(t)
+
+ def visit_type_type(self, t: TypeType) -> Type:
+ if self.check_recursion(t):
+ return AnyType(TypeOfAny.from_error)
+ return super().visit_type_type(t)
diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py
index 422f46a..6aa9fd7 100644
--- a/mypy/server/astdiff.py
+++ b/mypy/server/astdiff.py
@@ -56,7 +56,6 @@ def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool:
# tvar_def
# type_override
if (n.kind != m.kind
- or n.mod_id != m.mod_id
or n.module_public != m.module_public):
return False
if type(n.node) != type(m.node): # noqa
diff --git a/mypy/server/deps.py b/mypy/server/deps.py
index 0402a51..05ef0fb 100644
--- a/mypy/server/deps.py
+++ b/mypy/server/deps.py
@@ -213,7 +213,7 @@ class TypeDependenciesVisitor(TypeVisitor[List[str]]):
return []
def visit_forwardref_type(self, typ: ForwardRef) -> List[str]:
- return get_type_dependencies(typ.link)
+ assert False, 'Internal error: Leaked forward reference object {}'.format(typ)
def visit_type_var(self, typ: TypeVarType) -> List[str]:
# TODO: replace with actual implementation
diff --git a/mypy/strconv.py b/mypy/strconv.py
index 099515f..5b536ab 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -58,27 +58,24 @@ class StrConv(NodeVisitor[str]):
array with information specific to methods, global functions or
anonymous functions.
"""
- args = [] # type: List[mypy.nodes.Var]
- init = [] # type: List[Optional[mypy.nodes.AssignmentStmt]]
+ args = [] # type: List[Union[mypy.nodes.Var, Tuple[str, List[mypy.nodes.Node]]]]
extra = [] # type: List[Tuple[str, List[mypy.nodes.Var]]]
- for i, arg in enumerate(o.arguments):
+ for arg in o.arguments:
kind = arg.kind # type: int
if kind in (mypy.nodes.ARG_POS, mypy.nodes.ARG_NAMED):
- args.append(o.arguments[i].variable)
+ args.append(arg.variable)
elif kind in (mypy.nodes.ARG_OPT, mypy.nodes.ARG_NAMED_OPT):
- args.append(o.arguments[i].variable)
- init.append(o.arguments[i].initialization_statement)
+ assert arg.initializer is not None
+ args.append(('default', [arg.variable, arg.initializer]))
elif kind == mypy.nodes.ARG_STAR:
- extra.append(('VarArg', [o.arguments[i].variable]))
+ extra.append(('VarArg', [arg.variable]))
elif kind == mypy.nodes.ARG_STAR2:
- extra.append(('DictVarArg', [o.arguments[i].variable]))
+ extra.append(('DictVarArg', [arg.variable]))
a = [] # type: List[Any]
if args:
a.append(('Args', args))
if o.type:
a.append(o.type)
- if init:
- a.append(('Init', init))
if o.is_generator:
a.append('Generator')
a.extend(extra)
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index ce93bf7..ebd58b9 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -455,13 +455,12 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
annotation = ": {}".format(self.print_annotation(annotated_type))
else:
annotation = ""
- init_stmt = arg_.initialization_statement
- if init_stmt:
+ if arg_.initializer:
initializer = '...'
if kind in (ARG_NAMED, ARG_NAMED_OPT) and '*' not in args:
args.append('*')
if not annotation:
- typename = self.get_str_type_of_node(init_stmt.rvalue, True)
+ typename = self.get_str_type_of_node(arg_.initializer, True)
annotation = ': {} = ...'.format(typename)
else:
annotation += '={}'.format(initializer)
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
index 10a46e0..7a4a6f2 100644
--- a/mypy/test/testcheck.py
+++ b/mypy/test/testcheck.py
@@ -171,7 +171,7 @@ class TypeCheckSuite(DataSuite):
if incremental_step:
options.incremental = True
else:
- options.cache_dir = os.devnull # Dont waste time writing cache
+ options.cache_dir = os.devnull # Don't waste time writing cache
sources = []
for module_name, program_path, program_text in module_data:
diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py
index 5bf638a..26f1780 100644
--- a/mypy/test/testcmdline.py
+++ b/mypy/test/testcmdline.py
@@ -41,7 +41,7 @@ class PythonEvaluationSuite(DataSuite):
native_sep=True)
return c
- def run_case(self, testcase: DataDrivenTestCase):
+ def run_case(self, testcase: DataDrivenTestCase) -> None:
test_python_evaluation(testcase)
diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py
index 635b99c..be32252 100644
--- a/mypy/test/testpythoneval.py
+++ b/mypy/test/testpythoneval.py
@@ -10,8 +10,6 @@ Note: These test cases are *not* included in the main test suite, as including
this suite would slow down the main suite too much.
"""
-from contextlib import contextmanager
-import errno
import os
import os.path
import re
@@ -25,6 +23,7 @@ from mypy.test.config import test_data_prefix, test_temp_dir
from mypy.test.data import DataDrivenTestCase, parse_test_cases, DataSuite
from mypy.test.helpers import assert_string_arrays_equal
from mypy.util import try_find_python2_interpreter
+from mypy import api
# Files which contain test case descriptions.
python_eval_files = ['pythoneval.test',
@@ -50,7 +49,7 @@ class PythonEvaluationSuite(DataSuite):
test_python_evaluation, test_temp_dir, True)
return c
- def run_case(self, testcase: DataDrivenTestCase):
+ def run_case(self, testcase: DataDrivenTestCase) -> None:
test_python_evaluation(testcase)
@@ -61,11 +60,7 @@ def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
version.
"""
assert testcase.old_cwd is not None, "test was not properly set up"
- mypy_cmdline = [
- python3_path,
- os.path.join(testcase.old_cwd, 'scripts', 'mypy'),
- '--show-traceback',
- ]
+ mypy_cmdline = ['--show-traceback']
py2 = testcase.name.lower().endswith('python2')
if py2:
mypy_cmdline.append('--py2')
@@ -80,21 +75,27 @@ def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
# Write the program to a file.
program = '_' + testcase.name + '.py'
- mypy_cmdline.append(program)
program_path = os.path.join(test_temp_dir, program)
+ mypy_cmdline.append(program_path)
with open(program_path, 'w') as file:
for s in testcase.input:
file.write('{}\n'.format(s))
+ output = []
# Type check the program.
- # This uses the same PYTHONPATH as the current process.
- returncode, out = run(mypy_cmdline)
+ out, err, returncode = api.run(mypy_cmdline)
+ # split lines, remove newlines, and remove directory of test case
+ for line in (out + err).splitlines():
+ if line.startswith(test_temp_dir + os.sep):
+ output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n"))
+ else:
+ output.append(line.rstrip("\r\n"))
if returncode == 0:
# Execute the program.
returncode, interp_out = run([interpreter, program])
- out += interp_out
+ output.extend(interp_out)
# Remove temp file.
os.remove(program_path)
- assert_string_arrays_equal(adapt_output(testcase), out,
+ assert_string_arrays_equal(adapt_output(testcase), output,
'Invalid output ({}, line {})'.format(
testcase.file, testcase.line))
@@ -115,7 +116,7 @@ def adapt_output(testcase: DataDrivenTestCase) -> List[str]:
def run(
- cmdline: List[str], *, env: Optional[Dict[str, str]] = None, timeout: int = 30
+ cmdline: List[str], *, env: Optional[Dict[str, str]] = None, timeout: int = 300
) -> Tuple[int, List[str]]:
"""A poor man's subprocess.run() for 3.3 and 3.4 compatibility."""
process = subprocess.Popen(
diff --git a/mypy/traverser.py b/mypy/traverser.py
index 495bafd..53d5a9f 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -35,7 +35,7 @@ class TraverserVisitor(NodeVisitor[None]):
def visit_func(self, o: FuncItem) -> None:
for arg in o.arguments:
- init = arg.initialization_statement
+ init = arg.initializer
if init is not None:
init.accept(self)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index 1343992..17d33e3 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -76,26 +76,11 @@ class TransformVisitor(NodeVisitor[Node]):
return ImportAll(node.id, node.relative)
def copy_argument(self, argument: Argument) -> Argument:
- init_stmt = None # type: Optional[AssignmentStmt]
-
- if argument.initialization_statement:
- init_lvalue = cast(
- NameExpr,
- self.expr(argument.initialization_statement.lvalues[0]),
- )
- init_lvalue.set_line(argument.line)
- init_stmt = AssignmentStmt(
- [init_lvalue],
- self.expr(argument.initialization_statement.rvalue),
- self.optional_type(argument.initialization_statement.type),
- )
-
arg = Argument(
self.visit_var(argument.variable),
argument.type_annotation,
argument.initializer,
argument.kind,
- init_stmt,
)
# Refresh lines of the inner things
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 3119c19..6de9b1e 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -63,7 +63,8 @@ def analyze_type_alias(node: Expression,
is_typeshed_stub: bool,
allow_unnormalized: bool = False,
in_dynamic_func: bool = False,
- global_scope: bool = True) -> Optional[Type]:
+ global_scope: bool = True,
+ warn_bound_tvar: bool = False) -> Optional[Type]:
"""Return type if node is valid as a type alias rvalue.
Return None otherwise. 'node' must have been semantically analyzed.
@@ -117,7 +118,7 @@ def analyze_type_alias(node: Expression,
return None
analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, tvar_scope, fail_func, note_func,
plugin, options, is_typeshed_stub, aliasing=True,
- allow_unnormalized=allow_unnormalized)
+ allow_unnormalized=allow_unnormalized, warn_bound_tvar=warn_bound_tvar)
analyzer.in_dynamic_func = in_dynamic_func
analyzer.global_scope = global_scope
return type.accept(analyzer)
@@ -154,7 +155,8 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
aliasing: bool = False,
allow_tuple_literal: bool = False,
allow_unnormalized: bool = False,
- third_pass: bool = False) -> None:
+ third_pass: bool = False,
+ warn_bound_tvar: bool = False) -> None:
self.lookup = lookup_func
self.lookup_fqn_func = lookup_fqn_func
self.fail_func = fail_func
@@ -168,6 +170,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
self.plugin = plugin
self.options = options
self.is_typeshed_stub = is_typeshed_stub
+ self.warn_bound_tvar = warn_bound_tvar
self.third_pass = third_pass
def visit_unbound_type(self, t: UnboundType) -> Type:
@@ -194,7 +197,11 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
tvar_def = self.tvar_scope.get_binding(sym)
else:
tvar_def = None
- if sym.kind == TVAR and tvar_def is not None:
+ if self.warn_bound_tvar and sym.kind == TVAR and tvar_def is not None:
+ self.fail('Can\'t use bound type variable "{}"'
+ ' to define generic alias'.format(t.name), t)
+ return AnyType(TypeOfAny.from_error)
+ elif sym.kind == TVAR and tvar_def is not None:
if len(t.args) > 0:
self.fail('Type variable "{}" used with arguments'.format(
t.name), t)
@@ -704,8 +711,8 @@ class TypeAnalyserPass3(TypeVisitor[None]):
arg_values = [arg]
self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t)
# TODO: These hacks will be not necessary when this will be moved to later stage.
- arg = self.update_type(arg)
- bound = self.update_type(tvar.upper_bound)
+ arg = self.resolve_type(arg)
+ bound = self.resolve_type(tvar.upper_bound)
if not is_subtype(arg, bound):
self.fail('Type argument "{}" of "{}" must be '
'a subtype of "{}"'.format(
@@ -719,9 +726,10 @@ class TypeAnalyserPass3(TypeVisitor[None]):
def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str,
valids: List[Type], arg_number: int, context: Context) -> None:
for actual in actuals:
- actual = self.update_type(actual)
+ actual = self.resolve_type(actual)
if (not isinstance(actual, AnyType) and
- not any(is_same_type(actual, self.update_type(value)) for value in valids)):
+ not any(is_same_type(actual, self.resolve_type(value))
+ for value in valids)):
if len(actuals) > 1 or not isinstance(actual, Instance):
self.fail('Invalid type argument value for "{}"'.format(
type.name()), context)
@@ -731,11 +739,13 @@ class TypeAnalyserPass3(TypeVisitor[None]):
self.fail(messages.INCOMPATIBLE_TYPEVAR_VALUE.format(
arg_name, class_name, actual_type_name), context)
- def update_type(self, tp: Type) -> Type:
+ def resolve_type(self, tp: Type) -> Type:
# This helper is only needed while is_subtype and is_same_type are
# called in third pass. This can be removed when TODO in visit_instance is fixed.
if isinstance(tp, ForwardRef):
- tp = tp.link
+ if tp.resolved is None:
+ return tp.unbound
+ tp = tp.resolved
if isinstance(tp, Instance) and tp.type.replaced:
replaced = tp.type.replaced
if replaced.tuple_type:
@@ -799,8 +809,9 @@ class TypeAnalyserPass3(TypeVisitor[None]):
def visit_forwardref_type(self, t: ForwardRef) -> None:
self.indicator['forward'] = True
- if isinstance(t.link, UnboundType):
- t.link = self.anal_type(t.link)
+ if t.resolved is None:
+ resolved = self.anal_type(t.unbound)
+ t.resolve(resolved)
def anal_type(self, tp: UnboundType) -> Type:
tpan = TypeAnalyser(self.lookup_func,
diff --git a/mypy/types.py b/mypy/types.py
index dcc8459..e5cc8ed 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -717,8 +717,7 @@ class CallableType(FunctionLike):
)
def is_type_obj(self) -> bool:
- t = self.fallback.type
- return t is not None and t.is_metaclass()
+ return self.fallback.type.is_metaclass()
def is_concrete_type_obj(self) -> bool:
return self.is_type_obj() and self.is_classmethod_class
@@ -1341,10 +1340,7 @@ class TypeType(Type):
type UnionType must be handled through make_normalized static method.
"""
super().__init__(line, column)
- if isinstance(item, CallableType) and item.is_type_obj():
- self.item = item.fallback
- else:
- self.item = item
+ self.item = item
@staticmethod
def make_normalized(item: Type, *, line: int = -1, column: int = -1) -> Type:
@@ -1390,21 +1386,33 @@ class ForwardRef(Type):
So that ForwardRefs are temporary and will be completely replaced with the linked types
or Any (to avoid cyclic references) before the type checking stage.
"""
- link = None # type: Type # The wrapped type
+ _unbound = None # type: UnboundType # The original wrapped type
+ _resolved = None # type: Optional[Type] # The resolved forward reference (initially None)
+
+ def __init__(self, unbound: UnboundType) -> None:
+ self._unbound = unbound
+ self._resolved = None
+
+ @property
+ def unbound(self) -> UnboundType:
+ # This is read-only to make it clear that resolution happens through resolve().
+ return self._unbound
+
+ @property
+ def resolved(self) -> Optional[Type]:
+ # Similar to above.
+ return self._resolved
- def __init__(self, link: Type) -> None:
- self.link = link
+ def resolve(self, resolved: Type) -> None:
+ """Resolve an unbound forward reference to point to a type."""
+ assert self._resolved is None
+ self._resolved = resolved
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_forwardref_type(self)
- def serialize(self):
- if isinstance(self.link, UnboundType):
- name = self.link.name
- if isinstance(self.link, Instance):
- name = self.link.type.name()
- else:
- name = self.link.__class__.__name__
+ def serialize(self) -> str:
+ name = self.unbound.name
# We should never get here since all forward references should be resolved
# and removed during semantic analysis.
assert False, "Internal error: Unresolved forward reference to {}".format(name)
@@ -1749,7 +1757,10 @@ class TypeStrVisitor(SyntheticTypeVisitor[str]):
return 'Type[{}]'.format(t.item.accept(self))
def visit_forwardref_type(self, t: ForwardRef) -> str:
- return '~{}'.format(t.link.accept(self))
+ if t.resolved:
+ return '~{}'.format(t.resolved.accept(self))
+ else:
+ return '~{}'.format(t.unbound.accept(self))
def list_str(self, a: List[Type]) -> str:
"""Convert items of an array to strings (pretty-print types)
@@ -1831,7 +1842,10 @@ class TypeQuery(SyntheticTypeVisitor[T]):
return t.item.accept(self)
def visit_forwardref_type(self, t: ForwardRef) -> T:
- return t.link.accept(self)
+ if t.resolved:
+ return t.resolved.accept(self)
+ else:
+ return t.unbound.accept(self)
def visit_ellipsis_type(self, t: EllipsisType) -> T:
return self.strategy([])
diff --git a/mypy/util.py b/mypy/util.py
index 1e8e318..21038b0 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -10,7 +10,7 @@ T = TypeVar('T')
ENCODING_RE = re.compile(br'([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)')
-default_python2_interpreter = ['python2', 'python', '/usr/bin/python']
+default_python2_interpreter = ['python2', 'python', '/usr/bin/python', 'C:\\Python27\\python.exe']
def split_module_names(mod_name: str) -> List[str]:
@@ -69,10 +69,11 @@ def try_find_python2_interpreter() -> Optional[str]:
return _python2_interpreter
for interpreter in default_python2_interpreter:
try:
- process = subprocess.Popen([interpreter, '-V'], stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- stdout, stderr = process.communicate()
- if b'Python 2.7' in stdout:
+ retcode = subprocess.Popen([
+ interpreter, '-c',
+ 'import sys, typing; assert sys.version_info[:2] == (2, 7)'
+ ]).wait()
+ if not retcode:
_python2_interpreter = interpreter
return interpreter
except OSError:
diff --git a/mypy/version.py b/mypy/version.py
index 85ad90a..f973326 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1,7 +1,7 @@
import os
from mypy import git
-__version__ = '0.530'
+__version__ = '0.540'
base_version = __version__
mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index 6b97ed6..6b871f0 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -7,7 +7,18 @@ no_implicit_optional = True
disallow_any = generics, unimported
warn_redundant_casts = True
warn_unused_ignores = True
+warn_unused_configs = True
# historical exception
[mypy-mypy.semanal]
strict_optional = False
+
+[mypy-mypy.semanal_pass1]
+strict_optional = False
+
+[mypy-mypy.semanal_pass3]
+strict_optional = False
+
+# needs py2 compatibility
+[mypy-mypy.test.testextensions]
+disallow_untyped_defs = False
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index 7243de9..f5ea5d2 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -2105,7 +2105,7 @@ class B:
a = A
bad = lambda: 42
-B().bad() # E: Invalid method type
+B().bad() # E: Attribute function "bad" with type "Callable[[], int]" does not accept self argument
reveal_type(B.a) # E: Revealed type is 'def () -> __main__.A'
reveal_type(B().a) # E: Revealed type is 'def () -> __main__.A'
reveal_type(B().a()) # E: Revealed type is '__main__.A'
@@ -2119,17 +2119,17 @@ reveal_type(C().aa) # E: Revealed type is '__main__.A'
[out]
[case testClassValuedAttributesGeneric]
-from typing import Generic, TypeVar
+from typing import Generic, TypeVar, Type
T = TypeVar('T')
class A(Generic[T]):
def __init__(self, x: T) -> None:
self.x = x
class B(Generic[T]):
- a = A[T]
+ a: Type[A[T]] = A
-reveal_type(B[int]().a) # E: Revealed type is 'def (x: builtins.int*) -> __main__.A[builtins.int*]'
-B[int]().a('hi') # E: Argument 1 has incompatible type "str"; expected "int"
+reveal_type(B[int]().a) # E: Revealed type is 'Type[__main__.A[builtins.int*]]'
+B[int]().a('hi') # E: Argument 1 to "A" has incompatible type "str"; expected "int"
class C(Generic[T]):
a = A
@@ -3995,3 +3995,79 @@ class E(metaclass=t.M): pass
class F(six.with_metaclass(t.M)): pass
@six.add_metaclass(t.M)
class G: pass
+
+[case testCorrectEnclosingClassPushedInDeferred]
+class C:
+ def __getattr__(self, attr: str) -> int:
+ x: F
+ return x.f
+
+class F:
+ def __init__(self, f: int) -> None:
+ self.f = f
+[out]
+
+[case testCorrectEnclosingClassPushedInDeferred2]
+from typing import TypeVar
+T = TypeVar('T', bound=C)
+class C:
+ def m(self: T) -> T:
+ class Inner:
+ x: F
+ f = x.f
+ return self
+
+class F:
+ def __init__(self, f: int) -> None:
+ self.f = f
+[out]
+
+[case testCorrectEnclosingClassPushedInDeferred3]
+class A:
+ def f(self) -> None:
+ def g(x: int) -> int:
+ return y
+
+y = int()
+[out]
+
+[case testMetaclassMemberAccessViaType]
+from typing import Type
+class M(type):
+ def m(cls, x: int) -> int:
+ pass
+
+class C(metaclass=M):
+ pass
+x = C
+y: Type[C] = C
+
+reveal_type(type(C).m) # E: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int'
+reveal_type(type(x).m) # E: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int'
+reveal_type(type(y).m) # E: Revealed type is 'def (cls: __main__.M, x: builtins.int) -> builtins.int'
+[out]
+
+[case testMetaclassMemberAccessViaType2]
+from typing import Any, Type
+class M(type):
+ def m(cls, x: int) -> int:
+ pass
+B: Any
+class C(B, metaclass=M):
+ pass
+
+x: Type[C]
+reveal_type(x.m) # E: Revealed type is 'def (x: builtins.int) -> builtins.int'
+reveal_type(x.whatever) # E: Revealed type is 'Any'
+[out]
+
+[case testMetaclassMemberAccessViaType3]
+from typing import Any, Type, TypeVar
+T = TypeVar('T')
+class C(Any):
+ def bar(self: T) -> Type[T]: pass
+ def foo(self) -> None:
+ reveal_type(self.bar()) # E: Revealed type is 'Type[__main__.C*]'
+ reveal_type(self.bar().__name__) # E: Revealed type is 'builtins.str'
+[builtins fixtures/type.pyi]
+[out]
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index e396062..5e99613 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -507,8 +507,8 @@ class A:
f = x # type: Callable[[], None]
g = x # type: Callable[[B], None]
a = None # type: A
-a.f() # E: Invalid method type
-a.g() # E: Invalid method type
+a.f() # E: Attribute function "f" with type "Callable[[], None]" does not accept self argument
+a.g() # E: Invalid self argument "A" to attribute function "g" with type "Callable[[B], None]"
[case testMethodWithDynamicallyTypedMethodAsDataAttribute]
from typing import Any, Callable
@@ -568,7 +568,7 @@ class A(Generic[t]):
ab = None # type: A[B]
ac = None # type: A[C]
ab.f()
-ac.f() # E: Invalid method type
+ac.f() # E: Invalid self argument "A[C]" to attribute function "f" with type "Callable[[A[B]], None]"
[case testPartiallyTypedSelfInMethodDataAttribute]
from typing import Any, TypeVar, Generic, Callable
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 6e8fcd4..b4a5050 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -1747,3 +1747,13 @@ class D:
def __or__(self, x: G[X]) -> G[X]: pass
def __ior__(self, x: G[S2]) -> G[S2]: pass \
# E: Signatures of "__ior__" and "__or__" are incompatible
+
+[case testConstraintInferenceForAnyAgainstTypeT]
+from typing import Type, Any, TypeVar
+
+T = TypeVar('T')
+
+def f(c: Type[T]) -> T: ...
+
+x: Any
+reveal_type(f(x)) # E: Revealed type is 'Any'
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 1d3ee9a..5c952c8 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -27,7 +27,8 @@
-- Any files that we expect to be rechecked should be annotated in the [rechecked]
-- annotation, and any files expect to be stale (aka have a modified interface)
-- should be annotated in the [stale] annotation. Note that a file that ends up
--- producing an error does not create a new cache file and so is not considered stale.
+-- producing an error has its caches deleted and is marked stale automatically.
+-- Such files don't need to be included in [stale ...] list.
--
-- The test suite will automatically assume that __main__ is stale and rechecked in
-- all cases so we can avoid constantly having to annotate it. The list of
@@ -200,7 +201,7 @@ def foo() -> int:
return "foo"
return inner2()
-[rechecked mod2]
+[rechecked mod1, mod2]
[stale]
[out2]
tmp/mod2.py:4: error: Incompatible return value type (got "str", expected "int")
@@ -2800,6 +2801,87 @@ b.x.y
tmp/c.py:2: error: Revealed type is '<stale cache: consider running mypy without --quick>'
tmp/c.py:5: error: "<stale cache: consider running mypy without --quick>" has no attribute "y"
+[case testCacheDeletedAfterErrorsFound]
+import a
+[file a.py]
+from b import x
+[file b.py]
+from c import x
+[file c.py]
+x = 1
+[file c.py.2]
+1 + 1
+[file a.py.3]
+from b import x
+1 + 1
+[out]
+[out2]
+tmp/b.py:1: error: Module 'c' has no attribute 'x'
+tmp/a.py:1: error: Module 'b' has no attribute 'x'
+[out3]
+tmp/b.py:1: error: Module 'c' has no attribute 'x'
+tmp/a.py:1: error: Module 'b' has no attribute 'x'
+
+[case testCacheDeletedAfterErrorsFound2]
+import a
+[file a.py]
+from b import x
+[file b.py]
+from c import C
+x: C
+[file c.py]
+class C: pass
+[file c.py.2]
+def C(): pass
+[file a.py.3]
+from b import x
+1 + 1
+[out]
+[out2]
+tmp/b.py:2: error: Invalid type "c.C"
+[out3]
+tmp/b.py:2: error: Invalid type "c.C"
+
+[case testCacheDeletedAfterErrorsFound3]
+import a
+[file a.py]
+import b
+b.f()
+[file b.py]
+def f() -> None: pass
+[file b.py.2]
+def f(x) -> None: pass
+[out]
+[out2]
+tmp/a.py:2: error: Too few arguments for "f"
+[out3]
+tmp/a.py:2: error: Too few arguments for "f"
+
+[case testCacheDeletedAfterErrorsFound4]
+import a
+[file a.py]
+from b import x
+[file b.py]
+from c import x
+[file c.py]
+from d import x
+[file d.py]
+x = 1
+[file d.py.2]
+1 + 1
+[file a.py.3]
+from b import x
+1 + 1
+[out]
+[out2]
+tmp/c.py:1: error: Module 'd' has no attribute 'x'
+tmp/b.py:1: error: Module 'c' has no attribute 'x'
+tmp/a.py:1: error: Module 'b' has no attribute 'x'
+[out3]
+tmp/c.py:1: error: Module 'd' has no attribute 'x'
+tmp/b.py:1: error: Module 'c' has no attribute 'x'
+tmp/a.py:1: error: Module 'b' has no attribute 'x'
+
[case testNoCrashOnDoubleImportAliasQuick]
# cmd: mypy -m e
# cmd2: mypy -m c
@@ -3164,3 +3246,23 @@ import foo
external_list = [0]
[builtins fixtures/dict.pyi]
+
+[case testIncrementalCrashOnTypeWithFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import x
+
+[file b.py]
+from typing import TypeVar, Type
+T = TypeVar('T')
+
+def tp(arg: T) -> Type[T]:
+ pass
+def func(x: int) -> int:
+ pass
+
+x = tp(func)
+[out]
+[out2]
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 3549a27..8eb9f18 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -413,7 +413,7 @@ def id(x: T) -> T:
return x
[out]
-[case testUnderspecifiedInferenceResult-skip]
+[case testUnderspecifiedInferenceResult]
from typing import TypeVar
T = TypeVar('T')
class A: pass
@@ -421,6 +421,7 @@ a = None # type: A
def ff() -> None:
x = f() # E: Need type annotation for variable
+ reveal_type(x)
g(None) # Ok
f() # Ok because not used to infer local variable type
@@ -874,9 +875,10 @@ for x in [A()]:
b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
a = x
-for y in []:
- a = y
- reveal_type(y) # E: Revealed type is 'builtins.None'
+for y in []: # E: Need type annotation for variable
+ a = y # E: Cannot determine type of 'y'
+ reveal_type(y) # E: Revealed type is 'Any' \
+ # E: Cannot determine type of 'y'
class A: pass
class B: pass
@@ -920,9 +922,11 @@ for x, y in [[A()]]:
a = x
a = y
-for e, f in [[]]:
- reveal_type(e) # E: Revealed type is 'builtins.None'
- reveal_type(f) # E: Revealed type is 'builtins.None'
+for e, f in [[]]: # E: Need type annotation for variable
+ reveal_type(e) # E: Revealed type is 'Any' \
+ # E: Cannot determine type of 'e'
+ reveal_type(f) # E: Revealed type is 'Any' \
+ # E: Cannot determine type of 'f'
class A: pass
class B: pass
@@ -1927,3 +1931,18 @@ x = None
(x, x) = f('')
reveal_type(x) # E: Revealed type is 'builtins.str'
[out]
+
+[case testInferenceNestedTuplesFromGenericIterable]
+from typing import Tuple, TypeVar
+
+T = TypeVar('T')
+
+def make_tuple(elem: T) -> Tuple[T]:
+ return (elem,)
+
+def main() -> None:
+ ((a, b),) = make_tuple((1, 2))
+ reveal_type(a) # E: Revealed type is 'builtins.int'
+ reveal_type(b) # E: Revealed type is 'builtins.int'
+[builtins fixtures/tuple.pyi]
+[out]
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
index 8279e1a..fade8bf 100644
--- a/test-data/unit/check-isinstance.test
+++ b/test-data/unit/check-isinstance.test
@@ -1447,16 +1447,16 @@ def f(x: Union[Type[int], Type[str], Type[List]]) -> None:
x()[1] # E: Value of type "Union[int, str]" is not indexable
else:
reveal_type(x) # E: Revealed type is 'Type[builtins.list[Any]]'
- reveal_type(x()) # E: Revealed type is 'builtins.list[<nothing>]'
+ reveal_type(x()) # E: Revealed type is 'builtins.list[Any]'
x()[1]
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
if issubclass(x, (str, (list,))):
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]'
x()[1]
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
[builtins fixtures/isinstancelist.pyi]
[case testIssubclasDestructuringUnions2]
@@ -1469,16 +1469,16 @@ def f(x: Type[Union[int, str, List]]) -> None:
x()[1] # E: Value of type "Union[int, str]" is not indexable
else:
reveal_type(x) # E: Revealed type is 'Type[builtins.list[Any]]'
- reveal_type(x()) # E: Revealed type is 'builtins.list[<nothing>]'
+ reveal_type(x()) # E: Revealed type is 'builtins.list[Any]'
x()[1]
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
if issubclass(x, (str, (list,))):
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]'
x()[1]
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
[builtins fixtures/isinstancelist.pyi]
[case testIssubclasDestructuringUnions3]
@@ -1486,23 +1486,23 @@ from typing import Union, List, Tuple, Dict, Type
def f(x: Type[Union[int, str, List]]) -> None:
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
if issubclass(x, (str, (int,))):
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]'
reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str]'
x()[1] # E: Value of type "Union[int, str]" is not indexable
else:
reveal_type(x) # E: Revealed type is 'Type[builtins.list[Any]]'
- reveal_type(x()) # E: Revealed type is 'builtins.list[<nothing>]'
+ reveal_type(x()) # E: Revealed type is 'builtins.list[Any]'
x()[1]
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
if issubclass(x, (str, (list,))):
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]'
x()[1]
reveal_type(x) # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
- reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+ reveal_type(x()) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
[builtins fixtures/isinstancelist.pyi]
[case testIssubclass]
@@ -1757,7 +1757,6 @@ if isinstance(x, str, 1): # E: Too many arguments for "isinstance"
reveal_type(x) # E: Revealed type is 'builtins.int'
[builtins fixtures/isinstancelist.pyi]
-
[case testIsinstanceNarrowAny]
from typing import Any
@@ -1770,3 +1769,209 @@ def narrow_any_to_str_then_reassign_to_int() -> None:
reveal_type(v) # E: Revealed type is 'Any'
[builtins fixtures/isinstance.pyi]
+
+[case testNarrowTypeAfterInList]
+# flags: --strict-optional
+from typing import List, Optional
+
+x: List[int]
+y: Optional[int]
+
+if y in x:
+ reveal_type(y) # E: Revealed type is 'builtins.int'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+if y not in x:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'builtins.int'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNarrowTypeAfterInListOfOptional]
+# flags: --strict-optional
+from typing import List, Optional
+
+x: List[Optional[int]]
+y: Optional[int]
+
+if y not in x:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNarrowTypeAfterInListNonOverlapping]
+# flags: --strict-optional
+from typing import List, Optional
+
+x: List[str]
+y: Optional[int]
+
+if y in x:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNarrowTypeAfterInListNested]
+# flags: --strict-optional
+from typing import List, Optional, Any
+
+x: Optional[int]
+lst: Optional[List[int]]
+nested_any: List[List[Any]]
+
+if lst in nested_any:
+ reveal_type(lst) # E: Revealed type is 'builtins.list[builtins.int]'
+if x in nested_any:
+ reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNarrowTypeAfterInTuple]
+# flags: --strict-optional
+from typing import Optional
+class A: pass
+class B(A): pass
+class C(A): pass
+
+y: Optional[B]
+if y in (B(), C()):
+ reveal_type(y) # E: Revealed type is '__main__.B'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[__main__.B, builtins.None]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNarrowTypeAfterInNamedTuple]
+# flags: --strict-optional
+from typing import NamedTuple, Optional
+class NT(NamedTuple):
+ x: int
+ y: int
+nt: NT
+
+y: Optional[int]
+if y not in nt:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'builtins.int'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNarrowTypeAfterInDict]
+# flags: --strict-optional
+from typing import Dict, Optional
+x: Dict[str, int]
+y: Optional[str]
+
+if y in x:
+ reveal_type(y) # E: Revealed type is 'builtins.str'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+if y not in x:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'builtins.str'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNarrowTypeAfterInList_python2]
+# flags: --strict-optional
+from typing import List, Optional
+
+x = [] # type: List[int]
+y = None # type: Optional[int]
+
+# TODO: Fix running tests on Python 2: "Iterator[int]" has no attribute "next"
+if y in x: # type: ignore
+ reveal_type(y) # E: Revealed type is 'builtins.int'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+if y not in x: # type: ignore
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'builtins.int'
+
+[builtins_py2 fixtures/python2.pyi]
+[out]
+
+[case testNarrowTypeAfterInNoAnyOrObject]
+# flags: --strict-optional
+from typing import Any, List, Optional
+x: List[Any]
+z: List[object]
+
+y: Optional[int]
+if y in x:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+
+if y not in z:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[typing fixtures/typing-full.pyi]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNarrowTypeAfterInUserDefined]
+# flags: --strict-optional
+from typing import Container, Optional
+
+class C(Container[int]):
+ def __contains__(self, item: object) -> bool:
+ return item is 'surprise'
+
+y: Optional[int]
+# We never trust user defined types
+if y in C():
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+if y not in C():
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[typing fixtures/typing-full.pyi]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNarrowTypeAfterInSet]
+# flags: --strict-optional
+from typing import Optional, Set
+s: Set[str]
+
+y: Optional[str]
+if y in {'a', 'b', 'c'}:
+ reveal_type(y) # E: Revealed type is 'builtins.str'
+else:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+if y not in s:
+ reveal_type(y) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+else:
+ reveal_type(y) # E: Revealed type is 'builtins.str'
+[builtins fixtures/set.pyi]
+[out]
+
+[case testNarrowTypeAfterInTypedDict]
+# flags: --strict-optional
+from typing import Optional
+from mypy_extensions import TypedDict
+class TD(TypedDict):
+ a: int
+ b: str
+td: TD
+
+def f() -> None:
+ x: Optional[str]
+ if x not in td:
+ return
+ reveal_type(x) # E: Revealed type is 'builtins.str'
+[typing fixtures/typing-full.pyi]
+[builtins fixtures/dict.pyi]
+[out]
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 74b39fd..def8818 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -1,4 +1,26 @@
-- Test cases for function overloading
+[case testOverloadNotImportedNoCrash]
+ at overload
+def f(a): pass
+ at overload
+def f(a): pass
+def f(a): pass
+f(0)
+
+ at overload # E: Name 'overload' is not defined
+def g(a:int): pass
+def g(a): pass # E: Name 'g' already defined
+g(0)
+
+ at something # E: Name 'something' is not defined
+def r(a:int): pass
+def r(a): pass # E: Name 'r' already defined
+r(0)
+[out]
+main:1: error: Name 'overload' is not defined
+main:3: error: Name 'f' already defined
+main:3: error: Name 'overload' is not defined
+main:5: error: Name 'f' already defined
[case testTypeCheckOverloadWithImplementation]
from typing import overload, Any
@@ -1272,3 +1294,28 @@ a: Any
# The return type is not ambiguous so Any arguments cause no ambiguity.
reveal_type(f(a, 1, 1)) # E: Revealed type is 'builtins.str'
reveal_type(f(1, *a)) # E: Revealed type is 'builtins.str'
+
+[case testOverloadOnOverloadWithType]
+from typing import Any, Type, TypeVar, overload
+from mod import MyInt
+T = TypeVar('T')
+
+ at overload
+def make(cls: Type[T]) -> T: pass
+ at overload
+def make() -> Any: pass
+
+def make(*args):
+ pass
+
+c = make(MyInt)
+reveal_type(c) # E: Revealed type is 'mod.MyInt*'
+
+[file mod.pyi]
+from typing import overload
+class MyInt:
+ @overload
+ def __init__(self, x: str) -> None: pass
+ @overload
+ def __init__(self, x: str, y: int) -> None: pass
+[out]
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
index d0c4a56..4d79b92 100644
--- a/test-data/unit/check-selftype.test
+++ b/test-data/unit/check-selftype.test
@@ -351,21 +351,130 @@ class E:
def __init_subclass__(cls) -> None:
reveal_type(cls) # E: Revealed type is 'def () -> __main__.E'
-[case testSelfTypeProperty]
-from typing import TypeVar
+[case testSelfTypePropertyUnion]
+from typing import Union
+class A:
+ @property
+ def f(self: A) -> int: pass
-T = TypeVar('T', bound='A')
+class B:
+ @property
+ def f(self: B) -> int: pass
+x: Union[A, B]
+reveal_type(x.f) # E: Revealed type is 'builtins.int'
-class A:
+[builtins fixtures/property.pyi]
+
+[case testSelfTypeProperSupertypeAttribute]
+from typing import Callable, TypeVar
+class K: pass
+T = TypeVar('T', bound=K)
+class A(K):
@property
- def member(self: T) -> T:
- pass
+ def g(self: K) -> int: return 0
+ @property
+ def gt(self: T) -> T: return self
+ f: Callable[[object], int]
+ ft: Callable[[T], T]
+
+class B(A):
+ pass
+
+reveal_type(A().g) # E: Revealed type is 'builtins.int'
+reveal_type(A().gt) # E: Revealed type is '__main__.A*'
+reveal_type(A().f()) # E: Revealed type is 'builtins.int'
+reveal_type(A().ft()) # E: Revealed type is '__main__.A*'
+reveal_type(B().g) # E: Revealed type is 'builtins.int'
+reveal_type(B().gt) # E: Revealed type is '__main__.B*'
+reveal_type(B().f()) # E: Revealed type is 'builtins.int'
+reveal_type(B().ft()) # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/property.pyi]
+
+[case testSelfTypeProperSupertypeAttributeTuple]
+from typing import Callable, TypeVar, Tuple
+T = TypeVar('T')
+class A(Tuple[int, int]):
+ @property
+ def g(self: object) -> int: return 0
+ @property
+ def gt(self: T) -> T: return self
+ f: Callable[[object], int]
+ ft: Callable[[T], T]
class B(A):
pass
-reveal_type(A().member) # E: Revealed type is '__main__.A*'
-reveal_type(B().member) # E: Revealed type is '__main__.B*'
+reveal_type(A().g) # E: Revealed type is 'builtins.int'
+reveal_type(A().gt) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.A]'
+reveal_type(A().f()) # E: Revealed type is 'builtins.int'
+reveal_type(A().ft()) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.A]'
+reveal_type(B().g) # E: Revealed type is 'builtins.int'
+reveal_type(B().gt) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.B]'
+reveal_type(B().f()) # E: Revealed type is 'builtins.int'
+reveal_type(B().ft()) # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.B]'
+
+[builtins fixtures/property.pyi]
+
+[case testSelfTypeProperSupertypeAttributeMeta]
+from typing import Callable, TypeVar, Type
+T = TypeVar('T')
+class A(type):
+ @property
+ def g(cls: object) -> int: return 0
+ @property
+ def gt(cls: T) -> T: return cls
+ f: Callable[[object], int]
+ ft: Callable[[T], T]
+
+class B(A):
+ pass
+
+class X(metaclass=B):
+ def __init__(self, x: int) -> None: pass
+class Y(X): pass
+X1: Type[X]
+reveal_type(X.g) # E: Revealed type is 'builtins.int'
+reveal_type(X.gt) # E: Revealed type is 'def (x: builtins.int) -> __main__.X'
+reveal_type(X.f()) # E: Revealed type is 'builtins.int'
+reveal_type(X.ft()) # E: Revealed type is 'def (x: builtins.int) -> __main__.X'
+reveal_type(Y.g) # E: Revealed type is 'builtins.int'
+reveal_type(Y.gt) # E: Revealed type is 'def (x: builtins.int) -> __main__.Y'
+reveal_type(Y.f()) # E: Revealed type is 'builtins.int'
+reveal_type(Y.ft()) # E: Revealed type is 'def (x: builtins.int) -> __main__.Y'
+reveal_type(X1.g) # E: Revealed type is 'builtins.int'
+reveal_type(X1.gt) # E: Revealed type is 'Type[__main__.X]'
+reveal_type(X1.f()) # E: Revealed type is 'builtins.int'
+reveal_type(X1.ft()) # E: Revealed type is 'Type[__main__.X]'
+
+[builtins fixtures/property.pyi]
+
+[case testSelfTypeProperSupertypeAttributeGeneric]
+from typing import Callable, TypeVar, Generic
+Q = TypeVar('Q', covariant=True)
+class K(Generic[Q]):
+ q: Q
+T = TypeVar('T')
+class A(K[Q]):
+ @property
+ def g(self: K[object]) -> int: return 0
+ @property
+ def gt(self: K[T]) -> T: return self.q
+ f: Callable[[object], int]
+ ft: Callable[[T], T]
+
+class B(A[Q]):
+ pass
+a: A[int]
+b: B[str]
+reveal_type(a.g) # E: Revealed type is 'builtins.int'
+--reveal_type(a.gt) # E: Revealed type is 'builtins.int'
+reveal_type(a.f()) # E: Revealed type is 'builtins.int'
+reveal_type(a.ft()) # E: Revealed type is '__main__.A*[builtins.int]'
+reveal_type(b.g) # E: Revealed type is 'builtins.int'
+--reveal_type(b.gt) # E: Revealed type is '__main__.B*[builtins.str]'
+reveal_type(b.f()) # E: Revealed type is 'builtins.int'
+reveal_type(b.ft()) # E: Revealed type is '__main__.B*[builtins.str]'
[builtins fixtures/property.pyi]
@@ -376,3 +485,21 @@ class A:
# def g(self: None) -> None: ... see in check-python2.test
[out]
main:3: error: Self argument missing for a non-static method (or an invalid type for self)
+
+[case testUnionPropertyField]
+from typing import Union
+
+class A:
+ x: int
+
+class B:
+ @property
+ def x(self) -> int: return 1
+
+class C:
+ @property
+ def x(self) -> int: return 1
+
+ab: Union[A, B, C]
+reveal_type(ab.x) # E: Revealed type is 'builtins.int'
+[builtins fixtures/property.pyi]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 387eabc..ab3cd6e 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -633,7 +633,7 @@ for x in t:
[case testForLoopOverEmptyTuple]
import typing
t = ()
-for x in t: pass
+for x in t: pass # E: Need type annotation for variable
[builtins fixtures/for.pyi]
[case testForLoopOverNoneValuedTuple]
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
index 4003266..8c05b74 100644
--- a/test-data/unit/check-type-aliases.test
+++ b/test-data/unit/check-type-aliases.test
@@ -96,6 +96,48 @@ GenAlias = Sequence[T]
def fun(x: Alias) -> GenAlias[int]: pass
[out]
+[case testCorrectQualifiedAliasesAlsoInFunctions]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+
+class X(Generic[T]):
+ A = X[S]
+ def f(self) -> X[T]:
+ pass
+
+ a: X[T]
+ b: A = a
+ c: A[T] = a
+ d: A[int] = a # E: Incompatible types in assignment (expression has type "X[T]", variable has type "X[int]")
+
+ def g(self) -> None:
+ a: X[T]
+ b: X.A = a
+ c: X.A[T] = a
+ d: X.A[int] = a # E: Incompatible types in assignment (expression has type "X[T]", variable has type "X[int]")
+
+def g(arg: X[int]) -> None:
+ p: X[int] = arg.f()
+ q: X.A = arg.f()
+ r: X.A[str] = arg.f() # E: Incompatible types in assignment (expression has type "X[int]", variable has type "X[str]")
+[out]
+
+[case testProhibitBoundTypeVariableReuseForAliases]
+from typing import TypeVar, Generic, List
+T = TypeVar('T')
+class C(Generic[T]):
+ A = List[T] # E: Can't use bound type variable "T" to define generic alias
+
+x: C.A
+reveal_type(x) # E: Revealed type is 'builtins.list[Any]'
+
+def f(x: T) -> T:
+ A = List[T] # E: Can't use bound type variable "T" to define generic alias
+ return x
+[builtins fixtures/list.pyi]
+[out]
+
[case testTypeAliasInBuiltins]
def f(x: bytes): pass
bytes
diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test
index 1dc5d14..eab5e3f 100644
--- a/test-data/unit/check-typevar-values.test
+++ b/test-data/unit/check-typevar-values.test
@@ -556,16 +556,15 @@ def outer(x: T) -> T:
[case testClassMemberTypeVarInFunctionBody]
from typing import TypeVar, List
+S = TypeVar('S')
class C:
T = TypeVar('T', bound=int)
def f(self, x: T) -> T:
- L = List[C.T] # this creates a variable, not an alias
- reveal_type(L) # E: Revealed type is 'Overload(def () -> builtins.list[T`-1], def (x: typing.Iterable[T`-1]) -> builtins.list[T`-1])'
- y: C.T = x
- L().append(x)
+ L = List[S]
+ y: L[C.T] = [x]
C.T # E: Type variable "C.T" cannot be used as an expression
A = C.T # E: Type variable "C.T" cannot be used as an expression
- return L()[0]
+ return y[0]
[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
index a43c42a..465b180 100644
--- a/test-data/unit/check-unions.test
+++ b/test-data/unit/check-unions.test
@@ -497,6 +497,431 @@ if bool():
reveal_type(x) # E: Revealed type is 'Union[builtins.int, Any]'
[builtins fixtures/bool.pyi]
+[case testUnionMultiassignSingle]
+from typing import Union, Tuple, Any
+
+a: Union[Tuple[int], Tuple[float]]
+(a1,) = a
+reveal_type(a1) # E: Revealed type is 'builtins.float'
+
+b: Union[Tuple[int], Tuple[str]]
+(b1,) = b
+reveal_type(b1) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[case testUnionMultiassignDouble]
+from typing import Union, Tuple
+
+c: Union[Tuple[int, int], Tuple[int, float]]
+(c1, c2) = c
+reveal_type(c1) # E: Revealed type is 'builtins.int'
+reveal_type(c2) # E: Revealed type is 'builtins.float'
+
+[case testUnionMultiassignGeneric]
+from typing import Union, Tuple, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+
+def pack_two(x: T, y: S) -> Union[Tuple[T, T], Tuple[S, S]]:
+ pass
+
+(x, y) = pack_two(1, 'a')
+reveal_type(x) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+
+[case testUnionMultiassignAny]
+from typing import Union, Tuple, Any
+
+d: Union[Any, Tuple[float, float]]
+(d1, d2) = d
+reveal_type(d1) # E: Revealed type is 'Union[Any, builtins.float]'
+reveal_type(d2) # E: Revealed type is 'Union[Any, builtins.float]'
+
+e: Union[Any, Tuple[float, float], int]
+(e1, e2) = e # E: 'builtins.int' object is not iterable
+
+[case testUnionMultiassignNotJoin]
+from typing import Union, List
+
+class A: pass
+class B(A): pass
+class C(A): pass
+a: Union[List[B], List[C]]
+x, y = a
+reveal_type(x) # E: Revealed type is 'Union[__main__.B*, __main__.C*]'
+[builtins fixtures/list.pyi]
+
+[case testUnionMultiassignRebind]
+from typing import Union, List
+
+class A: pass
+class B(A): pass
+class C(A): pass
+obj: object
+a: Union[List[B], List[C]]
+obj, new = a
+reveal_type(obj) # E: Revealed type is 'Union[__main__.B*, __main__.C*]'
+reveal_type(new) # E: Revealed type is 'Union[__main__.B*, __main__.C*]'
+
+obj = 1
+reveal_type(obj) # E: Revealed type is 'builtins.int'
+[builtins fixtures/list.pyi]
+
+[case testUnionMultiassignAlreadyDeclared]
+from typing import Union, Tuple
+
+a: Union[Tuple[int, int], Tuple[int, float]]
+a1: object
+a2: int
+(a1, a2) = a # E: Incompatible types in assignment (expression has type "float", variable has type "int")
+
+b: Union[Tuple[float, int], Tuple[int, int]]
+b1: object
+b2: int
+(b1, b2) = b
+reveal_type(b1) # E: Revealed type is 'builtins.float'
+reveal_type(b2) # E: Revealed type is 'builtins.int'
+
+c: Union[Tuple[int, int], Tuple[int, int]]
+c1: object
+c2: int
+(c1, c2) = c
+reveal_type(c1) # E: Revealed type is 'builtins.int'
+reveal_type(c2) # E: Revealed type is 'builtins.int'
+
+d: Union[Tuple[int, int], Tuple[int, float]]
+d1: object
+(d1, d2) = d
+reveal_type(d1) # E: Revealed type is 'builtins.int'
+reveal_type(d2) # E: Revealed type is 'builtins.float'
+
+[case testUnionMultiassignIndexed]
+from typing import Union, Tuple, List
+
+class B:
+ x: object
+
+x: List[int]
+b: B
+
+a: Union[Tuple[int, int], Tuple[int, object]]
+(x[0], b.x) = a
+reveal_type(x[0]) # E: Revealed type is 'builtins.int*'
+reveal_type(b.x) # E: Revealed type is 'builtins.object'
+[builtins fixtures/list.pyi]
+
+[case testUnionMultiassignIndexedWithError]
+from typing import Union, Tuple, List
+
+class A: pass
+class B:
+ x: int
+
+x: List[A]
+b: B
+
+a: Union[Tuple[int, int], Tuple[int, object]]
+(x[0], b.x) = a # E: Incompatible types in assignment (expression has type "int", target has type "A") \
+ # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+reveal_type(x[0]) # E: Revealed type is '__main__.A*'
+reveal_type(b.x) # E: Revealed type is 'builtins.int'
+[builtins fixtures/list.pyi]
+
+[case testUnionMultiassignPacked]
+from typing import Union, Tuple, List
+
+a: Union[Tuple[int, int, int], Tuple[int, int, str]]
+a1: int
+a2: object
+(a1, *xs, a2) = a
+
+reveal_type(a1) # E: Revealed type is 'builtins.int'
+reveal_type(xs) # E: Revealed type is 'builtins.list[builtins.int*]'
+reveal_type(a2) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/list.pyi]
+
+[case testUnpackingUnionOfListsInFunction]
+from typing import Union, List
+
+def f(x: bool) -> Union[List[int], List[str]]:
+ if x:
+ return [1, 1]
+ else:
+ return ['a', 'a']
+
+def g(x: bool) -> None:
+ a, b = f(x)
+ reveal_type(a) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+ reveal_type(b) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+[builtins fixtures/list.pyi]
+
+[case testUnionOfVariableLengthTupleUnpacking]
+from typing import Tuple, Union
+VarTuple = Union[Tuple[int, int], Tuple[int, int, int]]
+
+def make_tuple() -> VarTuple:
+ pass
+x = make_tuple()
+
+a, b = x # E: Too many values to unpack (2 expected, 3 provided)
+a, b, c = x # E: Need more than 2 values to unpack (3 expected)
+c, *d = x
+reveal_type(c) # E: Revealed type is 'builtins.int'
+reveal_type(d) # E: Revealed type is 'builtins.list[builtins.int*]'
+[builtins fixtures/tuple.pyi]
+
+[case testUnionOfNonIterableUnpacking]
+from typing import Union
+bad: Union[int, str]
+
+x, y = bad # E: 'builtins.int' object is not iterable \
+ # E: 'builtins.str' object is not iterable
+reveal_type(x) # E: Revealed type is 'Any'
+reveal_type(y) # E: Revealed type is 'Any'
+[out]
+
+[case testUnionAlwaysTooMany]
+from typing import Union, Tuple
+bad: Union[Tuple[int, int, int], Tuple[str, str, str]]
+
+x, y = bad # E: Too many values to unpack (2 expected, 3 provided)
+reveal_type(x) # E: Revealed type is 'Any'
+reveal_type(y) # E: Revealed type is 'Any'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnionAlwaysTooFew]
+from typing import Union, Tuple
+bad: Union[Tuple[int, int, int], Tuple[str, str, str]]
+
+x, y, z, w = bad # E: Need more than 3 values to unpack (4 expected)
+reveal_type(x) # E: Revealed type is 'Any'
+reveal_type(y) # E: Revealed type is 'Any'
+reveal_type(z) # E: Revealed type is 'Any'
+reveal_type(w) # E: Revealed type is 'Any'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnionUnpackingChainedTuple]
+from typing import Union, Tuple
+good: Union[Tuple[int, int], Tuple[str, str]]
+
+x, y = t = good
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(t) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnionUnpackingChainedTuple2]
+from typing import Union, Tuple
+good: Union[Tuple[int, int], Tuple[str, str]]
+
+t = x, y = good
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(t) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnionUnpackingChainedTuple3]
+from typing import Union, Tuple
+good: Union[Tuple[int, int], Tuple[str, str]]
+
+x, y = a, b = good
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(a) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(b) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnionUnpackingChainedList]
+from typing import Union, List
+good: Union[List[int], List[str]]
+
+lst = x, y = good
+reveal_type(x) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+reveal_type(lst) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testUnionUnpackingChainedList2]
+from typing import Union, List
+good: Union[List[int], List[str]]
+
+x, *y, z = lst = good
+reveal_type(x) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]'
+reveal_type(z) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+reveal_type(lst) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testUnionUnpackingInForTuple]
+from typing import Union, Tuple, NamedTuple
+class NTInt(NamedTuple):
+ x: int
+ y: int
+class NTStr(NamedTuple):
+ x: str
+ y: str
+
+nt: Union[NTInt, NTStr]
+for nx in nt:
+ reveal_type(nx) # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+
+t: Union[Tuple[int, int], Tuple[str, str]]
+for x in t:
+ # TODO(Ivan): This will be OK when tuple fallback patches are added (like above)
+ reveal_type(x) # E: Revealed type is 'Any'
+[builtins fixtures/for.pyi]
+[out]
+
+[case testUnionUnpackingInForList]
+from typing import Union, List, Tuple
+
+t: Union[List[Tuple[int, int]], List[Tuple[str, str]]]
+for x, y in t:
+ reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+ reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+t2: List[Union[Tuple[int, int], Tuple[str, str]]]
+for x2, y2 in t2:
+ reveal_type(x2) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+ reveal_type(y2) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/for.pyi]
+[out]
+
+[case testUnionUnpackingDoubleBinder]
+from typing import Union, Tuple
+
+x: object
+y: object
+class A: pass
+class B: pass
+
+t1: Union[Tuple[A, A], Tuple[B, B]]
+t2: Union[Tuple[int, int], Tuple[str, str]]
+
+x, y = t1
+reveal_type(x) # E: Revealed type is 'Union[__main__.A, __main__.B]'
+reveal_type(y) # E: Revealed type is 'Union[__main__.A, __main__.B]'
+
+x, y = t2
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+x, y = object(), object()
+reveal_type(x) # E: Revealed type is 'builtins.object'
+reveal_type(y) # E: Revealed type is 'builtins.object'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnionUnpackingFromNestedTuples]
+from typing import Union, Tuple
+
+t: Union[Tuple[int, Tuple[int, int]], Tuple[str, Tuple[str, str]]]
+x, (y, z) = t
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(z) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNestedUnionUnpackingFromNestedTuples]
+from typing import Union, Tuple
+
+class A: pass
+class B: pass
+
+t: Union[Tuple[int, Union[Tuple[int, int], Tuple[A, A]]], Tuple[str, Union[Tuple[str, str], Tuple[B, B]]]]
+x, (y, z) = t
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]'
+reveal_type(z) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNestedUnionUnpackingFromNestedTuplesBinder]
+from typing import Union, Tuple
+
+class A: pass
+class B: pass
+
+x: object
+y: object
+z: object
+
+t: Union[Tuple[int, Union[Tuple[int, int], Tuple[A, A]]], Tuple[str, Union[Tuple[str, str], Tuple[B, B]]]]
+x, (y, z) = t
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]'
+reveal_type(z) # E: Revealed type is 'Union[builtins.int, __main__.A, builtins.str, __main__.B]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testUnpackUnionNoCrashOnPartialNone]
+# flags: --strict-optional
+from typing import Dict, Tuple, List, Any
+
+a: Any
+d: Dict[str, Tuple[List[Tuple[str, str]], str]]
+x, _ = d.get(a, (None, None))
+
+for y in x: pass # E: Iterable expected \
+ # E: Item "None" of "Optional[List[Tuple[str, str]]]" has no attribute "__iter__"
+if x:
+ for s, t in x:
+ reveal_type(s) # E: Revealed type is 'builtins.str'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testUnpackUnionNoCrashOnPartialNone2]
+# flags: --strict-optional
+from typing import Dict, Tuple, List, Any
+
+a: Any
+x = None
+d: Dict[str, Tuple[List[Tuple[str, str]], str]]
+x, _ = d.get(a, (None, None))
+
+for y in x: pass # E: Iterable expected \
+ # E: Item "None" of "Optional[List[Tuple[str, str]]]" has no attribute "__iter__"
+if x:
+ for s, t in x:
+ reveal_type(s) # E: Revealed type is 'builtins.str'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testUnpackUnionNoCrashOnPartialNoneBinder]
+# flags: --strict-optional
+from typing import Dict, Tuple, List, Any
+
+x: object
+a: Any
+d: Dict[str, Tuple[List[Tuple[str, str]], str]]
+x, _ = d.get(a, (None, None))
+# FIXME: fix narrow_declared_type for narrowed Optional types.
+reveal_type(x) # E: Revealed type is 'builtins.list[Tuple[builtins.str, builtins.str]]'
+
+for y in x: pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testUnpackUnionNoCrashOnPartialNoneList]
+# flags: --strict-optional
+from typing import Dict, Tuple, List, Any
+
+a: Any
+d: Dict[str, Tuple[List[Tuple[str, str]], str]]
+x, _ = d.get(a, ([], []))
+reveal_type(x) # E: Revealed type is 'Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.list[<nothing>]]'
+
+for y in x: pass
+[builtins fixtures/dict.pyi]
+[out]
+
[case testLongUnionFormatting]
from typing import Any, Generic, TypeVar, Union
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index 1bd5b9c..8464fe0 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -1078,3 +1078,15 @@ ignore_errors = True
ignore_errors = False
[out]
a/b/c/d/e/__init__.py:1: error: "int" not callable
+
+[case testDisallowUntypedDefsAndGenerics]
+# cmd: mypy a.py
+[file mypy.ini]
+[[mypy]
+disallow_untyped_defs = True
+disallow_any = generics
+[file a.py]
+def get_tasks(self):
+ return 'whatever'
+[out]
+a.py:1: error: Function is missing a type annotation
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
index cf8b61f..d8fc59f 100644
--- a/test-data/unit/fixtures/dict.pyi
+++ b/test-data/unit/fixtures/dict.pyi
@@ -19,6 +19,7 @@ class dict(Generic[KT, VT]):
def __getitem__(self, key: KT) -> VT: pass
def __setitem__(self, k: KT, v: VT) -> None: pass
def __iter__(self) -> Iterator[KT]: pass
+ def __contains__(self, item: object) -> bool: pass
def update(self, a: Mapping[KT, VT]) -> None: pass
@overload
def get(self, k: KT) -> Optional[VT]: pass
diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi
index 8b8ce1c..31f6de7 100644
--- a/test-data/unit/fixtures/for.pyi
+++ b/test-data/unit/fixtures/for.pyi
@@ -9,7 +9,8 @@ class object:
def __init__(self) -> None: pass
class type: pass
-class tuple(Generic[t]): pass
+class tuple(Generic[t]):
+ def __iter__(self) -> Iterator[t]: pass
class function: pass
class bool: pass
class int: pass # for convenience
diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi
index 7b6d1db..b97be43 100644
--- a/test-data/unit/fixtures/list.pyi
+++ b/test-data/unit/fixtures/list.pyi
@@ -16,9 +16,11 @@ class list(Generic[T]):
@overload
def __init__(self, x: Iterable[T]) -> None: pass
def __iter__(self) -> Iterator[T]: pass
+ def __contains__(self, item: object) -> bool: pass
def __add__(self, x: list[T]) -> list[T]: pass
def __mul__(self, x: int) -> list[T]: pass
def __getitem__(self, x: int) -> T: pass
+ def __setitem__(self, x: int, v: T) -> None: pass
def append(self, x: T) -> None: pass
def extend(self, x: Iterable[T]) -> None: pass
diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi
index 61e48be..283ba18 100644
--- a/test-data/unit/fixtures/python2.pyi
+++ b/test-data/unit/fixtures/python2.pyi
@@ -11,6 +11,7 @@ class function: pass
class int: pass
class str: pass
class unicode: pass
+class bool: pass
T = TypeVar('T')
class list(Iterable[T], Generic[T]): pass
diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi
index 79d53e8..9de7bda 100644
--- a/test-data/unit/fixtures/set.pyi
+++ b/test-data/unit/fixtures/set.pyi
@@ -13,9 +13,11 @@ class function: pass
class int: pass
class str: pass
+class bool: pass
class set(Iterable[T], Generic[T]):
def __iter__(self) -> Iterator[T]: pass
+ def __contains__(self, item: object) -> bool: pass
def add(self, x: T) -> None: pass
def discard(self, x: T) -> None: pass
def update(self, x: Set[T]) -> None: pass
diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi
index 4e53d12..e231900 100644
--- a/test-data/unit/fixtures/tuple.pyi
+++ b/test-data/unit/fixtures/tuple.pyi
@@ -12,6 +12,7 @@ class type:
def __call__(self, *a) -> object: pass
class tuple(Sequence[Tco], Generic[Tco]):
def __iter__(self) -> Iterator[Tco]: pass
+ def __contains__(self, item: object) -> bool: pass
def __getitem__(self, x: int) -> Tco: pass
def count(self, obj: Any) -> int: pass
class function: pass
diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi
index 4a2dcac..35cf0ad 100644
--- a/test-data/unit/fixtures/type.pyi
+++ b/test-data/unit/fixtures/type.pyi
@@ -11,6 +11,7 @@ class object:
class list(Generic[T]): pass
class type:
+ __name__: str
def mro(self) -> List['type']: pass
class tuple(Generic[T]): pass
@@ -18,4 +19,4 @@ class function: pass
class bool: pass
class int: pass
class str: pass
-class unicode: pass
\ No newline at end of file
+class unicode: pass
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
index 62fac70..fb6b1d3 100644
--- a/test-data/unit/fixtures/typing-full.pyi
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -126,6 +126,7 @@ class Mapping(Iterable[T], Protocol[T, T_co]):
def get(self, k: T, default: Union[T_co, V]) -> Union[T_co, V]: pass
def values(self) -> Iterable[T_co]: pass # Approximate return type
def __len__(self) -> int: ...
+ def __contains__(self, arg: object) -> int: pass
@runtime
class MutableMapping(Mapping[T, U], Protocol):
diff --git a/test-data/unit/parse-python2.test b/test-data/unit/parse-python2.test
index b654f6a..562a218 100644
--- a/test-data/unit/parse-python2.test
+++ b/test-data/unit/parse-python2.test
@@ -271,10 +271,8 @@ MypyFile:1(
FuncDef:1(
f
Args(
- Var(__tuple_arg_1))
- Init(
- AssignmentStmt:1(
- NameExpr(__tuple_arg_1)
+ default(
+ Var(__tuple_arg_1)
TupleExpr:1(
IntExpr(1)
IntExpr(2))))
diff --git a/test-data/unit/parse.test b/test-data/unit/parse.test
index 417d37c..92c4ea4 100644
--- a/test-data/unit/parse.test
+++ b/test-data/unit/parse.test
@@ -1088,10 +1088,8 @@ MypyFile:1(
FuncDef:1(
f
Args(
- Var(x))
- Init(
- AssignmentStmt:1(
- NameExpr(x)
+ default(
+ Var(x)
IntExpr(1)))
Block:1(
PassStmt:2()))
@@ -1099,17 +1097,14 @@ MypyFile:1(
g
Args(
Var(x)
- Var(y)
- Var(z))
- Init(
- AssignmentStmt:3(
- NameExpr(y)
+ default(
+ Var(y)
OpExpr:3(
+
IntExpr(1)
IntExpr(2)))
- AssignmentStmt:3(
- NameExpr(z)
+ default(
+ Var(z)
TupleExpr:3(
IntExpr(1)
IntExpr(2))))
@@ -1451,10 +1446,8 @@ MypyFile:1(
ExpressionStmt:1(
LambdaExpr:1(
Args(
- Var(x))
- Init(
- AssignmentStmt:1(
- NameExpr(x)
+ default(
+ Var(x)
IntExpr(2)))
Block:1(
ReturnStmt:1(
@@ -2307,10 +2300,8 @@ MypyFile:1(
MaxPos(1)
Args(
Var(x)
- Var(y))
- Init(
- AssignmentStmt:1(
- NameExpr(y)
+ default(
+ Var(y)
IntExpr(1)))
Block:1(
PassStmt:1())))
@@ -2324,12 +2315,10 @@ MypyFile:1(
MaxPos(1)
Args(
Var(x)
- Var(y))
- def (x: A?, *, y: B? =) -> None?
- Init(
- AssignmentStmt:1(
- NameExpr(y)
+ default(
+ Var(y)
IntExpr(1)))
+ def (x: A?, *, y: B? =) -> None?
Block:1(
PassStmt:1())))
@@ -2341,12 +2330,10 @@ MypyFile:1(
f
MaxPos(0)
Args(
- Var(y))
- def (*, y: B? =) -> None?
- Init(
- AssignmentStmt:1(
- NameExpr(y)
+ default(
+ Var(y)
IntExpr(1)))
+ def (*, y: B? =) -> None?
Block:1(
PassStmt:1())))
@@ -3029,10 +3016,8 @@ MypyFile:1(
MaxPos(1)
Args(
Var(x)
- Var(y))
- Init(
- AssignmentStmt:1(
- NameExpr(y)
+ default(
+ Var(y)
NameExpr(None)))
VarArg(
Var(args))
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 80bbaee..8f9ffd8 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1402,3 +1402,43 @@ o: object = p
it2: Iterable[int] = p
[out]
_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]")
+
+[case testAsyncioGatherPreciseType]
+import asyncio
+from typing import Tuple
+
+async def get_location(arg: str) -> Tuple[str, str]:
+ return arg, arg
+
+async def main() -> None:
+ ((a_x, a_y),) = await asyncio.gather(get_location('start'))
+ reveal_type(a_x)
+ reveal_type(a_y)
+[out]
+_testAsyncioGatherPreciseType.py:9: error: Revealed type is 'builtins.str'
+_testAsyncioGatherPreciseType.py:10: error: Revealed type is 'builtins.str'
+
+[case testNoCrashOnGenericUnionUnpacking]
+from typing import Union, Dict
+
+TEST = {'key': ('a', 'b')}
+def test() -> None:
+ a, b = TEST.get('foo', ('x', 'y'))
+ reveal_type(a)
+ reveal_type(b)
+def test2() -> None:
+ a, b = TEST.get('foo', (1, 2))
+ reveal_type(a)
+ reveal_type(b)
+
+x: Union[Dict[int, int], Dict[str, str]] = dict(a='b')
+for a, b in x.items():
+ reveal_type(a)
+ reveal_type(b)
+[out]
+_testNoCrashOnGenericUnionUnpacking.py:6: error: Revealed type is 'builtins.str'
+_testNoCrashOnGenericUnionUnpacking.py:7: error: Revealed type is 'builtins.str'
+_testNoCrashOnGenericUnionUnpacking.py:10: error: Revealed type is 'Union[builtins.str, builtins.int]'
+_testNoCrashOnGenericUnionUnpacking.py:11: error: Revealed type is 'Union[builtins.str, builtins.int]'
+_testNoCrashOnGenericUnionUnpacking.py:15: error: Revealed type is 'Union[builtins.int*, builtins.str*]'
+_testNoCrashOnGenericUnionUnpacking.py:16: error: Revealed type is 'Union[builtins.int*, builtins.str*]'
diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test
index 3c11da8..08b27e8 100644
--- a/test-data/unit/semanal-basic.test
+++ b/test-data/unit/semanal-basic.test
@@ -184,14 +184,11 @@ MypyFile:1(
FuncDef:1(
f
Args(
- Var(x)
- Var(y))
- Init(
- AssignmentStmt:1(
- NameExpr(x [l])
+ default(
+ Var(x)
NameExpr(f [__main__.f]))
- AssignmentStmt:1(
- NameExpr(y [l])
+ default(
+ Var(y)
NameExpr(object [builtins.object])))
Block:1(
ExpressionStmt:2(
diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test
index d68aa17..50a0bc3 100644
--- a/test-data/unit/semanal-classes.test
+++ b/test-data/unit/semanal-classes.test
@@ -547,12 +547,10 @@ MypyFile:1(
f
Args(
Var(self)
- Var(x))
- def (self: __main__.A, x: builtins.int =)
- Init(
- AssignmentStmt:4(
- NameExpr(x [l])
+ default(
+ Var(x)
NameExpr(X [__main__.A.X])))
+ def (self: __main__.A, x: builtins.int =)
Block:4(
PassStmt:4()))))
diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test
index 4821635..9f7a6d9 100644
--- a/test-data/unit/semanal-symtable.test
+++ b/test-data/unit/semanal-symtable.test
@@ -9,21 +9,21 @@ x = 1
[out]
__main__:
SymbolTable(
- x : Gdef/Var (__main__))
+ x : Gdef/Var (__main__.x))
[case testFuncDef]
def f(): pass
[out]
__main__:
SymbolTable(
- f : Gdef/FuncDef (__main__))
+ f : Gdef/FuncDef (__main__.f))
[case testEmptyClassDef]
class c: pass
[out]
__main__:
SymbolTable(
- c : Gdef/TypeInfo (__main__))
+ c : Gdef/TypeInfo (__main__.c))
[case testImport]
import m
@@ -32,10 +32,10 @@ x = 1
[out]
__main__:
SymbolTable(
- m : ModuleRef/MypyFile (__main__))
+ m : ModuleRef/MypyFile (m))
m:
SymbolTable(
- x : Gdef/Var (m))
+ x : Gdef/Var (m.x))
[case testImportFromModule]
from m import x
@@ -45,8 +45,22 @@ y = 1
[out]
__main__:
SymbolTable(
- x : Gdef/TypeInfo (__main__))
+ x : Gdef/TypeInfo (m.x))
m:
SymbolTable(
- x : Gdef/TypeInfo (m)
- y : Gdef/Var (m))
+ x : Gdef/TypeInfo (m.x)
+ y : Gdef/Var (m.y))
+
+[case testImportAs]
+from m import x as xx
+[file m.py]
+class x: pass
+y = 1
+[out]
+__main__:
+ SymbolTable(
+ xx : Gdef/TypeInfo (m.x))
+m:
+ SymbolTable(
+ x : Gdef/TypeInfo (m.x)
+ y : Gdef/Var (m.y))
diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test
index ca00516..b55e34d 100644
--- a/test-data/unit/semanal-types.test
+++ b/test-data/unit/semanal-types.test
@@ -1023,12 +1023,10 @@ MypyFile:1(
g
MaxPos(0)
Args(
- Var(y))
- def (*x: builtins.int, *, y: builtins.str =) -> Any
- Init(
- AssignmentStmt:1(
- NameExpr(y [l])
+ default(
+ Var(y)
StrExpr()))
+ def (*x: builtins.int, *, y: builtins.str =) -> Any
VarArg(
Var(x))
Block:1(
diff --git a/typeshed/stdlib/2/__builtin__.pyi b/typeshed/stdlib/2/__builtin__.pyi
index b781292..0b8dfb1 100644
--- a/typeshed/stdlib/2/__builtin__.pyi
+++ b/typeshed/stdlib/2/__builtin__.pyi
@@ -789,7 +789,10 @@ def sorted(iterable: Iterable[_T], *,
cmp: Callable[[_T, _T], int] = ...,
key: Callable[[_T], Any] = ...,
reverse: bool = ...) -> List[_T]: ...
-def sum(iterable: Iterable[_T], start: _T = ...) -> _T: ...
+ at overload
+def sum(iterable: Iterable[_T]) -> Union[_T, int]: ...
+ at overload
+def sum(iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ...
def unichr(i: int) -> unicode: ...
def vars(object: Any = ...) -> Dict[str, Any]: ...
@overload
diff --git a/typeshed/stdlib/2/builtins.pyi b/typeshed/stdlib/2/builtins.pyi
index b781292..0b8dfb1 100644
--- a/typeshed/stdlib/2/builtins.pyi
+++ b/typeshed/stdlib/2/builtins.pyi
@@ -789,7 +789,10 @@ def sorted(iterable: Iterable[_T], *,
cmp: Callable[[_T, _T], int] = ...,
key: Callable[[_T], Any] = ...,
reverse: bool = ...) -> List[_T]: ...
-def sum(iterable: Iterable[_T], start: _T = ...) -> _T: ...
+ at overload
+def sum(iterable: Iterable[_T]) -> Union[_T, int]: ...
+ at overload
+def sum(iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ...
def unichr(i: int) -> unicode: ...
def vars(object: Any = ...) -> Dict[str, Any]: ...
@overload
diff --git a/typeshed/stdlib/2/collections.pyi b/typeshed/stdlib/2/collections.pyi
index e0d983e..bd74205 100644
--- a/typeshed/stdlib/2/collections.pyi
+++ b/typeshed/stdlib/2/collections.pyi
@@ -96,6 +96,7 @@ class Counter(Dict[_T, int], Generic[_T]):
class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
def __reversed__(self) -> Iterator[_KT]: ...
+ def __copy__(self) -> OrderedDict[_KT, _VT]: ...
class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
default_factory = ... # type: Callable[[], _VT]
diff --git a/typeshed/stdlib/2/datetime.pyi b/typeshed/stdlib/2/datetime.pyi
index 2a804b2..725aee3 100644
--- a/typeshed/stdlib/2/datetime.pyi
+++ b/typeshed/stdlib/2/datetime.pyi
@@ -141,10 +141,10 @@ class datetime(object):
resolution = ... # type: timedelta
def __init__(self, year: int, month: int, day: int, hour: int = ...,
- minute: int = ..., second: int = ..., microseconds: int = ...,
+ minute: int = ..., second: int = ..., microsecond: int = ...,
tzinfo: tzinfo = ...) -> None: ...
def __new__(cls, year: int, month: int, day: int, hour: int = ...,
- minute: int = ..., second: int = ..., microseconds: int = ...,
+ minute: int = ..., second: int = ..., microsecond: int = ...,
tzinfo: tzinfo = ...) -> datetime: ...
@property
diff --git a/typeshed/stdlib/2/textwrap.pyi b/typeshed/stdlib/2/textwrap.pyi
index df8afa6..32a314c 100644
--- a/typeshed/stdlib/2/textwrap.pyi
+++ b/typeshed/stdlib/2/textwrap.pyi
@@ -1,29 +1,65 @@
-from typing import Any, AnyStr
+from typing import AnyStr, List, Dict, Pattern
-class _unicode: ...
+class TextWrapper(object):
+ width: int = ...
+ initial_indent: str = ...
+ subsequent_indent: str = ...
+ expand_tabs: bool = ...
+ replace_whitespace: bool = ...
+ fix_sentence_endings: bool = ...
+ drop_whitespace: bool = ...
+ break_long_words: bool = ...
+ break_on_hyphens: bool = ...
-class TextWrapper:
- whitespace_trans = ... # type: Any
- unicode_whitespace_trans = ... # type: Any
- uspace = ... # type: Any
- wordsep_re = ... # type: Any
- wordsep_simple_re = ... # type: Any
- sentence_end_re = ... # type: Any
- width = ... # type: Any
- initial_indent = ... # type: Any
- subsequent_indent = ... # type: Any
- expand_tabs = ... # type: Any
- replace_whitespace = ... # type: Any
- fix_sentence_endings = ... # type: Any
- break_long_words = ... # type: Any
- drop_whitespace = ... # type: Any
- break_on_hyphens = ... # type: Any
- wordsep_re_uni = ... # type: Any
- wordsep_simple_re_uni = ... # type: Any
- def __init__(self, width=..., initial_indent=..., subsequent_indent=..., expand_tabs=..., replace_whitespace=..., fix_sentence_endings=..., break_long_words=..., drop_whitespace=..., break_on_hyphens=...) -> None: ...
- def wrap(self, text): ...
- def fill(self, text): ...
+ # Attributes not present in documentation
+ sentence_end_re: Pattern[str] = ...
+ wordsep_re: Pattern[str] = ...
+ wordsep_simple_re: Pattern[str] = ...
+ whitespace_trans: str = ...
+ unicode_whitespace_trans: Dict[int, int] = ...
+ uspace: int = ...
+ x: int = ...
+
+ def __init__(
+ self,
+ width: int = ...,
+ initial_indent: str = ...,
+ subsequent_indent: str = ...,
+ expand_tabs: bool = ...,
+ replace_whitespace: bool = ...,
+ fix_sentence_endings: bool = ...,
+ break_long_words: bool = ...,
+ drop_whitespace: bool = ...,
+ break_on_hyphens: bool = ...) -> None:
+ ...
+
+ def wrap(self, text: AnyStr) -> List[AnyStr]: ...
+ def fill(self, text: AnyStr) -> AnyStr: ...
+
+def wrap(
+ text: AnyStr,
+ width: int = ...,
+ initial_indent: AnyStr = ...,
+ subsequent_indent: AnyStr = ...,
+ expand_tabs: bool = ...,
+ replace_whitespace: bool = ...,
+ fix_sentence_endings: bool = ...,
+ break_long_words: bool = ...,
+ drop_whitespace: bool = ...,
+ break_on_hyphens: bool = ...) -> AnyStr:
+ ...
+
+def fill(
+ text: AnyStr,
+ width: int =...,
+ initial_indent: AnyStr = ...,
+ subsequent_indent: AnyStr = ...,
+ expand_tabs: bool = ...,
+ replace_whitespace: bool = ...,
+ fix_sentence_endings: bool = ...,
+ break_long_words: bool = ...,
+ drop_whitespace: bool = ...,
+ break_on_hyphens: bool = ...) -> AnyStr:
+ ...
-def wrap(text, width=..., **kwargs): ...
-def fill(text, width=..., **kwargs): ...
def dedent(text: AnyStr) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/whichdb.pyi b/typeshed/stdlib/2/whichdb.pyi
new file mode 100644
index 0000000..b1a69f4
--- /dev/null
+++ b/typeshed/stdlib/2/whichdb.pyi
@@ -0,0 +1,5 @@
+# Source: https://hg.python.org/cpython/file/2.7/Lib/whichdb.py
+
+from typing import Optional, Text
+
+def whichdb(filename: Text) -> Optional[str]: ...
diff --git a/typeshed/stdlib/2and3/crypt.pyi b/typeshed/stdlib/2and3/crypt.pyi
new file mode 100644
index 0000000..db0e24a
--- /dev/null
+++ b/typeshed/stdlib/2and3/crypt.pyi
@@ -0,0 +1,18 @@
+import sys
+from typing import List, NamedTuple, Optional, Union
+
+
+if sys.version_info >= (3, 3):
+ class _Method: ...
+
+ METHOD_CRYPT: _Method
+ METHOD_MD5: _Method
+ METHOD_SHA256: _Method
+ METHOD_SHA512: _Method
+
+ methods: List[_Method]
+
+ def mksalt(method: Optional[_Method] = ...) -> str: ...
+ def crypt(word: str, salt: Optional[Union[str, _Method]] = ...) -> str: ...
+else:
+ def crypt(word: str, salt: str) -> str: ...
diff --git a/typeshed/stdlib/2and3/socket.pyi b/typeshed/stdlib/2and3/socket.pyi
index 34ed452..4fc8f0d 100644
--- a/typeshed/stdlib/2and3/socket.pyi
+++ b/typeshed/stdlib/2and3/socket.pyi
@@ -6,21 +6,21 @@
# see: http://nullege.com/codes/search/socket
# adapted for Python 2.7 by Michal Pokorny
import sys
-from typing import Any, Tuple, List, Optional, Union, overload, TypeVar
+from typing import Any, Iterable, Tuple, List, Optional, Union, overload, TypeVar
# ----- variables and constants -----
-AF_UNIX: int
-AF_INET: int
-AF_INET6: int
-SOCK_STREAM: int
-SOCK_DGRAM: int
-SOCK_RAW: int
-SOCK_RDM: int
-SOCK_SEQPACKET: int
-SOCK_CLOEXEC: int
-SOCK_NONBLOCK: int
+AF_UNIX: AddressFamily
+AF_INET: AddressFamily
+AF_INET6: AddressFamily
+SOCK_STREAM: SocketKind
+SOCK_DGRAM: SocketKind
+SOCK_RAW: SocketKind
+SOCK_RDM: SocketKind
+SOCK_SEQPACKET: SocketKind
+SOCK_CLOEXEC: SocketKind
+SOCK_NONBLOCK: SocketKind
SOMAXCONN: int
has_ipv6: bool
_GLOBAL_DEFAULT_TIMEOUT: Any
@@ -28,46 +28,46 @@ SocketType: Any
SocketIO: Any
# These are flags that may exist on Python 3.6. Many don't exist on all platforms.
-AF_AAL5: int
-AF_APPLETALK: int
-AF_ASH: int
-AF_ATMPVC: int
-AF_ATMSVC: int
-AF_AX25: int
-AF_BLUETOOTH: int
-AF_BRIDGE: int
-AF_CAN: int
-AF_DECnet: int
-AF_ECONET: int
-AF_IPX: int
-AF_IRDA: int
-AF_KEY: int
-AF_LLC: int
-AF_NETBEUI: int
-AF_NETLINK: int
-AF_NETROM: int
-AF_PACKET: int
-AF_PPPOX: int
-AF_RDS: int
-AF_ROSE: int
-AF_ROUTE: int
-AF_SECURITY: int
-AF_SNA: int
-AF_SYSTEM: int
-AF_TIPC: int
-AF_UNSPEC: int
-AF_WANPIPE: int
-AF_X25: int
-AI_ADDRCONFIG: int
-AI_ALL: int
-AI_CANONNAME: int
-AI_DEFAULT: int
-AI_MASK: int
-AI_NUMERICHOST: int
-AI_NUMERICSERV: int
-AI_PASSIVE: int
-AI_V4MAPPED: int
-AI_V4MAPPED_CFG: int
+AF_AAL5: AddressFamily
+AF_APPLETALK: AddressFamily
+AF_ASH: AddressFamily
+AF_ATMPVC: AddressFamily
+AF_ATMSVC: AddressFamily
+AF_AX25: AddressFamily
+AF_BLUETOOTH: AddressFamily
+AF_BRIDGE: AddressFamily
+AF_CAN: AddressFamily
+AF_DECnet: AddressFamily
+AF_ECONET: AddressFamily
+AF_IPX: AddressFamily
+AF_IRDA: AddressFamily
+AF_KEY: AddressFamily
+AF_LLC: AddressFamily
+AF_NETBEUI: AddressFamily
+AF_NETLINK: AddressFamily
+AF_NETROM: AddressFamily
+AF_PACKET: AddressFamily
+AF_PPPOX: AddressFamily
+AF_RDS: AddressFamily
+AF_ROSE: AddressFamily
+AF_ROUTE: AddressFamily
+AF_SECURITY: AddressFamily
+AF_SNA: AddressFamily
+AF_SYSTEM: AddressFamily
+AF_TIPC: AddressFamily
+AF_UNSPEC: AddressFamily
+AF_WANPIPE: AddressFamily
+AF_X25: AddressFamily
+AI_ADDRCONFIG: AddressInfo
+AI_ALL: AddressInfo
+AI_CANONNAME: AddressInfo
+AI_DEFAULT: AddressInfo
+AI_MASK: AddressInfo
+AI_NUMERICHOST: AddressInfo
+AI_NUMERICSERV: AddressInfo
+AI_PASSIVE: AddressInfo
+AI_V4MAPPED: AddressInfo
+AI_V4MAPPED_CFG: AddressInfo
BDADDR_ANY: str
BDADDR_LOCAL: str
BTPROTO_HCI: int
@@ -198,26 +198,26 @@ IP_TRANSPARENT: int
IP_TTL: int
IPX_TYPE: int
LOCAL_PEERCRED: int
-MSG_BCAST: int
-MSG_BTAG: int
-MSG_CMSG_CLOEXEC: int
-MSG_CONFIRM: int
-MSG_CTRUNC: int
-MSG_DONTROUTE: int
-MSG_DONTWAIT: int
-MSG_EOF: int
-MSG_EOR: int
-MSG_ERRQUEUE: int
-MSG_ETAG: int
-MSG_FASTOPEN: int
-MSG_MCAST: int
-MSG_MORE: int
-MSG_NOSIGNAL: int
-MSG_NOTIFICATION: int
-MSG_OOB: int
-MSG_PEEK: int
-MSG_TRUNC: int
-MSG_WAITALL: int
+MSG_BCAST: MsgFlag
+MSG_BTAG: MsgFlag
+MSG_CMSG_CLOEXEC: MsgFlag
+MSG_CONFIRM: MsgFlag
+MSG_CTRUNC: MsgFlag
+MSG_DONTROUTE: MsgFlag
+MSG_DONTWAIT: MsgFlag
+MSG_EOF: MsgFlag
+MSG_EOR: MsgFlag
+MSG_ERRQUEUE: MsgFlag
+MSG_ETAG: MsgFlag
+MSG_FASTOPEN: MsgFlag
+MSG_MCAST: MsgFlag
+MSG_MORE: MsgFlag
+MSG_NOSIGNAL: MsgFlag
+MSG_NOTIFICATION: MsgFlag
+MSG_OOB: MsgFlag
+MSG_PEEK: MsgFlag
+MSG_TRUNC: MsgFlag
+MSG_WAITALL: MsgFlag
NETLINK_ARPD: int
NETLINK_CRYPTO: int
NETLINK_DNRTMSG: int
@@ -368,7 +368,7 @@ if sys.version_info >= (3, 4):
CAN_BCM_RX_STATUS: int
CAN_BCM_RX_TIMEOUT: int
CAN_BCM_RX_CHANGED: int
- AF_LINK: int
+ AF_LINK: AddressFamily
if sys.version_info >= (3, 5):
CAN_RAW_FD_FRAMES: int
@@ -380,7 +380,7 @@ if sys.version_info >= (3, 6):
SO_PASSSEC: int
TCP_USER_TIMEOUT: int
TCP_CONGESTION: int
- AF_ALG: int
+ AF_ALG: AddressFamily
SOL_ALG: int
ALG_SET_KEY: int
ALG_SET_IV: int
@@ -449,6 +449,9 @@ if sys.version_info >= (3, 4):
SOCK_SEQPACKET = ...
SOCK_CLOEXEC = ...
SOCK_NONBLOCK = ...
+else:
+ AddressFamily = int
+ SocketKind = int
if sys.version_info >= (3, 6):
from enum import IntFlag
@@ -471,6 +474,9 @@ if sys.version_info >= (3, 6):
MSG_PEEK = ...
MSG_TRUNC = ...
MSG_WAITALL = ...
+else:
+ AddressInfo = int
+ MsgFlag = int
# ----- exceptions -----
@@ -492,6 +498,7 @@ class timeout(error):
# TODO AF_PACKET and AF_BLUETOOTH address objects
+_CMSG = Tuple[int, int, bytes]
_SelfT = TypeVar('_SelfT', bound=socket)
# ----- classes -----
@@ -543,9 +550,9 @@ class socket:
# return type is an address
def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ...
- def recvfrom_into(self, buffer: bytes, nbytes: int,
+ def recvfrom_into(self, buffer: bytearray, nbytes: int,
flags: int = ...) -> Any: ...
- def recv_into(self, buffer: bytes, nbytes: int,
+ def recv_into(self, buffer: bytearray, nbytes: int,
flags: int = ...) -> Any: ...
def send(self, data: bytes, flags: int = ...) -> int: ...
def sendall(self, data: bytes, flags: int =...) -> None:
@@ -559,6 +566,14 @@ class socket:
def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ...
def shutdown(self, how: int) -> None: ...
+ if sys.version_info >= (3, 3):
+ def recvmsg(self, __bufsize: int, __ancbufsize: int = ...,
+ __flags: int = ...) -> Tuple[bytes, List[_CMSG], int, Any]: ...
+ def recvmsg_into(self, __buffers: Iterable[bytearray], __ancbufsize: int = ...,
+ __flags: int = ...) -> Tuple[int, List[_CMSG], int, Any]: ...
+ def sendmsg(self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ...,
+ __flags: int = ..., __address: Any = ...) -> int: ...
+
# ----- functions -----
def create_connection(address: Tuple[str, int],
@@ -597,3 +612,11 @@ def inet_pton(address_family: int, ip_string: str) -> bytes: ...
def inet_ntop(address_family: int, packed_ip: bytes) -> str: ...
def getdefaulttimeout() -> Optional[float]: ...
def setdefaulttimeout(timeout: Optional[float]) -> None: ...
+
+if sys.version_info >= (3, 3):
+ def CMSG_LEN(length: int) -> int: ...
+ def CMSG_SPACE(length: int) -> int: ...
+ def sethostname(name: str) -> None: ...
+ def if_nameindex() -> List[Tuple[int, str]]: ...
+ def if_nametoindex(name: str) -> int: ...
+ def if_indextoname(index: int) -> str: ...
diff --git a/typeshed/stdlib/2and3/threading.pyi b/typeshed/stdlib/2and3/threading.pyi
index eb89a27..efd941f 100644
--- a/typeshed/stdlib/2and3/threading.pyi
+++ b/typeshed/stdlib/2and3/threading.pyi
@@ -35,7 +35,7 @@ def setprofile(func: _PF) -> None: ...
def stack_size(size: int = ...) -> int: ...
if sys.version_info >= (3,):
- TIMEOUT_MAX = ... # type: int
+ TIMEOUT_MAX = ... # type: float
class ThreadError(Exception): ...
@@ -86,7 +86,7 @@ class Lock:
exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
if sys.version_info >= (3,):
- def acquire(self, blocking: bool = ..., timeout: int = ...) -> bool: ...
+ def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
else:
def acquire(self, blocking: bool = ...) -> bool: ...
def release(self) -> None: ...
@@ -100,7 +100,7 @@ class _RLock:
exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
if sys.version_info >= (3,):
- def acquire(self, blocking: bool = ..., timeout: int = ...) -> bool: ...
+ def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
else:
def acquire(self, blocking: bool = ...) -> bool: ...
def release(self) -> None: ...
@@ -116,7 +116,7 @@ class Condition:
exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
if sys.version_info >= (3,):
- def acquire(self, blocking: bool = ..., timeout: int = ...) -> bool: ...
+ def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
else:
def acquire(self, blocking: bool = ...) -> bool: ...
def release(self) -> None: ...
@@ -136,7 +136,7 @@ class Semaphore:
exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
if sys.version_info >= (3,):
- def acquire(self, blocking: bool = ..., timeout: int = ...) -> bool: ...
+ def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
else:
def acquire(self, blocking: bool = ...) -> bool: ...
def release(self) -> None: ...
@@ -148,7 +148,7 @@ class BoundedSemaphore:
exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
if sys.version_info >= (3,):
- def acquire(self, blocking: bool = ..., timeout: int = ...) -> bool: ...
+ def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ...
else:
def acquire(self, blocking: bool = ...) -> bool: ...
def release(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/warnings.pyi b/typeshed/stdlib/2and3/warnings.pyi
index 6e8507b..67d2b81 100644
--- a/typeshed/stdlib/2and3/warnings.pyi
+++ b/typeshed/stdlib/2and3/warnings.pyi
@@ -13,7 +13,7 @@ def showwarning(message: str, category: Type[Warning], filename: str,
lineno: int, file: Optional[TextIO] = ...,
line: Optional[str] = ...) -> None: ...
def formatwarning(message: str, category: Type[Warning], filename: str,
- lineno: int, line: Optional[str] = ...) -> None: ...
+ lineno: int, line: Optional[str] = ...) -> str: ...
def filterwarnings(action: str, message: str = ...,
category: Type[Warning] = ..., module: str = ...,
lineno: int = ..., append: bool = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi b/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
index 404726e..4831ab0 100644
--- a/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
@@ -113,7 +113,7 @@ else:
def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[_tostring_result_type]: ...
def dump(elem: Element) -> None: ...
def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
-def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
+def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Any]]: ...
if sys.version_info >= (3, 4):
class XMLPullParser:
diff --git a/typeshed/stdlib/3.3/ipaddress.pyi b/typeshed/stdlib/3.3/ipaddress.pyi
index 7e61fec..d40b81e 100644
--- a/typeshed/stdlib/3.3/ipaddress.pyi
+++ b/typeshed/stdlib/3.3/ipaddress.pyi
@@ -1,201 +1,150 @@
-from typing import Any, Iterable, Iterator, Optional, SupportsInt, Tuple, TypeVar, Union
+import sys
+from typing import (Any, Container, Generic, Iterable, Iterator, Optional,
+ overload, SupportsInt, Tuple, TypeVar, Union)
-IPV4LENGTH = ... # type: int
-IPV6LENGTH = ... # type: int
+# Undocumented length constants
+IPV4LENGTH: int
+IPV6LENGTH: int
-class AddressValueError(ValueError): ...
-class NetmaskValueError(ValueError): ...
+_A = TypeVar("_A", IPv4Address, IPv6Address)
+_N = TypeVar("_N", IPv4Network, IPv6Network)
+_T = TypeVar("_T")
-class _TotalOrderingMixin:
- def __eq__(self, other): ...
- def __ne__(self, other): ...
- def __lt__(self, other): ...
- def __le__(self, other): ...
- def __gt__(self, other): ...
- def __ge__(self, other): ...
+def ip_address(address: object) -> Union[IPv4Address, IPv6Address]: ...
+def ip_network(address: object, strict: bool = ...) -> Union[IPv4Network, IPv6Network]: ...
+def ip_interface(address: object) -> Union[IPv4Interface, IPv6Interface]: ...
-class _IPAddressBase(_TotalOrderingMixin):
+class _IPAddressBase:
+ def __eq__(self, other: Any) -> bool: ...
+ def __ge__(self: _T, other: _T) -> bool: ...
+ def __gt__(self: _T, other: _T) -> bool: ...
+ def __le__(self: _T, other: _T) -> bool: ...
+ def __lt__(self: _T, other: _T) -> bool: ...
+ def __ne__(self, other: Any) -> bool: ...
@property
- def exploded(self): ...
+ def compressed(self) -> str: ...
@property
- def compressed(self): ...
+ def exploded(self) -> str: ...
+ if sys.version_info >= (3, 5):
+ @property
+ def reverse_pointer(self) -> str: ...
@property
- def version(self): ...
+ def version(self) -> int: ...
class _BaseAddress(_IPAddressBase, SupportsInt):
- def __init__(self, address) -> None: ...
+ def __init__(self, address: object) -> None: ...
+ def __add__(self: _T, other: int) -> _T: ...
+ def __hash__(self) -> int: ...
def __int__(self) -> int: ...
- def __eq__(self, other): ...
- def __lt__(self, other): ...
- def __add__(self, other): ...
- def __sub__(self, other): ...
- def __hash__(self): ...
-
-class _BaseNetwork(_IPAddressBase):
- def __init__(self, address) -> None: ...
- def hosts(self): ...
- def __iter__(self): ...
- def __getitem__(self, n): ...
- def __lt__(self, other): ...
- def __eq__(self, other): ...
- def __hash__(self): ...
- def __contains__(self, other): ...
- def overlaps(self, other): ...
- @property
- def broadcast_address(self): ...
- @property
- def hostmask(self): ...
- @property
- def with_prefixlen(self): ...
- @property
- def with_netmask(self): ...
- @property
- def with_hostmask(self): ...
- @property
- def num_addresses(self): ...
+ def __sub__(self: _T, other: int) -> _T: ...
+ if sys.version_info >= (3, 4):
+ @property
+ def is_global(self) -> bool: ...
@property
- def prefixlen(self): ...
- def address_exclude(self, other): ...
- def compare_networks(self, other): ...
- def subnets(self, prefixlen_diff=..., new_prefix=...): ...
- def supernet(self, prefixlen_diff=..., new_prefix=...): ...
- @property
- def is_multicast(self): ...
- @property
- def is_reserved(self): ...
- @property
- def is_link_local(self): ...
- @property
- def is_private(self): ...
- @property
- def is_global(self): ...
- @property
- def is_unspecified(self): ...
- @property
- def is_loopback(self): ...
-
-class _BaseV4:
- def __init__(self, address) -> None: ...
- @property
- def max_prefixlen(self): ...
- @property
- def version(self): ...
-
-class IPv4Address(_BaseV4, _BaseAddress):
- def __init__(self, address: object) -> None: ...
+ def is_link_local(self) -> bool: ...
@property
- def packed(self) -> bytes: ...
+ def is_loopback(self) -> bool: ...
@property
- def is_reserved(self) -> bool: ...
+ def is_multicast(self) -> bool: ...
@property
def is_private(self) -> bool: ...
@property
- def is_multicast(self) -> bool: ...
+ def is_reserved(self) -> bool: ...
@property
def is_unspecified(self) -> bool: ...
@property
- def is_loopback(self) -> bool: ...
- @property
- def is_link_local(self) -> bool: ...
-
-class IPv4Interface(IPv4Address):
- network = ... # type: IPv4Network
- netmask = ... # type: IPv4Address
- hostmask = ... # type: IPv4Address
- def __init__(self, address: object) -> None: ...
- def __eq__(self, other: Any) -> bool: ...
- def __lt__(self, other: Any) -> bool: ...
- def __hash__(self) -> int: ...
- @property
- def ip(self) -> IPv4Address: ...
+ def max_prefixlen(self) -> int: ...
@property
- def with_prefixlen(self) -> str: ...
- @property
- def with_netmask(self) -> str: ...
- @property
- def with_hostmask(self) -> str: ...
+ def packed(self) -> bytes: ...
-class IPv4Network(_BaseV4, _BaseNetwork):
- network_address = ... # type: IPv4Address
- netmask = ... # type: IPv4Address
- hosts = ... # type: Iterator[IPv4Address]
+class _BaseNetwork(_IPAddressBase, Container, Iterable[_A], Generic[_A]):
+ network_address: _A
+ netmask: _A
def __init__(self, address: object, strict: bool = ...) -> None: ...
+ def __contains__(self, other: Any) -> bool: ...
+ def __getitem__(self, n: int) -> _A: ...
+ def __iter__(self) -> Iterator[_A]: ...
+ def address_exclude(self: _T, other: _T) -> Iterator[_T]: ...
@property
- def is_global(self) -> bool: ...
-
-class _BaseV6:
- def __init__(self, address) -> None: ...
+ def broadcast_address(self) -> _A: ...
+ def compare_networks(self: _T, other: _T) -> int: ...
+ def hosts(self) -> Iterator[_A]: ...
@property
- def max_prefixlen(self): ...
+ def is_global(self) -> bool: ...
@property
- def version(self): ...
-
-class IPv6Address(_BaseV6, _BaseAddress):
- def __init__(self, address: object) -> None: ...
+ def is_link_local(self) -> bool: ...
@property
- def packed(self) -> bytes: ...
+ def is_loopback(self) -> bool: ...
@property
def is_multicast(self) -> bool: ...
@property
+ def is_private(self) -> bool: ...
+ @property
def is_reserved(self) -> bool: ...
@property
- def is_link_local(self) -> bool: ...
+ def is_unspecified(self) -> bool: ...
@property
- def is_site_local(self) -> bool: ...
+ def max_prefixlen(self) -> int: ...
@property
- def is_private(self) -> bool: ...
+ def num_addresses(self) -> int: ...
+ def overlaps(self: _T, other: _T) -> bool: ...
@property
- def is_global(self) -> bool: ...
+ def prefixlen(self) -> int: ...
+ def subnets(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ...
+ def supernet(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ...
@property
- def is_unspecified(self) -> bool: ...
+ def with_hostmask(self) -> str: ...
@property
- def is_loopback(self) -> bool: ...
+ def with_netmask(self) -> str: ...
@property
- def ipv4_mapped(self) -> Optional[IPv4Address]: ...
+ def with_prefixlen(self) -> str: ...
+
+class _BaseInterface(_BaseAddress, Generic[_A, _N]):
+ hostmask: _A
+ netmask: _A
+ network: _N
@property
- def teredo(self) -> Optional[Tuple[IPv4Address, IPv4Address]]: ...
+ def ip(self) -> _A: ...
@property
- def sixtofour(self) -> Optional[IPv4Address]: ...
-
-class IPv6Interface(IPv6Address):
- network = ... # type: IPv6Network
- netmask = ... # type: IPv6Address
- hostmask = ... # type: IPv6Address
- def __init__(self, address: object) -> None: ...
- def __eq__(self, other: Any) -> bool: ...
- def __lt__(self, other: Any) -> bool: ...
- def __hash__(self) -> int: ...
+ def with_hostmask(self) -> str: ...
@property
- def ip(self) -> IPv6Address: ...
+ def with_netmask(self) -> str: ...
@property
def with_prefixlen(self) -> str: ...
+
+class IPv4Address(_BaseAddress): ...
+class IPv4Network(_BaseNetwork[IPv4Address]): ...
+class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ...
+
+class IPv6Address(_BaseAddress):
@property
- def with_netmask(self) -> str: ...
+ def ipv4_mapped(self) -> Optional[IPv4Address]: ...
@property
- def with_hostmask(self) -> str: ...
+ def is_site_local(self) -> bool: ...
@property
- def is_unspecified(self) -> bool: ...
+ def sixtofour(self) -> Optional[IPv4Address]: ...
@property
- def is_loopback(self) -> bool: ...
+ def teredo(self) -> Optional[Tuple[IPv4Address, IPv4Address]]: ...
-class IPv6Network(_BaseV6, _BaseNetwork):
- network_address = ... # type: IPv6Address
- netmask = ... # type: IPv6Address
- def __init__(self, address: object, strict: bool = ...) -> None: ...
- def hosts(self) -> Iterator[IPv6Address]: ...
+class IPv6Network(_BaseNetwork[IPv6Address]):
@property
def is_site_local(self) -> bool: ...
-_ip_address = Union[IPv4Address, IPv6Address]
-_ip_network = Union[IPv4Network, IPv6Network]
-_ip_interface = Union[IPv4Interface, IPv6Interface]
-_AnyIPAddress = TypeVar("_AnyIPAddress", IPv4Address, IPv6Address)
+class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ...
-def ip_address(address: object) -> _ip_address: ...
-def ip_network(address: object, strict: bool = ...) -> _ip_network: ...
-def ip_interface(address: object) -> _ip_interface: ...
def v4_int_to_packed(address: int) -> bytes: ...
def v6_int_to_packed(address: int) -> bytes: ...
-def summarize_address_range(first: _AnyIPAddress, _AnyIPAddress) -> Iterator[_AnyIPAddress]: ...
-def collapse_addresses(addresses: Iterable[_AnyIPAddress]) -> Iterator[_AnyIPAddress]: ...
-def get_mixed_type_key(obj): ...
+ at overload
+def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ...
+ at overload
+def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ...
+def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ...
+ at overload
+def get_mixed_type_key(obj: _A) -> Tuple[int, _A]: ...
+ at overload
+def get_mixed_type_key(obj: IPv4Network) -> Tuple[int, IPv4Address, IPv4Address]: ...
+ at overload
+def get_mixed_type_key(obj: IPv6Network) -> Tuple[int, IPv6Address, IPv6Address]: ...
+
+class AddressValueError(ValueError): ...
+class NetmaskValueError(ValueError): ...
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
index 00086f4..28dbefb 100644
--- a/typeshed/stdlib/3.4/asyncio/futures.pyi
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -11,6 +11,7 @@ from concurrent.futures import (
__all__: List[str]
_T = TypeVar('_T')
+_S = TypeVar('_S', bound=Future)
class InvalidStateError(Error): ...
@@ -40,8 +41,8 @@ class Future(Iterable[_T], Awaitable[_T], Generic[_T]):
def done(self) -> bool: ...
def result(self) -> _T: ...
def exception(self) -> BaseException: ...
- def add_done_callback(self, fn: Callable[[Future[_T]], Any]) -> None: ...
- def remove_done_callback(self, fn: Callable[[Future[_T]], Any]) -> int: ...
+ def add_done_callback(self: _S, fn: Callable[[_S], Any]) -> None: ...
+ def remove_done_callback(self: _S, fn: Callable[[_S], Any]) -> int: ...
def set_result(self, result: _T) -> None: ...
def set_exception(self, exception: Union[type, BaseException]) -> None: ...
def _copy_state(self, other: Any) -> None: ...
diff --git a/typeshed/stdlib/3/array.pyi b/typeshed/stdlib/3/array.pyi
index 77ed052..c740556 100644
--- a/typeshed/stdlib/3/array.pyi
+++ b/typeshed/stdlib/3/array.pyi
@@ -2,48 +2,58 @@
# Based on http://docs.python.org/3.2/library/array.html
-from typing import Any, Iterable, Tuple, List, Iterator, BinaryIO, overload
+from typing import (Any, BinaryIO, Generic, Iterable, Iterator, List, MutableSequence,
+ overload, Text, Tuple, TypeVar, Union)
+
+_T = TypeVar('_T', int, float, Text)
typecodes = ... # type: str
-class array:
+class array(MutableSequence[_T], Generic[_T]):
typecode = ... # type: str
itemsize = ... # type: int
def __init__(self, typecode: str,
- initializer: Iterable[Any] = ...) -> None: ...
- def append(self, x: Any) -> None: ...
+ __initializer: Union[bytes, Iterable[_T]] = ...) -> None: ...
+ def append(self, x: _T) -> None: ...
def buffer_info(self) -> Tuple[int, int]: ...
def byteswap(self) -> None: ...
def count(self, x: Any) -> int: ...
- def extend(self, iterable: Iterable[Any]) -> None: ...
+ def extend(self, iterable: Iterable[_T]) -> None: ...
def frombytes(self, s: bytes) -> None: ...
def fromfile(self, f: BinaryIO, n: int) -> None: ...
- def fromlist(self, list: List[Any]) -> None: ...
+ def fromlist(self, list: List[_T]) -> None: ...
def fromstring(self, s: bytes) -> None: ...
def fromunicode(self, s: str) -> None: ...
- def index(self, x: Any) -> int: ...
- def insert(self, i: int, x: Any) -> None: ...
- def pop(self, i: int = ...) -> Any: ...
+ def index(self, x: _T) -> int: ... # type: ignore # Overrides Sequence
+ def insert(self, i: int, x: _T) -> None: ...
+ def pop(self, i: int = ...) -> _T: ...
def remove(self, x: Any) -> None: ...
def reverse(self) -> None: ...
def tobytes(self) -> bytes: ...
def tofile(self, f: BinaryIO) -> None: ...
- def tolist(self) -> List[Any]: ...
+ def tolist(self) -> List[_T]: ...
def tostring(self) -> bytes: ...
def tounicode(self) -> str: ...
def __len__(self) -> int: ...
- def __iter__(self) -> Iterator[Any]: ...
- def __str__(self) -> str: ...
- def __hash__(self) -> int: ...
@overload
- def __getitem__(self, i: int) -> Any: ...
+ def __getitem__(self, i: int) -> _T: ...
+ @overload
+ def __getitem__(self, s: slice) -> array[_T]: ...
+
+ @overload # type: ignore # Overrides MutableSequence
+ def __setitem__(self, i: int, o: _T) -> None: ...
@overload
- def __getitem__(self, s: slice) -> 'array': ...
+ def __setitem__(self, s: slice, o: array[_T]) -> None: ...
- def __setitem__(self, i: int, o: Any) -> None: ...
- def __delitem__(self, i: int) -> None: ...
- def __add__(self, x: 'array') -> 'array': ...
- def __mul__(self, n: int) -> 'array': ...
- def __contains__(self, o: object) -> bool: ...
+ def __delitem__(self, i: Union[int, slice]) -> None: ...
+ def __add__(self, x: array[_T]) -> array[_T]: ...
+ def __ge__(self, other: array[_T]) -> bool: ...
+ def __gt__(self, other: array[_T]) -> bool: ...
+ def __iadd__(self, x: array[_T]) -> array[_T]: ... # type: ignore # Overrides MutableSequence
+ def __imul__(self, n: int) -> array[_T]: ...
+ def __le__(self, other: array[_T]) -> bool: ...
+ def __lt__(self, other: array[_T]) -> bool: ...
+ def __mul__(self, n: int) -> array[_T]: ...
+ def __rmul__(self, n: int) -> array[_T]: ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 8d0dfb8..7780fcc 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -861,7 +861,10 @@ def setattr(object: Any, name: str, value: Any) -> None: ...
def sorted(iterable: Iterable[_T], *,
key: Optional[Callable[[_T], Any]] = None,
reverse: bool = False) -> List[_T]: ...
-def sum(iterable: Iterable[_T], start: _T = ...) -> _T: ...
+ at overload
+def sum(iterable: Iterable[_T]) -> Union[_T, int]: ...
+ at overload
+def sum(iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ...
def vars(object: Any = ...) -> Dict[str, Any]: ...
@overload
def zip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index 5da5c47..d80cb2f 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -278,6 +278,7 @@ class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
def move_to_end(self, key: _KT, last: bool = ...) -> None: ...
def __reversed__(self) -> Iterator[_KT]: ...
+ def __copy__(self) -> OrderedDict[_KT, _VT]: ...
class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
default_factory = ... # type: Callable[[], _VT]
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
index 8b1b2e4..6ff3ada 100644
--- a/typeshed/stdlib/3/datetime.pyi
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -1,11 +1,10 @@
import sys
+from time import struct_time
from typing import Optional, SupportsAbs, Tuple, overload
MINYEAR = 0
MAXYEAR = 0
-_TimeTuple = Tuple[int, int, int, int, int, int, int, int, int]
-
class tzinfo:
def tzname(self, dt: Optional[datetime]) -> str: ...
def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
@@ -150,7 +149,7 @@ class datetime:
max = ... # type: datetime
resolution = ... # type: timedelta
- def __init__(self, year: int, month: int = ..., day: int = ..., hour: int = ...,
+ def __init__(self, year: int, month: int, day: int, hour: int = ...,
minute: int = ..., second: int = ..., microsecond: int = ...,
tzinfo: Optional[tzinfo] = ...) -> None: ...
@@ -188,9 +187,9 @@ class datetime:
def strftime(self, fmt: str) -> str: ...
def __format__(self, fmt: str) -> str: ...
def toordinal(self) -> int: ...
- def timetuple(self) -> _TimeTuple: ...
+ def timetuple(self) -> struct_time: ...
def timestamp(self) -> float: ...
- def utctimetuple(self) -> _TimeTuple: ...
+ def utctimetuple(self) -> struct_time: ...
def date(self) -> _date: ...
def time(self) -> _time: ...
def timetz(self) -> _time: ...
diff --git a/typeshed/stdlib/3/itertools.pyi b/typeshed/stdlib/3/itertools.pyi
index 1f79cd2..6421163 100644
--- a/typeshed/stdlib/3/itertools.pyi
+++ b/typeshed/stdlib/3/itertools.pyi
@@ -7,9 +7,10 @@ from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
_T = TypeVar('_T')
_S = TypeVar('_S')
+_N = TypeVar('_N', int, float)
-def count(start: int = ...,
- step: int = ...) -> Iterator[int]: ... # more general types?
+def count(start: _N = ...,
+ step: _N = ...) -> Iterator[_N]: ... # more general types?
def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index c9e8e7a..059e3dd 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -5,8 +5,8 @@ from builtins import OSError as error
from io import TextIOWrapper as _TextIOWrapper
import sys
from typing import (
- Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr,
- Optional, Generic, Set, Callable, Text, Sequence, IO, NamedTuple, TypeVar
+ Mapping, MutableMapping, Dict, List, Any, Tuple, IO, Iterable, Iterator, overload, Union, AnyStr,
+ Optional, Generic, Set, Callable, Text, Sequence, NamedTuple, TypeVar
)
from . import path as path
from mypy_extensions import NoReturn
@@ -24,9 +24,66 @@ if sys.version_info >= (3, 3):
supports_effective_ids: Set[Callable[..., Any]]
supports_follow_symlinks: Set[Callable[..., Any]]
+ PRIO_PROCESS: int # Unix only
+ PRIO_PGRP: int # Unix only
+ PRIO_USER: int # Unix only
+
+ F_LOCK: int # Unix only
+ F_TLOCK: int # Unix only
+ F_ULOCK: int # Unix only
+ F_TEST: int # Unix only
+
+ POSIX_FADV_NORMAL: int # Unix only
+ POSIX_FADV_SEQUENTIAL: int # Unix only
+ POSIX_FADV_RANDOM: int # Unix only
+ POSIX_FADV_NOREUSE: int # Unix only
+ POSIX_FADV_WILLNEED: int # Unix only
+ POSIX_FADV_DONTNEED: int # Unix only
+
+ SF_NODISKIO: int # Unix only
+ SF_MNOWAIT: int # Unix only
+ SF_SYNC: int # Unix only
+
+ XATTR_SIZE_MAX: int # Linux only
+ XATTR_CREATE: int # Linux only
+ XATTR_REPLACE: int # Linux only
+
+ P_PID: int # Unix only
+ P_PGID: int # Unix only
+ P_ALL: int # Unix only
+
+ WEXITED: int # Unix only
+ WSTOPPED: int # Unix only
+ WNOWAIT: int # Unix only
+
+ CLD_EXITED: int # Unix only
+ CLD_DUMPED: int # Unix only
+ CLD_TRAPPED: int # Unix only
+ CLD_CONTINUED: int # Unix only
+
+ SCHED_OTHER: int # some flavors of Unix
+ SCHED_BATCH: int # some flavors of Unix
+ SCHED_IDLE: int # some flavors of Unix
+ SCHED_SPORADIC: int # some flavors of Unix
+ SCHED_FIFO: int # some flavors of Unix
+ SCHED_RR: int # some flavors of Unix
+ SCHED_RESET_ON_FORK: int # some flavors of Unix
+
+ RTLD_LAZY: int
+ RTLD_NOW: int
+ RTLD_GLOBAL: int
+ RTLD_LOCAL: int
+ RTLD_NODELETE: int
+ RTLD_NOLOAD: int
+ RTLD_DEEPBIND: int
+
+
SEEK_SET: int
SEEK_CUR: int
SEEK_END: int
+if sys.version_info >= (3, 3):
+ SEEK_DATA: int # some flavors of Unix
+ SEEK_HOLE: int # some flavors of Unix
O_RDONLY: int
O_WRONLY: int
@@ -41,6 +98,8 @@ O_SYNC: int # Unix only
O_NDELAY: int # Unix only
O_NONBLOCK: int # Unix only
O_NOCTTY: int # Unix only
+if sys.version_info >= (3, 3):
+ O_CLOEXEC: int # Unix only
O_SHLOCK: int # Unix only
O_EXLOCK: int # Unix only
O_BINARY: int # Windows only
@@ -55,6 +114,9 @@ O_DIRECT: int # Gnu extension if in C library
O_DIRECTORY: int # Gnu extension if in C library
O_NOFOLLOW: int # Gnu extension if in C library
O_NOATIME: int # Gnu extension if in C library
+if sys.version_info >= (3, 4):
+ O_PATH: int # Gnu extension if in C library
+ O_TMPFILE: int # Gnu extension if in C library
O_LARGEFILE: int # Gnu extension if in C library
curdir: str
@@ -121,6 +183,10 @@ if sys.version_info >= (3, 6):
from builtins import _PathLike as PathLike # See comment in builtins
_PathType = path._PathType
+if sys.version_info >= (3, 3):
+ _FdOrPathType = Union[int, _PathType]
+else:
+ _FdOrPathType = _PathType
if sys.version_info >= (3, 6):
class DirEntry(PathLike[AnyStr]):
@@ -230,6 +296,8 @@ def ctermid() -> str: ... # Unix only
def getegid() -> int: ... # Unix only
def geteuid() -> int: ... # Unix only
def getgid() -> int: ... # Unix only
+if sys.version_info >= (3, 3):
+ def getgrouplist(user: str, gid: int) -> List[int]: ... # Unix only
def getgroups() -> List[int]: ... # Unix only, behaves differently on Mac
def initgroups(username: str, gid: int) -> None: ... # Unix only
def getlogin() -> str: ...
@@ -237,6 +305,9 @@ def getpgid(pid: int) -> int: ... # Unix only
def getpgrp() -> int: ... # Unix only
def getpid() -> int: ...
def getppid() -> int: ...
+if sys.version_info >= (3, 3):
+ def getpriority(which: int, who: int) -> int: ... # Unix only
+ def setpriority(which: int, who: int, priority: int) -> None: ... # Unix only
def getresuid() -> Tuple[int, int, int]: ... # Unix only
def getresgid() -> Tuple[int, int, int]: ... # Unix only
def getuid() -> int: ... # Unix only
@@ -285,63 +356,136 @@ def fstat(fd: int) -> stat_result: ...
def fstatvfs(fd: int) -> statvfs_result: ... # Unix only
def fsync(fd: int) -> None: ...
def ftruncate(fd: int, length: int) -> None: ... # Unix only
+if sys.version_info >= (3, 5):
+ def get_blocking(fd: int) -> bool: ... # Unix only
+ def set_blocking(fd: int, blocking: bool) -> None: ... # Unix only
def isatty(fd: int) -> bool: ... # Unix only
+if sys.version_info >= (3, 3):
+ def lockf(__fd: int, __cmd: int, __length: int) -> None: ... # Unix only
def lseek(fd: int, pos: int, how: int) -> int: ...
-def open(file: _PathType, flags: int, mode: int = ...) -> int: ...
+if sys.version_info >= (3, 3):
+ def open(file: _PathType, flags: int, mode: int = ..., *, dir_fd: Optional[int] = ...) -> int: ...
+else:
+ def open(file: _PathType, flags: int, mode: int = ...) -> int: ...
def openpty() -> Tuple[int, int]: ... # some flavors of Unix
def pipe() -> Tuple[int, int]: ...
+if sys.version_info >= (3, 3):
+ def pipe2(flags: int) -> Tuple[int, int]: ... # some flavors of Unix
+ def posix_fallocate(fd: int, offset: int, length: int) -> None: ... # Unix only
+ def posix_fadvise(fd: int, offset: int, length: int, advice: int) -> None: ... # Unix only
+ def pread(fd: int, buffersize: int, offset: int) -> bytes: ... # Unix only
+ def pwrite(fd: int, string: bytes, offset: int) -> int: ... # Unix only
def read(fd: int, n: int) -> bytes: ...
+if sys.version_info >= (3, 3):
+ @overload
+ def sendfile(__out_fd: int, __in_fd: int, offset: Optional[int], count: int) -> int: ... # Unix only
+ @overload
+ def sendfile(__out_fd: int, __in_fd: int, offset: int, count: int,
+ headers: Sequence[bytes] = ..., trailers: Sequence[bytes] = ..., flags: int = ...) -> int: ... # FreeBSD and Mac OS X only
+ def readv(fd: int, buffers: Sequence[bytearray]) -> int: ... # Unix only
+ def writev(fd: int, buffers: Sequence[bytes]) -> int: ... # Unix only
+
+ terminal_size = NamedTuple('terminal_size', [('columns', int), ('lines', int)])
+ def get_terminal_size(fd: int = ...) -> terminal_size: ...
+
+if sys.version_info >= (3, 4):
+ def get_inheritable(fd: int) -> bool: ...
+ def set_inheritable(fd: int, inheritable: bool) -> None: ...
+
def tcgetpgrp(fd: int) -> int: ... # Unix only
def tcsetpgrp(fd: int, pg: int) -> None: ... # Unix only
def ttyname(fd: int) -> str: ... # Unix only
def write(fd: int, string: bytes) -> int: ...
-def access(path: _PathType, mode: int) -> bool: ...
-def chdir(path: _PathType) -> None: ...
+if sys.version_info >= (3, 3):
+ def access(path: _FdOrPathType, mode: int, *, dir_fd: Optional[int] = ...,
+ effective_ids: bool = ..., follow_symlinks: bool = ...) -> bool: ...
+else:
+ def access(path: _PathType, mode: int) -> bool: ...
+def chdir(path: _FdOrPathType) -> None: ...
def fchdir(fd: int) -> None: ...
def getcwd() -> str: ...
def getcwdb() -> bytes: ...
-def chflags(path: _PathType, flags: int) -> None: ... # Unix only
+if sys.version_info >= (3, 3):
+ def chflags(path: _PathType, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix
+ def chmod(path: _FdOrPathType, mode: int, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ...
+ def chown(path: _FdOrPathType, uid: int, gid: int, *, dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ... # Unix only
+else:
+ def chflags(path: _PathType, flags: int) -> None: ... # Some flavors of Unix
+ def chmod(path: _PathType, mode: int) -> None: ...
+ def chown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
def chroot(path: _PathType) -> None: ... # Unix only
-def chmod(path: _PathType, mode: int) -> None: ...
-def chown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
def lchflags(path: _PathType, flags: int) -> None: ... # Unix only
def lchmod(path: _PathType, mode: int) -> None: ... # Unix only
def lchown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
-def link(src: _PathType, link_name: _PathType) -> None: ...
+if sys.version_info >= (3, 3):
+ def link(src: _PathType, link_name: _PathType, *, src_dir_fd: Optional[int] = ...,
+ dst_dir_fd: Optional[int] = ..., follow_symlinks: bool = ...) -> None: ...
+else:
+ def link(src: _PathType, link_name: _PathType) -> None: ...
- at overload
-def listdir(path: str = ...) -> List[str]: ...
- at overload
-def listdir(path: bytes) -> List[bytes]: ...
+if sys.version_info >= (3, 3):
+ @overload
+ def listdir(path: Optional[str] = ...) -> List[str]: ...
+ @overload
+ def listdir(path: bytes) -> List[bytes]: ...
+ @overload
+ def listdir(path: int) -> List[str]: ...
+else:
+ @overload
+ def listdir(path: Optional[str] = ...) -> List[str]: ...
+ @overload
+ def listdir(path: bytes) -> List[bytes]: ...
-def lstat(path: _PathType) -> stat_result: ...
-def mkfifo(path: _PathType, mode: int = ...) -> None: ... # Unix only
-def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ...
+if sys.version_info >= (3, 3):
+ def lstat(path: _PathType, *, dir_fd: Optional[int] = ...) -> stat_result: ...
+ def mkdir(path: _PathType, mode: int = ..., *, dir_fd: Optional[int] = ...) -> None: ...
+ def mkfifo(path: _PathType, mode: int = ..., *, dir_fd: Optional[int] = ...) -> None: ... # Unix only
+else:
+ def lstat(path: _PathType) -> stat_result: ...
+ def mkdir(path: _PathType, mode: int = ...) -> None: ...
+ def mkfifo(path: _PathType, mode: int = ...) -> None: ... # Unix only
+if sys.version_info >= (3, 4):
+ def makedirs(name: _PathType, mode: int = ..., exist_ok: bool = ...) -> None: ...
+else:
+ def makedirs(path: _PathType, mode: int = ..., exist_ok: bool = ...) -> None: ...
+if sys.version_info >= (3, 4):
+ def mknod(path: _PathType, mode: int = ..., device: int = ...,
+ *, dir_fd: Optional[int] = ...) -> None: ...
+elif sys.version_info >= (3, 3):
+ def mknod(filename: _PathType, mode: int = ..., device: int = ...,
+ *, dir_fd: Optional[int] = ...) -> None: ...
+else:
+ def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ...
def major(device: int) -> int: ...
def minor(device: int) -> int: ...
def makedev(major: int, minor: int) -> int: ...
-def mkdir(path: _PathType, mode: int = ...) -> None: ...
-if sys.version_info >= (3, 4):
- def makedirs(name: _PathType, mode: int = ...,
- exist_ok: bool = ...) -> None: ...
-else:
- def makedirs(path: _PathType, mode: int = ...,
- exist_ok: bool = ...) -> None: ...
-def pathconf(path: _PathType, name: Union[str, int]) -> int: ... # Unix only
+def pathconf(path: _FdOrPathType, name: Union[str, int]) -> int: ... # Unix only
if sys.version_info >= (3, 6):
- def readlink(path: Union[AnyStr, PathLike[AnyStr]]) -> AnyStr: ...
+ def readlink(path: Union[AnyStr, PathLike[AnyStr]], *, dir_fd: Optional[int] = ...) -> AnyStr: ...
+elif sys.version_info >= (3, 3):
+ def readlink(path: AnyStr, *, dir_fd: Optional[int] = ...) -> AnyStr: ...
else:
def readlink(path: AnyStr) -> AnyStr: ...
-def remove(path: _PathType) -> None: ...
+if sys.version_info >= (3, 3):
+ def remove(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ...
+else:
+ def remove(path: _PathType) -> None: ...
if sys.version_info >= (3, 4):
def removedirs(name: _PathType) -> None: ...
else:
def removedirs(path: _PathType) -> None: ...
-def rename(src: _PathType, dst: _PathType) -> None: ...
+if sys.version_info >= (3, 3):
+ def rename(src: _PathType, dst: _PathType, *,
+ src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ...) -> None: ...
+else:
+ def rename(src: _PathType, dst: _PathType) -> None: ...
def renames(old: _PathType, new: _PathType) -> None: ...
if sys.version_info >= (3, 3):
- def replace(src: _PathType, dst: _PathType) -> None: ...
-def rmdir(path: _PathType) -> None: ...
+ def replace(src: _PathType, dst: _PathType, *,
+ src_dir_fd: Optional[int] = ..., dst_dir_fd: Optional[int] = ...) -> None: ...
+ def rmdir(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ...
+else:
+ def rmdir(path: _PathType) -> None: ...
if sys.version_info >= (3, 6):
@overload
def scandir() -> Iterator[DirEntry[str]]: ...
@@ -352,18 +496,30 @@ elif sys.version_info >= (3, 5):
def scandir() -> Iterator[DirEntry[str]]: ...
@overload
def scandir(path: AnyStr) -> Iterator[DirEntry[AnyStr]]: ...
-def stat(path: _PathType) -> stat_result: ...
-def stat_float_times(newvalue: Union[bool, None] = ...) -> bool: ...
-def statvfs(path: _PathType) -> statvfs_result: ... # Unix only
-def symlink(source: _PathType, link_name: _PathType,
- target_is_directory: bool = ...) -> None:
- ... # final argument in Windows only
-def unlink(path: _PathType) -> None: ...
-if sys.version_info >= (3, 0):
- def utime(path: _PathType, times: Optional[Union[Tuple[int, int], Tuple[float, float]]] = ...,
- ns: Optional[Tuple[int, int]] = ..., dir_fd: Optional[int] = ...,
+if sys.version_info >= (3, 3):
+ def stat(path: _FdOrPathType, *, dir_fd: Optional[int] = ...,
+ follow_symlinks: bool = ...) -> stat_result: ...
+else:
+ def stat(path: _PathType) -> stat_result: ...
+ at overload
+def stat_float_times() -> bool: ...
+ at overload
+def stat_float_times(__newvalue: bool) -> None: ...
+def statvfs(path: _FdOrPathType) -> statvfs_result: ... # Unix only
+if sys.version_info >= (3, 3):
+ def symlink(source: _PathType, link_name: _PathType,
+ target_is_directory: bool = ..., *, dir_fd: Optional[int] = ...) -> None: ...
+ def sync() -> None: ... # Unix only
+ def truncate(path: _FdOrPathType, length: int) -> None: ... # Unix only up to version 3.4
+ def unlink(path: _PathType, *, dir_fd: Optional[int] = ...) -> None: ...
+ def utime(path: _FdOrPathType, times: Optional[Union[Tuple[int, int], Tuple[float, float]]] = ..., *,
+ ns: Tuple[int, int] = ..., dir_fd: Optional[int] = ...,
follow_symlinks: bool = ...) -> None: ...
else:
+ def symlink(source: _PathType, link_name: _PathType,
+ target_is_directory: bool = ...) -> None:
+ ... # final argument in Windows only
+ def unlink(path: _PathType) -> None: ...
def utime(path: _PathType, times: Optional[Tuple[float, float]]) -> None: ...
if sys.version_info >= (3, 6):
@@ -375,6 +531,15 @@ else:
def walk(top: AnyStr, topdown: bool = ..., onerror: Optional[Callable[[OSError], Any]] = ...,
followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
List[AnyStr]]]: ...
+if sys.version_info >= (3, 3):
+ def fwalk(top: _PathType = ..., topdown: bool = ...,
+ onerror: Optional[Callable] = ..., *, follow_symlinks: bool = ...,
+ dir_fd: Optional[int] = ...) -> Iterator[Tuple[str, List[str], List[str], int]]: ... # Unix only
+ def getxattr(path: _FdOrPathType, attribute: _PathType, *, follow_symlinks: bool = ...) -> bytes: ... # Linux only
+ def listxattr(path: _FdOrPathType, *, follow_symlinks: bool = ...) -> List[str]: ... # Linux only
+ def removexattr(path: _FdOrPathType, attribute: _PathType, *, follow_symlinks: bool = ...) -> None: ... # Linux only
+ def setxattr(path: _FdOrPathType, attribute: _PathType, value: bytes, flags: int = ..., *,
+ follow_symlinks: bool = ...) -> None: ... # Linux only
def abort() -> NoReturn: ...
# These are defined as execl(file, *args) but the first *arg is mandatory.
@@ -389,7 +554,7 @@ def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn
# The implementation enforces tuple or list so we can't use Sequence.
_ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]]
def execv(path: _PathType, args: _ExecVArgs) -> None: ...
-def execve(path: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
+def execve(path: _FdOrPathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
def execvp(file: _PathType, args: _ExecVArgs) -> None: ...
def execvpe(file: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
@@ -402,16 +567,16 @@ def nice(increment: int) -> int: ... # Unix only
def plock(op: int) -> None: ... # Unix only ???op is int?
if sys.version_info >= (3, 0):
- class popen(_TextIOWrapper):
- # TODO 'b' modes or bytes command not accepted?
- def __init__(self, command: str, mode: str = ...,
- bufsize: int = ...) -> None: ...
- def close(self) -> Any: ... # may return int
+ class _wrap_close(_TextIOWrapper):
+ def close(self) -> Optional[int]: ... # type: ignore
+ def popen(command: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ...
else:
- def popen(command: str, *args, **kwargs) -> Optional[IO[Any]]: ...
- def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
- def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ...
- def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
+ class _wrap_close(IO[Text]):
+ def close(self) -> Optional[int]: ... # type: ignore
+ def popen(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> _wrap_close: ...
+ def popen2(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> Tuple[IO[Text], IO[Text]]: ...
+ def popen3(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> Tuple[IO[Text], IO[Text], IO[Text]]: ...
+ def popen4(__cmd: Text, __mode: Text = ..., __bufsize: int = ...) -> Tuple[IO[Text], IO[Text]]: ...
def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ...
def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text],
@@ -429,8 +594,15 @@ def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]],
... # Unix only
def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... # Windows only
def system(command: _PathType) -> int: ...
-def times() -> Tuple[float, float, float, float, float]: ...
+if sys.version_info >= (3, 3):
+ from posix import times_result
+ def times() -> times_result: ...
+else:
+ def times() -> Tuple[float, float, float, float, float]: ...
def wait() -> Tuple[int, int]: ... # Unix only
+if sys.version_info >= (3, 3):
+ from posix import waitid_result
+ def waitid(idtype: int, ident: int, options: int) -> waitid_result: ... # Unix only
def waitpid(pid: int, options: int) -> Tuple[int, int]: ...
def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only
def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only
@@ -442,39 +614,27 @@ def WIFEXITED(status: int) -> bool: ... # Unix only
def WEXITSTATUS(status: int) -> int: ... # Unix only
def WSTOPSIG(status: int) -> int: ... # Unix only
def WTERMSIG(status: int) -> int: ... # Unix only
-def confstr(name: Union[str, int]) -> Optional[str]: ... # Unix only
-def getloadavg() -> Tuple[float, float, float]: ... # Unix only
-def sysconf(name: Union[str, int]) -> int: ... # Unix only
-def urandom(n: int) -> bytes: ...
-
-if sys.version_info >= (3, 0):
- def sched_getaffinity(id: int) -> Set[int]: ...
-if sys.version_info >= (3, 3):
- class waitresult:
- si_pid: int
- def waitid(idtype: int, id: int, options: int) -> waitresult: ...
-
-if sys.version_info < (3, 0):
- def tmpfile() -> IO[Any]: ...
- def tmpnam() -> str: ...
- def tempnam(dir: str = ..., prefix: str = ...) -> str: ...
-
-P_ALL: int
-WEXITED: int
-WNOWAIT: int
if sys.version_info >= (3, 3):
- def sync() -> None: ... # Unix only
-
- def truncate(path: Union[_PathType, int], length: int) -> None: ... # Unix only up to version 3.4
-
- def fwalk(top: AnyStr = ..., topdown: bool = ...,
- onerror: Callable = ..., *, follow_symlinks: bool = ...,
- dir_fd: int = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
- List[AnyStr], int]]: ... # Unix only
-
- terminal_size = NamedTuple('terminal_size', [('columns', int), ('lines', int)])
- def get_terminal_size(fd: int = ...) -> terminal_size: ...
+ from posix import sched_param
+ def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix
+ def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix
+ def sched_setscheduler(pid: int, policy: int, param: sched_param) -> None: ... # some flavors of Unix
+ def sched_getscheduler(pid: int) -> int: ... # some flavors of Unix
+ def sched_setparam(pid: int, param: sched_param) -> None: ... # some flavors of Unix
+ def sched_getparam(pid: int) -> sched_param: ... # some flavors of Unix
+ def sched_rr_get_interval(pid: int) -> float: ... # some flavors of Unix
+ def sched_yield() -> None: ... # some flavors of Unix
+ def sched_setaffinity(pid: int, mask: Iterable[int]) -> None: ... # some flavors of Unix
+ def sched_getaffinity(pid: int) -> Set[int]: ... # some flavors of Unix
+def confstr(name: Union[str, int]) -> Optional[str]: ... # Unix only
if sys.version_info >= (3, 4):
def cpu_count() -> Optional[int]: ...
+def getloadavg() -> Tuple[float, float, float]: ... # Unix only
+def sysconf(name: Union[str, int]) -> int: ... # Unix only
+if sys.version_info >= (3, 6):
+ def getrandom(size: int, flags: int = ...) -> bytes: ...
+ def urandom(size: int) -> bytes: ...
+else:
+ def urandom(n: int) -> bytes: ...
diff --git a/typeshed/stdlib/3/pipes.pyi b/typeshed/stdlib/3/pipes.pyi
index 62163d6..00fc994 100644
--- a/typeshed/stdlib/3/pipes.pyi
+++ b/typeshed/stdlib/3/pipes.pyi
@@ -11,8 +11,8 @@ class Template:
def debug(self, flag: bool) -> None: ...
def append(self, cmd: str, kind: str) -> None: ...
def prepend(self, cmd: str, kind: str) -> None: ...
- def open(self, file: str, rw: str) -> os.popen: ...
- def copy(self, file: str, rw: str) -> os.popen: ...
+ def open(self, file: str, rw: str) -> os._wrap_close: ...
+ def copy(self, file: str, rw: str) -> os._wrap_close: ...
# Not documented, but widely used.
# Documented as shlex.quote since 3.3.
diff --git a/typeshed/stdlib/3/posix.pyi b/typeshed/stdlib/3/posix.pyi
index 09e2e4f..01e5826 100644
--- a/typeshed/stdlib/3/posix.pyi
+++ b/typeshed/stdlib/3/posix.pyi
@@ -10,3 +10,23 @@ from typing import NamedTuple
if sys.version_info >= (3, 3):
uname_result = NamedTuple('uname_result', [('sysname', str), ('nodename', str),
('release', str), ('version', str), ('machine', str)])
+
+ times_result = NamedTuple('times_result', [
+ ('user', float),
+ ('system', float),
+ ('children_user', float),
+ ('children_system', float),
+ ('elapsed', float),
+ ])
+
+ waitid_result = NamedTuple('waitid_result', [
+ ('si_pid', int),
+ ('si_uid', int),
+ ('si_signo', int),
+ ('si_status', int),
+ ('si_code', int),
+ ])
+
+ sched_param = NamedTuple('sched_priority', [
+ ('sched_priority', int),
+ ])
diff --git a/typeshed/stdlib/3/textwrap.pyi b/typeshed/stdlib/3/textwrap.pyi
index 319297b..f61f975 100644
--- a/typeshed/stdlib/3/textwrap.pyi
+++ b/typeshed/stdlib/3/textwrap.pyi
@@ -1,64 +1,55 @@
-# Better textwrap stubs hand-written by o11c.
-# https://docs.python.org/3/library/textwrap.html
-from typing import (
- Callable,
- List,
-)
+from typing import Callable, List, Optional, Dict, Pattern
class TextWrapper:
- def __init__(
- self,
- width: int = ...,
- *,
- initial_indent: str = ...,
- subsequent_indent: str = ...,
- expand_tabs: bool = ...,
- tabsize: int = ...,
- replace_whitespace: bool = ...,
- fix_sentence_endings: bool = ...,
- break_long_words: bool = ...,
- break_on_hyphens: bool = ...,
- drop_whitespace: bool = ...,
- max_lines: int = ...,
- placeholder: str = ...
- ) -> None:
- self.width = width
- self.initial_indent = initial_indent
- self.subsequent_indent = subsequent_indent
- self.expand_tabs = expand_tabs
- self.tabsize = tabsize
- self.replace_whitespace = replace_whitespace
- self.fix_sentence_endings = fix_sentence_endings
- self.break_long_words = break_long_words
- self.break_on_hyphens = break_on_hyphens
- self.drop_whitespace = drop_whitespace
- self.max_lines = max_lines
- self.placeholder = placeholder
+ width: int = ...
+ initial_indent: str = ...
+ subsequent_indent: str = ...
+ expand_tabs: bool = ...
+ replace_whitespace: bool = ...
+ fix_sentence_endings: bool = ...
+ drop_whitespace: bool = ...
+ break_long_words: bool = ...
+ break_on_hyphens: bool = ...
+ tabsize: int = ...
+ max_lines: Optional[int] = ...
+ placeholder: str = ...
- # Private methods *are* part of the documented API for subclasses.
- def _munge_whitespace(self, text: str) -> str:
- ...
-
- def _split(self, text: str) -> List[str]:
- ...
-
- def _fix_sentence_endings(self, chunks: List[str]) -> None:
- ...
-
- def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None:
- ...
-
- def _wrap_chunks(self, chunks: List[str]) -> List[str]:
- ...
+ # Attributes not present in documentation
+ sentence_end_re: Pattern[str] = ...
+ wordsep_re: Pattern[str] = ...
+ wordsep_simple_re: Pattern[str] = ...
+ whitespace_trans: str = ...
+ unicode_whitespace_trans: Dict[int, int] = ...
+ uspace: int = ...
+ x: int = ...
- def _split_chunks(self, text: str) -> List[str]:
+ def __init__(
+ self,
+ width: int = ...,
+ initial_indent: str = ...,
+ subsequent_indent: str = ...,
+ expand_tabs: bool = ...,
+ replace_whitespace: bool = ...,
+ fix_sentence_endings: bool = ...,
+ break_long_words: bool = ...,
+ drop_whitespace: bool = ...,
+ break_on_hyphens: bool = ...,
+ tabsize: int = ...,
+ *,
+ max_lines: Optional[int] = ...,
+ placeholder: str = ...) -> None:
...
- def wrap(self, text: str) -> List[str]:
- ...
+ # Private methods *are* part of the documented API for subclasses.
+ def _munge_whitespace(self, text: str) -> str: ...
+ def _split(self, text: str) -> List[str]: ...
+ def _fix_sentence_endings(self, chunks: List[str]) -> None: ...
+ def _handle_long_word(self, reversed_chunks: List[str], cur_line: List[str], cur_len: int, width: int) -> None: ...
+ def _wrap_chunks(self, chunks: List[str]) -> List[str]: ...
+ def _split_chunks(self, text: str) -> List[str]: ...
- def fill(self, text: str) -> str:
- ...
+ def wrap(self, text: str) -> List[str]: ...
+ def fill(self, text: str) -> str: ...
def wrap(
@@ -94,7 +85,7 @@ def fill(
drop_whitespace: bool = ...,
max_lines: int = ...,
placeholder: str = ...
-):
+) -> str:
...
def shorten(
@@ -112,7 +103,7 @@ def shorten(
drop_whitespace: bool = ...,
# Omit `max_lines: int = None`, it is forced to 1 here.
placeholder: str = ...
-):
+) -> str:
...
def dedent(text: str) -> str:
diff --git a/typeshed/third_party/2/dateutil/tz/__init__.pyi b/typeshed/third_party/2/dateutil/tz/__init__.pyi
index 4c8a6d2..f618e63 100644
--- a/typeshed/third_party/2/dateutil/tz/__init__.pyi
+++ b/typeshed/third_party/2/dateutil/tz/__init__.pyi
@@ -1 +1,12 @@
-from .tz import tzutc, tzoffset, tzlocal, tzfile, tzrange, tzstr, tzical, gettz, datetime_exists, datetime_ambiguous
+from .tz import (
+ tzutc as tzutz,
+ tzoffset as tzoffset,
+ tzlocal as tzlocal,
+ tzfile as tzfile,
+ tzrange as tzrange,
+ tzstr as tzstr,
+ tzical as tzical,
+ gettz as gettz,
+ datetime_exists as datetime_exists,
+ datetime_ambiguous as datetime_ambiguous,
+)
diff --git a/typeshed/third_party/2/pycurl.pyi b/typeshed/third_party/2/pycurl.pyi
index 85f2b13..4272f28 100644
--- a/typeshed/third_party/2/pycurl.pyi
+++ b/typeshed/third_party/2/pycurl.pyi
@@ -1,12 +1,13 @@
# TODO(MichalPokorny): more precise types
-from typing import Any, Tuple, Optional
+from typing import Any, Tuple
-GLOBAL_SSL = ... # type: int
-GLOBAL_WIN32 = ... # type: int
+GLOBAL_ACK_EINTR = ... # type: int
GLOBAL_ALL = ... # type: int
-GLOBAL_NOTHING = ... # type: int
GLOBAL_DEFAULT = ... # type: int
+GLOBAL_NOTHING = ... # type: int
+GLOBAL_SSL = ... # type: int
+GLOBAL_WIN32 = ... # type: int
def global_init(option: int) -> None: ...
def global_cleanup() -> None: ...
@@ -39,20 +40,26 @@ class CurlMulti(object):
def perform(self) -> Tuple[Any, int]: ...
def fdset(self) -> tuple: ...
def select(self, timeout: float = ...) -> int: ...
- def info_read(self, max_objects: int) -> tuple: ...
+ def info_read(self, max_objects: int = ...) -> tuple: ...
class CurlShare(object):
def close(self) -> None: ...
def setopt(self, option: int, value: Any) -> Any: ...
+ACCEPTTIMEOUT_MS = ... # type: int
+ACCEPT_ENCODING = ... # type: int
ADDRESS_SCOPE = ... # type: int
APPCONNECT_TIME = ... # type: int
+APPEND = ... # type: int
AUTOREFERER = ... # type: int
BUFFERSIZE = ... # type: int
CAINFO = ... # type: int
CAPATH = ... # type: int
+CLOSESOCKETFUNCTION = ... # type: int
+COMPILE_DATE = ... # type: str
COMPILE_LIBCURL_VERSION_NUM = ... # type: int
COMPILE_PY_VERSION_HEX = ... # type: int
+CONDITION_UNMET = ... # type: int
CONNECTTIMEOUT = ... # type: int
CONNECTTIMEOUT_MS = ... # type: int
CONNECT_ONLY = ... # type: int
@@ -64,6 +71,7 @@ COOKIE = ... # type: int
COOKIEFILE = ... # type: int
COOKIEJAR = ... # type: int
COOKIELIST = ... # type: int
+COOKIESESSION = ... # type: int
COPYPOSTFIELDS = ... # type: int
CRLF = ... # type: int
CRLFILE = ... # type: int
@@ -72,19 +80,26 @@ CSELECT_IN = ... # type: int
CSELECT_OUT = ... # type: int
CURL_HTTP_VERSION_1_0 = ... # type: int
CURL_HTTP_VERSION_1_1 = ... # type: int
+CURL_HTTP_VERSION_2 = ... # type: int
+CURL_HTTP_VERSION_2_0 = ... # type: int
CURL_HTTP_VERSION_LAST = ... # type: int
CURL_HTTP_VERSION_NONE = ... # type: int
CUSTOMREQUEST = ... # type: int
DEBUGFUNCTION = ... # type: int
+DIRLISTONLY = ... # type: int
DNS_CACHE_TIMEOUT = ... # type: int
+DNS_SERVERS = ... # type: int
DNS_USE_GLOBAL_CACHE = ... # type: int
EFFECTIVE_URL = ... # type: int
EGDSOCKET = ... # type: int
ENCODING = ... # type: int
+EXPECT_100_TIMEOUT_MS = ... # type: int
FAILONERROR = ... # type: int
FILE = ... # type: int
FOLLOWLOCATION = ... # type: int
FORBID_REUSE = ... # type: int
+FORM_BUFFER = ... # type: int
+FORM_BUFFERPTR = ... # type: int
FORM_CONTENTS = ... # type: int
FORM_CONTENTTYPE = ... # type: int
FORM_FILE = ... # type: int
@@ -116,9 +131,17 @@ FTP_SSL = ... # type: int
FTP_SSL_CCC = ... # type: int
FTP_USE_EPRT = ... # type: int
FTP_USE_EPSV = ... # type: int
+FTP_USE_PRET = ... # type: int
+GSSAPI_DELEGATION = ... # type: int
+GSSAPI_DELEGATION_FLAG = ... # type: int
+GSSAPI_DELEGATION_NONE = ... # type: int
+GSSAPI_DELEGATION_POLICY_FLAG = ... # type: int
HEADER = ... # type: int
HEADERFUNCTION = ... # type: int
+HEADEROPT = ... # type: int
+HEADER_SEPARATE = ... # type: int
HEADER_SIZE = ... # type: int
+HEADER_UNIFIED = ... # type: int
HTTP200ALIASES = ... # type: int
HTTPAUTH = ... # type: int
HTTPAUTH_ANY = ... # type: int
@@ -126,9 +149,13 @@ HTTPAUTH_ANYSAFE = ... # type: int
HTTPAUTH_AVAIL = ... # type: int
HTTPAUTH_BASIC = ... # type: int
HTTPAUTH_DIGEST = ... # type: int
+HTTPAUTH_DIGEST_IE = ... # type: int
HTTPAUTH_GSSNEGOTIATE = ... # type: int
+HTTPAUTH_NEGOTIATE = ... # type: int
HTTPAUTH_NONE = ... # type: int
HTTPAUTH_NTLM = ... # type: int
+HTTPAUTH_NTLM_WB = ... # type: int
+HTTPAUTH_ONLY = ... # type: int
HTTPGET = ... # type: int
HTTPHEADER = ... # type: int
HTTPPOST = ... # type: int
@@ -149,8 +176,13 @@ INFOTYPE_HEADER_OUT = ... # type: int
INFOTYPE_SSL_DATA_IN = ... # type: int
INFOTYPE_SSL_DATA_OUT = ... # type: int
INFOTYPE_TEXT = ... # type: int
+INFO_CERTINFO = ... # type: int
INFO_COOKIELIST = ... # type: int
INFO_FILETIME = ... # type: int
+INFO_RTSP_CLIENT_CSEQ = ... # type: int
+INFO_RTSP_CSEQ_RECV = ... # type: int
+INFO_RTSP_SERVER_CSEQ = ... # type: int
+INFO_RTSP_SESSION_ID = ... # type: int
INTERFACE = ... # type: int
IOCMD_NOP = ... # type: int
IOCMD_RESTARTREAD = ... # type: int
@@ -164,20 +196,51 @@ IPRESOLVE_V4 = ... # type: int
IPRESOLVE_V6 = ... # type: int
IPRESOLVE_WHATEVER = ... # type: int
ISSUERCERT = ... # type: int
+KEYPASSWD = ... # type: int
+KHMATCH_MISMATCH = ... # type: int
+KHMATCH_MISSING = ... # type: int
+KHMATCH_OK = ... # type: int
+KHSTAT_DEFER = ... # type: int
+KHSTAT_FINE = ... # type: int
+KHSTAT_FINE_ADD_TO_FILE = ... # type: int
+KHSTAT_REJECT = ... # type: int
+KHTYPE_DSS = ... # type: int
+KHTYPE_RSA = ... # type: int
+KHTYPE_RSA1 = ... # type: int
+KHTYPE_UNKNOWN = ... # type: int
KRB4LEVEL = ... # type: int
+KRBLEVEL = ... # type: int
LASTSOCKET = ... # type: int
LOCALPORT = ... # type: int
LOCALPORTRANGE = ... # type: int
+LOCAL_IP = ... # type: int
+LOCAL_PORT = ... # type: int
LOCK_DATA_COOKIE = ... # type: int
LOCK_DATA_DNS = ... # type: int
+LOCK_DATA_SSL_SESSION = ... # type: int
+LOGIN_OPTIONS = ... # type: int
LOW_SPEED_LIMIT = ... # type: int
LOW_SPEED_TIME = ... # type: int
+MAIL_AUTH = ... # type: int
+MAIL_FROM = ... # type: int
+MAIL_RCPT = ... # type: int
MAXCONNECTS = ... # type: int
MAXFILESIZE = ... # type: int
MAXFILESIZE_LARGE = ... # type: int
MAXREDIRS = ... # type: int
MAX_RECV_SPEED_LARGE = ... # type: int
MAX_SEND_SPEED_LARGE = ... # type: int
+M_CHUNK_LENGTH_PENALTY_SIZE = ... # type: int
+M_CONTENT_LENGTH_PENALTY_SIZE = ... # type: int
+M_MAXCONNECTS = ... # type: int
+M_MAX_HOST_CONNECTIONS = ... # type: int
+M_MAX_PIPELINE_LENGTH = ... # type: int
+M_MAX_TOTAL_CONNECTIONS = ... # type: int
+M_PIPELINING = ... # type: int
+M_PIPELINING_SERVER_BL = ... # type: int
+M_PIPELINING_SITE_BL = ... # type: int
+M_SOCKETFUNCTION = ... # type: int
+M_TIMERFUNCTION = ... # type: int
NAMELOOKUP_TIME = ... # type: int
NETRC = ... # type: int
NETRC_FILE = ... # type: int
@@ -188,11 +251,24 @@ NEW_DIRECTORY_PERMS = ... # type: int
NEW_FILE_PERMS = ... # type: int
NOBODY = ... # type: int
NOPROGRESS = ... # type: int
+NOPROXY = ... # type: int
NOSIGNAL = ... # type: int
NUM_CONNECTS = ... # type: int
OPENSOCKETFUNCTION = ... # type: int
+OPT_CERTINFO = ... # type: int
OPT_FILETIME = ... # type: int
OS_ERRNO = ... # type: int
+PASSWORD = ... # type: int
+PATH_AS_IS = ... # type: int
+PAUSE_ALL = ... # type: int
+PAUSE_CONT = ... # type: int
+PAUSE_RECV = ... # type: int
+PAUSE_SEND = ... # type: int
+PINNEDPUBLICKEY = ... # type: int
+PIPEWAIT = ... # type: int
+PIPE_HTTP1 = ... # type: int
+PIPE_MULTIPLEX = ... # type: int
+PIPE_NOTHING = ... # type: int
POLL_IN = ... # type: int
POLL_INOUT = ... # type: int
POLL_NONE = ... # type: int
@@ -205,19 +281,58 @@ POSTFIELDS = ... # type: int
POSTFIELDSIZE = ... # type: int
POSTFIELDSIZE_LARGE = ... # type: int
POSTQUOTE = ... # type: int
+POSTREDIR = ... # type: int
PREQUOTE = ... # type: int
PRETRANSFER_TIME = ... # type: int
PRIMARY_IP = ... # type: int
+PRIMARY_PORT = ... # type: int
PROGRESSFUNCTION = ... # type: int
+PROTOCOLS = ... # type: int
+PROTO_ALL = ... # type: int
+PROTO_DICT = ... # type: int
+PROTO_FILE = ... # type: int
+PROTO_FTP = ... # type: int
+PROTO_FTPS = ... # type: int
+PROTO_GOPHER = ... # type: int
+PROTO_HTTP = ... # type: int
+PROTO_HTTPS = ... # type: int
+PROTO_IMAP = ... # type: int
+PROTO_IMAPS = ... # type: int
+PROTO_LDAP = ... # type: int
+PROTO_LDAPS = ... # type: int
+PROTO_POP3 = ... # type: int
+PROTO_POP3S = ... # type: int
+PROTO_RTMP = ... # type: int
+PROTO_RTMPE = ... # type: int
+PROTO_RTMPS = ... # type: int
+PROTO_RTMPT = ... # type: int
+PROTO_RTMPTE = ... # type: int
+PROTO_RTMPTS = ... # type: int
+PROTO_RTSP = ... # type: int
+PROTO_SCP = ... # type: int
+PROTO_SFTP = ... # type: int
+PROTO_SMB = ... # type: int
+PROTO_SMBS = ... # type: int
+PROTO_SMTP = ... # type: int
+PROTO_SMTPS = ... # type: int
+PROTO_TELNET = ... # type: int
+PROTO_TFTP = ... # type: int
PROXY = ... # type: int
PROXYAUTH = ... # type: int
PROXYAUTH_AVAIL = ... # type: int
+PROXYHEADER = ... # type: int
+PROXYPASSWORD = ... # type: int
PROXYPORT = ... # type: int
PROXYTYPE = ... # type: int
PROXYTYPE_HTTP = ... # type: int
+PROXYTYPE_HTTP_1_0 = ... # type: int
PROXYTYPE_SOCKS4 = ... # type: int
+PROXYTYPE_SOCKS4A = ... # type: int
PROXYTYPE_SOCKS5 = ... # type: int
+PROXYTYPE_SOCKS5_HOSTNAME = ... # type: int
+PROXYUSERNAME = ... # type: int
PROXYUSERPWD = ... # type: int
+PROXY_SERVICE_NAME = ... # type: int
PROXY_TRANSFER_MODE = ... # type: int
PUT = ... # type: int
QUOTE = ... # type: int
@@ -226,20 +341,41 @@ RANGE = ... # type: int
READDATA = ... # type: int
READFUNCTION = ... # type: int
READFUNC_ABORT = ... # type: int
+READFUNC_PAUSE = ... # type: int
REDIRECT_COUNT = ... # type: int
REDIRECT_TIME = ... # type: int
REDIRECT_URL = ... # type: int
+REDIR_POST_301 = ... # type: int
+REDIR_POST_302 = ... # type: int
+REDIR_POST_303 = ... # type: int
+REDIR_POST_ALL = ... # type: int
+REDIR_PROTOCOLS = ... # type: int
REFERER = ... # type: int
REQUEST_SIZE = ... # type: int
+RESOLVE = ... # type: int
RESPONSE_CODE = ... # type: int
RESUME_FROM = ... # type: int
RESUME_FROM_LARGE = ... # type: int
+SASL_IR = ... # type: int
+SEEKFUNCTION = ... # type: int
+SEEKFUNC_CANTSEEK = ... # type: int
+SEEKFUNC_FAIL = ... # type: int
+SEEKFUNC_OK = ... # type: int
+SERVICE_NAME = ... # type: int
SHARE = ... # type: int
SH_SHARE = ... # type: int
SH_UNSHARE = ... # type: int
SIZE_DOWNLOAD = ... # type: int
SIZE_UPLOAD = ... # type: int
SOCKET_TIMEOUT = ... # type: int
+SOCKOPTFUNCTION = ... # type: int
+SOCKOPT_ALREADY_CONNECTED = ... # type: int
+SOCKOPT_ERROR = ... # type: int
+SOCKOPT_OK = ... # type: int
+SOCKS5_GSSAPI_NEC = ... # type: int
+SOCKS5_GSSAPI_SERVICE = ... # type: int
+SOCKTYPE_ACCEPT = ... # type: int
+SOCKTYPE_IPCXN = ... # type: int
SPEED_DOWNLOAD = ... # type: int
SPEED_UPLOAD = ... # type: int
SSH_AUTH_ANY = ... # type: int
@@ -251,6 +387,8 @@ SSH_AUTH_PASSWORD = ... # type: int
SSH_AUTH_PUBLICKEY = ... # type: int
SSH_AUTH_TYPES = ... # type: int
SSH_HOST_PUBLIC_KEY_MD5 = ... # type: int
+SSH_KEYFUNCTION = ... # type: int
+SSH_KNOWNHOSTS = ... # type: int
SSH_PRIVATE_KEYFILE = ... # type: int
SSH_PUBLIC_KEYFILE = ... # type: int
SSLCERT = ... # type: int
@@ -261,20 +399,34 @@ SSLENGINE_DEFAULT = ... # type: int
SSLKEY = ... # type: int
SSLKEYPASSWD = ... # type: int
SSLKEYTYPE = ... # type: int
+SSLOPT_ALLOW_BEAST = ... # type: int
SSLVERSION = ... # type: int
SSLVERSION_DEFAULT = ... # type: int
SSLVERSION_SSLv2 = ... # type: int
SSLVERSION_SSLv3 = ... # type: int
SSLVERSION_TLSv1 = ... # type: int
+SSLVERSION_TLSv1_0 = ... # type: int
+SSLVERSION_TLSv1_1 = ... # type: int
+SSLVERSION_TLSv1_2 = ... # type: int
SSL_CIPHER_LIST = ... # type: int
+SSL_ENABLE_ALPN = ... # type: int
+SSL_ENABLE_NPN = ... # type: int
SSL_ENGINES = ... # type: int
+SSL_FALSESTART = ... # type: int
+SSL_OPTIONS = ... # type: int
SSL_SESSIONID_CACHE = ... # type: int
SSL_VERIFYHOST = ... # type: int
SSL_VERIFYPEER = ... # type: int
SSL_VERIFYRESULT = ... # type: int
+SSL_VERIFYSTATUS = ... # type: int
STARTTRANSFER_TIME = ... # type: int
STDERR = ... # type: int
+TCP_KEEPALIVE = ... # type: int
+TCP_KEEPIDLE = ... # type: int
+TCP_KEEPINTVL = ... # type: int
TCP_NODELAY = ... # type: int
+TELNETOPTIONS = ... # type: int
+TFTP_BLKSIZE = ... # type: int
TIMECONDITION = ... # type: int
TIMECONDITION_IFMODSINCE = ... # type: int
TIMECONDITION_IFUNMODSINCE = ... # type: int
@@ -283,23 +435,63 @@ TIMECONDITION_NONE = ... # type: int
TIMEOUT = ... # type: int
TIMEOUT_MS = ... # type: int
TIMEVALUE = ... # type: int
+TLSAUTH_PASSWORD = ... # type: int
+TLSAUTH_TYPE = ... # type: int
+TLSAUTH_USERNAME = ... # type: int
TOTAL_TIME = ... # type: int
TRANSFERTEXT = ... # type: int
+TRANSFER_ENCODING = ... # type: int
+UNIX_SOCKET_PATH = ... # type: int
UNRESTRICTED_AUTH = ... # type: int
UPLOAD = ... # type: int
URL = ... # type: int
USERAGENT = ... # type: int
+USERNAME = ... # type: int
USERPWD = ... # type: int
+USESSL_ALL = ... # type: int
+USESSL_CONTROL = ... # type: int
+USESSL_NONE = ... # type: int
+USESSL_TRY = ... # type: int
+USE_SSL = ... # type: int
VERBOSE = ... # type: int
+VERSION_ASYNCHDNS = ... # type: int
+VERSION_CONV = ... # type: int
+VERSION_CURLDEBUG = ... # type: int
+VERSION_DEBUG = ... # type: int
+VERSION_GSSAPI = ... # type: int
+VERSION_GSSNEGOTIATE = ... # type: int
+VERSION_HTTP2 = ... # type: int
+VERSION_IDN = ... # type: int
+VERSION_IPV6 = ... # type: int
+VERSION_KERBEROS4 = ... # type: int
+VERSION_KERBEROS5 = ... # type: int
+VERSION_LARGEFILE = ... # type: int
+VERSION_LIBZ = ... # type: int
+VERSION_NTLM = ... # type: int
+VERSION_NTLM_WB = ... # type: int
+VERSION_SPNEGO = ... # type: int
+VERSION_SSL = ... # type: int
+VERSION_SSPI = ... # type: int
+VERSION_TLSAUTH_SRP = ... # type: int
+VERSION_UNIX_SOCKETS = ... # type: int
+WILDCARDMATCH = ... # type: int
WRITEDATA = ... # type: int
WRITEFUNCTION = ... # type: int
+WRITEFUNC_PAUSE = ... # type: int
WRITEHEADER = ... # type: int
+XFERINFOFUNCTION = ... # type: int
+XOAUTH2_BEARER = ... # type: int
E_ABORTED_BY_CALLBACK = ... # type: int
+E_AGAIN = ... # type: int
+E_ALREADY_COMPLETE = ... # type: int
+E_BAD_CALLING_ORDER = ... # type: int
E_BAD_CONTENT_ENCODING = ... # type: int
E_BAD_DOWNLOAD_RESUME = ... # type: int
E_BAD_FUNCTION_ARGUMENT = ... # type: int
+E_BAD_PASSWORD_ENTERED = ... # type: int
E_CALL_MULTI_PERFORM = ... # type: int
+E_CHUNK_FAILED = ... # type: int
E_CONV_FAILED = ... # type: int
E_CONV_REQD = ... # type: int
E_COULDNT_CONNECT = ... # type: int
@@ -308,18 +500,26 @@ E_COULDNT_RESOLVE_PROXY = ... # type: int
E_FAILED_INIT = ... # type: int
E_FILESIZE_EXCEEDED = ... # type: int
E_FILE_COULDNT_READ_FILE = ... # type: int
+E_FTP_ACCEPT_FAILED = ... # type: int
+E_FTP_ACCEPT_TIMEOUT = ... # type: int
E_FTP_ACCESS_DENIED = ... # type: int
+E_FTP_BAD_DOWNLOAD_RESUME = ... # type: int
+E_FTP_BAD_FILE_LIST = ... # type: int
E_FTP_CANT_GET_HOST = ... # type: int
E_FTP_CANT_RECONNECT = ... # type: int
E_FTP_COULDNT_GET_SIZE = ... # type: int
E_FTP_COULDNT_RETR_FILE = ... # type: int
E_FTP_COULDNT_SET_ASCII = ... # type: int
E_FTP_COULDNT_SET_BINARY = ... # type: int
+E_FTP_COULDNT_SET_TYPE = ... # type: int
E_FTP_COULDNT_STOR_FILE = ... # type: int
E_FTP_COULDNT_USE_REST = ... # type: int
+E_FTP_PARTIAL_FILE = ... # type: int
E_FTP_PORT_FAILED = ... # type: int
+E_FTP_PRET_FAILED = ... # type: int
E_FTP_QUOTE_ERROR = ... # type: int
E_FTP_SSL_FAILED = ... # type: int
+E_FTP_USER_PASSWORD_INCORRECT = ... # type: int
E_FTP_WEIRD_227_FORMAT = ... # type: int
E_FTP_WEIRD_PASS_REPLY = ... # type: int
E_FTP_WEIRD_PASV_REPLY = ... # type: int
@@ -328,6 +528,9 @@ E_FTP_WEIRD_USER_REPLY = ... # type: int
E_FTP_WRITE_ERROR = ... # type: int
E_FUNCTION_NOT_FOUND = ... # type: int
E_GOT_NOTHING = ... # type: int
+E_HTTP2 = ... # type: int
+E_HTTP_NOT_FOUND = ... # type: int
+E_HTTP_PORT_FAILED = ... # type: int
E_HTTP_POST_ERROR = ... # type: int
E_HTTP_RANGE_ERROR = ... # type: int
E_HTTP_RETURNED_ERROR = ... # type: int
@@ -337,18 +540,35 @@ E_LDAP_INVALID_URL = ... # type: int
E_LDAP_SEARCH_FAILED = ... # type: int
E_LIBRARY_NOT_FOUND = ... # type: int
E_LOGIN_DENIED = ... # type: int
+E_MALFORMAT_USER = ... # type: int
+E_MULTI_ADDED_ALREADY = ... # type: int
E_MULTI_BAD_EASY_HANDLE = ... # type: int
E_MULTI_BAD_HANDLE = ... # type: int
+E_MULTI_BAD_SOCKET = ... # type: int
+E_MULTI_CALL_MULTI_PERFORM = ... # type: int
+E_MULTI_CALL_MULTI_SOCKET = ... # type: int
E_MULTI_INTERNAL_ERROR = ... # type: int
E_MULTI_OK = ... # type: int
E_MULTI_OUT_OF_MEMORY = ... # type: int
+E_MULTI_UNKNOWN_OPTION = ... # type: int
+E_NOT_BUILT_IN = ... # type: int
+E_NO_CONNECTION_AVAILABLE = ... # type: int
E_OK = ... # type: int
+E_OPERATION_TIMEDOUT = ... # type: int
E_OPERATION_TIMEOUTED = ... # type: int
E_OUT_OF_MEMORY = ... # type: int
E_PARTIAL_FILE = ... # type: int
+E_PEER_FAILED_VERIFICATION = ... # type: int
+E_QUOTE_ERROR = ... # type: int
+E_RANGE_ERROR = ... # type: int
E_READ_ERROR = ... # type: int
E_RECV_ERROR = ... # type: int
+E_REMOTE_ACCESS_DENIED = ... # type: int
+E_REMOTE_DISK_FULL = ... # type: int
+E_REMOTE_FILE_EXISTS = ... # type: int
E_REMOTE_FILE_NOT_FOUND = ... # type: int
+E_RTSP_CSEQ_ERROR = ... # type: int
+E_RTSP_SESSION_ERROR = ... # type: int
E_SEND_ERROR = ... # type: int
E_SEND_FAIL_REWIND = ... # type: int
E_SHARE_IN_USE = ... # type: int
@@ -358,10 +578,14 @@ E_SSL_CACERT_BADFILE = ... # type: int
E_SSL_CERTPROBLEM = ... # type: int
E_SSL_CIPHER = ... # type: int
E_SSL_CONNECT_ERROR = ... # type: int
+E_SSL_CRL_BADFILE = ... # type: int
E_SSL_ENGINE_INITFAILED = ... # type: int
E_SSL_ENGINE_NOTFOUND = ... # type: int
E_SSL_ENGINE_SETFAILED = ... # type: int
+E_SSL_INVALIDCERTSTATUS = ... # type: int
+E_SSL_ISSUER_ERROR = ... # type: int
E_SSL_PEER_CERTIFICATE = ... # type: int
+E_SSL_PINNEDPUBKEYNOTMATCH = ... # type: int
E_SSL_SHUTDOWN_FAILED = ... # type: int
E_TELNET_OPTION_SYNTAX = ... # type: int
E_TFTP_DISKFULL = ... # type: int
@@ -372,7 +596,11 @@ E_TFTP_NOTFOUND = ... # type: int
E_TFTP_PERM = ... # type: int
E_TFTP_UNKNOWNID = ... # type: int
E_TOO_MANY_REDIRECTS = ... # type: int
+E_UNKNOWN_OPTION = ... # type: int
E_UNKNOWN_TELNET_OPTION = ... # type: int
E_UNSUPPORTED_PROTOCOL = ... # type: int
+E_UPLOAD_FAILED = ... # type: int
E_URL_MALFORMAT = ... # type: int
+E_URL_MALFORMAT_USER = ... # type: int
+E_USE_SSL_FAILED = ... # type: int
E_WRITE_ERROR = ... # type: int
diff --git a/typeshed/third_party/2and3/boto/utils.pyi b/typeshed/third_party/2and3/boto/utils.pyi
new file mode 100644
index 0000000..b3c5425
--- /dev/null
+++ b/typeshed/third_party/2and3/boto/utils.pyi
@@ -0,0 +1,239 @@
+import datetime
+import logging.handlers
+import subprocess
+import sys
+import time
+
+import boto.connection
+from typing import (
+ Any,
+ Callable,
+ ContextManager,
+ Dict,
+ IO,
+ Iterable,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+
+if sys.version_info[0] >= 3:
+ # TODO move _StringIO definition into boto.compat once stubs exist and rename to StringIO
+ import io
+ _StringIO = io.StringIO
+
+ from hashlib import _Hash
+ _HashType = _Hash
+
+ from email.message import Message as _Message
+else:
+ # TODO move _StringIO definition into boto.compat once stubs exist and rename to StringIO
+ import StringIO
+ _StringIO = StringIO.StringIO
+
+ from hashlib import _hash
+ _HashType = _hash
+
+ # TODO use email.message.Message once stubs exist
+ _Message = Any
+
+_Provider = Any # TODO replace this with boto.provider.Provider once stubs exist
+_LockType = Any # TODO replace this with _thread.LockType once stubs exist
+
+
+JSONDecodeError = ... # type: Type[ValueError]
+qsa_of_interest = ... # type: List[str]
+
+
+def unquote_v(nv: str) -> Union[str, Tuple[str, str]]: ...
+def canonical_string(
+ method: str,
+ path: str,
+ headers: Mapping[str, Optional[str]],
+ expires: Optional[int] = ...,
+ provider: Optional[_Provider] = ...,
+) -> str: ...
+def merge_meta(
+ headers: Mapping[str, str],
+ metadata: Mapping[str, str],
+ provider: Optional[_Provider] = ...,
+) -> Mapping[str, str]: ...
+def get_aws_metadata(
+ headers: Mapping[str, str],
+ provider: Optional[_Provider] = ...,
+) -> Mapping[str, str]: ...
+def retry_url(
+ url: str,
+ retry_on_404: bool = ...,
+ num_retries: int = ...,
+ timeout: Optional[int] = ...,
+) -> str: ...
+
+class LazyLoadMetadata(Dict[_KT, _VT]):
+ def __init__(
+ self,
+ url: str,
+ num_retries: int,
+ timeout: Optional[int] = ...,
+ ) -> None: ...
+
+def get_instance_metadata(
+ version: str = ...,
+ url: str = ...,
+ data: str = ...,
+ timeout: Optional[int] = ...,
+ num_retries: int = ...,
+) -> Optional[LazyLoadMetadata]: ...
+def get_instance_identity(
+ version: str = ...,
+ url: str = ...,
+ timeout: Optional[int] = ...,
+ num_retries: int = ...,
+) -> Optional[Mapping[str, Any]]: ...
+def get_instance_userdata(
+ version: str = ...,
+ sep: Optional[str] = ...,
+ url: str = ...,
+ timeout: Optional[int] = ...,
+ num_retries: int = ...,
+) -> Mapping[str, str]: ...
+
+ISO8601 = ... # type: str
+ISO8601_MS = ... # type: str
+RFC1123 = ... # type: str
+LOCALE_LOCK = ... # type: _LockType
+
+def setlocale(name: Union[str, Tuple[str, str]]) -> ContextManager[str]: ...
+def get_ts(ts: Optional[time.struct_time] = ...) -> str: ...
+def parse_ts(ts: str) -> datetime.datetime: ...
+def find_class(module_name: str, class_name: Optional[str] = ...) -> Optional[Type[Any]]: ...
+def update_dme(username: str, password: str, dme_id: str, ip_address: str) -> str: ...
+def fetch_file(
+ uri: str,
+ file: Optional[IO[str]] = ...,
+ username: Optional[str] = ...,
+ password: Optional[str] = ...,
+) -> Optional[IO[str]]: ...
+
+class ShellCommand:
+ exit_code = ... # type: int
+ command = ... # type: subprocess._CMD
+ log_fp = ... # type: _StringIO
+ wait = ... # type: bool
+ fail_fast = ... # type: bool
+
+ def __init__(
+ self,
+ command: subprocess._CMD,
+ wait: bool = ...,
+ fail_fast: bool = ...,
+ cwd: Optional[subprocess._TXT] = ...,
+ ) -> None: ...
+
+ process = ... # type: subprocess.Popen
+
+ def run(self, cwd: Optional[subprocess._CMD] = ...) -> Optional[int]: ...
+ def setReadOnly(self, value) -> None: ...
+ def getStatus(self) -> Optional[int]: ...
+
+ status = ... # type: Optional[int]
+
+ def getOutput(self) -> str: ...
+
+ output = ... # type: str
+
+class AuthSMTPHandler(logging.handlers.SMTPHandler):
+ username = ... # type: str
+ password = ... # type: str
+ def __init__(
+ self,
+ mailhost: str,
+ username: str,
+ password: str,
+ fromaddr: str,
+ toaddrs: Sequence[str],
+ subject: str,
+ ) -> None: ...
+
+class LRUCache(Dict[_KT, _VT]):
+ class _Item:
+ previous = ... # type: Optional[LRUCache._Item]
+ next = ... # type: Optional[LRUCache._Item]
+ key = ...
+ value = ...
+ def __init__(self, key, value) -> None: ...
+
+ _dict = ... # type: Dict[_KT, LRUCache._Item]
+ capacity = ... # type: int
+ head = ... # type: Optional[LRUCache._Item]
+ tail = ... # type: Optional[LRUCache._Item]
+
+ def __init__(self, capacity: int) -> None: ...
+
+
+# This exists to work around Password.str's name shadowing the str type
+_str = str
+
+class Password:
+ hashfunc = ... # type: Callable[[bytes], _HashType]
+ str = ... # type: Optional[_str]
+
+ def __init__(
+ self,
+ str: Optional[_str] = ...,
+ hashfunc: Optional[Callable[[bytes], _HashType]] = ...,
+ ) -> None: ...
+ def set(self, value: Union[bytes, _str]) -> None: ...
+ def __eq__(self, other: Any) -> bool: ...
+ def __len__(self) -> int: ...
+
+def notify(
+ subject: str,
+ body: Optional[str] = ...,
+ html_body: Optional[Union[Sequence[str], str]] = ...,
+ to_string: Optional[str] = ...,
+ attachments: Optional[Iterable[_Message]] = ...,
+ append_instance_id: bool = ...,
+) -> None: ...
+def get_utf8_value(value: str) -> bytes: ...
+def mklist(value: Any) -> List: ...
+def pythonize_name(name: str) -> str: ...
+def write_mime_multipart(
+ content: List[Tuple[str, str]],
+ compress: bool = ...,
+ deftype: str = ...,
+ delimiter: str = ...,
+) -> str: ...
+def guess_mime_type(content: str, deftype: str) -> str: ...
+def compute_md5(
+ fp: IO[Any],
+ buf_size: int = ...,
+ size: Optional[int] = ...,
+) -> Tuple[str, str, int]: ...
+def compute_hash(
+ fp: IO[Any],
+ buf_size: int = ...,
+ size: Optional[int] = ...,
+ hash_algorithm: Any = ...,
+) -> Tuple[str, str, int]: ...
+def find_matching_headers(name: str, headers: Mapping[str, Optional[str]]) -> List[str]: ...
+def merge_headers_by_name(name: str, headers: Mapping[str, Optional[str]]) -> str: ...
+
+class RequestHook:
+ def handle_request_data(
+ self,
+ request: boto.connection.HTTPRequest,
+ response: boto.connection.HTTPResponse,
+ error: bool = ...,
+ ) -> Any: ...
+
+def host_is_ipv6(hostname: str) -> bool: ...
+def parse_host(hostname: str) -> str: ...
diff --git a/typeshed/third_party/2and3/pymysql/connections.pyi b/typeshed/third_party/2and3/pymysql/connections.pyi
index 62109ba..dd8b5e0 100644
--- a/typeshed/third_party/2and3/pymysql/connections.pyi
+++ b/typeshed/third_party/2and3/pymysql/connections.pyi
@@ -92,6 +92,7 @@ class Connection:
def close(self): ...
def autocommit(self, value): ...
def commit(self): ...
+ def begin(self) -> None: ...
def rollback(self): ...
def escape(self, obj): ...
def literal(self, obj): ...
diff --git a/typeshed/third_party/2and3/requests/api.pyi b/typeshed/third_party/2and3/requests/api.pyi
index 865b481..e453475 100644
--- a/typeshed/third_party/2and3/requests/api.pyi
+++ b/typeshed/third_party/2and3/requests/api.pyi
@@ -5,7 +5,7 @@ from typing import Optional, Union, Any, Iterable, Mapping, MutableMapping, Tupl
from .models import Response
_ParamsMappingValueType = Union[Text, bytes, int, float, Iterable[Union[Text, bytes, int, float]]]
-_Data = Union[None, bytes, MutableMapping[Text, Text], IO]
+_Data = Union[None, bytes, MutableMapping[Text, Text], Iterable[Tuple[Text, Text]], IO]
def request(method: str, url: str, **kwargs) -> Response: ...
def get(url: Union[Text, bytes],
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git
More information about the debian-med-commit
mailing list