[med-svn] [mypy] 01/01: Imported Upstream version 0.4
Michael Crusoe
misterc-guest at moszumanska.debian.org
Fri May 6 12:13:02 UTC 2016
This is an automated email from the git hooks/post-receive script.
misterc-guest pushed a commit to annotated tag upstream/0.4
in repository mypy.
commit 5c0efa7e1a2465c29969affc3dcf02ba001551c4
Author: Michael R. Crusoe <crusoe at ucdavis.edu>
Date: Fri May 6 04:06:29 2016 -0700
Imported Upstream version 0.4
---
PKG-INFO | 2 +-
lib-typing/3.2/typing.py | 634 +++----
mypy/applytype.py | 23 +-
mypy/build.py | 1769 ++++++++++++--------
mypy/checker.py | 192 ++-
mypy/checkexpr.py | 164 +-
mypy/checkmember.py | 91 +-
mypy/checkstrformat.py | 6 +-
mypy/constraints.py | 150 +-
mypy/erasetype.py | 2 -
mypy/errors.py | 22 +-
mypy/expandtype.py | 30 +-
mypy/exprtotype.py | 6 +-
mypy/fastparse.py | 771 +++++++++
mypy/fixup.py | 273 +++
mypy/infer.py | 7 +-
mypy/join.py | 51 +-
mypy/lex.py | 9 +-
mypy/main.py | 475 +++---
mypy/meet.py | 47 +-
mypy/messages.py | 46 +-
mypy/moduleinfo.py | 2 +
mypy/nodes.py | 433 ++++-
mypy/parse.py | 123 +-
mypy/parsetype.py | 23 +-
mypy/replacetvars.py | 5 +-
mypy/report.py | 60 +-
mypy/sametypes.py | 14 +-
mypy/semanal.py | 324 +++-
mypy/strconv.py | 13 +-
mypy/stubgen.py | 2 +-
mypy/subtypes.py | 32 +-
mypy/treetransform.py | 5 +-
mypy/typeanal.py | 73 +-
mypy/types.py | 317 +++-
mypy/util.py | 2 +-
mypy/version.py | 2 +-
mypy/waiter.py | 33 +-
setup.py | 6 +-
typeshed/runtests.py | 83 +-
typeshed/stdlib/2.7/ConfigParser.pyi | 103 ++
typeshed/stdlib/2.7/Cookie.pyi | 44 +
typeshed/stdlib/2.7/Queue.pyi | 16 +-
typeshed/stdlib/2.7/__builtin__.pyi | 43 +-
typeshed/stdlib/2.7/__future__.pyi | 6 +-
typeshed/stdlib/2.7/_ast.pyi | 664 +++-----
typeshed/stdlib/2.7/_functools.pyi | 9 +-
typeshed/stdlib/2.7/_weakref.pyi | 11 +-
typeshed/stdlib/2.7/_weakrefset.pyi | 2 +-
typeshed/stdlib/2.7/abc.pyi | 12 +-
typeshed/stdlib/2.7/argparse.pyi | 8 +-
typeshed/stdlib/2.7/ast.pyi | 67 +-
typeshed/stdlib/2.7/bisect.pyi | 6 +
typeshed/stdlib/2.7/builtins.pyi | 43 +-
typeshed/stdlib/2.7/cPickle.pyi | 9 +
typeshed/stdlib/2.7/calendar.pyi | 75 +
typeshed/stdlib/2.7/codecs.pyi | 4 +
typeshed/stdlib/2.7/collections.pyi | 7 +-
typeshed/stdlib/2.7/contextlib.pyi | 14 +-
typeshed/stdlib/2.7/copy.pyi | 4 +-
typeshed/stdlib/2.7/email/_parseaddr.pyi | 44 +
typeshed/stdlib/2.7/email/utils.pyi | 22 +
typeshed/stdlib/2.7/fcntl.pyi | 12 +-
typeshed/stdlib/2.7/functools.pyi | 11 +-
typeshed/stdlib/2.7/gc.pyi | 22 +-
typeshed/stdlib/2.7/genericpath.pyi | 14 +
typeshed/stdlib/2.7/hashlib.pyi | 2 +-
typeshed/stdlib/2.7/heapq.pyi | 15 +
typeshed/stdlib/2.7/hmac.pyi | 4 +-
typeshed/stdlib/2.7/httplib.pyi | 4 +-
typeshed/stdlib/2.7/inspect.pyi | 71 +-
typeshed/stdlib/2.7/itertools.pyi | 12 +-
typeshed/stdlib/2.7/linecache.pyi | 7 +
typeshed/stdlib/2.7/locale.pyi | 33 +
typeshed/stdlib/2.7/logging/__init__.pyi | 48 +-
typeshed/stdlib/2.7/mimetypes.pyi | 26 +
typeshed/stdlib/2.7/multiprocessing/__init__.pyi | 32 +
typeshed/stdlib/2.7/multiprocessing/process.pyi | 39 +
typeshed/stdlib/2.7/multiprocessing/util.pyi | 33 +
typeshed/stdlib/2.7/optparse.pyi | 256 +++
typeshed/stdlib/2.7/os/__init__.pyi | 12 +-
typeshed/stdlib/2.7/pdb.pyi | 30 +
typeshed/stdlib/2.7/pickle.pyi | 40 +-
typeshed/stdlib/2.7/pipes.pyi | 10 +-
typeshed/stdlib/2.7/posixpath.pyi | 50 +
typeshed/stdlib/2.7/pprint.pyi | 4 +-
typeshed/stdlib/2.7/quopri.pyi | 8 +
typeshed/stdlib/2.7/runpy.pyi | 21 +
typeshed/stdlib/2.7/select.pyi | 2 +-
typeshed/stdlib/2.7/shelve.pyi | 33 +
typeshed/stdlib/2.7/socket.pyi | 46 +-
typeshed/stdlib/2.7/sqlite3/dbapi2.pyi | 94 +-
typeshed/stdlib/2.7/ssl.pyi | 191 ++-
typeshed/stdlib/2.7/subprocess.pyi | 6 +-
typeshed/stdlib/2.7/time.pyi | 15 +-
typeshed/stdlib/2.7/traceback.pyi | 16 +-
typeshed/stdlib/2.7/typing.pyi | 18 +-
typeshed/stdlib/2.7/urllib.pyi | 2 +-
typeshed/stdlib/2.7/urllib2.pyi | 18 +-
typeshed/stdlib/2.7/weakref.pyi | 74 +
.../stdlib/{3/bz2.pyi => 2.7/wsgiref/__init__.pyi} | 0
typeshed/stdlib/2.7/wsgiref/validate.pyi | 47 +
typeshed/stdlib/2.7/xml/etree/ElementInclude.pyi | 19 +
typeshed/stdlib/2.7/xml/etree/ElementPath.pyi | 35 +
typeshed/stdlib/2.7/xml/etree/ElementTree.pyi | 116 ++
.../{3/bz2.pyi => 2.7/xml/etree/__init__.pyi} | 0
typeshed/stdlib/2.7/xml/etree/cElementTree.pyi | 5 +
typeshed/stdlib/2.7/zlib.pyi | 36 +-
typeshed/stdlib/2and3/bz2.pyi | 6 +
typeshed/stdlib/2and3/math.pyi | 9 +
typeshed/stdlib/2and3/operator.pyi | 22 +-
typeshed/stdlib/{3 => 2and3}/warnings.pyi | 0
.../stdlib/{3/bz2.pyi => 3.2/xml/__init__.pyi} | 0
typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi | 19 +
typeshed/stdlib/3.2/xml/etree/ElementPath.pyi | 35 +
typeshed/stdlib/3.2/xml/etree/ElementTree.pyi | 120 ++
.../{3/bz2.pyi => 3.2/xml/etree/__init__.pyi} | 0
typeshed/stdlib/3.2/xml/etree/cElementTree.pyi | 5 +
.../stdlib/{3/bz2.pyi => 3.3/xml/__init__.pyi} | 0
typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi | 19 +
typeshed/stdlib/3.3/xml/etree/ElementPath.pyi | 35 +
typeshed/stdlib/3.3/xml/etree/ElementTree.pyi | 122 ++
.../{3/bz2.pyi => 3.3/xml/etree/__init__.pyi} | 0
typeshed/stdlib/3.3/xml/etree/cElementTree.pyi | 5 +
typeshed/stdlib/3.4/asyncio/__init__.pyi | 39 +-
typeshed/stdlib/3.4/asyncio/coroutines.pyi | 9 +
typeshed/stdlib/3.4/asyncio/events.pyi | 15 +-
typeshed/stdlib/3.4/asyncio/futures.pyi | 9 +-
typeshed/stdlib/3.4/asyncio/protocols.pyi | 24 +
typeshed/stdlib/3.4/asyncio/queues.pyi | 5 +-
typeshed/stdlib/3.4/asyncio/streams.pyi | 101 ++
typeshed/stdlib/3.4/asyncio/subprocess.pyi | 60 +
typeshed/stdlib/3.4/asyncio/tasks.pyi | 14 +-
typeshed/stdlib/3.4/asyncio/transports.pyi | 37 +
.../stdlib/{3/bz2.pyi => 3.4/xml/__init__.pyi} | 0
typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi | 19 +
typeshed/stdlib/3.4/xml/etree/ElementPath.pyi | 35 +
typeshed/stdlib/3.4/xml/etree/ElementTree.pyi | 127 ++
.../{3/bz2.pyi => 3.4/xml/etree/__init__.pyi} | 0
typeshed/stdlib/3.4/xml/etree/cElementTree.pyi | 5 +
.../stdlib/{3/bz2.pyi => 3.5/xml/__init__.pyi} | 0
typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi | 19 +
typeshed/stdlib/3.5/xml/etree/ElementPath.pyi | 35 +
typeshed/stdlib/3.5/xml/etree/ElementTree.pyi | 127 ++
.../{3/bz2.pyi => 3.5/xml/etree/__init__.pyi} | 0
typeshed/stdlib/3.5/xml/etree/cElementTree.pyi | 5 +
typeshed/stdlib/3/__future__.pyi | 6 +-
typeshed/stdlib/3/_ast.pyi | 358 ++++
typeshed/stdlib/3/_compression.pyi | 20 +
typeshed/stdlib/3/_curses.pyi | 295 ++++
typeshed/stdlib/3/_operator.pyi | 71 +
typeshed/stdlib/3/abc.pyi | 4 +-
typeshed/stdlib/3/argparse.pyi | 7 +-
typeshed/stdlib/3/ast.pyi | 42 +
typeshed/stdlib/3/builtins.pyi | 39 +-
typeshed/stdlib/3/calendar.pyi | 84 +-
.../{collections.pyi => collections/__init__.pyi} | 7 +-
.../stdlib/3/{bz2.pyi => concurrent/__init__.pyi} | 0
typeshed/stdlib/3/concurrent/futures/__init__.pyi | 7 +
typeshed/stdlib/3/concurrent/futures/_base.pyi | 81 +
typeshed/stdlib/3/concurrent/futures/process.pyi | 46 +
typeshed/stdlib/3/concurrent/futures/thread.pyi | 19 +
typeshed/stdlib/3/configparser.pyi | 166 ++
typeshed/stdlib/3/contextlib.pyi | 14 +-
typeshed/stdlib/3/copy.pyi | 4 +-
typeshed/stdlib/3/curses/__init__.pyi | 12 +
typeshed/stdlib/3/fcntl.pyi | 99 +-
typeshed/stdlib/3/fileinput.pyi | 48 +
typeshed/stdlib/3/functools.pyi | 11 +-
typeshed/stdlib/3/gc.pyi | 24 +-
typeshed/stdlib/3/getpass.pyi | 10 +-
typeshed/stdlib/3/gzip.pyi | 51 +
typeshed/stdlib/3/hashlib.pyi | 21 +-
typeshed/stdlib/3/hmac.pyi | 27 +
typeshed/stdlib/3/http/cookies.pyi | 46 +
typeshed/stdlib/3/inspect.pyi | 273 ++-
typeshed/stdlib/3/io.pyi | 6 +-
typeshed/stdlib/3/itertools.pyi | 12 +-
typeshed/stdlib/3/json.pyi | 6 +-
typeshed/stdlib/3/mimetypes.pyi | 26 +
typeshed/stdlib/3/multiprocessing/__init__.pyi | 3 +
typeshed/stdlib/3/multiprocessing/pool.pyi | 30 +
typeshed/stdlib/3/os/__init__.pyi | 2 +-
typeshed/stdlib/3/pdb.pyi | 30 +
typeshed/stdlib/3/pickle.pyi | 61 +-
typeshed/stdlib/3/pprint.pyi | 6 +-
typeshed/stdlib/3/runpy.pyi | 21 +
typeshed/stdlib/3/shelve.pyi | 31 +
typeshed/stdlib/3/socket.pyi | 44 +-
typeshed/stdlib/3/sqlite3/__init__.pyi | 5 +
typeshed/stdlib/{2.7 => 3}/sqlite3/dbapi2.pyi | 94 +-
typeshed/stdlib/3/subprocess.pyi | 4 +-
typeshed/stdlib/3/tokenize.pyi | 99 ++
typeshed/stdlib/3/traceback.pyi | 4 +-
typeshed/stdlib/3/typing.pyi | 29 +-
typeshed/stdlib/3/weakref.pyi | 57 +-
.../stdlib/3/{bz2.pyi => wsgiref/__init__.pyi} | 0
typeshed/stdlib/3/wsgiref/validate.pyi | 47 +
typeshed/stdlib/3/xml/etree/ElementInclude.pyi | 17 +-
typeshed/stdlib/3/xml/etree/ElementPath.pyi | 45 +-
typeshed/stdlib/3/xml/etree/ElementTree.pyi | 199 +--
typeshed/stdlib/3/zlib.pyi | 38 +-
.../2.7/selenium/webdriver/remote/webdriver.pyi | 111 ++
.../2.7/selenium/webdriver/remote/webelement.pyi | 65 +
.../2.7/sqlalchemy/dialects/mysql/__init__.pyi | 4 +-
.../2.7/sqlalchemy/dialects/mysql/base.pyi | 133 +-
.../third_party/2.7/sqlalchemy/engine/base.pyi | 3 +
.../2.7/sqlalchemy/engine/strategies.pyi | 4 +-
typeshed/third_party/2.7/sqlalchemy/engine/url.pyi | 2 +-
.../third_party/2.7/sqlalchemy/orm/__init__.pyi | 88 +-
typeshed/third_party/2.7/tornado/locks.pyi | 50 +
typeshed/third_party/2.7/tornado/testing.pyi | 64 +
.../3/dateutil/__init__.pyi} | 0
typeshed/third_party/3/dateutil/parser.pyi | 52 +
typeshed/third_party/3/pkg_resources/__init__.pyi | 508 ++++++
.../3/pkg_resources/_vendor/__init__.py} | 0
.../3/pkg_resources/_vendor/packaging/__init__.pyi | 4 +
.../pkg_resources/_vendor/packaging/specifiers.pyi | 58 +
.../3/pkg_resources/_vendor/packaging/version.pyi | 49 +
typeshed/third_party/3/typed_ast/__init__.pyi | 2 +
typeshed/third_party/3/typed_ast/ast27.pyi | 361 ++++
typeshed/third_party/3/typed_ast/ast35.pyi | 393 +++++
typeshed/third_party/3/typed_ast/conversions.pyi | 4 +
223 files changed, 12000 insertions(+), 2969 deletions(-)
diff --git a/PKG-INFO b/PKG-INFO
index d22a472..c173c7d 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: mypy-lang
-Version: 0.3.1
+Version: 0.4
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py
index 040c531..d275011 100644
--- a/lib-typing/3.2/typing.py
+++ b/lib-typing/3.2/typing.py
@@ -1,14 +1,7 @@
-# TODO:
-# - Generic[T, T] is invalid
-# - Look for TODO below
-
-# TODO nits:
-# Get rid of asserts that are the caller's fault.
-# Docstrings (e.g. ABCs).
-
import abc
from abc import abstractmethod, abstractproperty
import collections
+import contextlib
import functools
import re as stdlib_re # Avoid confusion with the re we export.
import sys
@@ -32,6 +25,9 @@ __all__ = [
# ABCs (from collections.abc).
'AbstractSet', # collections.abc.Set.
+ 'Awaitable',
+ 'AsyncIterator',
+ 'AsyncIterable',
'ByteString',
'Container',
'Hashable',
@@ -51,14 +47,13 @@ __all__ = [
# Structural checks, a.k.a. protocols.
'Reversible',
'SupportsAbs',
- 'SupportsBytes',
- 'SupportsComplex',
'SupportsFloat',
'SupportsInt',
'SupportsRound',
# Concrete collection types.
'Dict',
+ 'DefaultDict',
'List',
'Set',
'NamedTuple', # Not really a type.
@@ -71,12 +66,13 @@ __all__ = [
'no_type_check',
'no_type_check_decorator',
'overload',
-
- # Submodules.
- 'io',
- 're',
+ 'Text',
]
+# The pseudo-submodules 're' and 'io' are part of the public
+# namespace, but excluded from __all__ because they might stomp on
+# legitimate imports of those modules.
+
def _qualname(x):
if sys.version_info[:2] >= (3, 3):
@@ -120,8 +116,8 @@ class TypingMeta(type):
"""
return self
- def _has_type_var(self):
- return False
+ def _get_type_vars(self, tvars):
+ pass
def __repr__(self):
return '%s.%s' % (self.__module__, _qualname(self))
@@ -130,6 +126,8 @@ class TypingMeta(type):
class Final:
"""Mix-in class to prevent instantiation."""
+ __slots__ = ()
+
def __new__(self, *args, **kwds):
raise TypeError("Cannot instantiate %r" % self.__class__)
@@ -178,6 +176,9 @@ class _ForwardRef(TypingMeta):
self.__forward_evaluated__ = True
return self.__forward_value__
+ def __instancecheck__(self, obj):
+ raise TypeError("Forward references cannot be used with isinstance().")
+
def __subclasscheck__(self, cls):
if not self.__forward_evaluated__:
globalns = self.__forward_frame__.f_globals
@@ -188,16 +189,6 @@ class _ForwardRef(TypingMeta):
return False # Too early.
return issubclass(cls, self.__forward_value__)
- def __instancecheck__(self, obj):
- if not self.__forward_evaluated__:
- globalns = self.__forward_frame__.f_globals
- localns = self.__forward_frame__.f_locals
- try:
- self._eval_type(globalns, localns)
- except NameError:
- return False # Too early.
- return isinstance(obj, self.__forward_value__)
-
def __repr__(self):
return '_ForwardRef(%r)' % (self.__forward_arg__,)
@@ -213,6 +204,8 @@ class _TypeAlias:
False.
"""
+ __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
+
def __new__(cls, *args, **kwds):
"""Constructor.
@@ -220,8 +213,8 @@ class _TypeAlias:
someone tries to subclass a type alias (not a good idea).
"""
if (len(args) == 3 and
- isinstance(args[0], str) and
- isinstance(args[1], tuple)):
+ isinstance(args[0], str) and
+ isinstance(args[1], tuple)):
# Close enough.
raise TypeError("A type alias cannot be subclassed")
return object.__new__(cls)
@@ -261,8 +254,7 @@ class _TypeAlias:
self.impl_type, self.type_checker)
def __instancecheck__(self, obj):
- return (isinstance(obj, self.impl_type) and
- isinstance(self.type_checker(obj), self.type_var))
+ raise TypeError("Type aliases cannot be used with isinstance().")
def __subclasscheck__(self, cls):
if cls is Any:
@@ -278,8 +270,16 @@ class _TypeAlias:
return issubclass(cls, self.impl_type)
-def _has_type_var(t):
- return t is not None and isinstance(t, TypingMeta) and t._has_type_var()
+def _get_type_vars(types, tvars):
+ for t in types:
+ if isinstance(t, TypingMeta):
+ t._get_type_vars(tvars)
+
+
+def _type_vars(types):
+ tvars = []
+ _get_type_vars(types, tvars)
+ return tuple(tvars)
def _eval_type(t, globalns, localns):
@@ -334,8 +334,8 @@ class AnyMeta(TypingMeta):
self = super().__new__(cls, name, bases, namespace, _root=_root)
return self
- def __instancecheck__(self, instance):
- return True
+ def __instancecheck__(self, obj):
+ raise TypeError("Any cannot be used with isinstance().")
def __subclasscheck__(self, cls):
if not isinstance(cls, type):
@@ -351,6 +351,8 @@ class Any(Final, metaclass=AnyMeta, _root=True):
- As a special case, Any and object are subclasses of each other.
"""
+ __slots__ = ()
+
class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
"""Type variable.
@@ -381,7 +383,7 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
At runtime, isinstance(x, T) will raise TypeError. However,
issubclass(C, T) is true for any class C, and issubclass(str, A)
and issubclass(bytes, A) are true, and issubclass(int, A) is
- false.
+ false. (TODO: Why is this needed? This may change. See #136.)
Type variables may be marked covariant or contravariant by passing
covariant=True or contravariant=True. See PEP 484 for more
@@ -415,8 +417,9 @@ class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True):
self.__bound__ = None
return self
- def _has_type_var(self):
- return True
+ def _get_type_vars(self, tvars):
+ if self not in tvars:
+ tvars.append(self)
def __repr__(self):
if self.__covariant__:
@@ -449,12 +452,10 @@ KT = TypeVar('KT') # Key type.
VT = TypeVar('VT') # Value type.
T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
-KT_co = TypeVar('KT_co', covariant=True) # Key type covariant containers.
VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
# A useful type variable with constraints. This represents string types.
-# TODO: What about bytearray, memoryview?
AnyStr = TypeVar('AnyStr', bytes, str)
@@ -496,6 +497,9 @@ class UnionMeta(TypingMeta):
return Any
if isinstance(t1, TypeVar):
continue
+ if isinstance(t1, _TypeAlias):
+ # _TypeAlias is not a real class.
+ continue
if any(issubclass(t1, t2)
for t2 in all_params - {t1} if not isinstance(t2, TypeVar)):
all_params.remove(t1)
@@ -517,12 +521,9 @@ class UnionMeta(TypingMeta):
return self.__class__(self.__name__, self.__bases__, {},
p, _root=True)
- def _has_type_var(self):
+ def _get_type_vars(self, tvars):
if self.__union_params__:
- for t in self.__union_params__:
- if _has_type_var(t):
- return True
- return False
+ _get_type_vars(self.__union_params__, tvars)
def __repr__(self):
r = super().__repr__()
@@ -550,9 +551,8 @@ class UnionMeta(TypingMeta):
def __hash__(self):
return hash(self.__union_set_params__)
- def __instancecheck__(self, instance):
- return (self.__union_set_params__ is not None and
- any(isinstance(instance, t) for t in self.__union_params__))
+ def __instancecheck__(self, obj):
+ raise TypeError("Unions cannot be used with isinstance().")
def __subclasscheck__(self, cls):
if cls is Any:
@@ -647,6 +647,8 @@ class Optional(Final, metaclass=OptionalMeta, _root=True):
Optional[X] is equivalent to Union[X, type(None)].
"""
+ __slots__ = ()
+
class TupleMeta(TypingMeta):
"""Metaclass for Tuple."""
@@ -658,12 +660,9 @@ class TupleMeta(TypingMeta):
self.__tuple_use_ellipsis__ = use_ellipsis
return self
- def _has_type_var(self):
+ def _get_type_vars(self, tvars):
if self.__tuple_params__:
- for t in self.__tuple_params__:
- if _has_type_var(t):
- return True
- return False
+ _get_type_vars(self.__tuple_params__, tvars)
def _eval_type(self, globalns, localns):
tp = self.__tuple_params__
@@ -706,23 +705,14 @@ class TupleMeta(TypingMeta):
def __eq__(self, other):
if not isinstance(other, TupleMeta):
return NotImplemented
- return self.__tuple_params__ == other.__tuple_params__
+ return (self.__tuple_params__ == other.__tuple_params__ and
+ self.__tuple_use_ellipsis__ == other.__tuple_use_ellipsis__)
def __hash__(self):
return hash(self.__tuple_params__)
- def __instancecheck__(self, t):
- if not isinstance(t, tuple):
- return False
- if self.__tuple_params__ is None:
- return True
- if self.__tuple_use_ellipsis__:
- p = self.__tuple_params__[0]
- return all(isinstance(x, p) for x in t)
- else:
- return (len(t) == len(self.__tuple_params__) and
- all(isinstance(x, p)
- for x, p in zip(t, self.__tuple_params__)))
+ def __instancecheck__(self, obj):
+ raise TypeError("Tuples cannot be used with isinstance().")
def __subclasscheck__(self, cls):
if cls is Any:
@@ -756,6 +746,8 @@ class Tuple(Final, metaclass=TupleMeta, _root=True):
To specify a variable-length tuple of homogeneous type, use Sequence[T].
"""
+ __slots__ = ()
+
class CallableMeta(TypingMeta):
"""Metaclass for Callable."""
@@ -779,17 +771,17 @@ class CallableMeta(TypingMeta):
self.__result__ = result
return self
- def _has_type_var(self):
+ def _get_type_vars(self, tvars):
if self.__args__:
- for t in self.__args__:
- if _has_type_var(t):
- return True
- return _has_type_var(self.__result__)
+ _get_type_vars(self.__args__, tvars)
def _eval_type(self, globalns, localns):
if self.__args__ is None and self.__result__ is None:
return self
- args = [_eval_type(t, globalns, localns) for t in self.__args__]
+ if self.__args__ is Ellipsis:
+ args = self.__args__
+ else:
+ args = [_eval_type(t, globalns, localns) for t in self.__args__]
result = _eval_type(self.__result__, globalns, localns)
if args == self.__args__ and result == self.__result__:
return self
@@ -828,57 +820,14 @@ class CallableMeta(TypingMeta):
def __hash__(self):
return hash(self.__args__) ^ hash(self.__result__)
- def __instancecheck__(self, instance):
- if not callable(instance):
- return False
+ def __instancecheck__(self, obj):
+ # For unparametrized Callable we allow this, because
+ # typing.Callable should be equivalent to
+ # collections.abc.Callable.
if self.__args__ is None and self.__result__ is None:
- return True
- assert self.__args__ is not None
- assert self.__result__ is not None
- my_args, my_result = self.__args__, self.__result__
- import inspect # TODO: Avoid this import.
- # Would it be better to use Signature objects?
- try:
- (args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults,
- annotations) = inspect.getfullargspec(instance)
- except TypeError:
- return False # We can't find the signature. Give up.
- msg = ("When testing isinstance(<callable>, Callable[...], "
- "<calleble>'s annotations must be types.")
- if my_args is not Ellipsis:
- if kwonlyargs and (not kwonlydefaults or
- len(kwonlydefaults) < len(kwonlyargs)):
- return False
- if isinstance(instance, types.MethodType):
- # For methods, getfullargspec() includes self/cls,
- # but it's not part of the call signature, so drop it.
- del args[0]
- min_call_args = len(args)
- if defaults:
- min_call_args -= len(defaults)
- if varargs:
- max_call_args = 999999999
- if len(args) < len(my_args):
- args += [varargs] * (len(my_args) - len(args))
- else:
- max_call_args = len(args)
- if not min_call_args <= len(my_args) <= max_call_args:
- return False
- for my_arg_type, name in zip(my_args, args):
- if name in annotations:
- annot_type = _type_check(annotations[name], msg)
- else:
- annot_type = Any
- if not issubclass(my_arg_type, annot_type):
- return False
- # TODO: If mutable type, check invariance?
- if 'return' in annotations:
- annot_return_type = _type_check(annotations['return'], msg)
- # Note contravariance here!
- if not issubclass(annot_return_type, my_result):
- return False
- # Can't find anything wrong...
- return True
+ return isinstance(obj, collections_abc.Callable)
+ else:
+ raise TypeError("Callable[] cannot be used with isinstance().")
def __subclasscheck__(self, cls):
if cls is Any:
@@ -902,6 +851,8 @@ class Callable(Final, metaclass=CallableMeta, _root=True):
such function types are rarely used as callback types.
"""
+ __slots__ = ()
+
def _gorg(a):
"""Return the farthest origin of a generic class."""
@@ -926,76 +877,106 @@ def _geqv(a, b):
return _gorg(a) is _gorg(b)
-class GenericMeta(TypingMeta, abc.ABCMeta):
- """Metaclass for generic types."""
+def _next_in_mro(cls):
+ """Helper for Generic.__new__.
+
+ Returns the class after the last occurrence of Generic or
+ Generic[...] in cls.__mro__.
+ """
+ next_in_mro = object
+ # Look for the last occurrence of Generic or Generic[...].
+ for i, c in enumerate(cls.__mro__[:-1]):
+ if isinstance(c, GenericMeta) and _gorg(c) is Generic:
+ next_in_mro = cls.__mro__[i+1]
+ return next_in_mro
- # TODO: Constrain more how Generic is used; only a few
- # standard patterns should be allowed.
- # TODO: Use a more precise rule than matching __name__ to decide
- # whether two classes are the same. Also, save the formal
- # parameters. (These things are related! A solution lies in
- # using origin.)
+class GenericMeta(TypingMeta, abc.ABCMeta):
+ """Metaclass for generic types."""
__extra__ = None
def __new__(cls, name, bases, namespace,
- parameters=None, origin=None, extra=None):
- if parameters is None:
- # Extract parameters from direct base classes. Only
- # direct bases are considered and only those that are
- # themselves generic, and parameterized with type
- # variables. Don't use bases like Any, Union, Tuple,
- # Callable or type variables.
- params = None
+ tvars=None, args=None, origin=None, extra=None):
+ self = super().__new__(cls, name, bases, namespace, _root=True)
+
+ if tvars is not None:
+ # Called from __getitem__() below.
+ assert origin is not None
+ assert all(isinstance(t, TypeVar) for t in tvars), tvars
+ else:
+ # Called from class statement.
+ assert tvars is None, tvars
+ assert args is None, args
+ assert origin is None, origin
+
+ # Get the full set of tvars from the bases.
+ tvars = _type_vars(bases)
+ # Look for Generic[T1, ..., Tn].
+ # If found, tvars must be a subset of it.
+ # If not found, tvars is it.
+ # Also check for and reject plain Generic,
+ # and reject multiple Generic[...].
+ gvars = None
for base in bases:
- if isinstance(base, TypingMeta):
- if not isinstance(base, GenericMeta):
+ if base is Generic:
+ raise TypeError("Cannot inherit from plain Generic")
+ if (isinstance(base, GenericMeta) and
+ base.__origin__ is Generic):
+ if gvars is not None:
raise TypeError(
- "You cannot inherit from magic class %s" %
- repr(base))
- if base.__parameters__ is None:
- continue # The base is unparameterized.
- for bp in base.__parameters__:
- if _has_type_var(bp) and not isinstance(bp, TypeVar):
- raise TypeError(
- "Cannot inherit from a generic class "
- "parameterized with "
- "non-type-variable %s" % bp)
- if params is None:
- params = []
- if bp not in params:
- params.append(bp)
- if params is not None:
- parameters = tuple(params)
- self = super().__new__(cls, name, bases, namespace, _root=True)
- self.__parameters__ = parameters
+ "Cannot inherit from Generic[...] multiple types.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ raise TypeError(
+ "Some type variables (%s) "
+ "are not listed in Generic[%s]" %
+ (", ".join(str(t) for t in tvars if t not in gvarset),
+ ", ".join(str(g) for g in gvars)))
+ tvars = gvars
+
+ self.__parameters__ = tvars
+ self.__args__ = args
+ self.__origin__ = origin
if extra is not None:
self.__extra__ = extra
# Else __extra__ is inherited, eventually from the
# (meta-)class default above.
- self.__origin__ = origin
+ # Speed hack (https://github.com/python/typing/issues/196).
+ self.__next_in_mro__ = _next_in_mro(self)
return self
- def _has_type_var(self):
- if self.__parameters__:
- for t in self.__parameters__:
- if _has_type_var(t):
- return True
- return False
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
def __repr__(self):
- r = super().__repr__()
- if self.__parameters__ is not None:
+ if self.__origin__ is not None:
+ r = repr(self.__origin__)
+ else:
+ r = super().__repr__()
+ if self.__args__:
r += '[%s]' % (
+ ', '.join(_type_repr(p) for p in self.__args__))
+ if self.__parameters__:
+ r += '<%s>' % (
', '.join(_type_repr(p) for p in self.__parameters__))
return r
def __eq__(self, other):
if not isinstance(other, GenericMeta):
return NotImplemented
- return (_geqv(self, other) and
- self.__parameters__ == other.__parameters__)
+ if self.__origin__ is not None:
+ return (self.__origin__ is other.__origin__ and
+ self.__args__ == other.__args__ and
+ self.__parameters__ == other.__parameters__)
+ else:
+ return self is other
def __hash__(self):
return hash((self.__name__, self.__parameters__))
@@ -1004,37 +985,56 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
if not isinstance(params, tuple):
params = (params,)
if not params:
- raise TypeError("Cannot have empty parameter list")
+ raise TypeError(
+ "Parameter list to %s[...] cannot be empty" % _qualname(self))
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
- if self.__parameters__ is None:
- for p in params:
- if not isinstance(p, TypeVar):
- raise TypeError("Initial parameters must be "
- "type variables; got %s" % p)
+ if self is Generic:
+ # Generic can only be subscripted with unique type variables.
+ if not all(isinstance(p, TypeVar) for p in params):
+ raise TypeError(
+ "Parameters to Generic[...] must all be type variables")
+ if len(set(params)) != len(params):
+ raise TypeError(
+ "Parameters to Generic[...] must all be unique")
+ tvars = params
+ args = None
+ elif self is _Protocol:
+ # _Protocol is internal, don't check anything.
+ tvars = params
+ args = None
+ elif self.__origin__ in (Generic, _Protocol):
+ # Can't subscript Generic[...] or _Protocol[...].
+ raise TypeError("Cannot subscript already-subscripted %s" %
+ repr(self))
else:
- if len(params) != len(self.__parameters__):
- raise TypeError("Cannot change parameter count from %d to %d" %
- (len(self.__parameters__), len(params)))
- for new, old in zip(params, self.__parameters__):
- if isinstance(old, TypeVar):
- if not old.__constraints__:
- # Substituting for an unconstrained TypeVar is OK.
- continue
- if issubclass(new, Union[old.__constraints__]):
- # Specializing a constrained type variable is OK.
- continue
- if not issubclass(new, old):
- raise TypeError(
- "Cannot substitute %s for %s in %s" %
- (_type_repr(new), _type_repr(old), self))
-
- return self.__class__(self.__name__, self.__bases__,
+ # Subscripting a regular Generic subclass.
+ if not self.__parameters__:
+ raise TypeError("%s is not a generic class" % repr(self))
+ alen = len(params)
+ elen = len(self.__parameters__)
+ if alen != elen:
+ raise TypeError(
+ "Too %s parameters for %s; actual %s, expected %s" %
+ ("many" if alen > elen else "few", repr(self), alen, elen))
+ tvars = _type_vars(params)
+ args = params
+ return self.__class__(self.__name__,
+ (self,) + self.__bases__,
dict(self.__dict__),
- parameters=params,
+ tvars=tvars,
+ args=args,
origin=self,
extra=self.__extra__)
+ def __instancecheck__(self, instance):
+ # Since we extend ABC.__subclasscheck__ and
+ # ABC.__instancecheck__ inlines the cache checking done by the
+ # latter, we must extend __instancecheck__ too. For simplicity
+ # we just skip the cache check -- instance checks for generic
+ # classes are supposed to be rare anyways.
+ return self.__subclasscheck__(instance.__class__)
+
def __subclasscheck__(self, cls):
if cls is Any:
return True
@@ -1043,10 +1043,10 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
# C[X] is a subclass of C[Y] iff X is a subclass of Y.
origin = self.__origin__
if origin is not None and origin is cls.__origin__:
- assert len(self.__parameters__) == len(origin.__parameters__)
- assert len(cls.__parameters__) == len(origin.__parameters__)
- for p_self, p_cls, p_origin in zip(self.__parameters__,
- cls.__parameters__,
+ assert len(self.__args__) == len(origin.__parameters__)
+ assert len(cls.__args__) == len(origin.__parameters__)
+ for p_self, p_cls, p_origin in zip(self.__args__,
+ cls.__args__,
origin.__parameters__):
if isinstance(p_origin, TypeVar):
if p_origin.__covariant__:
@@ -1075,12 +1075,9 @@ class GenericMeta(TypingMeta, abc.ABCMeta):
return False
return issubclass(cls, self.__extra__)
- def __instancecheck__(self, obj):
- if super().__instancecheck__(obj):
- return True
- if self.__extra__ is None:
- return False
- return isinstance(obj, self.__extra__)
+
+# Prevent checks for Generic to crash when defining Generic.
+Generic = None
class Generic(metaclass=GenericMeta):
@@ -1097,27 +1094,23 @@ class Generic(metaclass=GenericMeta):
This class can then be used as follows::
- def lookup_name(mapping: Mapping, key: KT, default: VT) -> VT:
+ def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
try:
return mapping[key]
except KeyError:
return default
-
- For clarity the type variables may be redefined, e.g.::
-
- X = TypeVar('X')
- Y = TypeVar('Y')
- def lookup_name(mapping: Mapping[X, Y], key: X, default: Y) -> Y:
- # Same body as above.
"""
+ __slots__ = ()
+
def __new__(cls, *args, **kwds):
- next_in_mro = object
- # Look for the last occurrence of Generic or Generic[...].
- for i, c in enumerate(cls.__mro__[:-1]):
- if isinstance(c, GenericMeta) and _gorg(c) is Generic:
- next_in_mro = cls.__mro__[i+1]
- return next_in_mro.__new__(_gorg(cls))
+ if cls.__origin__ is None:
+ return cls.__next_in_mro__.__new__(cls)
+ else:
+ origin = _gorg(cls)
+ obj = cls.__next_in_mro__.__new__(origin)
+ obj.__init__(*args, **kwds)
+ return obj
def cast(typ, val):
@@ -1135,9 +1128,7 @@ def _get_defaults(func):
"""Internal helper to extract the default arguments, by name."""
code = func.__code__
pos_count = code.co_argcount
- kw_count = code.co_kwonlyargcount
arg_names = code.co_varnames
- kwarg_names = arg_names[pos_count:pos_count + kw_count]
arg_names = arg_names[:pos_count]
defaults = func.__defaults__ or ()
kwdefaults = func.__kwdefaults__
@@ -1190,7 +1181,6 @@ def get_type_hints(obj, globalns=None, localns=None):
return hints
-# TODO: Also support this as a class decorator.
def no_type_check(arg):
"""Decorator to indicate that annotations are not type hints.
@@ -1225,8 +1215,42 @@ def no_type_check_decorator(decorator):
return wrapped_decorator
+def _overload_dummy(*args, **kwds):
+ """Helper for @overload to raise when called."""
+ raise NotImplementedError(
+ "You should not call an overloaded function. "
+ "A series of @overload-decorated functions "
+ "outside a stub module should always be followed "
+ "by an implementation that is not @overload-ed.")
+
+
def overload(func):
- raise RuntimeError("Overloading is only supported in library stubs")
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+ """
+ return _overload_dummy
class _ProtocolMeta(GenericMeta):
@@ -1236,6 +1260,9 @@ class _ProtocolMeta(GenericMeta):
from Generic.
"""
+ def __instancecheck__(self, obj):
+ raise TypeError("Protocols cannot be used with isinstance().")
+
def __subclasscheck__(self, cls):
if not self._is_protocol:
# No structural checks since this isn't a protocol.
@@ -1271,13 +1298,16 @@ class _ProtocolMeta(GenericMeta):
break
else:
if (not attr.startswith('_abc_') and
- attr != '__abstractmethods__' and
- attr != '_is_protocol' and
- attr != '__dict__' and
- attr != '_get_protocol_attrs' and
- attr != '__parameters__' and
- attr != '__origin__' and
- attr != '__module__'):
+ attr != '__abstractmethods__' and
+ attr != '_is_protocol' and
+ attr != '__dict__' and
+ attr != '__args__' and
+ attr != '__slots__' and
+ attr != '_get_protocol_attrs' and
+ attr != '__next_in_mro__' and
+ attr != '__parameters__' and
+ attr != '__origin__' and
+ attr != '__module__'):
attrs.add(attr)
return attrs
@@ -1291,6 +1321,8 @@ class _Protocol(metaclass=_ProtocolMeta):
such as Hashable).
"""
+ __slots__ = ()
+
_is_protocol = True
@@ -1300,15 +1332,37 @@ class _Protocol(metaclass=_ProtocolMeta):
Hashable = collections_abc.Hashable # Not generic.
+if hasattr(collections_abc, 'Awaitable'):
+ class Awaitable(Generic[T_co], extra=collections_abc.Awaitable):
+ __slots__ = ()
+else:
+ Awaitable = None
+
+
+if hasattr(collections_abc, 'AsyncIterable'):
+
+ class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable):
+ __slots__ = ()
+
+ class AsyncIterator(AsyncIterable[T_co],
+ extra=collections_abc.AsyncIterator):
+ __slots__ = ()
+
+else:
+ AsyncIterable = None
+ AsyncIterator = None
+
+
class Iterable(Generic[T_co], extra=collections_abc.Iterable):
- pass
+ __slots__ = ()
class Iterator(Iterable[T_co], extra=collections_abc.Iterator):
- pass
+ __slots__ = ()
class SupportsInt(_Protocol):
+ __slots__ = ()
@abstractmethod
def __int__(self) -> int:
@@ -1316,6 +1370,7 @@ class SupportsInt(_Protocol):
class SupportsFloat(_Protocol):
+ __slots__ = ()
@abstractmethod
def __float__(self) -> float:
@@ -1323,6 +1378,7 @@ class SupportsFloat(_Protocol):
class SupportsComplex(_Protocol):
+ __slots__ = ()
@abstractmethod
def __complex__(self) -> complex:
@@ -1330,38 +1386,46 @@ class SupportsComplex(_Protocol):
class SupportsBytes(_Protocol):
+ __slots__ = ()
@abstractmethod
def __bytes__(self) -> bytes:
pass
-class SupportsAbs(_Protocol[T]):
+class SupportsAbs(_Protocol[T_co]):
+ __slots__ = ()
@abstractmethod
- def __abs__(self) -> T:
+ def __abs__(self) -> T_co:
pass
-class SupportsRound(_Protocol[T]):
+class SupportsRound(_Protocol[T_co]):
+ __slots__ = ()
@abstractmethod
- def __round__(self, ndigits: int = 0) -> T:
+ def __round__(self, ndigits: int = 0) -> T_co:
pass
-class Reversible(_Protocol[T]):
+if hasattr(collections_abc, 'Reversible'):
+ class Reversible(Iterable[T_co], extra=collections_abc.Reversible):
+ __slots__ = ()
+else:
+ class Reversible(_Protocol[T_co]):
+ __slots__ = ()
- @abstractmethod
- def __reversed__(self) -> 'Iterator[T]':
- pass
+ @abstractmethod
+ def __reversed__(self) -> 'Iterator[T_co]':
+ pass
Sized = collections_abc.Sized # Not generic.
class Container(Generic[T_co], extra=collections_abc.Container):
- pass
+ __slots__ = ()
# Callable was defined earlier.
@@ -1376,6 +1440,7 @@ class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet):
pass
+# NOTE: Only the value type is covariant.
class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
extra=collections_abc.Mapping):
pass
@@ -1384,10 +1449,14 @@ class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping):
pass
-
-class Sequence(Sized, Iterable[T_co], Container[T_co],
+if hasattr(collections_abc, 'Reversible'):
+ class Sequence(Sized, Reversible[T_co], Container[T_co],
extra=collections_abc.Sequence):
- pass
+ pass
+else:
+ class Sequence(Sized, Iterable[T_co], Container[T_co],
+ extra=collections_abc.Sequence):
+ pass
class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence):
@@ -1401,19 +1470,7 @@ class ByteString(Sequence[int], extra=collections_abc.ByteString):
ByteString.register(type(memoryview(b'')))
-class _ListMeta(GenericMeta):
-
- def __instancecheck__(self, obj):
- if not super().__instancecheck__(obj):
- return False
- itemtype = self.__parameters__[0]
- for x in obj:
- if not isinstance(x, itemtype):
- return False
- return True
-
-
-class List(list, MutableSequence[T], metaclass=_ListMeta):
+class List(list, MutableSequence[T]):
def __new__(cls, *args, **kwds):
if _geqv(cls, List):
@@ -1422,19 +1479,7 @@ class List(list, MutableSequence[T], metaclass=_ListMeta):
return list.__new__(cls, *args, **kwds)
-class _SetMeta(GenericMeta):
-
- def __instancecheck__(self, obj):
- if not super().__instancecheck__(obj):
- return False
- itemtype = self.__parameters__[0]
- for x in obj:
- if not isinstance(x, itemtype):
- return False
- return True
-
-
-class Set(set, MutableSet[T], metaclass=_SetMeta):
+class Set(set, MutableSet[T]):
def __new__(cls, *args, **kwds):
if _geqv(cls, Set):
@@ -1443,7 +1488,7 @@ class Set(set, MutableSet[T], metaclass=_SetMeta):
return set.__new__(cls, *args, **kwds)
-class _FrozenSetMeta(_SetMeta):
+class _FrozenSetMeta(GenericMeta):
"""This metaclass ensures set is not a subclass of FrozenSet.
Without this metaclass, set would be considered a subclass of
@@ -1456,13 +1501,9 @@ class _FrozenSetMeta(_SetMeta):
return False
return super().__subclasscheck__(cls)
- def __instancecheck__(self, obj):
- if issubclass(obj.__class__, Set):
- return False
- return super().__instancecheck__(obj)
-
class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta):
+ __slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, FrozenSet):
@@ -1475,13 +1516,14 @@ class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView):
pass
-class KeysView(MappingView[KT_co], AbstractSet[KT_co],
+class KeysView(MappingView[KT], AbstractSet[KT],
extra=collections_abc.KeysView):
pass
-# TODO: Enable Set[Tuple[KT_co, VT_co]] instead of Generic[KT_co, VT_co].
-class ItemsView(MappingView, Generic[KT_co, VT_co],
+class ItemsView(MappingView[Tuple[KT, VT_co]],
+ AbstractSet[Tuple[KT, VT_co]],
+ Generic[KT, VT_co],
extra=collections_abc.ItemsView):
pass
@@ -1490,20 +1532,13 @@ class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView):
pass
-class _DictMeta(GenericMeta):
-
- def __instancecheck__(self, obj):
- if not super().__instancecheck__(obj):
- return False
- keytype, valuetype = self.__parameters__
- for key, value in obj.items():
- if not (isinstance(key, keytype) and
- isinstance(value, valuetype)):
- return False
- return True
+if hasattr(contextlib, 'AbstractContextManager'):
+ class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager):
+ __slots__ = ()
+ __all__.append('ContextManager')
-class Dict(dict, MutableMapping[KT, VT], metaclass=_DictMeta):
+class Dict(dict, MutableMapping[KT, VT]):
def __new__(cls, *args, **kwds):
if _geqv(cls, Dict):
@@ -1511,6 +1546,13 @@ class Dict(dict, MutableMapping[KT, VT], metaclass=_DictMeta):
"use dict() instead")
return dict.__new__(cls, *args, **kwds)
+class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
+
+ def __new__(cls, *args, **kwds):
+ if _geqv(cls, DefaultDict):
+ raise TypeError("Type DefaultDict cannot be instantiated; "
+ "use collections.defaultdict() instead")
+ return collections.defaultdict.__new__(cls, *args, **kwds)
# Determine what base class to use for Generator.
if hasattr(collections_abc, 'Generator'):
@@ -1523,6 +1565,7 @@ else:
class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co],
extra=_G_base):
+ __slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Generator):
@@ -1550,9 +1593,18 @@ def NamedTuple(typename, fields):
fields = [(n, t) for n, t in fields]
cls = collections.namedtuple(typename, [n for n, t in fields])
cls._field_types = dict(fields)
+ # Set the module to the caller's module (otherwise it'd be 'typing').
+ try:
+ cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
return cls
+# Python-version-specific alias (Python 2: unicode; Python 3: str)
+Text = str
+
+
class IO(Generic[AnyStr]):
"""Generic base class for TextIO and BinaryIO.
@@ -1566,6 +1618,8 @@ class IO(Generic[AnyStr]):
way to track the other distinctions in the type system.
"""
+ __slots__ = ()
+
@abstractproperty
def mode(self) -> str:
pass
@@ -1650,6 +1704,8 @@ class IO(Generic[AnyStr]):
class BinaryIO(IO[bytes]):
"""Typed version of the return of open() in binary mode."""
+ __slots__ = ()
+
@abstractmethod
def write(self, s: Union[bytes, bytearray]) -> int:
pass
@@ -1662,6 +1718,8 @@ class BinaryIO(IO[bytes]):
class TextIO(IO[str]):
"""Typed version of the return of open() in text mode."""
+ __slots__ = ()
+
@abstractproperty
def buffer(self) -> BinaryIO:
pass
diff --git a/mypy/applytype.py b/mypy/applytype.py
index 439c6c5..29f2287 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -1,8 +1,9 @@
from typing import List, Dict
import mypy.subtypes
+from mypy.sametypes import is_same_type
from mypy.expandtype import expand_type
-from mypy.types import Type, CallableType, AnyType
+from mypy.types import Type, TypeVarType, CallableType, AnyType, Void
from mypy.messages import MessageBuilder
from mypy.nodes import Context
@@ -12,8 +13,7 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
"""Apply generic type arguments to a callable type.
For example, applying [int] to 'def [T] (T) -> T' results in
- 'def [-1:int] (int) -> int'. Here '[-1:int]' is an implicit bound type
- variable.
+ 'def (int) -> int'.
Note that each type can be None; in this case, it will not be applied.
"""
@@ -23,13 +23,19 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
return AnyType()
# Check that inferred type variable values are compatible with allowed
- # values. Also, promote subtype values to allowed values.
+ # values and bounds. Also, promote subtype values to allowed values.
types = types[:]
for i, type in enumerate(types):
values = callable.variables[i].values
if values and type:
if isinstance(type, AnyType):
continue
+ if isinstance(type, TypeVarType) and type.values:
+ # Allow substituting T1 for T if every allowed value of T1
+ # is also a legal value of T.
+ if all(any(is_same_type(v, v1) for v in values)
+ for v1 in type.values):
+ continue
for value in values:
if mypy.subtypes.is_subtype(type, value):
types[i] = value
@@ -37,6 +43,10 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
else:
msg.incompatible_typevar_value(callable, i + 1, type, context)
+ upper_bound = callable.variables[i].upper_bound
+ if type and not mypy.subtypes.satisfies_upper_bound(type, upper_bound):
+ msg.incompatible_typevar_value(callable, i + 1, type, context)
+
# Create a map from type variable id to target type.
id_to_type = {} # type: Dict[int, Type]
for i, tv in enumerate(tvars):
@@ -46,10 +56,6 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
# Apply arguments to argument types.
arg_types = [expand_type(at, id_to_type) for at in callable.arg_types]
- bound_vars = [(tv.id, id_to_type[tv.id])
- for tv in tvars
- if tv.id in id_to_type]
-
# The callable may retain some type vars if only some were applied.
remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type]
@@ -57,5 +63,4 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
arg_types=arg_types,
ret_type=expand_type(callable.ret_type, id_to_type),
variables=remaining_tvars,
- bound_vars=callable.bound_vars + bound_vars,
)
diff --git a/mypy/build.py b/mypy/build.py
index 4d01c5d..d37dc78 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -8,29 +8,35 @@ file. The individual passes are implemented in separate modules.
The function build() is the main interface to this module.
"""
+# TODO: More consistent terminology, e.g. path/fnam, module/id, state/file
+import binascii
+import collections
+import contextlib
+import json
import os
import os.path
-import shlex
-import subprocess
import sys
-import re
+import time
from os.path import dirname, basename
-from typing import Dict, List, Tuple, Iterable, cast, Set, Union, Optional
+from typing import (AbstractSet, Dict, Iterable, Iterator, List,
+ NamedTuple, Optional, Set, Tuple, Union)
from mypy.types import Type
-from mypy.nodes import MypyFile, Node, Import, ImportFrom, ImportAll
-from mypy.nodes import SymbolTableNode, MODULE_REF
-from mypy.semanal import SemanticAnalyzer, FirstPass, ThirdPass
+from mypy.nodes import (MypyFile, Node, Import, ImportFrom, ImportAll,
+ SymbolTableNode, MODULE_REF)
+from mypy.semanal import FirstPass, SemanticAnalyzer, ThirdPass
from mypy.checker import TypeChecker
-from mypy.errors import Errors, CompileError
-from mypy import parse
-from mypy import stats
+from mypy.errors import Errors, CompileError, report_internal_error
+from mypy import fixup
from mypy.report import Reports
from mypy import defaults
from mypy import moduleinfo
from mypy import util
+from mypy.fixup import fixup_module_pass_one, fixup_module_pass_two
+from mypy.parse import parse
+from mypy.stats import dump_type_stats
# We need to know the location of this file to load data, but
@@ -51,44 +57,34 @@ TEST_BUILTINS = 'test-builtins' # Use stub builtins to speed up tests
DUMP_TYPE_STATS = 'dump-type-stats'
DUMP_INFER_STATS = 'dump-infer-stats'
SILENT_IMPORTS = 'silent-imports' # Silence imports of .py files
-
-# State ids. These describe the states a source file / module can be in a
-# build.
-
-# We aren't processing this source file yet (no associated state object).
-UNSEEN_STATE = 0
-# The source file has a state object, but we haven't done anything with it yet.
-UNPROCESSED_STATE = 1
-# We've parsed the source file.
-PARSED_STATE = 2
-# We've done the first two passes of semantic analysis.
-PARTIAL_SEMANTIC_ANALYSIS_STATE = 3
-# We've semantically analyzed the source file.
-SEMANTICALLY_ANALYSED_STATE = 4
-# We've type checked the source file (and all its dependencies).
-TYPE_CHECKED_STATE = 5
+ALMOST_SILENT = 'almost-silent' # If SILENT_IMPORTS: report silenced imports as errors
+INCREMENTAL = 'incremental' # Incremental mode: use the cache
+FAST_PARSER = 'fast-parser' # Use experimental fast parser
+# Disallow calling untyped functions from typed ones
+DISALLOW_UNTYPED_CALLS = 'disallow-untyped-calls'
+# Disallow defining untyped (or incompletely typed) functions
+DISALLOW_UNTYPED_DEFS = 'disallow-untyped-defs'
+# Type check unannotated functions
+CHECK_UNTYPED_DEFS = 'check-untyped-defs'
PYTHON_EXTENSIONS = ['.pyi', '.py']
-final_state = TYPE_CHECKED_STATE
-
-
-def earlier_state(s: int, t: int) -> bool:
- return s < t
-
class BuildResult:
"""The result of a successful build.
Attributes:
- files: Dictionary from module name to related AST node.
- types: Dictionary from parse tree node to its inferred type.
+ manager: The build manager.
+ files: Dictionary from module name to related AST node.
+ types: Dictionary from parse tree node to its inferred type.
+ errors: List of error messages.
"""
- def __init__(self, files: Dict[str, MypyFile],
- types: Dict[Node, Type]) -> None:
- self.files = files
- self.types = types
+ def __init__(self, manager: 'BuildManager') -> None:
+ self.manager = manager
+ self.files = manager.modules
+ self.types = manager.type_checker.type_map
+ self.errors = manager.errors.messages()
class BuildSource:
@@ -98,29 +94,46 @@ class BuildSource:
self.module = module or '__main__'
self.text = text
- def load(self, lib_path, pyversion: Tuple[int, int]) -> str:
- """Load the module if needed. This also has the side effect
- of calculating the effective path for modules."""
- if self.text is not None:
- return self.text
-
- self.path = self.path or lookup_program(self.module, lib_path)
- return read_program(self.path, pyversion)
-
@property
def effective_path(self) -> str:
"""Return the effective path (ie, <string> if its from in memory)"""
return self.path or '<string>'
+class BuildSourceSet:
+ """Efficiently test a file's membership in the set of build sources."""
+
+ def __init__(self, sources: List[BuildSource]) -> None:
+ self.source_text_present = False
+ self.source_modules = set() # type: Set[str]
+ self.source_paths = set() # type: Set[str]
+
+ for source in sources:
+ if source.text is not None:
+ self.source_text_present = True
+ elif source.path:
+ self.source_paths.add(source.path)
+ else:
+ self.source_modules.add(source.module)
+
+ def is_source(self, file: MypyFile) -> bool:
+ if file.path and file.path in self.source_paths:
+ return True
+ elif file._fullname in self.source_modules:
+ return True
+ elif file.path is None and self.source_text_present:
+ return True
+ else:
+ return False
+
+
def build(sources: List[BuildSource],
target: int,
alt_lib_path: str = None,
bin_dir: str = None,
pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
custom_typing_module: str = None,
- implicit_any: bool = False,
- report_dirs: Dict[str, str] = {},
+ report_dirs: Dict[str, str] = None,
flags: List[str] = None,
python_path: bool = False) -> BuildResult:
"""Analyze a program.
@@ -128,20 +141,21 @@ def build(sources: List[BuildSource],
A single call to build performs parsing, semantic analysis and optionally
type checking for the program *and* all imported modules, recursively.
- Return BuildResult if successful; otherwise raise CompileError.
+ Return BuildResult if successful or only non-blocking errors were found;
+ otherwise raise CompileError.
Args:
target: select passes to perform (a build target constant, e.g. C)
sources: list of sources to build
- alt_lib_dir: an additional directory for looking up library modules
+ alt_lib_path: an additional directory for looking up library modules
(takes precedence over other directories)
bin_dir: directory containing the mypy script, used for finding data
directories; if omitted, use '.' as the data directory
pyversion: Python version (major, minor)
custom_typing_module: if not None, use this module id as an alias for typing
- implicit_any: if True, add implicit Any signatures to all functions
flags: list of build options (e.g. COMPILE_ONLY)
"""
+ report_dirs = report_dirs or {}
flags = flags or []
data_dir = default_data_dir(bin_dir)
@@ -159,8 +173,9 @@ def build(sources: List[BuildSource],
for source in sources:
if source.path:
# Include directory of the program file in the module search path.
- lib_path.insert(
- 0, remove_cwd_prefix_from_path(dirname(source.path)))
+ dir = remove_cwd_prefix_from_path(dirname(source.path))
+ if dir not in lib_path:
+ lib_path.insert(0, dir)
# Do this even if running as a file, for sanity (mainly because with
# multiple builds, there could be a mix of files/modules, so its easier
@@ -176,50 +191,59 @@ def build(sources: List[BuildSource],
if alt_lib_path:
lib_path.insert(0, alt_lib_path)
- # TODO Reports is global to a build manager but only supports a single "main file"
- # Fix this.
- reports = Reports(sources[0].effective_path, data_dir, report_dirs)
+ reports = Reports(data_dir, report_dirs)
+
+ source_set = BuildSourceSet(sources)
- # Construct a build manager object that performs all the stages of the
- # build in the correct order.
+ # Construct a build manager object to hold state during the build.
#
# Ignore current directory prefix in error messages.
manager = BuildManager(data_dir, lib_path, target,
pyversion=pyversion, flags=flags,
ignore_prefix=os.getcwd(),
custom_typing_module=custom_typing_module,
- implicit_any=implicit_any,
+ source_set=source_set,
reports=reports)
- # Construct information that describes the initial files. __main__ is the
- # implicit module id and the import context is empty initially ([]).
- initial_states = [] # type: List[UnprocessedFile]
- for source in sources:
- content = source.load(lib_path, pyversion)
- info = StateInfo(source.effective_path, source.module, [], manager)
- initial_state = UnprocessedFile(info, content)
- initial_states += [initial_state]
-
- # Perform the build by sending the files as new file (UnprocessedFile is the
- # initial state of all files) to the manager. The manager will process the
- # file and all dependant modules recursively.
- result = manager.process(initial_states)
- reports.finish()
- return result
+ try:
+ dispatch(sources, manager)
+ return BuildResult(manager)
+ finally:
+ manager.log("Build finished with %d modules, %d types, and %d errors" %
+ (len(manager.modules),
+ len(manager.type_checker.type_map),
+ manager.errors.num_messages()))
+ # Finish the HTML or XML reports even if CompileError was raised.
+ reports.finish()
def default_data_dir(bin_dir: str) -> str:
- # TODO fix this logic
+ """Returns directory containing typeshed directory
+
+ Args:
+ bin_dir: directory containing the mypy script
+ """
if not bin_dir:
mypy_package = os.path.dirname(__file__)
parent = os.path.dirname(mypy_package)
- if os.path.basename(parent) == 'site-packages':
- # Installed in site-packages, but invoked with python3 -m mypy;
- # __file__ is .../blah/lib/python3.N/site-packages/mypy/__init__.py;
+ if (os.path.basename(parent) == 'site-packages' or
+ os.path.basename(parent) == 'dist-packages'):
+ # Installed in site-packages or dist-packages, but invoked with python3 -m mypy;
+ # __file__ is .../blah/lib/python3.N/site-packages/mypy/build.py
+ # or .../blah/lib/python3.N/dist-packages/mypy/build.py (Debian)
+ # or .../blah/lib/site-packages/mypy/build.py (Windows)
# blah may be a virtualenv or /usr/local. We want .../blah/lib/mypy.
- lib = os.path.dirname(os.path.dirname(parent))
- if os.path.basename(lib) == 'lib':
- return os.path.join(lib, 'mypy')
+ lib = parent
+ for i in range(2):
+ lib = os.path.dirname(lib)
+ if os.path.basename(lib) == 'lib':
+ return os.path.join(lib, 'mypy')
+ subdir = os.path.join(parent, 'lib', 'mypy')
+ if os.path.isdir(subdir):
+ # If installed via buildout, the __file__ is
+ # somewhere/mypy/__init__.py and what we want is
+ # somewhere/lib/mypy.
+ return subdir
# Default to directory containing this file's parent.
return parent
base = os.path.basename(bin_dir)
@@ -230,13 +254,16 @@ def default_data_dir(bin_dir: str) -> str:
return os.path.join(dir, 'Lib', 'mypy')
elif base == 'scripts':
# Assume that we have a repo check out or unpacked source tarball.
- return os.path.dirname(bin_dir)
+ return dir
elif base == 'bin':
# Installed to somewhere (can be under /usr/local or anywhere).
return os.path.join(dir, 'lib', 'mypy')
elif base == 'python3':
# Assume we installed python3 with brew on os x
return os.path.join(os.path.dirname(dir), 'lib', 'mypy')
+ elif dir.endswith('python-exec'):
+ # Gentoo uses a python wrapper in /usr/lib to which mypy is a symlink.
+ return os.path.join(os.path.dirname(dir), 'mypy')
else:
# Don't know where to find the data files!
raise RuntimeError("Broken installation: can't determine base dir")
@@ -284,39 +311,33 @@ def default_lib_path(data_dir: str, pyversion: Tuple[int, int],
return path
-def lookup_program(module: str, lib_path: List[str]) -> str:
- # Modules are .py or .pyi
- path = find_module(module, lib_path)
- if path:
- return path
- else:
- raise CompileError([
- "mypy: can't find module '{}'".format(module)])
-
-
-def read_program(path: str, pyversion: Tuple[int, int]) -> str:
- try:
- text = read_with_python_encoding(path, pyversion)
- except IOError as ioerr:
- raise CompileError([
- "mypy: can't read file '{}': {}".format(path, ioerr.strerror)])
- except UnicodeDecodeError as decodeerr:
- raise CompileError([
- "mypy: can't decode file '{}': {}".format(path, str(decodeerr))])
- return text
+CacheMeta = NamedTuple('CacheMeta',
+ [('id', str),
+ ('path', str),
+ ('mtime', float),
+ ('size', int),
+ ('dependencies', List[str]), # names of imported modules
+ ('data_mtime', float), # mtime of data_json
+ ('data_json', str), # path of <id>.data.json
+ ('suppressed', List[str]), # dependencies that weren't imported
+ ])
+# NOTE: dependencies + suppressed == all unreachable imports;
+# suppressed contains those reachable imports that were prevented by
+# --silent-imports or simply not found.
class BuildManager:
- """This is the central class for building a mypy program.
+ """This class holds shared state for building a mypy program.
- It coordinates parsing, import processing, semantic analysis and
- type checking. It manages state objects that actually perform the
- build steps.
+ It is used to coordinate parsing, import processing, semantic
+ analysis and type checking. The actual build steps are carried
+ out by dispatch().
Attributes:
data_dir: Mypy data directory (contains stubs)
target: Build target; selects which passes to perform
lib_path: Library path for looking up modules
+ modules: Mapping of module ID to MypyFile (shared by the passes)
semantic_analyzer:
Semantic analyzer, pass 2
semantic_analyzer_pass3:
@@ -325,16 +346,6 @@ class BuildManager:
errors: Used for reporting all errors
pyversion: Python version (major, minor)
flags: Build options
- states: States of all individual files that are being
- processed. Each file in a build is always represented
- by a single state object (after it has been encountered
- for the first time). This is the only place where
- states are stored.
- module_files: Map from module name to source file path. There is a
- 1:1 mapping between modules and source files.
- module_deps: Cache for module dependencies (direct or indirect).
- Item (m, n) indicates whether m depends on n (directly
- or indirectly).
missing_modules: Set of modules that could not be imported encountered so far
"""
@@ -345,8 +356,9 @@ class BuildManager:
flags: List[str],
ignore_prefix: str,
custom_typing_module: str,
- implicit_any: bool,
+ source_set: BuildSourceSet,
reports: Reports) -> None:
+ self.start_time = time.time()
self.data_dir = data_dir
self.errors = Errors()
self.errors.set_ignore_prefix(ignore_prefix)
@@ -355,156 +367,22 @@ class BuildManager:
self.pyversion = pyversion
self.flags = flags
self.custom_typing_module = custom_typing_module
- self.implicit_any = implicit_any
+ self.source_set = source_set
self.reports = reports
+ check_untyped_defs = CHECK_UNTYPED_DEFS in self.flags
self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors,
- pyversion=pyversion)
- modules = self.semantic_analyzer.modules
- self.semantic_analyzer_pass3 = ThirdPass(modules, self.errors)
- self.type_checker = TypeChecker(self.errors, modules, self.pyversion)
- self.states = [] # type: List[State]
- self.module_files = {} # type: Dict[str, str]
- self.module_deps = {} # type: Dict[Tuple[str, str], bool]
+ pyversion=pyversion,
+ check_untyped_defs=check_untyped_defs)
+ self.modules = self.semantic_analyzer.modules
+ self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors)
+ self.type_checker = TypeChecker(self.errors,
+ self.modules,
+ self.pyversion,
+ DISALLOW_UNTYPED_CALLS in self.flags,
+ DISALLOW_UNTYPED_DEFS in self.flags,
+ check_untyped_defs)
self.missing_modules = set() # type: Set[str]
- def process(self, initial_states: List['UnprocessedFile']) -> BuildResult:
- """Perform a build.
-
- The argument is a state that represents the main program
- file. This method should only be called once per a build
- manager object. The return values are identical to the return
- values of the build function.
- """
- self.states += initial_states
- for initial_state in initial_states:
- self.module_files[initial_state.id] = initial_state.path
- for initial_state in initial_states:
- initial_state.load_dependencies()
-
- # Process states in a loop until all files (states) have been
- # semantically analyzed or type checked (depending on target).
- #
- # We type check all files before the rest of the passes so that we can
- # report errors and fail as quickly as possible.
- while True:
- # Find the next state that has all its dependencies met.
- next = self.next_available_state()
- if not next:
- self.trace('done')
- break
-
- # Potentially output some debug information.
- self.trace('next {} ({})'.format(next.path, next.state()))
-
- # Set the import context for reporting error messages correctly.
- self.errors.set_import_context(next.import_context)
- # Process the state. The process method is reponsible for adding a
- # new state object representing the new state of the file.
- next.process()
-
- # Raise exception if the build failed. The build can fail for
- # various reasons, such as parse error, semantic analysis error,
- # etc.
- if self.errors.is_blockers():
- self.errors.raise_error()
-
- # If there were no errors, all files should have been fully processed.
- for s in self.states:
- assert s.state() == final_state, (
- '{} still unprocessed in state {}'.format(s.path, s.state()))
-
- if self.errors.is_errors():
- self.errors.raise_error()
-
- # Collect a list of all files.
- trees = [] # type: List[MypyFile]
- for state in self.states:
- trees.append(cast(ParsedFile, state).tree)
-
- # Perform any additional passes after type checking for all the files.
- self.final_passes(trees, self.type_checker.type_map)
-
- return BuildResult(self.semantic_analyzer.modules,
- self.type_checker.type_map)
-
- def next_available_state(self) -> 'State':
- """Find a ready state (one that has all its dependencies met)."""
- i = len(self.states) - 1
- while i >= 0:
- if self.states[i].is_ready():
- num_incomplete = self.states[i].num_incomplete_deps()
- if num_incomplete == 0:
- # This is perfect; no need to look for the best match.
- return self.states[i]
- i -= 1
- return None
-
- def has_module(self, name: str) -> bool:
- """Have we seen a module yet?"""
- return name in self.module_files
-
- def file_state(self, path: str) -> int:
- """Return the state of a source file.
-
- In particular, return UNSEEN_STATE if the file has no associated
- state.
-
- This function does not consider any dependencies.
- """
- for s in self.states:
- if s.path == path:
- return s.state()
- return UNSEEN_STATE
-
- def module_state(self, name: str) -> int:
- """Return the state of a module.
-
- In particular, return UNSEEN_STATE if the file has no associated
- state.
-
- This considers also module dependencies.
- """
- if not self.has_module(name):
- return UNSEEN_STATE
- state = final_state
- fs = self.file_state(self.module_files[name])
- if earlier_state(fs, state):
- state = fs
- return state
-
- def is_dep(self, m1: str, m2: str, done: Set[str] = None) -> bool:
- """Does m1 import m2 directly or indirectly?"""
- # Have we computed this previously?
- dep = self.module_deps.get((m1, m2))
- if dep is not None:
- return dep
-
- if not done:
- done = set([m1])
-
- # m1 depends on m2 iff one of the deps of m1 depends on m2.
- st = self.lookup_state(m1)
- for m in st.dependencies:
- if m in done:
- continue
- done.add(m)
- # Cache this dependency.
- self.module_deps[m1, m] = True
- # Search recursively.
- if m == m2 or self.is_dep(m, m2, done):
- # Yes! Mark it in the cache.
- self.module_deps[m1, m2] = True
- return True
- # No dependency. Mark it in the cache.
- self.module_deps[m1, m2] = False
- return False
-
- def lookup_state(self, module: str) -> 'State':
- for state in self.states:
- if state.id == module:
- return state
- raise RuntimeError('%s not found' % module)
-
def all_imported_modules_in_file(self,
file: MypyFile) -> List[Tuple[str, int]]:
"""Find all reachable import statements in a file.
@@ -534,12 +412,22 @@ class BuildManager:
res.append((id, imp.line))
elif isinstance(imp, ImportFrom):
cur_id = correct_rel_imp(imp)
- res.append((cur_id, imp.line))
+ pos = len(res)
+ all_are_submodules = True
# Also add any imported names that are submodules.
for name, __ in imp.names:
sub_id = cur_id + '.' + name
if self.is_module(sub_id):
res.append((sub_id, imp.line))
+ else:
+ all_are_submodules = False
+ # If all imported names are submodules, don't add
+ # cur_id as a dependency. Otherwise (i.e., if at
+ # least one imported name isn't a submodule)
+ # cur_id is also a dependency, and we should
+ # insert it *before* any submodules.
+ if not all_are_submodules:
+ res.insert(pos, ((cur_id, imp.line)))
elif isinstance(imp, ImportAll):
res.append((correct_rel_imp(imp), imp.line))
return res
@@ -548,21 +436,52 @@ class BuildManager:
"""Is there a file in the file system corresponding to module id?"""
return find_module(id, self.lib_path) is not None
- def final_passes(self, files: List[MypyFile],
- types: Dict[Node, Type]) -> None:
- """Perform the code generation passes for type checked files."""
- if self.target in [SEMANTIC_ANALYSIS, TYPE_CHECK]:
- pass # Nothing to do.
+ def parse_file(self, id: str, path: str, source: str) -> MypyFile:
+ """Parse the source of a file with the given name.
+
+ Raise CompileError if there is a parse error.
+ """
+ num_errs = self.errors.num_messages()
+ tree = parse(source, path, self.errors,
+ pyversion=self.pyversion,
+ custom_typing_module=self.custom_typing_module,
+ fast_parser=FAST_PARSER in self.flags)
+ tree._fullname = id
+ if self.errors.num_messages() != num_errs:
+ self.log("Bailing due to parse errors")
+ self.errors.raise_error()
+ return tree
+
+ def module_not_found(self, path: str, line: int, id: str) -> None:
+ self.errors.set_file(path)
+ stub_msg = "(Stub files are from https://github.com/python/typeshed)"
+ if ((self.pyversion[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
+ (self.pyversion[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
+ self.errors.report(
+ line, "No library stub file for standard library module '{}'".format(id))
+ self.errors.report(line, stub_msg, severity='note', only_once=True)
+ elif moduleinfo.is_third_party_module(id):
+ self.errors.report(line, "No library stub file for module '{}'".format(id))
+ self.errors.report(line, stub_msg, severity='note', only_once=True)
else:
- raise RuntimeError('Unsupported target %d' % self.target)
+ self.errors.report(line, "Cannot find module named '{}'".format(id))
+ self.errors.report(line, '(Perhaps setting MYPYPATH '
+ 'or using the "--silent-imports" flag would help)',
+ severity='note', only_once=True)
- def log(self, message: str) -> None:
+ def report_file(self, file: MypyFile) -> None:
+ if self.source_set.is_source(file):
+ self.reports.file(file, type_map=self.type_checker.type_map)
+
+ def log(self, *message: str) -> None:
if VERBOSE in self.flags:
- print('LOG:', message, file=sys.stderr)
+ print('%.3f:LOG: ' % (time.time() - self.start_time), *message, file=sys.stderr)
+ sys.stderr.flush()
- def trace(self, message: str) -> None:
+ def trace(self, *message: str) -> None:
if self.flags.count(VERBOSE) >= 2:
- print('TRACE:', message, file=sys.stderr)
+ print('%.3f:TRACE:' % (time.time() - self.start_time), *message, file=sys.stderr)
+ sys.stderr.flush()
def remove_cwd_prefix_from_path(p: str) -> str:
@@ -577,7 +496,9 @@ def remove_cwd_prefix_from_path(p: str) -> str:
if basename(cur) != '':
cur += os.sep
# Compute root path.
- while p and os.path.isfile(os.path.join(p, '__init__.py')):
+ while (p and
+ (os.path.isfile(os.path.join(p, '__init__.py')) or
+ os.path.isfile(os.path.join(p, '__init__.pyi')))):
dir, base = os.path.split(p)
if not base:
break
@@ -591,373 +512,6 @@ def remove_cwd_prefix_from_path(p: str) -> str:
return p
-class StateInfo:
- """Description of a source file that is being built."""
-
- def __init__(self, path: str, id: str,
- import_context: List[Tuple[str, int]],
- manager: BuildManager) -> None:
- """Initialize state information.
-
- Arguments:
- path: Path to the file
- id: Module id, such as 'os.path' or '__main__' (for the main
- program file)
- import_context:
- The import trail that caused this module to be
- imported (path, line) tuples
- manager: The manager that manages this build
- """
- self.path = path
- self.id = id
- self.import_context = import_context
- self.manager = manager
-
-
-class State:
- """Abstract base class for build states.
-
- There is always at most one state per source file.
- """
-
- # The StateInfo attributes are duplicated here for convenience.
- path = ''
- id = ''
- import_context = None # type: List[Tuple[str, int]]
- manager = None # type: BuildManager
- # Modules that this file directly depends on (in no particular order).
- dependencies = None # type: List[str]
-
- def __init__(self, info: StateInfo) -> None:
- self.path = info.path
- self.id = info.id
- self.import_context = info.import_context
- self.manager = info.manager
- self.dependencies = []
-
- def info(self) -> StateInfo:
- return StateInfo(self.path, self.id, self.import_context, self.manager)
-
- def process(self) -> None:
- raise RuntimeError('Not implemented')
-
- def is_ready(self) -> bool:
- """Return True if all dependencies are at least in the same state
- as this object (but not in the initial state).
- """
- for module in self.dependencies:
- state = self.manager.module_state(module)
- if earlier_state(state,
- self.state()) or state == UNPROCESSED_STATE:
- return False
- return True
-
- def num_incomplete_deps(self) -> int:
- """Return the number of dependencies that are ready but incomplete."""
- return 0 # Does not matter in this state
-
- def state(self) -> int:
- raise RuntimeError('Not implemented')
-
- def switch_state(self, state_object: 'State') -> None:
- """Called by state objects to replace the state of the file.
-
- Also notify the manager.
- """
- for i in range(len(self.manager.states)):
- if self.manager.states[i].path == state_object.path:
- self.manager.states[i] = state_object
- return
- raise RuntimeError('State for {} not found'.format(state_object.path))
-
- def errors(self) -> Errors:
- return self.manager.errors
-
- def semantic_analyzer(self) -> SemanticAnalyzer:
- return self.manager.semantic_analyzer
-
- def semantic_analyzer_pass3(self) -> ThirdPass:
- return self.manager.semantic_analyzer_pass3
-
- def type_checker(self) -> TypeChecker:
- return self.manager.type_checker
-
- def fail(self, path: str, line: int, msg: str, blocker: bool = True) -> None:
- """Report an error in the build (e.g. if could not find a module)."""
- self.errors().set_file(path)
- self.errors().report(line, msg, blocker=blocker)
-
- def module_not_found(self, path: str, line: int, id: str) -> None:
- self.errors().set_file(path)
- stub_msg = "(Stub files are from https://github.com/python/typeshed)"
- if ((self.manager.pyversion[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
- (self.manager.pyversion[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
- self.errors().report(
- line, "No library stub file for standard library module '{}'".format(id))
- self.errors().report(line, stub_msg, severity='note', only_once=True)
- elif moduleinfo.is_third_party_module(id):
- self.errors().report(line, "No library stub file for module '{}'".format(id))
- self.errors().report(line, stub_msg, severity='note', only_once=True)
- else:
- self.errors().report(line, "Cannot find module named '{}'".format(id))
- self.errors().report(line, "(Perhaps setting MYPYPATH would help)", severity='note',
- only_once=True)
-
-
-class UnprocessedFile(State):
- def __init__(self, info: StateInfo, program_text: str) -> None:
- super().__init__(info)
- self.program_text = program_text
- self.silent = SILENT_IMPORTS in self.manager.flags
-
- def load_dependencies(self):
- # Add surrounding package(s) as dependencies.
- for p in super_packages(self.id):
- if p in self.manager.missing_modules:
- continue
- if not self.import_module(p):
- # Could not find a module. Typically the reason is a
- # misspelled module name, missing stub, module not in
- # search path or the module has not been installed.
- if self.silent:
- self.manager.missing_modules.add(p)
- else:
- self.module_not_found(self.path, 1, p)
- else:
- self.dependencies.append(p)
-
- def process(self) -> None:
- """Parse the file, store global names and advance to the next state."""
- if self.id in self.manager.semantic_analyzer.modules:
- self.fail(self.path, 1, "Duplicate module named '{}'".format(self.id))
- return
-
- tree = self.parse(self.program_text, self.path)
-
- # Store the parsed module in the shared module symbol table.
- self.manager.semantic_analyzer.modules[self.id] = tree
-
- if '.' in self.id:
- # Include module in the symbol table of the enclosing package.
- c = self.id.split('.')
- p = '.'.join(c[:-1])
- sem_anal = self.manager.semantic_analyzer
- if p in sem_anal.modules:
- sem_anal.modules[p].names[c[-1]] = SymbolTableNode(
- MODULE_REF, tree, p)
-
- if self.id != 'builtins':
- # The builtins module is imported implicitly in every program (it
- # contains definitions of int, print etc.).
- self.manager.trace('import builtins')
- if not self.import_module('builtins'):
- self.fail(self.path, 1, 'Could not find builtins')
-
- # Do the first pass of semantic analysis: add top-level definitions in
- # the file to the symbol table. We must do this before processing imports,
- # since this may mark some import statements as unreachable.
- first = FirstPass(self.semantic_analyzer())
- first.analyze(tree, self.path, self.id)
-
- # Add all directly imported modules to be processed (however they are
- # not processed yet, just waiting to be processed).
- for id, line in self.manager.all_imported_modules_in_file(tree):
- self.errors().push_import_context(self.path, line)
- try:
- res = self.import_module(id)
- finally:
- self.errors().pop_import_context()
- if not res:
- if id == '':
- # Must be from a relative import.
- self.fail(self.path, line,
- "No parent module -- cannot perform relative import".format(id),
- blocker=True)
- else:
- if (line not in tree.ignored_lines and
- 'import' not in tree.weak_opts and
- not self.silent):
- self.module_not_found(self.path, line, id)
- self.manager.missing_modules.add(id)
-
- # Initialize module symbol table, which was populated by the semantic
- # analyzer.
- tree.names = self.semantic_analyzer().globals
-
- # Replace this state object with a parsed state in BuildManager.
- self.switch_state(ParsedFile(self.info(), tree))
-
- def import_module(self, id: str) -> bool:
- """Schedule a module to be processed.
-
- Add an unprocessed state object corresponding to the module to the
- manager, or do nothing if the module already has a state object.
- """
- if self.manager.has_module(id):
- # Do nothing: already being compiled.
- return True
-
- if id == 'builtins' and self.manager.pyversion[0] == 2:
- # The __builtin__ module is called internally by mypy 'builtins' in Python 2 mode
- # (similar to Python 3), but the stub file is __builtin__.pyi. The reason is that
- # a lot of code hard codes 'builtins.x' and this it's easier to work it around like
- # this. It also means that the implementation can mostly ignore the difference and
- # just assume 'builtins' everywhere, which simplifies code.
- file_id = '__builtin__'
- else:
- file_id = id
- path, text = read_module_source_from_file(file_id, self.manager.lib_path,
- self.manager.pyversion, self.silent)
- if text is not None:
- info = StateInfo(path, id, self.errors().import_context(),
- self.manager)
- new_file = UnprocessedFile(info, text)
- self.manager.states.append(new_file)
- self.manager.module_files[id] = path
- new_file.load_dependencies()
- return True
- else:
- return False
-
- def parse(self, source_text: Union[str, bytes], fnam: str) -> MypyFile:
- """Parse the source of a file with the given name.
-
- Raise CompileError if there is a parse error.
- """
- num_errs = self.errors().num_messages()
- tree = parse.parse(source_text, fnam, self.errors(),
- pyversion=self.manager.pyversion,
- custom_typing_module=self.manager.custom_typing_module,
- implicit_any=self.manager.implicit_any)
- tree._fullname = self.id
- if self.errors().num_messages() != num_errs:
- self.errors().raise_error()
- return tree
-
- def state(self) -> int:
- return UNPROCESSED_STATE
-
-
-class ParsedFile(State):
- tree = None # type: MypyFile
-
- def __init__(self, info: StateInfo, tree: MypyFile) -> None:
- super().__init__(info)
- self.tree = tree
-
- # Build a list all directly imported moules (dependencies).
- imp = [] # type: List[str]
- for id, line in self.manager.all_imported_modules_in_file(tree):
- # Omit missing modules, as otherwise we could not type check
- # programs with missing modules.
- if id not in self.manager.missing_modules and id != self.id:
- imp.append(id)
- if self.id != 'builtins':
- imp.append('builtins')
-
- if imp != []:
- self.manager.trace('{} dependencies: {}'.format(info.path, imp))
-
- # Record the dependencies. Note that the dependencies list also
- # contains any superpackages and we must preserve them (e.g. os for
- # os.path).
- self.dependencies.extend(imp)
-
- def process(self) -> None:
- """Semantically analyze file and advance to the next state."""
- self.semantic_analyzer().visit_file(self.tree, self.tree.path)
- self.switch_state(PartiallySemanticallyAnalyzedFile(self.info(),
- self.tree))
-
- def num_incomplete_deps(self) -> int:
- """Return the number of dependencies that are incomplete.
-
- Here complete means that their state is *later* than this module.
- Cyclic dependencies are omitted to break cycles forcibly (and somewhat
- arbitrarily).
- """
- incomplete = 0
- for module in self.dependencies:
- state = self.manager.module_state(module)
- if (not earlier_state(self.state(), state) and
- not self.manager.is_dep(module, self.id)):
- incomplete += 1
- return incomplete
-
- def state(self) -> int:
- return PARSED_STATE
-
-
-class PartiallySemanticallyAnalyzedFile(ParsedFile):
- def process(self) -> None:
- """Perform final pass of semantic analysis and advance state."""
- self.semantic_analyzer_pass3().visit_file(self.tree, self.tree.path)
- if DUMP_TYPE_STATS in self.manager.flags:
- stats.dump_type_stats(self.tree, self.tree.path)
- self.switch_state(SemanticallyAnalyzedFile(self.info(), self.tree))
-
- def state(self) -> int:
- return PARTIAL_SEMANTIC_ANALYSIS_STATE
-
-
-class SemanticallyAnalyzedFile(ParsedFile):
- def process(self) -> None:
- """Type check file and advance to the next state."""
- if self.manager.target >= TYPE_CHECK:
- self.type_checker().visit_file(self.tree, self.tree.path)
- if DUMP_INFER_STATS in self.manager.flags:
- stats.dump_type_stats(self.tree, self.tree.path, inferred=True,
- typemap=self.manager.type_checker.type_map)
- self.manager.reports.file(self.tree, type_map=self.manager.type_checker.type_map)
-
- # FIX remove from active state list to speed up processing
-
- self.switch_state(TypeCheckedFile(self.info(), self.tree))
-
- def state(self) -> int:
- return SEMANTICALLY_ANALYSED_STATE
-
-
-class TypeCheckedFile(SemanticallyAnalyzedFile):
- def process(self) -> None:
- """Finished, so cannot process."""
- raise RuntimeError('Cannot process TypeCheckedFile')
-
- def is_ready(self) -> bool:
- """Finished, so cannot ever become ready."""
- return False
-
- def state(self) -> int:
- return TYPE_CHECKED_STATE
-
-
-def read_module_source_from_file(id: str,
- lib_path: Iterable[str],
- pyversion: Tuple[int, int],
- silent: bool) -> Tuple[Optional[str], Optional[str]]:
- """Find and read the source file of a module.
-
- Return a pair (path, file contents). Return (None, None) if the module
- could not be found or read.
-
- Args:
- id: module name, a string of form 'foo' or 'foo.bar'
- lib_path: library search path
- silent: if set, don't import .py files (only .pyi files)
- """
- path = find_module(id, lib_path)
- if path is not None:
- if silent and not path.endswith('.pyi'):
- return None, None
- try:
- text = read_with_python_encoding(path, pyversion)
- except IOError:
- return None, None
- return path, text
- else:
- return None, None
-
-
# Cache find_module: (id, lib_path) -> result.
find_module_cache = {} # type: Dict[Tuple[str, Tuple[str, ...]], str]
@@ -1061,23 +615,6 @@ def verify_module(id: str, path: str) -> bool:
return True
-def super_packages(id: str) -> List[str]:
- """Return the surrounding packages of a module, e.g. ['os'] for os.path."""
- c = id.split('.')
- res = [] # type: List[str]
- for i in range(1, len(c)):
- res.append('.'.join(c[:i]))
- return res
-
-
-def make_parent_dirs(path: str) -> None:
- parent = os.path.dirname(path)
- try:
- os.makedirs(parent)
- except OSError:
- pass
-
-
def read_with_python_encoding(path: str, pyversion: Tuple[int, int]) -> str:
"""Read the Python file with while obeying PEP-263 encoding detection"""
source_bytearray = bytearray()
@@ -1101,3 +638,915 @@ def read_with_python_encoding(path: str, pyversion: Tuple[int, int]) -> str:
source_bytearray.extend(f.read())
return source_bytearray.decode(encoding)
+
+
+MYPY_CACHE = '.mypy_cache'
+
+
+def get_cache_names(id: str, path: str, pyversion: Tuple[int, int]) -> Tuple[str, str]:
+ """Return the file names for the cache files.
+
+ Args:
+ id: module ID
+ path: module path (used to recognize packages)
+ pyversion: Python version (major, minor)
+
+ Returns:
+ A tuple with the file names to be used for the meta JSON and the
+ data JSON, respectively.
+ """
+ prefix = os.path.join(MYPY_CACHE, '%d.%d' % pyversion, *id.split('.'))
+ is_package = os.path.basename(path).startswith('__init__.py')
+ if is_package:
+ prefix = os.path.join(prefix, '__init__')
+ return (prefix + '.meta.json', prefix + '.data.json')
+
+
+def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[CacheMeta]:
+ """Find cache data for a module.
+
+ Args:
+ id: module ID
+ path: module path
+ manager: the build manager (for pyversion, log/trace, and build options)
+
+ Returns:
+ A CacheMeta instance if the cache data was found and appears
+ valid; otherwise None.
+ """
+ # TODO: May need to take more build options into account; in
+ # particular SILENT_IMPORTS may affect the cache dramatically.
+ meta_json, data_json = get_cache_names(id, path, manager.pyversion)
+ manager.trace('Looking for {} {}'.format(id, data_json))
+ if not os.path.exists(meta_json):
+ return None
+ with open(meta_json, 'r') as f:
+ meta_str = f.read()
+ manager.trace('Meta {} {}'.format(id, meta_str.rstrip()))
+ meta = json.loads(meta_str) # TODO: Errors
+ if not isinstance(meta, dict):
+ return None
+ path = os.path.abspath(path)
+ m = CacheMeta(
+ meta.get('id'),
+ meta.get('path'),
+ meta.get('mtime'),
+ meta.get('size'),
+ meta.get('dependencies', []),
+ meta.get('data_mtime'),
+ data_json,
+ meta.get('suppressed', []),
+ )
+ if (m.id != id or m.path != path or
+ m.mtime is None or m.size is None or
+ m.dependencies is None or m.data_mtime is None):
+ return None
+ # TODO: Share stat() outcome with find_module()
+ st = os.stat(path) # TODO: Errors
+ if st.st_mtime != m.mtime or st.st_size != m.size:
+ manager.log('Metadata abandoned because of modified file {}'.format(path))
+ return None
+ # It's a match on (id, path, mtime, size).
+ # Check data_json; assume if its mtime matches it's good.
+ # TODO: stat() errors
+ if os.path.getmtime(data_json) != m.data_mtime:
+ return None
+ manager.log('Found {} {}'.format(id, meta_json))
+ return m
+
+
+def random_string():
+ return binascii.hexlify(os.urandom(8)).decode('ascii')
+
+
+def write_cache(id: str, path: str, tree: MypyFile,
+ dependencies: List[str], suppressed: List[str],
+ manager: BuildManager) -> None:
+ """Write cache files for a module.
+
+ Args:
+ id: module ID
+ path: module path
+ tree: the fully checked module data
+ dependencies: module IDs on which this module depends
+ suppressed: module IDs which were suppressed as dependencies
+ manager: the build manager (for pyversion, log/trace)
+ """
+ path = os.path.abspath(path)
+ manager.trace('Dumping {} {}'.format(id, path))
+ st = os.stat(path) # TODO: Errors
+ mtime = st.st_mtime
+ size = st.st_size
+ meta_json, data_json = get_cache_names(id, path, manager.pyversion)
+ manager.log('Writing {} {} {}'.format(id, meta_json, data_json))
+ data = tree.serialize()
+ parent = os.path.dirname(data_json)
+ if not os.path.isdir(parent):
+ os.makedirs(parent)
+ assert os.path.dirname(meta_json) == parent
+ nonce = '.' + random_string()
+ data_json_tmp = data_json + nonce
+ meta_json_tmp = meta_json + nonce
+ with open(data_json_tmp, 'w') as f:
+ json.dump(data, f, indent=2, sort_keys=True)
+ f.write('\n')
+ data_mtime = os.path.getmtime(data_json_tmp)
+ meta = {'id': id,
+ 'path': path,
+ 'mtime': mtime,
+ 'size': size,
+ 'data_mtime': data_mtime,
+ 'dependencies': dependencies,
+ 'suppressed': suppressed,
+ }
+ with open(meta_json_tmp, 'w') as f:
+ json.dump(meta, f, sort_keys=True)
+ f.write('\n')
+ # TODO: On Windows, os.rename() may not be atomic, and we could
+ # use os.replace(). However that's new in Python 3.3.
+ os.rename(data_json_tmp, data_json)
+ os.rename(meta_json_tmp, meta_json)
+
+
+"""Dependency manager.
+
+Design
+======
+
+Ideally
+-------
+
+A. Collapse cycles (each SCC -- strongly connected component --
+ becomes one "supernode").
+
+B. Topologically sort nodes based on dependencies.
+
+C. Process from leaves towards roots.
+
+Wrinkles
+--------
+
+a. Need to parse source modules to determine dependencies.
+
+b. Processing order for modules within an SCC.
+
+c. Must order mtimes of files to decide whether to re-process; depends
+ on clock never resetting.
+
+d. from P import M; checks filesystem whether module P.M exists in
+ filesystem.
+
+e. Race conditions, where somebody modifies a file while we're
+ processing. I propose not to modify the algorithm to handle this,
+ but to detect when this could lead to inconsistencies. (For
+ example, when we decide on the dependencies based on cache
+ metadata, and then we decide to re-parse a file because of a stale
+ dependency, if the re-parsing leads to a different list of
+ dependencies we should warn the user or start over.)
+
+Steps
+-----
+
+1. For each explicitly given module find the source file location.
+
+2. For each such module load and check the cache metadata, and decide
+ whether it's valid.
+
+3. Now recursively (or iteratively) find dependencies and add those to
+ the graph:
+
+ - for cached nodes use the list of dependencies from the cache
+ metadata (this will be valid even if we later end up re-parsing
+ the same source);
+
+ - for uncached nodes parse the file and process all imports found,
+ taking care of (a) above.
+
+Step 3 should also address (d) above.
+
+Once step 3 terminates we have the entire dependency graph, and for
+each module we've either loaded the cache metadata or parsed the
+source code. (However, we may still need to parse those modules for
+which we have cache metadata but that depend, directly or indirectly,
+on at least one module for which the cache metadata is stale.)
+
+Now we can execute steps A-C from the first section. Finding SCCs for
+step A shouldn't be hard; there's a recipe here:
+http://code.activestate.com/recipes/578507/. There's also a plethora
+of topsort recipes, e.g. http://code.activestate.com/recipes/577413/.
+
+For single nodes, processing is simple. If the node was cached, we
+deserialize the cache data and fix up cross-references. Otherwise, we
+do semantic analysis followed by type checking. We also handle (c)
+above; if a module has valid cache data *but* any of its
+dependendencies was processed from source, then the module should be
+processed from source.
+
+A relatively simple optimization (outside SCCs) we might do in the
+future is as follows: if a node's cache data is valid, but one or more
+of its dependencies are out of date so we have to re-parse the node
+from source, once we have fully type-checked the node, we can decide
+whether its symbol table actually changed compared to the cache data
+(by reading the cache data and comparing it to the data we would be
+writing). If there is no change we can declare the node up to date,
+and any node that depends (and for which we have cached data, and
+whose other dependencies are up to date) on it won't need to be
+re-parsed from source.
+
+Import cycles
+-------------
+
+Finally we have to decide how to handle (c), import cycles. Here
+we'll need a modified version of the original state machine
+(build.py), but we only need to do this per SCC, and we won't have to
+deal with changes to the list of nodes while we're processing it.
+
+If all nodes in the SCC have valid cache metadata and all dependencies
+outside the SCC are still valid, we can proceed as follows:
+
+ 1. Load cache data for all nodes in the SCC.
+
+ 2. Fix up cross-references for all nodes in the SCC.
+
+Otherwise, the simplest (but potentially slow) way to proceed is to
+invalidate all cache data in the SCC and re-parse all nodes in the SCC
+from source. We can do this as follows:
+
+ 1. Parse source for all nodes in the SCC.
+
+ 2. Semantic analysis for all nodes in the SCC.
+
+ 3. Type check all nodes in the SCC.
+
+(If there are more passes the process is the same -- each pass should
+be done for all nodes before starting the next pass for any nodes in
+the SCC.)
+
+We could process the nodes in the SCC in any order. For sentimental
+reasons, I've decided to process them in the reverse order in which we
+encountered them when originally constructing the graph. That's how
+the old build.py deals with cycles, and at least this reproduces the
+previous implementation more accurately.
+
+Can we do better than re-parsing all nodes in the SCC when any of its
+dependencies are out of date? It's doubtful. The optimization
+mentioned at the end of the previous section would require re-parsing
+and type-checking a node and then comparing its symbol table to the
+cached data; but because the node is part of a cycle we can't
+technically type-check it until the semantic analysis of all other
+nodes in the cycle has completed. (This is an important issue because
+Dropbox has a very large cycle in production code. But I'd like to
+deal with it later.)
+
+Additional wrinkles
+-------------------
+
+During implementation more wrinkles were found.
+
+- When a submodule of a package (e.g. x.y) is encountered, the parent
+ package (e.g. x) must also be loaded, but it is not strictly a
+ dependency. See State.add_ancestors() below.
+"""
+
+
+class ModuleNotFound(Exception):
+ """Control flow exception to signal that a module was not found."""
+
+
+class State:
+ """The state for a module.
+
+ The source is only used for the -c command line option; in that
+ case path is None. Otherwise source is None and path isn't.
+ """
+
+ manager = None # type: BuildManager
+ order_counter = 0 # Class variable
+ order = None # type: int # Order in which modules were encountered
+ id = None # type: str # Fully qualified module name
+ path = None # type: Optional[str] # Path to module source
+ xpath = None # type: str # Path or '<string>'
+ source = None # type: Optional[str] # Module source code
+ meta = None # type: Optional[CacheMeta]
+ data = None # type: Optional[str]
+ tree = None # type: Optional[MypyFile]
+ dependencies = None # type: List[str]
+ suppressed = None # type: List[str] # Suppressed/missing dependencies
+
+ # Map each dependency to the line number where it is first imported
+ dep_line_map = None # type: Dict[str, int]
+
+ # Parent package, its parent, etc.
+ ancestors = None # type: Optional[List[str]]
+
+ # List of (path, line number) tuples giving context for import
+ import_context = None # type: List[Tuple[str, int]]
+
+ # The State from which this module was imported, if any
+ caller_state = None # type: Optional[State]
+
+ # If caller_state is set, the line number in the caller where the import occurred
+ caller_line = 0
+
+ def __init__(self,
+ id: Optional[str],
+ path: Optional[str],
+ source: Optional[str],
+ manager: BuildManager,
+ caller_state: 'State' = None,
+ caller_line: int = 0,
+ ancestor_for: 'State' = None,
+ ) -> None:
+ assert id or path or source is not None, "Neither id, path nor source given"
+ self.manager = manager
+ State.order_counter += 1
+ self.order = State.order_counter
+ self.caller_state = caller_state
+ self.caller_line = caller_line
+ if caller_state:
+ self.import_context = caller_state.import_context[:]
+ self.import_context.append((caller_state.xpath, caller_line))
+ else:
+ self.import_context = []
+ self.id = id or '__main__'
+ if not path and source is None:
+ file_id = id
+ if id == 'builtins' and manager.pyversion[0] == 2:
+ # The __builtin__ module is called internally by mypy
+ # 'builtins' in Python 2 mode (similar to Python 3),
+ # but the stub file is __builtin__.pyi. The reason is
+ # that a lot of code hard-codes 'builtins.x' and it's
+ # easier to work it around like this. It also means
+ # that the implementation can mostly ignore the
+ # difference and just assume 'builtins' everywhere,
+ # which simplifies code.
+ file_id = '__builtin__'
+ path = find_module(file_id, manager.lib_path)
+ if path:
+ # In silent mode, don't import .py files, except from stubs.
+ if (SILENT_IMPORTS in manager.flags and
+ path.endswith('.py') and (caller_state or ancestor_for)):
+ # (Never silence builtins, even if it's a .py file;
+ # this can happen in tests!)
+ if (id != 'builtins' and
+ not ((caller_state and
+ caller_state.tree and
+ caller_state.tree.is_stub))):
+ if ALMOST_SILENT in manager.flags:
+ if ancestor_for:
+ self.skipping_ancestor(id, path, ancestor_for)
+ else:
+ self.skipping_module(id, path)
+ path = None
+ manager.missing_modules.add(id)
+ raise ModuleNotFound
+ else:
+ # Could not find a module. Typically the reason is a
+ # misspelled module name, missing stub, module not in
+ # search path or the module has not been installed.
+ if caller_state:
+ suppress_message = ((SILENT_IMPORTS in manager.flags and
+ ALMOST_SILENT not in manager.flags) or
+ (caller_state.tree is not None and
+ (caller_line in caller_state.tree.ignored_lines or
+ 'import' in caller_state.tree.weak_opts)))
+ if not suppress_message:
+ save_import_context = manager.errors.import_context()
+ manager.errors.set_import_context(caller_state.import_context)
+ manager.module_not_found(caller_state.xpath, caller_line, id)
+ manager.errors.set_import_context(save_import_context)
+ manager.missing_modules.add(id)
+ raise ModuleNotFound
+ else:
+ # If we can't find a root source it's always fatal.
+ # TODO: This might hide non-fatal errors from
+ # root sources processed earlier.
+ raise CompileError(["mypy: can't find module '%s'" % id])
+ self.path = path
+ self.xpath = path or '<string>'
+ self.source = source
+ if path and source is None and INCREMENTAL in manager.flags:
+ self.meta = find_cache_meta(self.id, self.path, manager)
+ # TODO: Get mtime if not cached.
+ self.add_ancestors()
+ if self.meta:
+ # Make copies, since we may modify these and want to
+ # compare them to the originals later.
+ self.dependencies = list(self.meta.dependencies)
+ self.suppressed = list(self.meta.suppressed)
+ self.dep_line_map = {}
+ else:
+ # Parse the file (and then some) to get the dependencies.
+ self.parse_file()
+ self.suppressed = []
+
+ def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None:
+ # TODO: Read the path (the __init__.py file) and return
+ # immediately if it's empty or only contains comments.
+ # But beware, some package may be the ancestor of many modules,
+ # so we'd need to cache the decision.
+ manager = self.manager
+ manager.errors.set_import_context([])
+ manager.errors.set_file(ancestor_for.xpath)
+ manager.errors.report(-1, "Ancestor package '%s' silently ignored" % (id,),
+ severity='note', only_once=True)
+ manager.errors.report(-1, "(Using --silent-imports, submodule passed on command line)",
+ severity='note', only_once=True)
+ manager.errors.report(-1, "(This note brought to you by --almost-silent)",
+ severity='note', only_once=True)
+
+ def skipping_module(self, id: str, path: str) -> None:
+ assert self.caller_state, (id, path)
+ manager = self.manager
+ save_import_context = manager.errors.import_context()
+ manager.errors.set_import_context(self.caller_state.import_context)
+ manager.errors.set_file(self.caller_state.xpath)
+ line = self.caller_line
+ manager.errors.report(line, "Import of '%s' silently ignored" % (id,),
+ severity='note')
+ manager.errors.report(line, "(Using --silent-imports, module not passed on command line)",
+ severity='note', only_once=True)
+ manager.errors.report(line, "(This note courtesy of --almost-silent)",
+ severity='note', only_once=True)
+ manager.errors.set_import_context(save_import_context)
+
+ def add_ancestors(self) -> None:
+ # All parent packages are new ancestors.
+ ancestors = []
+ parent = self.id
+ while '.' in parent:
+ parent, _ = parent.rsplit('.', 1)
+ ancestors.append(parent)
+ self.ancestors = ancestors
+
+ def is_fresh(self) -> bool:
+ """Return whether the cache data for this file is fresh."""
+ # NOTE: self.dependencies may differ from
+ # self.meta.dependencies when a dependency is dropped due to
+ # suppression by --silent-imports. However when a suppressed
+ # dependency is added back we find out later in the process.
+ return self.meta is not None and self.dependencies == self.meta.dependencies
+
+ def mark_stale(self) -> None:
+ """Throw away the cache data for this file, marking it as stale."""
+ self.meta = None
+
+ def check_blockers(self) -> None:
+ """Raise CompileError if a blocking error is detected."""
+ if self.manager.errors.is_blockers():
+ self.manager.log("Bailing due to blocking errors")
+ self.manager.errors.raise_error()
+
+ @contextlib.contextmanager
+ def wrap_context(self) -> Iterator[None]:
+ save_import_context = self.manager.errors.import_context()
+ self.manager.errors.set_import_context(self.import_context)
+ try:
+ yield
+ except CompileError:
+ raise
+ except Exception as err:
+ report_internal_error(err, self.path, 0)
+ self.manager.errors.set_import_context(save_import_context)
+ self.check_blockers()
+
+ # Methods for processing cached modules.
+
+ def load_tree(self) -> None:
+ with open(self.meta.data_json) as f:
+ data = json.load(f)
+ # TODO: Assert data file wasn't changed.
+ self.tree = MypyFile.deserialize(data)
+ self.manager.modules[self.id] = self.tree
+
+ def fix_cross_refs(self) -> None:
+ fixup_module_pass_one(self.tree, self.manager.modules)
+
+ def calculate_mros(self) -> None:
+ fixup_module_pass_two(self.tree, self.manager.modules)
+
+ # Methods for processing modules from source code.
+
+ def parse_file(self) -> None:
+ if self.tree is not None:
+ # The file was already parsed (in __init__()).
+ return
+
+ manager = self.manager
+ modules = manager.modules
+ manager.log("Parsing %s (%s)" % (self.xpath, self.id))
+
+ with self.wrap_context():
+ source = self.source
+ self.source = None # We won't need it again.
+ if self.path and source is None:
+ try:
+ source = read_with_python_encoding(self.path, manager.pyversion)
+ except IOError as ioerr:
+ raise CompileError([
+ "mypy: can't read file '{}': {}".format(self.path, ioerr.strerror)])
+ except UnicodeDecodeError as decodeerr:
+ raise CompileError([
+ "mypy: can't decode file '{}': {}".format(self.path, str(decodeerr))])
+ self.tree = manager.parse_file(self.id, self.xpath, source)
+
+ modules[self.id] = self.tree
+
+ # Do the first pass of semantic analysis: add top-level
+ # definitions in the file to the symbol table. We must do
+ # this before processing imports, since this may mark some
+ # import statements as unreachable.
+ first = FirstPass(manager.semantic_analyzer)
+ first.analyze(self.tree, self.xpath, self.id)
+
+ # Initialize module symbol table, which was populated by the
+ # semantic analyzer.
+ # TODO: Why can't FirstPass .analyze() do this?
+ self.tree.names = manager.semantic_analyzer.globals
+
+ # Compute (direct) dependencies.
+ # Add all direct imports (this is why we needed the first pass).
+ # Also keep track of each dependency's source line.
+ dependencies = []
+ suppressed = []
+ dep_line_map = {} # type: Dict[str, int] # id -> line
+ for id, line in manager.all_imported_modules_in_file(self.tree):
+ if id == self.id:
+ continue
+ # Omit missing modules, as otherwise we could not type-check
+ # programs with missing modules.
+ if id in manager.missing_modules:
+ if id not in dep_line_map:
+ suppressed.append(id)
+ dep_line_map[id] = line
+ continue
+ if id == '':
+ # Must be from a relative import.
+ manager.errors.set_file(self.xpath)
+ manager.errors.report(line, "No parent module -- cannot perform relative import",
+ blocker=True)
+ continue
+ if id not in dep_line_map:
+ dependencies.append(id)
+ dep_line_map[id] = line
+ # Every module implicitly depends on builtins.
+ if self.id != 'builtins' and 'builtins' not in dep_line_map:
+ dependencies.append('builtins')
+
+ # If self.dependencies is already set, it was read from the
+ # cache, but for some reason we're re-parsing the file.
+ # NOTE: What to do about race conditions (like editing the
+ # file while mypy runs)? A previous version of this code
+ # explicitly checked for this, but ran afoul of other reasons
+ # for differences (e.g. --silent-imports).
+ self.dependencies = dependencies
+ self.suppressed = suppressed
+ self.dep_line_map = dep_line_map
+ self.check_blockers()
+
+ def patch_parent(self) -> None:
+ # Include module in the symbol table of the enclosing package.
+ if '.' not in self.id:
+ return
+ manager = self.manager
+ modules = manager.modules
+ parent, child = self.id.rsplit('.', 1)
+ if parent in modules:
+ manager.trace("Added %s.%s" % (parent, child))
+ modules[parent].names[child] = SymbolTableNode(MODULE_REF, self.tree, parent)
+ else:
+ manager.log("Hm... couldn't add %s.%s" % (parent, child))
+
+ def semantic_analysis(self) -> None:
+ with self.wrap_context():
+ self.manager.semantic_analyzer.visit_file(self.tree, self.xpath)
+
+ def semantic_analysis_pass_three(self) -> None:
+ with self.wrap_context():
+ self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath)
+ if DUMP_TYPE_STATS in self.manager.flags:
+ dump_type_stats(self.tree, self.xpath)
+
+ def type_check(self) -> None:
+ manager = self.manager
+ if manager.target < TYPE_CHECK:
+ return
+ with self.wrap_context():
+ manager.type_checker.visit_file(self.tree, self.xpath)
+ if DUMP_INFER_STATS in manager.flags:
+ dump_type_stats(self.tree, self.xpath, inferred=True,
+ typemap=manager.type_checker.type_map)
+ manager.report_file(self.tree)
+
+ def write_cache(self) -> None:
+ if self.path and INCREMENTAL in self.manager.flags and not self.manager.errors.is_errors():
+ write_cache(self.id, self.path, self.tree,
+ list(self.dependencies), list(self.suppressed),
+ self.manager)
+
+
+Graph = Dict[str, State]
+
+
+def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
+ manager.log("Using new dependency manager")
+ graph = load_graph(sources, manager)
+ manager.log("Loaded graph with %d nodes" % len(graph))
+ process_graph(graph, manager)
+
+
+def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
+ """Given some source files, load the full dependency graph."""
+ graph = {} # type: Graph
+ # The deque is used to implement breadth-first traversal.
+ # TODO: Consider whether to go depth-first instead. This may
+ # affect the order in which we process files within import cycles.
+ new = collections.deque() # type: collections.deque[State]
+ # Seed the graph with the initial root sources.
+ for bs in sources:
+ try:
+ st = State(id=bs.module, path=bs.path, source=bs.text, manager=manager)
+ except ModuleNotFound:
+ continue
+ if st.id in graph:
+ manager.errors.set_file(st.xpath)
+ manager.errors.report(1, "Duplicate module named '%s'" % st.id)
+ manager.errors.raise_error()
+ graph[st.id] = st
+ new.append(st)
+ # Collect dependencies. We go breadth-first.
+ while new:
+ st = new.popleft()
+ for dep in st.ancestors + st.dependencies:
+ if dep not in graph:
+ try:
+ if dep in st.ancestors:
+ # TODO: Why not 'if dep not in st.dependencies' ?
+ # Ancestors don't have import context.
+ newst = State(id=dep, path=None, source=None, manager=manager,
+ ancestor_for=st)
+ else:
+ newst = State(id=dep, path=None, source=None, manager=manager,
+ caller_state=st, caller_line=st.dep_line_map.get(dep, 1))
+ except ModuleNotFound:
+ if dep in st.dependencies:
+ st.dependencies.remove(dep)
+ st.suppressed.append(dep)
+ else:
+ assert newst.id not in graph, newst.id
+ graph[newst.id] = newst
+ new.append(newst)
+ return graph
+
+
+def process_graph(graph: Graph, manager: BuildManager) -> None:
+ """Process everything in dependency order."""
+ sccs = sorted_components(graph)
+ manager.log("Found %d SCCs; largest has %d nodes" %
+ (len(sccs), max(len(scc) for scc in sccs)))
+ # We're processing SCCs from leaves (those without further
+ # dependencies) to roots (those from which everything else can be
+ # reached).
+ for ascc in sccs:
+ # Sort the SCC's nodes in *reverse* order or encounter.
+ # This is a heuristic for handling import cycles.
+ # Note that ascc is a set, and scc is a list.
+ scc = sorted(ascc, key=lambda id: -graph[id].order)
+ # If builtins is in the list, move it last. (This is a bit of
+ # a hack, but it's necessary because the builtins module is
+ # part of a small cycle involving at least {builtins, abc,
+ # typing}. Of these, builtins must be processed last or else
+ # some builtin objects will be incompletely processed.)
+ if 'builtins' in ascc:
+ scc.remove('builtins')
+ scc.append('builtins')
+ # Because the SCCs are presented in topological sort order, we
+ # don't need to look at dependencies recursively for staleness
+ # -- the immediate dependencies are sufficient.
+ stale_scc = {id for id in scc if not graph[id].is_fresh()}
+ fresh = not stale_scc
+ deps = set()
+ for id in scc:
+ deps.update(graph[id].dependencies)
+ deps -= ascc
+ stale_deps = {id for id in deps if not graph[id].is_fresh()}
+ fresh = fresh and not stale_deps
+ undeps = set()
+ if fresh:
+ # Check if any dependencies that were suppressed according
+ # to the cache have heen added back in this run.
+ # NOTE: Newly suppressed dependencies are handled by is_fresh().
+ for id in scc:
+ undeps.update(graph[id].suppressed)
+ undeps &= graph.keys()
+ if undeps:
+ fresh = False
+ if fresh:
+ # All cache files are fresh. Check that no dependency's
+ # cache file is newer than any scc node's cache file.
+ oldest_in_scc = min(graph[id].meta.data_mtime for id in scc)
+ newest_in_deps = 0 if not deps else max(graph[dep].meta.data_mtime for dep in deps)
+ if manager.flags.count(VERBOSE) >= 2: # Dump all mtimes for extreme debugging.
+ all_ids = sorted(ascc | deps, key=lambda id: graph[id].meta.data_mtime)
+ for id in all_ids:
+ if id in scc:
+ if graph[id].meta.data_mtime < newest_in_deps:
+ key = "*id:"
+ else:
+ key = "id:"
+ else:
+ if graph[id].meta.data_mtime > oldest_in_scc:
+ key = "+dep:"
+ else:
+ key = "dep:"
+ manager.trace(" %5s %.0f %s" % (key, graph[id].meta.data_mtime, id))
+ # If equal, give the benefit of the doubt, due to 1-sec time granularity
+ # (on some platforms).
+ if oldest_in_scc < newest_in_deps:
+ fresh = False
+ fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc)
+ else:
+ fresh_msg = "fresh"
+ elif undeps:
+ fresh_msg = "stale due to changed suppression (%s)" % " ".join(sorted(undeps))
+ elif stale_scc:
+ fresh_msg = "inherently stale (%s)" % " ".join(sorted(stale_scc))
+ if stale_deps:
+ fresh_msg += " with stale deps (%s)" % " ".join(sorted(stale_deps))
+ else:
+ fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps))
+ if len(scc) == 1:
+ manager.log("Processing SCC sigleton (%s) as %s" % (" ".join(scc), fresh_msg))
+ else:
+ manager.log("Processing SCC of size %d (%s) as %s" %
+ (len(scc), " ".join(scc), fresh_msg))
+ if fresh:
+ process_fresh_scc(graph, scc)
+ else:
+ process_stale_scc(graph, scc)
+
+
+def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
+ """Process the modules in one SCC from their cached data."""
+ for id in scc:
+ graph[id].load_tree()
+ for id in scc:
+ graph[id].patch_parent()
+ for id in scc:
+ graph[id].fix_cross_refs()
+ for id in scc:
+ graph[id].calculate_mros()
+
+
+def process_stale_scc(graph: Graph, scc: List[str]) -> None:
+ """Process the modules in one SCC from source code."""
+ for id in scc:
+ graph[id].mark_stale()
+ for id in scc:
+ # We may already have parsed the module, or not.
+ # If the former, parse_file() is a no-op.
+ graph[id].parse_file()
+ for id in scc:
+ graph[id].patch_parent()
+ for id in scc:
+ graph[id].semantic_analysis()
+ for id in scc:
+ graph[id].semantic_analysis_pass_three()
+ for id in scc:
+ graph[id].type_check()
+ graph[id].write_cache()
+
+
+def sorted_components(graph: Graph) -> List[AbstractSet[str]]:
+ """Return the graph's SCCs, topologically sorted by dependencies.
+
+ The sort order is from leaves (nodes without dependencies) to
+ roots (nodes on which no other nodes depend).
+
+ This works for a subset of the full dependency graph too;
+ dependencies that aren't present in graph.keys() are ignored.
+ """
+ # Compute SCCs.
+ vertices = set(graph)
+ edges = {id: [dep for dep in st.dependencies if dep in graph]
+ for id, st in graph.items()}
+ sccs = list(strongly_connected_components(vertices, edges))
+ # Topsort.
+ sccsmap = {id: frozenset(scc) for scc in sccs for id in scc}
+ data = {} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]]
+ for scc in sccs:
+ deps = set() # type: Set[AbstractSet[str]]
+ for id in scc:
+ deps.update(sccsmap[x] for x in graph[id].dependencies if x in graph)
+ data[frozenset(scc)] = deps
+ res = []
+ for ready in topsort(data):
+ # Sort the sets in ready by reversed smallest State.order. Examples:
+ #
+ # - If ready is [{x}, {y}], x.order == 1, y.order == 2, we get
+ # [{y}, {x}].
+ #
+ # - If ready is [{a, b}, {c, d}], a.order == 1, b.order == 3,
+ # c.order == 2, d.order == 4, the sort keys become [1, 2]
+ # and the result is [{c, d}, {a, b}].
+ res.extend(sorted(ready,
+ key=lambda scc: -min(graph[id].order for id in scc)))
+ return res
+
+
+def strongly_connected_components(vertices: Set[str],
+ edges: Dict[str, List[str]]) -> Iterator[Set[str]]:
+ """Compute Strongly Connected Components of a directed graph.
+
+ Args:
+ vertices: the labels for the vertices
+ edges: for each vertex, gives the target vertices of its outgoing edges
+
+ Returns:
+ An iterator yielding strongly connected components, each
+ represented as a set of vertices. Each input vertex will occur
+ exactly once; vertices not part of a SCC are returned as
+ singleton sets.
+
+ From http://code.activestate.com/recipes/578507/.
+ """
+ identified = set() # type: Set[str]
+ stack = [] # type: List[str]
+ index = {} # type: Dict[str, int]
+ boundaries = [] # type: List[int]
+
+ def dfs(v: str) -> Iterator[Set[str]]:
+ index[v] = len(stack)
+ stack.append(v)
+ boundaries.append(index[v])
+
+ for w in edges[v]:
+ if w not in index:
+ # For Python >= 3.3, replace with "yield from dfs(w)"
+ for scc in dfs(w):
+ yield scc
+ elif w not in identified:
+ while index[w] < boundaries[-1]:
+ boundaries.pop()
+
+ if boundaries[-1] == index[v]:
+ boundaries.pop()
+ scc = set(stack[index[v]:])
+ del stack[index[v]:]
+ identified.update(scc)
+ yield scc
+
+ for v in vertices:
+ if v not in index:
+ # For Python >= 3.3, replace with "yield from dfs(v)"
+ for scc in dfs(v):
+ yield scc
+
+
+def topsort(data: Dict[AbstractSet[str],
+ Set[AbstractSet[str]]]) -> Iterable[Set[AbstractSet[str]]]:
+ """Topological sort.
+
+ Args:
+ data: A map from SCCs (represented as frozen sets of strings) to
+ sets of SCCs, its dependencies. NOTE: This data structure
+ is modified in place -- for normalization purposes,
+ self-dependencies are removed and entries representing
+ orphans are added.
+
+ Returns:
+ An iterator yielding sets of SCCs that have an equivalent
+ ordering. NOTE: The algorithm doesn't care about the internal
+ structure of SCCs.
+
+ Example:
+ Suppose the input has the following structure:
+
+ {A: {B, C}, B: {D}, C: {D}}
+
+ This is normalized to:
+
+ {A: {B, C}, B: {D}, C: {D}, D: {}}
+
+ The algorithm will yield the following values:
+
+ {D}
+ {B, C}
+ {A}
+
+ From http://code.activestate.com/recipes/577413/.
+ """
+ # TODO: Use a faster algorithm?
+ for k, v in data.items():
+ v.discard(k) # Ignore self dependencies.
+ for item in set.union(*data.values()) - set(data.keys()):
+ data[item] = set()
+ while True:
+ ready = {item for item, dep in data.items() if not dep}
+ if not ready:
+ break
+ yield ready
+ data = {item: (dep - ready)
+ for item, dep in data.items()
+ if item not in ready}
+ assert not data, "A cyclic dependency exists amongst %r" % data
diff --git a/mypy/checker.py b/mypy/checker.py
index 6fdc775..2fcb9d4 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1,14 +1,15 @@
"""Mypy type checker."""
import itertools
+import contextlib
from typing import (
- Any, Dict, Set, List, cast, Tuple, Callable, TypeVar, Union, Optional, NamedTuple
+ Any, Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple
)
from mypy.errors import Errors, report_internal_error
from mypy.nodes import (
- SymbolTable, Node, MypyFile, LDEF, Var,
+ SymbolTable, Node, MypyFile, Var,
OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo,
ClassDef, GDEF, Block, AssignmentStmt, NameExpr, MemberExpr, IndexExpr,
TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, IfStmt,
@@ -17,7 +18,7 @@ from mypy.nodes import (
BytesExpr, UnicodeExpr, FloatExpr, OpExpr, UnaryExpr, CastExpr, SuperExpr,
TypeApplication, DictExpr, SliceExpr, FuncExpr, TempNode, SymbolTableNode,
Context, ListComprehension, ConditionalExpr, GeneratorExpr,
- Decorator, SetExpr, PassStmt, TypeVarExpr, PrintStmt,
+ Decorator, SetExpr, TypeVarExpr, PrintStmt,
LITERAL_TYPE, BreakStmt, ContinueStmt, ComparisonExpr, StarExpr,
YieldFromExpr, NamedTupleExpr, SetComprehension,
DictionaryComprehension, ComplexExpr, EllipsisExpr, TypeAliasExpr,
@@ -28,12 +29,13 @@ from mypy.nodes import function_type, method_type, method_type_with_fallback
from mypy import nodes
from mypy.types import (
Type, AnyType, CallableType, Void, FunctionLike, Overloaded, TupleType,
- Instance, NoneTyp, UnboundType, ErrorType, TypeTranslator, strip_type,
+ Instance, NoneTyp, ErrorType, strip_type,
UnionType, TypeVarType, PartialType, DeletedType
)
from mypy.sametypes import is_same_type
from mypy.messages import MessageBuilder
import mypy.checkexpr
+from mypy.checkmember import map_type_from_supertype
from mypy import defaults
from mypy import messages
from mypy.subtypes import (
@@ -294,6 +296,12 @@ class ConditionalTypeBinder:
def pop_loop_frame(self):
self.loop_frames.pop()
+ def __enter__(self) -> None:
+ self.push_frame()
+
+ def __exit__(self, *args: Any) -> None:
+ self.pop_frame()
+
def meet_frames(*frames: Frame) -> Frame:
answer = Frame()
@@ -327,8 +335,6 @@ class TypeChecker(NodeVisitor[Type]):
is_stub = False
# Error message reporter
errors = None # type: Errors
- # SymbolNode table for the whole program
- symtable = None # type: SymbolTable
# Utility for generating messages
msg = None # type: MessageBuilder
# Types of type checked nodes
@@ -354,7 +360,6 @@ class TypeChecker(NodeVisitor[Type]):
# Stack of collections of variables with partial types
partial_types = None # type: List[Dict[Var, Context]]
globals = None # type: SymbolTable
- locals = None # type: SymbolTable
modules = None # type: Dict[str, MypyFile]
# Nodes that couldn't be checked because some types weren't available. We'll run
# another pass and try these again.
@@ -364,15 +369,21 @@ class TypeChecker(NodeVisitor[Type]):
# Have we deferred the current function? If yes, don't infer additional
# types during this pass within the function.
current_node_deferred = False
+ # This makes it an error to call an untyped function from a typed one
+ disallow_untyped_calls = False
+ # This makes it an error to define an untyped or partially-typed function
+ disallow_untyped_defs = False
+ # Should we check untyped function defs?
+ check_untyped_defs = False
def __init__(self, errors: Errors, modules: Dict[str, MypyFile],
- pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
+ pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+ disallow_untyped_calls=False, disallow_untyped_defs=False,
+ check_untyped_defs=False) -> None:
"""Construct a type checker.
- Use errors to report type check errors. Assume symtable has been
- populated by the semantic analyzer.
+ Use errors to report type check errors.
"""
- self.expr_checker
self.errors = errors
self.modules = modules
self.pyversion = pyversion
@@ -390,6 +401,9 @@ class TypeChecker(NodeVisitor[Type]):
self.deferred_nodes = []
self.pass_num = 0
self.current_node_deferred = False
+ self.disallow_untyped_calls = disallow_untyped_calls
+ self.disallow_untyped_defs = disallow_untyped_defs
+ self.check_untyped_defs = check_untyped_defs
def visit_file(self, file_node: MypyFile, path: str) -> None:
"""Type check a mypy file with the given path."""
@@ -398,7 +412,6 @@ class TypeChecker(NodeVisitor[Type]):
self.errors.set_file(path)
self.errors.set_ignored_lines(file_node.ignored_lines)
self.globals = file_node.names
- self.locals = None
self.weak_opts = file_node.weak_opts
self.enter_partial_types()
@@ -463,9 +476,7 @@ class TypeChecker(NodeVisitor[Type]):
changed, _ = self.binder.pop_frame(True, True)
self.breaking_out = False
if not repeat_till_fixed or not changed:
- break
-
- return answer
+ return answer
#
# Definitions
@@ -494,7 +505,7 @@ class TypeChecker(NodeVisitor[Type]):
sig1 = self.function_type(item.func)
sig2 = self.function_type(item2.func)
if is_unsafe_overlapping_signatures(sig1, sig2):
- self.msg.overloaded_signatures_overlap(i + 1, j + 2,
+ self.msg.overloaded_signatures_overlap(i + 1, i + j + 2,
item.func)
def is_generator_return_type(self, typ: Type) -> bool:
@@ -587,10 +598,10 @@ class TypeChecker(NodeVisitor[Type]):
self.fail(messages.INCOMPATIBLE_REDEFINITION, defn)
else:
# TODO: Update conditional type binder.
- self.check_subtype(orig_type, new_type, defn,
+ self.check_subtype(new_type, orig_type, defn,
messages.INCOMPATIBLE_REDEFINITION,
- 'original type',
- 'redefinition with type')
+ 'redefinition with type',
+ 'original type')
def check_func_item(self, defn: FuncItem,
type_override: CallableType = None,
@@ -647,8 +658,6 @@ class TypeChecker(NodeVisitor[Type]):
else:
fdef = None
- self.enter()
-
if fdef:
# Check if __init__ has an invalid, non-None return type.
if (fdef.info and fdef.name() == '__init__' and
@@ -657,6 +666,19 @@ class TypeChecker(NodeVisitor[Type]):
self.fail(messages.INIT_MUST_HAVE_NONE_RETURN_TYPE,
item.type)
+ if self.disallow_untyped_defs:
+ # Check for functions with unspecified/not fully specified types.
+ def is_implicit_any(t: Type) -> bool:
+ return isinstance(t, AnyType) and t.implicit
+
+ if fdef.type is None:
+ self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef)
+ elif isinstance(fdef.type, CallableType):
+ if is_implicit_any(fdef.type.ret_type):
+ self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
+ if any(is_implicit_any(t) for t in fdef.type.arg_types):
+ self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef)
+
if name in nodes.reverse_op_method_set:
self.check_reverse_op_method(item, typ, name)
elif name == '__getattr__':
@@ -718,7 +740,6 @@ class TypeChecker(NodeVisitor[Type]):
self.return_types.pop()
- self.leave()
self.binder = old_binder
def check_reverse_op_method(self, defn: FuncItem, typ: CallableType,
@@ -937,15 +958,14 @@ class TypeChecker(NodeVisitor[Type]):
original_type = base_attr.type
if original_type is None and isinstance(base_attr.node,
FuncDef):
- original_type = self.function_type(cast(FuncDef,
- base_attr.node))
+ original_type = self.function_type(base_attr.node)
if isinstance(original_type, FunctionLike):
original = map_type_from_supertype(
method_type(original_type),
defn.info, base)
# Check that the types are compatible.
# TODO overloaded signatures
- self.check_override(cast(FunctionLike, typ),
+ self.check_override(typ,
cast(FunctionLike, original),
defn.name(),
name,
@@ -1059,16 +1079,16 @@ class TypeChecker(NodeVisitor[Type]):
second = base2[name]
first_type = first.type
if first_type is None and isinstance(first.node, FuncDef):
- first_type = self.function_type(cast(FuncDef, first.node))
+ first_type = self.function_type(first.node)
second_type = second.type
if second_type is None and isinstance(second.node, FuncDef):
- second_type = self.function_type(cast(FuncDef, second.node))
+ second_type = self.function_type(second.node)
# TODO: What if some classes are generic?
if (isinstance(first_type, FunctionLike) and
isinstance(second_type, FunctionLike)):
# Method override
- first_sig = method_type(cast(FunctionLike, first_type))
- second_sig = method_type(cast(FunctionLike, second_type))
+ first_sig = method_type(first_type)
+ second_sig = method_type(second_type)
ok = is_subtype(first_sig, second_sig)
elif first_type and second_type:
ok = is_equivalent(first_type, second_type)
@@ -1123,6 +1143,8 @@ class TypeChecker(NodeVisitor[Type]):
if len(s.lvalues) > 1:
# Chained assignment (e.g. x = y = ...).
# Make sure that rvalue type will not be reinferred.
+ if s.rvalue not in self.type_map:
+ self.accept(s.rvalue)
rvalue = self.temp_node(self.type_map[s.rvalue], s)
for lv in s.lvalues[:-1]:
self.check_assignment(lv, rvalue, s.type is None)
@@ -1224,9 +1246,6 @@ class TypeChecker(NodeVisitor[Type]):
msg: str = None) -> None:
"""Check the assignment of one rvalue to a number of lvalues."""
- if not msg:
- msg = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT
-
# Infer the type of an ordinary rvalue expression.
rvalue_type = self.accept(rvalue) # TODO maybe elsewhere; redundant
undefined_rvalue = False
@@ -1237,7 +1256,7 @@ class TypeChecker(NodeVisitor[Type]):
lv = lv.expr
self.check_assignment(lv, self.temp_node(AnyType(), context), infer_lvalue_type)
elif isinstance(rvalue_type, TupleType):
- self.check_multi_assignment_from_tuple(lvalues, rvalue, cast(TupleType, rvalue_type),
+ self.check_multi_assignment_from_tuple(lvalues, rvalue, rvalue_type,
context, undefined_rvalue, infer_lvalue_type)
else:
self.check_multi_assignment_from_iterable(lvalues, rvalue_type,
@@ -1510,7 +1529,7 @@ class TypeChecker(NodeVisitor[Type]):
def try_infer_partial_type_from_indexed_assignment(
self, lvalue: IndexExpr, rvalue: Node) -> None:
# TODO: Should we share some of this with try_infer_partial_type?
- if isinstance(lvalue.base, RefExpr):
+ if isinstance(lvalue.base, RefExpr) and isinstance(lvalue.base.node, Var):
var = cast(Var, lvalue.base.node)
if var is not None and isinstance(var.type, PartialType):
type_type = var.type.type
@@ -1682,8 +1701,7 @@ class TypeChecker(NodeVisitor[Type]):
method, lvalue_type, s.rvalue, s)
if isinstance(s.lvalue, IndexExpr):
- lv = cast(IndexExpr, s.lvalue)
- self.check_indexed_assignment(lv, s.rvalue, s.rvalue)
+ self.check_indexed_assignment(s.lvalue, s.rvalue, s.rvalue)
else:
if not is_subtype(rvalue_type, lvalue_type):
self.msg.incompatible_operator_assignment(s.op, s)
@@ -1691,6 +1709,16 @@ class TypeChecker(NodeVisitor[Type]):
def visit_assert_stmt(self, s: AssertStmt) -> Type:
self.accept(s.expr)
+ # If this is asserting some isinstance check, bind that type in the following code
+ true_map, _ = find_isinstance_check(
+ s.expr, self.type_map,
+ self.typing_mode_weak()
+ )
+
+ if true_map:
+ for var, type in true_map.items():
+ self.binder.push(var, type)
+
def visit_raise_stmt(self, s: RaiseStmt) -> Type:
"""Type check a raise statement."""
self.breaking_out = True
@@ -1714,7 +1742,7 @@ class TypeChecker(NodeVisitor[Type]):
# allow `raise type, value, traceback`
# https://docs.python.org/2/reference/simple_stmts.html#the-raise-statement
# TODO: Also check tuple item types.
- if len(cast(TupleType, typ).items) in (2, 3):
+ if len(typ.items) in (2, 3):
return
if isinstance(typ, Instance) and typ.type.fallback_to_any:
# OK!
@@ -1730,8 +1758,6 @@ class TypeChecker(NodeVisitor[Type]):
self.binder.try_frames.add(len(self.binder.frames) - 2)
self.accept(s.body)
self.binder.try_frames.remove(len(self.binder.frames) - 2)
- if s.else_body:
- self.accept(s.else_body)
self.breaking_out = False
changed, frame_on_completion = self.binder.pop_frame()
completed_frames.append(frame_on_completion)
@@ -1766,6 +1792,14 @@ class TypeChecker(NodeVisitor[Type]):
changed, frame_on_completion = self.binder.pop_frame()
completed_frames.append(frame_on_completion)
+ # Do the else block similar to the way we do except blocks.
+ if s.else_body:
+ self.binder.push_frame()
+ self.accept(s.else_body)
+ self.breaking_out = False
+ changed, frame_on_completion = self.binder.pop_frame()
+ completed_frames.append(frame_on_completion)
+
self.binder.update_from_options(completed_frames)
if s.finally_body:
@@ -1855,7 +1889,7 @@ class TypeChecker(NodeVisitor[Type]):
def visit_del_stmt(self, s: DelStmt) -> Type:
if isinstance(s.expr, IndexExpr):
- e = cast(IndexExpr, s.expr) # Cast
+ e = s.expr
m = MemberExpr(e.base, '__delitem__')
m.line = s.line
c = CallExpr(m, [e.index], [nodes.ARG_POS], [None])
@@ -1902,7 +1936,7 @@ class TypeChecker(NodeVisitor[Type]):
self.check_incompatible_property_override(e)
def check_incompatible_property_override(self, e: Decorator) -> None:
- if not e.var.is_settable_property:
+ if not e.var.is_settable_property and e.func.info is not None:
name = e.func.name()
for base in e.func.info.mro[1:]:
base_attr = base.names.get(name)
@@ -2182,7 +2216,7 @@ class TypeChecker(NodeVisitor[Type]):
self.type_map[node] = typ
def typing_mode_none(self) -> bool:
- if self.is_dynamic_function():
+ if self.is_dynamic_function() and not self.check_untyped_defs:
return not self.weak_opts
elif self.function_stack:
return False
@@ -2190,7 +2224,7 @@ class TypeChecker(NodeVisitor[Type]):
return False
def typing_mode_weak(self) -> bool:
- if self.is_dynamic_function():
+ if self.is_dynamic_function() and not self.check_untyped_defs:
return bool(self.weak_opts)
elif self.function_stack:
return False
@@ -2198,7 +2232,7 @@ class TypeChecker(NodeVisitor[Type]):
return 'global' in self.weak_opts
def typing_mode_full(self) -> bool:
- if self.is_dynamic_function():
+ if self.is_dynamic_function() and not self.check_untyped_defs:
return False
elif self.function_stack:
return True
@@ -2212,9 +2246,7 @@ class TypeChecker(NodeVisitor[Type]):
"""Look up a definition from the symbol table with the given name.
TODO remove kind argument
"""
- if self.locals is not None and name in self.locals:
- return self.locals[name]
- elif name in self.globals:
+ if name in self.globals:
return self.globals[name]
else:
b = self.globals.get('__builtins__', None)
@@ -2234,12 +2266,6 @@ class TypeChecker(NodeVisitor[Type]):
n = cast(MypyFile, n.names.get(parts[i], None).node)
return n.names[parts[-1]]
- def enter(self) -> None:
- self.locals = SymbolTable()
-
- def leave(self) -> None:
- self.locals = None
-
def enter_partial_types(self) -> None:
"""Push a new scope for collecting partial types."""
self.partial_types.append({})
@@ -2298,36 +2324,6 @@ class TypeChecker(NodeVisitor[Type]):
return method_type_with_fallback(func, self.named_type('builtins.function'))
-def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
- super_info: TypeInfo) -> Type:
- """Map type variables in a type defined in a supertype context to be valid
- in the subtype context. Assume that the result is unique; if more than
- one type is possible, return one of the alternatives.
-
- For example, assume
-
- . class D(Generic[S]) ...
- . class C(D[E[T]], Generic[T]) ...
-
- Now S in the context of D would be mapped to E[T] in the context of C.
- """
- # Create the type of self in subtype, of form t[a1, ...].
- inst_type = self_type(sub_info)
- if isinstance(inst_type, TupleType):
- inst_type = inst_type.fallback
- # Map the type of self to supertype. This gets us a description of the
- # supertype type variables in terms of subtype variables, i.e. t[t1, ...]
- # so that any type variables in tN are to be interpreted in subtype
- # context.
- inst_type = map_instance_to_supertype(inst_type, super_info)
- # Finally expand the type variables in type with those in the previously
- # constructed type. Note that both type and inst_type may have type
- # variables, but in type they are interpreterd in supertype context while
- # in inst_type they are interpreted in subtype context. This works even if
- # the names of type variables in supertype and subtype overlap.
- return expand_type_by_instance(typ, inst_type)
-
-
def find_isinstance_check(node: Node,
type_map: Dict[Node, Type],
weak: bool=False) \
@@ -2352,7 +2348,6 @@ def find_isinstance_check(node: Node,
elsetype = vartype
if vartype:
if is_proper_subtype(vartype, type):
- elsetype = None
return {expr: type}, None
elif not is_overlapping_types(vartype, type):
return None, {expr: elsetype}
@@ -2395,12 +2390,29 @@ def find_isinstance_check(node: Node,
def get_isinstance_type(node: Node, type_map: Dict[Node, Type]) -> Type:
type = type_map[node]
- if isinstance(type, FunctionLike):
- if type.is_type_obj():
- # Type variables may be present -- erase them, which is the best
- # we can do (outside disallowing them here).
- return erase_typevars(type.items()[0].ret_type)
- return None
+
+ if isinstance(type, TupleType):
+ all_types = type.items
+ else:
+ all_types = [type]
+
+ types = [] # type: List[Type]
+
+ for type in all_types:
+ if isinstance(type, FunctionLike):
+ if type.is_type_obj():
+ # Type variables may be present -- erase them, which is the best
+ # we can do (outside disallowing them here).
+ type = erase_typevars(type.items()[0].ret_type)
+
+ types.append(type)
+
+ if len(types) == 0:
+ return None
+ elif len(types) == 1:
+ return types[0]
+ else:
+ return UnionType(types)
def expand_node(defn: Node, map: Dict[int, Type]) -> Node:
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index b32e995..658c9be 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1,11 +1,11 @@
"""Expression type checker. This file is conceptually part of TypeChecker."""
-from typing import cast, List, Tuple, Dict, Callable, Union, Optional
+from typing import cast, Dict, List, Tuple, Callable, Union, Optional
from mypy.types import (
Type, AnyType, CallableType, Overloaded, NoneTyp, Void, TypeVarDef,
- TupleType, Instance, TypeVarType, TypeTranslator, ErasedType, FunctionLike, UnionType,
- PartialType, DeletedType
+ TupleType, Instance, TypeVarType, ErasedType, UnionType,
+ PartialType, DeletedType, UnboundType
)
from mypy.nodes import (
NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
@@ -14,22 +14,20 @@ from mypy.nodes import (
TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context,
ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
- DictionaryComprehension, ComplexExpr, EllipsisExpr, LITERAL_TYPE,
- TypeAliasExpr, YieldExpr, BackquoteExpr, ARG_POS
+ DictionaryComprehension, ComplexExpr, EllipsisExpr,
+ TypeAliasExpr, BackquoteExpr, ARG_POS, ARG_NAMED, ARG_STAR2
)
-from mypy.errors import Errors
from mypy.nodes import function_type
from mypy import nodes
import mypy.checker
from mypy import types
from mypy.sametypes import is_same_type
-from mypy.replacetvars import replace_func_type_vars, replace_type_vars
+from mypy.replacetvars import replace_func_type_vars
from mypy.messages import MessageBuilder
from mypy import messages
from mypy.infer import infer_type_arguments, infer_function_type_arguments
from mypy import join
-from mypy.expandtype import expand_type
-from mypy.subtypes import is_subtype, is_more_precise
+from mypy.subtypes import is_subtype, is_equivalent
from mypy import applytype
from mypy import erasetype
from mypy.checkmember import analyze_member_access, type_object_type
@@ -40,7 +38,7 @@ from mypy.checkstrformat import StringFormatterChecker
# Type of callback user for checking individual function arguments. See
# check_args() below for details.
-ArgChecker = Callable[[Type, Type, Type, int, int, CallableType, Context, MessageBuilder],
+ArgChecker = Callable[[Type, Type, int, Type, int, int, CallableType, Context, MessageBuilder],
None]
@@ -134,11 +132,12 @@ class ExpressionChecker:
# It's really a special form that only looks like a call.
return self.accept(e.analyzed, self.chk.type_context[-1])
self.try_infer_partial_type(e)
- self.accept(e.callee)
- # Access callee type directly, since accept may return the Any type
- # even if the type is known (in a dynamically typed function). This
- # way we get a more precise callee in dynamically typed functions.
- callee_type = self.chk.type_map[e.callee]
+ callee_type = self.accept(e.callee)
+ if (self.chk.disallow_untyped_calls and
+ self.chk.typing_mode_full() and
+ isinstance(callee_type, CallableType)
+ and callee_type.implicit):
+ return self.msg.untyped_function_call(callee_type, e)
return self.check_call_expr_with_callee_type(callee_type, e)
# Types and methods that can be used to infer partial types.
@@ -215,7 +214,7 @@ class ExpressionChecker:
"""
arg_messages = arg_messages or self.msg
if isinstance(callee, CallableType):
- if callee.is_type_obj() and callee.type_object().is_abstract:
+ if callee.is_concrete_type_obj() and callee.type_object().is_abstract:
type = callee.type_object()
self.msg.cannot_instantiate_abstract_class(
callee.type_object().name(), type.abstract_attributes,
@@ -275,6 +274,9 @@ class ExpressionChecker:
self.msg)
return self.check_call(call_function, args, arg_kinds, context, arg_names,
callable_node, arg_messages)
+ elif isinstance(callee, TypeVarType):
+ return self.check_call(callee.upper_bound, args, arg_kinds, context, arg_names,
+ callable_node, arg_messages)
else:
return self.msg.not_callable(callee, context), AnyType()
@@ -316,7 +318,7 @@ class ExpressionChecker:
"""Infer argument expression types using a callable type as context.
For example, if callee argument 2 has type List[int], infer the
- argument exprsession with List[int] type context.
+ argument expression with List[int] type context.
Returns the inferred types of *actual arguments*.
"""
@@ -324,7 +326,7 @@ class ExpressionChecker:
for i, actuals in enumerate(formal_to_actual):
for ai in actuals:
- if arg_kinds[ai] != nodes.ARG_STAR:
+ if arg_kinds[ai] not in (nodes.ARG_STAR, nodes.ARG_STAR2):
res[ai] = self.accept(args[ai], callee.arg_types[i])
# Fill in the rest of the argument types.
@@ -385,8 +387,7 @@ class ExpressionChecker:
Infer based on the types of arguments.
- Return a derived callable type that has the arguments applied (and
- stored as implicit type arguments).
+ Return a derived callable type that has the arguments applied.
"""
if not self.chk.typing_mode_none():
# Disable type errors during type inference. There may be errors
@@ -403,7 +404,7 @@ class ExpressionChecker:
arg_pass_nums = self.get_arg_infer_passes(
callee_type.arg_types, formal_to_actual, len(args))
- pass1_args = [] # type: List[Type]
+ pass1_args = [] # type: List[Optional[Type]]
for i, arg in enumerate(arg_types):
if arg_pass_nums[i] > 1:
pass1_args.append(None)
@@ -420,6 +421,20 @@ class ExpressionChecker:
inferred_args) = self.infer_function_type_arguments_pass2(
callee_type, args, arg_kinds, formal_to_actual,
inferred_args, context)
+
+ if callee_type.special_sig == 'dict' and len(inferred_args) == 2 and (
+ ARG_NAMED in arg_kinds or ARG_STAR2 in arg_kinds):
+ # HACK: Infer str key type for dict(...) with keyword args. The type system
+ # can't represent this so we special case it, as this is a pretty common
+ # thing. This doesn't quite work with all possible subclasses of dict
+ # if they shuffle type variables around, as we assume that there is a 1-1
+ # correspondence with dict type variables. This is a marginal issue and
+ # a little tricky to fix so it's left unfixed for now.
+ if isinstance(inferred_args[0], NoneTyp):
+ inferred_args[0] = self.named_type('builtins.str')
+ elif not is_subtype(self.named_type('builtins.str'), inferred_args[0]):
+ self.msg.fail(messages.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE,
+ context)
else:
# In dynamically typed functions use implicit 'Any' types for
# type variables.
@@ -601,7 +616,7 @@ class ExpressionChecker:
# and **args this is the item type, not the collection type).
actual_type = get_actual_type(arg_type, arg_kinds[actual],
tuple_counter)
- check_arg(actual_type, arg_type,
+ check_arg(actual_type, arg_type, arg_kinds[actual],
callee.arg_types[i],
actual + 1, i + 1, callee, context, messages)
@@ -615,11 +630,12 @@ class ExpressionChecker:
actual_type = get_actual_type(arg_type,
arg_kinds[actual],
tuple_counter)
- check_arg(actual_type, arg_type,
+ check_arg(actual_type, arg_type, arg_kinds[actual],
callee.arg_types[i],
actual + 1, i + 1, callee, context, messages)
def check_arg(self, caller_type: Type, original_caller_type: Type,
+ caller_kind: int,
callee_type: Type, n: int, m: int, callee: CallableType,
context: Context, messages: MessageBuilder) -> None:
"""Check the type of a single argument in a call."""
@@ -629,7 +645,7 @@ class ExpressionChecker:
messages.deleted_as_rvalue(caller_type, context)
elif not is_subtype(caller_type, callee_type):
messages.incompatible_argument(n, m, callee, original_caller_type,
- context)
+ caller_kind, context)
def overload_call_target(self, arg_types: List[Type], arg_kinds: List[int],
arg_names: List[str],
@@ -641,9 +657,8 @@ class ExpressionChecker:
could not be determined).
"""
messages = messages or self.msg
- # TODO also consider argument names and kinds
- # TODO for overlapping signatures we should try to get a more precise
- # result than 'Any'
+ # TODO: For overlapping signatures we should try to get a more precise
+ # result than 'Any'.
match = [] # type: List[CallableType]
best_match = 0
for typ in overload.items():
@@ -655,14 +670,23 @@ class ExpressionChecker:
not mypy.checker.is_more_precise_signature(
match[-1], typ)):
# Ambiguous return type. Either the function overload is
- # overlapping (which results in an error elsewhere) or the
- # caller has provided some Any argument types; in
- # either case can only infer the type to be Any, as it is
- # not an error to use Any types in calls.
+ # overlapping (which we don't handle very well here) or the
+ # caller has provided some Any argument types; in either
+ # case we'll fall back to Any. It's okay to use Any types
+ # in calls.
#
- # Overlapping overload items are fine if the items are
+ # Overlapping overload items are generally fine if the
+ # overlapping is only possible when there is multiple
+ # inheritance, as this is rare. See docstring of
+ # mypy.meet.is_overlapping_types for more about this.
+ #
+ # Note that there is no ambiguity if the items are
# covariant in both argument types and return types with
- # respect to type precision.
+ # respect to type precision. We'll pick the best/closest
+ # match.
+ #
+ # TODO: Consider returning a union type instead if the
+ # overlapping is NOT due to Any types?
return AnyType()
else:
match.append(typ)
@@ -686,9 +710,6 @@ class ExpressionChecker:
arg_names: List[str], callee: CallableType) -> int:
"""Determine whether arguments could match the signature at runtime.
- If is_var_arg is True, the caller uses varargs. This is used for
- overload resolution.
-
Return similarity level (0 = no match, 1 = can match, 2 = non-promotion match). See
overload_arg_similarity for a discussion of similarity levels.
"""
@@ -705,7 +726,7 @@ class ExpressionChecker:
similarity = 2
- def check_arg(caller_type: Type, original_caller_type: Type,
+ def check_arg(caller_type: Type, original_caller_type: Type, caller_kind: int,
callee_type: Type, n: int, m: int, callee: CallableType,
context: Context, messages: MessageBuilder) -> None:
nonlocal similarity
@@ -739,7 +760,7 @@ class ExpressionChecker:
lambda i: arg_types[i])
ok = True
- def check_arg(caller_type: Type, original_caller_type: Type,
+ def check_arg(caller_type: Type, original_caller_type: Type, caller_kind: int,
callee_type: Type, n: int, m: int, callee: CallableType,
context: Context, messages: MessageBuilder) -> None:
nonlocal ok
@@ -836,7 +857,7 @@ class ExpressionChecker:
if e.op == '*' and isinstance(e.left, ListExpr):
# Expressions of form [...] * e get special type inference.
return self.check_list_multiply(e)
- if e.op == '%' and isinstance(e.left, StrExpr):
+ if e.op == '%' and isinstance(e.left, (StrExpr, BytesExpr)):
return self.strfrm_checker.check_str_interpolation(cast(StrExpr, e.left), e.right)
left_type = self.accept(e.left)
@@ -1081,7 +1102,6 @@ class ExpressionChecker:
return self.accept(e.analyzed)
left_type = self.accept(e.base)
if isinstance(left_type, TupleType) and self.chk.typing_mode_full():
- left_type = cast(TupleType, left_type)
# Special case for tuples. They support indexing only by integer
# literals. (Except in weak type checking mode.)
index = e.index
@@ -1189,7 +1209,7 @@ class ExpressionChecker:
ctx = None # type: TupleType
# Try to determine type context for type inference.
if isinstance(self.chk.type_context[-1], TupleType):
- t = cast(TupleType, self.chk.type_context[-1])
+ t = self.chk.type_context[-1]
if len(t.items) == len(e.items):
ctx = t
# Infer item types.
@@ -1247,6 +1267,8 @@ class ExpressionChecker:
else:
# Type context available.
self.chk.check_func_item(e, type_override=inferred_type)
+ if e.expr() not in self.chk.type_map:
+ self.accept(e.expr())
ret_type = self.chk.type_map[e.expr()]
return replace_callable_return_type(inferred_type, ret_type)
@@ -1297,6 +1319,8 @@ class ExpressionChecker:
# There's an undefined base class, and we're
# at the end of the chain. That's not an error.
return AnyType()
+ if not self.chk.typing_mode_full():
+ return AnyType()
return analyze_member_access(e.name, self_type(e.info), e,
is_lvalue, True,
self.named_type, self.not_ready_callback,
@@ -1381,6 +1405,7 @@ class ExpressionChecker:
def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
cond_type = self.accept(e.cond)
self.check_not_void(cond_type, e)
+ ctx = self.chk.type_context[-1]
# Gain type information from isinstance if it is there
# but only for the current expression
@@ -1389,26 +1414,36 @@ class ExpressionChecker:
self.chk.type_map,
self.chk.typing_mode_weak())
- self.chk.binder.push_frame()
-
- if if_map:
- for var, type in if_map.items():
- self.chk.binder.push(var, type)
+ if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx)
- if_type = self.accept(e.if_expr)
+ if not mypy.checker.is_valid_inferred_type(if_type):
+ # Analyze the right branch disregarding the left branch.
+ else_type = self.analyze_cond_branch(else_map, e.else_expr, context=ctx)
- self.chk.binder.pop_frame()
- self.chk.binder.push_frame()
+ # If it would make a difference, re-analyze the left
+ # branch using the right branch's type as context.
+ if ctx is None or not is_equivalent(else_type, ctx):
+ # TODO: If it's possible that the previous analysis of
+ # the left branch produced errors that are avoided
+ # using this context, suppress those errors.
+ if_type = self.analyze_cond_branch(if_map, e.if_expr, context=else_type)
- if else_map:
- for var, type in else_map.items():
- self.chk.binder.push(var, type)
+ else:
+ # Analyze the right branch in the context of the left
+ # branch's type.
+ else_type = self.analyze_cond_branch(else_map, e.else_expr, context=if_type)
- else_type = self.accept(e.else_expr, context=if_type)
+ res = join.join_types(if_type, else_type)
- self.chk.binder.pop_frame()
+ return res
- return join.join_types(if_type, else_type)
+ def analyze_cond_branch(self, map: Optional[Dict[Node, Type]],
+ node: Node, context: Optional[Type]) -> Type:
+ with self.chk.binder:
+ if map:
+ for var, type in map.items():
+ self.chk.binder.push(var, type)
+ return self.accept(node, context=context)
def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
self.accept(e.expr)
@@ -1509,7 +1544,7 @@ def map_actuals_to_formals(caller_kinds: List[int],
elif callee_kinds[j] == nodes.ARG_STAR:
map[j].append(i)
elif kind == nodes.ARG_STAR:
- # We need to to know the actual type to map varargs.
+ # We need to know the actual type to map varargs.
argt = caller_arg_type(i)
if isinstance(argt, TupleType):
# A tuple actual maps to a fixed number of formals.
@@ -1548,7 +1583,7 @@ def map_actuals_to_formals(caller_kinds: List[int],
def is_empty_tuple(t: Type) -> bool:
- return isinstance(t, TupleType) and not cast(TupleType, t).items
+ return isinstance(t, TupleType) and not t.items
def is_duplicate_mapping(mapping: List[int], actual_kinds: List[int]) -> bool:
@@ -1614,9 +1649,17 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
The distinction is important in cases where multiple overload items match. We want
give priority to higher similarity matches.
"""
+ # Replace type variables with their upper bounds. Overloading
+ # resolution is based on runtime behavior which erases type
+ # parameters, so no need to handle type variables occuring within
+ # a type.
+ if isinstance(actual, TypeVarType):
+ actual = actual.erase_to_union_or_bound()
+ if isinstance(formal, TypeVarType):
+ formal = formal.erase_to_union_or_bound()
if (isinstance(actual, NoneTyp) or isinstance(actual, AnyType) or
- isinstance(formal, AnyType) or isinstance(formal, TypeVarType) or
- isinstance(formal, CallableType)):
+ isinstance(formal, AnyType) or isinstance(formal, CallableType) or
+ (isinstance(actual, Instance) and actual.type.fallback_to_any)):
# These could match anything at runtime.
return 2
if isinstance(actual, UnionType):
@@ -1643,5 +1686,8 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
return 0
else:
return 0
+ if isinstance(actual, UnboundType) or isinstance(formal, UnboundType):
+ # Either actual or formal is the result of an error; shut up.
+ return 2
# Fall back to a conservative equality check for the remaining kinds of type.
return 2 if is_same_type(erasetype.erase_type(actual), erasetype.erase_type(formal)) else 0
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 0364351..8eeea02 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -1,6 +1,6 @@
"""Type checking of attribute access"""
-from typing import cast, Callable, List
+from typing import cast, Callable, List, Optional
from mypy.types import (
Type, Instance, AnyType, TupleType, CallableType, FunctionLike, TypeVarDef,
@@ -187,7 +187,7 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
# methods: the former to the instance, the latter to the
# class.
functype = cast(FunctionLike, t)
- check_method_type(functype, itype, node, msg)
+ check_method_type(functype, itype, var.is_classmethod, node, msg)
signature = method_type(functype)
if var.is_property:
# A property cannot have an overloaded type => the cast
@@ -228,17 +228,29 @@ def lookup_member_var_or_accessor(info: TypeInfo, name: str,
return None
-def check_method_type(functype: FunctionLike, itype: Instance,
+def check_method_type(functype: FunctionLike, itype: Instance, is_classmethod: bool,
context: Context, msg: MessageBuilder) -> None:
for item in functype.items():
- if not item.arg_types or item.arg_kinds[0] != ARG_POS:
- # No positional first (self) argument.
+ if not item.arg_types or item.arg_kinds[0] not in (ARG_POS, ARG_STAR):
+ # No positional first (self) argument (*args is okay).
msg.invalid_method_type(item, context)
- else:
+ elif not is_classmethod:
# Check that self argument has type 'Any' or valid instance type.
selfarg = item.arg_types[0]
if not subtypes.is_equivalent(selfarg, itype):
msg.invalid_method_type(item, context)
+ else:
+ # Check that cls argument has type 'Any' or valid class type.
+ # (This is sufficient for the current treatment of @classmethod,
+ # but probably needs to be revisited when we implement Type[C]
+ # or advanced variants of it like Type[<args>, C].)
+ clsarg = item.arg_types[0]
+ if isinstance(clsarg, CallableType) and clsarg.is_type_obj():
+ if not subtypes.is_equivalent(clsarg.ret_type, itype):
+ msg.invalid_class_method_type(item, context)
+ else:
+ if not subtypes.is_equivalent(clsarg, AnyType()):
+ msg.invalid_class_method_type(item, context)
def analyze_class_attribute_access(itype: Instance,
@@ -276,7 +288,7 @@ def analyze_class_attribute_access(itype: Instance,
return AnyType()
if isinstance(node.node, TypeInfo):
- return type_object_type(cast(TypeInfo, node.node), builtin_type)
+ return type_object_type(node.node, builtin_type)
if is_decorated:
# TODO: Return type of decorated function. This is quick hack to work around #998.
@@ -338,7 +350,7 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) ->
arg_names=["_args", "_kwds"],
ret_type=AnyType(),
fallback=builtin_type('builtins.function'))
- return class_callable(sig, info, fallback)
+ return class_callable(sig, info, fallback, None)
# Construct callable type based on signature of __init__. Adjust
# return type and insert type arguments.
return type_object_type_from_function(init_method, info, fallback)
@@ -347,17 +359,37 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) ->
def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo,
fallback: Instance) -> FunctionLike:
signature = method_type_with_fallback(init_or_new, fallback)
+
+ # The __init__ method might come from a generic superclass
+ # (init_or_new.info) with type variables that do not map
+ # identically to the type variables of the class being constructed
+ # (info). For example
+ #
+ # class A(Generic[T]): def __init__(self, x: T) -> None: pass
+ # class B(A[List[T]], Generic[T]): pass
+ #
+ # We need to first map B's __init__ to the type (List[T]) -> None.
+ signature = cast(FunctionLike,
+ map_type_from_supertype(signature, info, init_or_new.info))
+
+ if init_or_new.info.fullname() == 'builtins.dict':
+ # Special signature!
+ special_sig = 'dict'
+ else:
+ special_sig = None
+
if isinstance(signature, CallableType):
- return class_callable(signature, info, fallback)
+ return class_callable(signature, info, fallback, special_sig)
else:
# Overloaded __init__/__new__.
items = [] # type: List[CallableType]
for item in cast(Overloaded, signature).items():
- items.append(class_callable(item, info, fallback))
+ items.append(class_callable(item, info, fallback, special_sig))
return Overloaded(items)
-def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance) -> CallableType:
+def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance,
+ special_sig: Optional[str]) -> CallableType:
"""Create a type object type based on the signature of __init__."""
variables = [] # type: List[TypeVarDef]
for i, tvar in enumerate(info.defn.type_vars):
@@ -368,9 +400,12 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance)
variables.extend(initvars)
callable_type = init_type.copy_modified(
- ret_type=self_type(info), fallback=type_type, name=None, variables=variables)
+ ret_type=self_type(info), fallback=type_type, name=None, variables=variables,
+ special_sig=special_sig)
c = callable_type.with_name('"{}"'.format(info.name()))
- return convert_class_tvars_to_func_tvars(c, len(initvars))
+ cc = convert_class_tvars_to_func_tvars(c, len(initvars))
+ cc.is_classmethod_class = True
+ return cc
def convert_class_tvars_to_func_tvars(callable: CallableType,
@@ -402,3 +437,33 @@ class TvarTranslator(TypeTranslator):
else:
items.append(v)
return items
+
+
+def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
+ super_info: TypeInfo) -> Type:
+ """Map type variables in a type defined in a supertype context to be valid
+ in the subtype context. Assume that the result is unique; if more than
+ one type is possible, return one of the alternatives.
+
+ For example, assume
+
+ . class D(Generic[S]) ...
+ . class C(D[E[T]], Generic[T]) ...
+
+ Now S in the context of D would be mapped to E[T] in the context of C.
+ """
+ # Create the type of self in subtype, of form t[a1, ...].
+ inst_type = self_type(sub_info)
+ if isinstance(inst_type, TupleType):
+ inst_type = inst_type.fallback
+ # Map the type of self to supertype. This gets us a description of the
+ # supertype type variables in terms of subtype variables, i.e. t[t1, ...]
+ # so that any type variables in tN are to be interpreted in subtype
+ # context.
+ inst_type = map_instance_to_supertype(inst_type, super_info)
+ # Finally expand the type variables in type with those in the previously
+ # constructed type. Note that both type and inst_type may have type
+ # variables, but in type they are interpreterd in supertype context while
+ # in inst_type they are interpreted in subtype context. This works even if
+ # the names of type variables in supertype and subtype overlap.
+ return expand_type_by_instance(typ, inst_type)
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index 39e3fe4..0b203e5 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -8,7 +8,7 @@ from mypy.types import (
Type, AnyType, TupleType, Instance, UnionType
)
from mypy.nodes import (
- Node, StrExpr, TupleExpr, DictExpr, Context
+ Node, StrExpr, BytesExpr, TupleExpr, DictExpr, Context
)
if False:
# break import cycle only needed for mypy
@@ -136,7 +136,7 @@ class StringFormatterChecker:
def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier],
replacements: Node) -> None:
dict_with_only_str_literal_keys = (isinstance(replacements, DictExpr) and
- all(isinstance(k, StrExpr)
+ all(isinstance(k, (StrExpr, BytesExpr))
for k, v in cast(DictExpr, replacements).items))
if dict_with_only_str_literal_keys:
mapping = {} # type: Dict[str, Type]
@@ -255,7 +255,7 @@ class StringFormatterChecker:
def check_node(node: Node) -> None:
"""int, or str with length 1"""
type = self.accept(node, expected_type)
- if isinstance(node, StrExpr) and len(cast(StrExpr, node).value) != 1:
+ if isinstance(node, (StrExpr, BytesExpr)) and len(cast(StrExpr, node).value) != 1:
self.msg.requires_int_or_char(context)
check_type(type)
diff --git a/mypy/constraints.py b/mypy/constraints.py
index cfb9d06..4ea64f4 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -1,6 +1,6 @@
"""Type inference constraints."""
-from typing import List, cast
+from typing import List, Optional, cast
from mypy.types import (
CallableType, Type, TypeVisitor, UnboundType, AnyType, Void, NoneTyp, TypeVarType,
@@ -10,6 +10,7 @@ from mypy.types import (
from mypy.maptype import map_instance_to_supertype
from mypy import nodes
import mypy.subtypes
+from mypy.erasetype import erase_typevars
SUBTYPE_OF = 0 # type: int
@@ -39,18 +40,20 @@ class Constraint:
def infer_constraints_for_callable(
- callee: CallableType, arg_types: List[Type], arg_kinds: List[int],
+ callee: CallableType, arg_types: List[Optional[Type]], arg_kinds: List[int],
formal_to_actual: List[List[int]]) -> List[Constraint]:
"""Infer type variable constraints for a callable and actual arguments.
Return a list of constraints.
"""
-
constraints = [] # type: List[Constraint]
tuple_counter = [0]
for i, actuals in enumerate(formal_to_actual):
for actual in actuals:
+ if arg_types[actual] is None:
+ continue
+
actual_type = get_actual_type(arg_types[actual], arg_kinds[actual],
tuple_counter)
c = infer_constraints(callee.arg_types[i], actual_type,
@@ -79,16 +82,14 @@ def get_actual_type(arg_type: Type, kind: int,
return AnyType()
elif isinstance(arg_type, TupleType):
# Get the next tuple item of a tuple *arg.
- tuplet = cast(TupleType, arg_type)
tuple_counter[0] += 1
- return tuplet.items[tuple_counter[0] - 1]
+ return arg_type.items[tuple_counter[0] - 1]
else:
return AnyType()
elif kind == nodes.ARG_STAR2:
- if isinstance(arg_type, Instance) and (
- (cast(Instance, arg_type)).type.fullname() == 'builtins.dict'):
+ if isinstance(arg_type, Instance) and (arg_type.type.fullname() == 'builtins.dict'):
# Dict **arg. TODO more general (Mapping)
- return (cast(Instance, arg_type)).args[1]
+ return arg_type.args[1]
else:
return AnyType()
else:
@@ -104,8 +105,8 @@ def infer_constraints(template: Type, actual: Type,
recursively against a type which does not contain (the same) type
variable references. The result is a list of type constrains of
form 'T is a supertype/subtype of x', where T is a type variable
- present in the the template and x is a type without reference to
- type variables present in the template.
+ present in the template and x is a type without reference to type
+ variables present in the template.
Assume T and S are type variables. Now the following results can be
calculated (read as '(template, actual) --> result'):
@@ -119,9 +120,82 @@ def infer_constraints(template: Type, actual: Type,
The constraints are represented as Constraint objects.
"""
+ # If the template is simply a type variable, emit a Constraint directly.
+ # We need to handle this case before handling Unions for two reasons:
+ # 1. "T <: Union[U1, U2]" is not equivalent to "T <: U1 or T <: U2",
+ # because T can itself be a union (notably, Union[U1, U2] itself).
+ # 2. "T :> Union[U1, U2]" is logically equivalent to "T :> U1 and
+ # T :> U2", but they are not equivalent to the constraint solver,
+ # which never introduces new Union types (it uses join() instead).
+ if isinstance(template, TypeVarType):
+ return [Constraint(template.id, direction, actual)]
+
+ # Now handle the case of either template or actual being a Union.
+ # For a Union to be a subtype of another type, every item of the Union
+ # must be a subtype of that type, so concatenate the constraints.
+ if direction == SUBTYPE_OF and isinstance(template, UnionType):
+ res = []
+ for t_item in template.items:
+ res.extend(infer_constraints(t_item, actual, direction))
+ return res
+ if direction == SUPERTYPE_OF and isinstance(actual, UnionType):
+ res = []
+ for a_item in actual.items:
+ res.extend(infer_constraints(template, a_item, direction))
+ return res
+
+ # Now the potential subtype is known not to be a Union or a type
+ # variable that we are solving for. In that case, for a Union to
+ # be a supertype of the potential subtype, some item of the Union
+ # must be a supertype of it.
+ if direction == SUBTYPE_OF and isinstance(actual, UnionType):
+ return any_constraints(
+ [infer_constraints_if_possible(template, a_item, direction)
+ for a_item in actual.items])
+ if direction == SUPERTYPE_OF and isinstance(template, UnionType):
+ return any_constraints(
+ [infer_constraints_if_possible(t_item, actual, direction)
+ for t_item in template.items])
+
+ # Remaining cases are handled by ConstraintBuilderVisitor.
return template.accept(ConstraintBuilderVisitor(actual, direction))
+def infer_constraints_if_possible(template: Type, actual: Type,
+ direction: int) -> Optional[List[Constraint]]:
+ """Like infer_constraints, but return None if the input relation is
+ known to be unsatisfiable, for example if template=List[T] and actual=int.
+ (In this case infer_constraints would return [], just like it would for
+ an automatically satisfied relation like template=List[T] and actual=object.)
+ """
+ if (direction == SUBTYPE_OF and
+ not mypy.subtypes.is_subtype(erase_typevars(template), actual)):
+ return None
+ if (direction == SUPERTYPE_OF and
+ not mypy.subtypes.is_subtype(actual, erase_typevars(template))):
+ return None
+ return infer_constraints(template, actual, direction)
+
+
+def any_constraints(options: List[Optional[List[Constraint]]]) -> List[Constraint]:
+ """Deduce what we can from a collection of constraint lists given that
+ at least one of the lists must be satisfied. A None element in the
+ list of options represents an unsatisfiable constraint and is ignored.
+ """
+ valid_options = [option for option in options if option is not None]
+ if len(valid_options) == 1:
+ return valid_options[0]
+ # Otherwise, there are either no valid options or multiple valid options.
+ # Give up and deduce nothing.
+ return []
+
+ # TODO: In the latter case, it could happen that every valid
+ # option requires the same constraint on the same variable. Then
+ # we could include that that constraint in the result. Or more
+ # generally, if a given (variable, direction) pair appears in
+ # every option, combine the bounds with meet/join.
+
+
class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
"""Visitor class for inferring type constraints."""
@@ -163,10 +237,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
# Non-trivial leaf type
def visit_type_var(self, template: TypeVarType) -> List[Constraint]:
- if self.actual:
- return [Constraint(template.id, self.direction, self.actual)]
- else:
- return []
+ assert False, ("Unexpected TypeVarType in ConstraintBuilderVisitor"
+ " (should have been handled in infer_constraints)")
# Non-leaf types
@@ -174,18 +246,18 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
actual = self.actual
res = [] # type: List[Constraint]
if isinstance(actual, Instance):
- instance = cast(Instance, actual)
+ instance = actual
if (self.direction == SUBTYPE_OF and
template.type.has_base(instance.type.fullname())):
mapped = map_instance_to_supertype(template, instance.type)
for i in range(len(instance.args)):
# The constraints for generic type parameters are
- # invariant. Include the default constraint and its
- # negation to achieve the effect.
- cb = infer_constraints(mapped.args[i], instance.args[i],
- self.direction)
- res.extend(cb)
- res.extend(negate_constraints(cb))
+ # invariant. Include constraints from both directions
+ # to achieve the effect.
+ res.extend(infer_constraints(
+ mapped.args[i], instance.args[i], self.direction))
+ res.extend(infer_constraints(
+ mapped.args[i], instance.args[i], neg_op(self.direction)))
return res
elif (self.direction == SUPERTYPE_OF and
instance.type.has_base(template.type.fullname())):
@@ -193,10 +265,10 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
for j in range(len(template.args)):
# The constraints for generic type parameters are
# invariant.
- cb = infer_constraints(template.args[j], mapped.args[j],
- self.direction)
- res.extend(cb)
- res.extend(negate_constraints(cb))
+ res.extend(infer_constraints(
+ template.args[j], mapped.args[j], self.direction))
+ res.extend(infer_constraints(
+ template.args[j], mapped.args[j], neg_op(self.direction)))
return res
if isinstance(actual, AnyType):
# IDEA: Include both ways, i.e. add negation as well?
@@ -206,7 +278,6 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
is_named_instance(template, 'typing.Sequence') or
is_named_instance(template, 'typing.Reversible'))
and self.direction == SUPERTYPE_OF):
- actual = cast(TupleType, actual)
for item in actual.items:
cb = infer_constraints(template.args[0], item, SUPERTYPE_OF)
res.extend(cb)
@@ -216,7 +287,7 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
def visit_callable_type(self, template: CallableType) -> List[Constraint]:
if isinstance(self.actual, CallableType):
- cactual = cast(CallableType, self.actual)
+ cactual = self.actual
# FIX verify argument counts
# FIX what if one of the functions is generic
res = [] # type: List[Constraint]
@@ -226,8 +297,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
if not template.is_ellipsis_args:
# The lengths should match, but don't crash (it will error elsewhere).
for t, a in zip(template.arg_types, cactual.arg_types):
- # Negate constraints due function argument type contravariance.
- res.extend(negate_constraints(infer_constraints(t, a, self.direction)))
+ # Negate direction due to function argument type contravariance.
+ res.extend(infer_constraints(t, a, neg_op(self.direction)))
res.extend(infer_constraints(template.ret_type, cactual.ret_type,
self.direction))
return res
@@ -238,8 +309,7 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
self.direction))
return res
elif isinstance(self.actual, Overloaded):
- return self.infer_against_overloaded(cast(Overloaded, self.actual),
- template)
+ return self.infer_against_overloaded(self.actual, template)
else:
return []
@@ -256,12 +326,11 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
def visit_tuple_type(self, template: TupleType) -> List[Constraint]:
actual = self.actual
- if (isinstance(actual, TupleType) and
- len((cast(TupleType, actual)).items) == len(template.items)):
+ if isinstance(actual, TupleType) and len(actual.items) == len(template.items):
res = [] # type: List[Constraint]
for i in range(len(template.items)):
res.extend(infer_constraints(template.items[i],
- cast(TupleType, actual).items[i],
+ actual.items[i],
self.direction))
return res
elif isinstance(actual, AnyType):
@@ -270,10 +339,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
return []
def visit_union_type(self, template: UnionType) -> List[Constraint]:
- res = [] # type: List[Constraint]
- for item in template.items:
- res.extend(infer_constraints(item, self.actual, self.direction))
- return res
+ assert False, ("Unexpected UnionType in ConstraintBuilderVisitor"
+ " (should have been handled in infer_constraints)")
def infer_against_any(self, types: List[Type]) -> List[Constraint]:
res = [] # type: List[Constraint]
@@ -288,13 +355,6 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
return res
-def negate_constraints(constraints: List[Constraint]) -> List[Constraint]:
- res = [] # type: List[Constraint]
- for c in constraints:
- res.append(Constraint(c.type_var, neg_op(c.op), c.target))
- return res
-
-
def neg_op(op: int) -> int:
"""Map SubtypeOf to SupertypeOf and vice versa."""
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index e67d20b..15ccd4f 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -1,5 +1,3 @@
-import typing
-
from mypy.types import (
Type, TypeVisitor, UnboundType, ErrorType, AnyType, Void, NoneTyp,
Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded, ErasedType,
diff --git a/mypy/errors.py b/mypy/errors.py
index 69a0b27..c2fc663 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -3,7 +3,7 @@ import os.path
import sys
import traceback
-from typing import Tuple, List, TypeVar, Sequence, Any, Callable, Set
+from typing import Tuple, List, TypeVar, Set
T = TypeVar('T')
@@ -196,7 +196,7 @@ class Errors:
Render the messages suitable for displaying.
"""
- raise CompileError(self.messages())
+ raise CompileError(self.messages(), use_stdout=True)
def messages(self) -> List[str]:
"""Return a string list that represents the error messages.
@@ -338,10 +338,12 @@ class CompileError(Exception):
"""
messages = None # type: List[str]
+ use_stdout = False
- def __init__(self, messages: List[str]) -> None:
+ def __init__(self, messages: List[str], use_stdout: bool = False) -> None:
super().__init__('\n'.join(messages))
self.messages = messages
+ self.use_stdout = use_stdout
def remove_path_prefix(path: str, prefix: str) -> str:
@@ -374,8 +376,14 @@ def report_internal_error(err: Exception, file: str, line: int) -> None:
for s in traceback.format_list(tb + tb2):
print(s.rstrip('\n'))
print('{}: {}'.format(type(err).__name__, err))
- print('\n*** INTERNAL ERROR ***')
- print('\n{}:{}: error: Internal error --'.format(file, line),
- 'please report a bug at https://github.com/JukkaL/mypy/issues')
- print('\nNOTE: you can use "mypy --pdb ..." to drop into the debugger when this happens.')
+ print('\n*** INTERNAL ERROR ***', file=sys.stderr)
+ if line:
+ prefix = '{}:{}'.format(file, line)
+ else:
+ prefix = file
+ print('\n{}: error: Internal error --'.format(prefix),
+ 'please report a bug at https://github.com/python/mypy/issues',
+ file=sys.stderr)
+ print('\nNOTE: you can use "mypy --pdb ..." to drop into the debugger when this happens.',
+ file=sys.stderr)
sys.exit(1)
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 3b8ac51..60730b5 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -24,15 +24,7 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type:
variables = {} # type: Dict[int, Type]
for i in range(len(instance.args)):
variables[i + 1] = instance.args[i]
- typ = expand_type(typ, variables)
- if isinstance(typ, CallableType):
- bounds = [] # type: List[Tuple[int, Type]]
- for j in range(len(instance.args)):
- bounds.append((j + 1, instance.args[j]))
- typ = update_callable_implicit_bounds(cast(CallableType, typ), bounds)
- else:
- pass
- return typ
+ return expand_type(typ, variables)
class ExpandTypeVisitor(TypeVisitor[Type]):
@@ -83,8 +75,7 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
def visit_callable_type(self, t: CallableType) -> Type:
return t.copy_modified(arg_types=self.expand_types(t.arg_types),
- ret_type=t.ret_type.accept(self),
- bound_vars=self.expand_bound_vars(t.bound_vars))
+ ret_type=t.ret_type.accept(self))
def visit_overloaded(self, t: Overloaded) -> Type:
items = [] # type: List[CallableType]
@@ -96,7 +87,9 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
return TupleType(self.expand_types(t.items), t.fallback, t.line)
def visit_union_type(self, t: UnionType) -> Type:
- return UnionType(self.expand_types(t.items), t.line)
+ # After substituting for type variables in t.items,
+ # some of the resulting types might be subtypes of others.
+ return UnionType.make_simplified_union(self.expand_types(t.items), t.line)
def visit_partial_type(self, t: PartialType) -> Type:
return t
@@ -106,16 +99,3 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
for t in types:
a.append(t.accept(self))
return a
-
- def expand_bound_vars(
- self, types: List[Tuple[int, Type]]) -> List[Tuple[int, Type]]:
- a = [] # type: List[Tuple[int, Type]]
- for id, t in types:
- a.append((id, t.accept(self)))
- return a
-
-
-def update_callable_implicit_bounds(
- t: CallableType, arg_types: List[Tuple[int, Type]]) -> CallableType:
- # FIX what if there are existing bounds?
- return t.copy_modified(bound_vars=arg_types)
diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index 4879cec..dc95f3d 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -1,9 +1,7 @@
"""Translate an expression (Node) to a Type value."""
-from typing import cast
-
from mypy.nodes import (
- Node, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, StrExpr, EllipsisExpr
+ Node, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, StrExpr, BytesExpr, EllipsisExpr
)
from mypy.parsetype import parse_str_as_type, TypeParseError
from mypy.types import Type, UnboundType, TypeList, EllipsisType
@@ -44,7 +42,7 @@ def expr_to_unanalyzed_type(expr: Node) -> Type:
elif isinstance(expr, ListExpr):
return TypeList([expr_to_unanalyzed_type(t) for t in expr.items],
line=expr.line)
- elif isinstance(expr, StrExpr):
+ elif isinstance(expr, (StrExpr, BytesExpr)):
# Parse string literal type.
try:
result = parse_str_as_type(expr.value, expr.line)
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
new file mode 100644
index 0000000..0728070
--- /dev/null
+++ b/mypy/fastparse.py
@@ -0,0 +1,771 @@
+from functools import wraps
+import sys
+
+from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List
+from mypy.nodes import (
+ MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
+ ClassDef, Decorator, Block, Var, OperatorAssignmentStmt,
+ ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt,
+ DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl,
+ WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt,
+ TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
+ DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
+ FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
+ UnaryExpr, FuncExpr, ComparisonExpr,
+ StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
+ SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument,
+ ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2
+)
+from mypy.types import Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType
+from mypy import defaults
+from mypy.errors import Errors
+
+try:
+ from typed_ast import ast27
+ from typed_ast import ast35
+ from typed_ast import conversions
+except ImportError:
+ print('You must install the typed_ast module before you can run mypy with `--fast-parser`.\n'
+ 'The typed_ast module can be found at https://github.com/ddfisher/typed_ast',
+ file=sys.stderr)
+ sys.exit(1)
+
+T = TypeVar('T')
+U = TypeVar('U')
+
+
+def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
+ pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
+ custom_typing_module: str = None) -> MypyFile:
+ """Parse a source file, without doing any semantic analysis.
+
+ Return the parse tree. If errors is not provided, raise ParseError
+ on failure. Otherwise, use the errors object to report parse errors.
+
+ The pyversion (major, minor) argument determines the Python syntax variant.
+ """
+ is_stub_file = bool(fnam) and fnam.endswith('.pyi')
+ try:
+ if pyversion[0] >= 3 or is_stub_file:
+ ast = ast35.parse(source, fnam, 'exec')
+ else:
+ ast2 = ast27.parse(source, fnam, 'exec')
+ ast = conversions.py2to3(ast2)
+ except SyntaxError as e:
+ if errors:
+ errors.set_file('<input>' if fnam is None else fnam)
+ errors.report(e.lineno, e.msg) # type: ignore
+ else:
+ raise
+ else:
+ tree = ASTConverter(pyversion=pyversion,
+ custom_typing_module=custom_typing_module,
+ ).visit(ast)
+ tree.path = fnam
+ tree.is_stub = is_stub_file
+ return tree
+
+ return MypyFile([],
+ [],
+ False,
+ set(),
+ weak_opts=set())
+
+
+def parse_type_comment(type_comment: str, line: int) -> Type:
+ typ = ast35.parse(type_comment, '<type_comment>', 'eval')
+ assert isinstance(typ, ast35.Expression)
+ return TypeConverter(line=line).visit(typ.body)
+
+
+def with_line(f: Callable[[Any, T], U]) -> Callable[[Any, T], U]:
+ @wraps(f)
+ def wrapper(self, ast):
+ node = f(self, ast)
+ node.set_line(ast.lineno)
+ return node
+ return wrapper
+
+
+def find(f: Callable[[T], bool], seq: Sequence[T]) -> T:
+ for item in seq:
+ if f(item):
+ return item
+ return None
+
+
+class ASTConverter(ast35.NodeTransformer):
+ def __init__(self, pyversion: Tuple[int, int], custom_typing_module: str = None) -> None:
+ self.class_nesting = 0
+ self.imports = [] # type: List[ImportBase]
+
+ self.pyversion = pyversion
+ self.custom_typing_module = custom_typing_module
+
+ def generic_visit(self, node: ast35.AST) -> None:
+ raise RuntimeError('AST node not implemented: ' + str(type(node)))
+
+ def visit_NoneType(self, n: Any) -> Optional[Node]:
+ return None
+
+ def visit_list(self, l: Sequence[ast35.AST]) -> List[Node]:
+ return [self.visit(e) for e in l]
+
+ op_map = {
+ ast35.Add: '+',
+ ast35.Sub: '-',
+ ast35.Mult: '*',
+ ast35.MatMult: '@',
+ ast35.Div: '/',
+ ast35.Mod: '%',
+ ast35.Pow: '**',
+ ast35.LShift: '<<',
+ ast35.RShift: '>>',
+ ast35.BitOr: '|',
+ ast35.BitXor: '^',
+ ast35.BitAnd: '&',
+ ast35.FloorDiv: '//'
+ }
+
+ def from_operator(self, op: ast35.operator) -> str:
+ op_name = ASTConverter.op_map.get(type(op))
+ if op_name is None:
+ raise RuntimeError('Unknown operator ' + str(type(op)))
+ elif op_name == '@':
+ raise RuntimeError('mypy does not support the MatMult operator')
+ else:
+ return op_name
+
+ comp_op_map = {
+ ast35.Gt: '>',
+ ast35.Lt: '<',
+ ast35.Eq: '==',
+ ast35.GtE: '>=',
+ ast35.LtE: '<=',
+ ast35.NotEq: '!=',
+ ast35.Is: 'is',
+ ast35.IsNot: 'is not',
+ ast35.In: 'in',
+ ast35.NotIn: 'not in'
+ }
+
+ def from_comp_operator(self, op: ast35.cmpop) -> str:
+ op_name = ASTConverter.comp_op_map.get(type(op))
+ if op_name is None:
+ raise RuntimeError('Unknown comparison operator ' + str(type(op)))
+ else:
+ return op_name
+
+ def as_block(self, stmts: List[ast35.stmt], lineno: int) -> Block:
+ b = None
+ if stmts:
+ b = Block(self.visit_list(stmts))
+ b.set_line(lineno)
+ return b
+
+ def fix_function_overloads(self, stmts: List[Node]) -> List[Node]:
+ ret = [] # type: List[Node]
+ current_overload = []
+ current_overload_name = None
+ # mypy doesn't actually check that the decorator is literally @overload
+ for stmt in stmts:
+ if isinstance(stmt, Decorator) and stmt.name() == current_overload_name:
+ current_overload.append(stmt)
+ else:
+ if len(current_overload) == 1:
+ ret.append(current_overload[0])
+ elif len(current_overload) > 1:
+ ret.append(OverloadedFuncDef(current_overload))
+
+ if isinstance(stmt, Decorator):
+ current_overload = [stmt]
+ current_overload_name = stmt.name()
+ else:
+ current_overload = []
+ current_overload_name = None
+ ret.append(stmt)
+
+ if len(current_overload) == 1:
+ ret.append(current_overload[0])
+ elif len(current_overload) > 1:
+ ret.append(OverloadedFuncDef(current_overload))
+ return ret
+
+ def in_class(self) -> bool:
+ return self.class_nesting > 0
+
+ def translate_module_id(self, id: str) -> str:
+ """Return the actual, internal module id for a source text id.
+
+ For example, translate '__builtin__' in Python 2 to 'builtins'.
+ """
+ if id == self.custom_typing_module:
+ return 'typing'
+ elif id == '__builtin__' and self.pyversion[0] == 2:
+ # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation
+ # is named __builtin__.py (there is another layer of translation elsewhere).
+ return 'builtins'
+ return id
+
+ def visit_Module(self, mod: ast35.Module) -> Node:
+ body = self.fix_function_overloads(self.visit_list(mod.body))
+
+ return MypyFile(body,
+ self.imports,
+ False,
+ {ti.lineno for ti in mod.type_ignores},
+ weak_opts=set())
+
+ # --- stmt ---
+ # FunctionDef(identifier name, arguments args,
+ # stmt* body, expr* decorator_list, expr? returns, string? type_comment)
+ # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults,
+ # arg? kwarg, expr* defaults)
+ @with_line
+ def visit_FunctionDef(self, n: ast35.FunctionDef) -> Node:
+ args = self.transform_args(n.args, n.lineno)
+
+ arg_kinds = [arg.kind for arg in args]
+ arg_names = [arg.variable.name() for arg in args]
+ arg_types = None # type: List[Type]
+ if n.type_comment is not None:
+ func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
+ assert isinstance(func_type_ast, ast35.FunctionType)
+ # for ellipsis arg
+ if (len(func_type_ast.argtypes) == 1 and
+ isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
+ arg_types = [AnyType() for a in args]
+ else:
+ arg_types = [a if a is not None else AnyType() for
+ a in TypeConverter(line=n.lineno).visit_list(func_type_ast.argtypes)]
+ return_type = TypeConverter(line=n.lineno).visit(func_type_ast.returns)
+
+ # add implicit self type
+ if self.in_class() and len(arg_types) < len(args):
+ arg_types.insert(0, AnyType())
+ else:
+ arg_types = [a.type_annotation for a in args]
+ return_type = TypeConverter(line=n.lineno).visit(n.returns)
+
+ func_type = None
+ if any(arg_types) or return_type:
+ func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
+ arg_kinds,
+ arg_names,
+ return_type if return_type is not None else AnyType(),
+ None)
+
+ func_def = FuncDef(n.name,
+ args,
+ self.as_block(n.body, n.lineno),
+ func_type)
+ if func_type is not None:
+ func_type.definition = func_def
+
+ if n.decorator_list:
+ var = Var(func_def.name())
+ var.is_ready = False
+ var.set_line(n.decorator_list[0].lineno)
+
+ func_def.is_decorated = True
+ func_def.set_line(n.lineno + len(n.decorator_list))
+ func_def.body.set_line(func_def.get_line())
+ return Decorator(func_def, self.visit_list(n.decorator_list), var)
+ else:
+ return func_def
+
+ def transform_args(self, args: ast35.arguments, line: int) -> List[Argument]:
+ def make_argument(arg, default, kind):
+ arg_type = TypeConverter(line=line).visit(arg.annotation)
+ return Argument(Var(arg.arg), arg_type, self.visit(default), kind)
+
+ new_args = []
+ num_no_defaults = len(args.args) - len(args.defaults)
+ # positional arguments without defaults
+ for a in args.args[:num_no_defaults]:
+ new_args.append(make_argument(a, None, ARG_POS))
+
+ # positional arguments with defaults
+ for a, d in zip(args.args[num_no_defaults:], args.defaults):
+ new_args.append(make_argument(a, d, ARG_OPT))
+
+ # *arg
+ if args.vararg is not None:
+ new_args.append(make_argument(args.vararg, None, ARG_STAR))
+
+ num_no_kw_defaults = len(args.kwonlyargs) - len(args.kw_defaults)
+ # keyword-only arguments without defaults
+ for a in args.kwonlyargs[:num_no_kw_defaults]:
+ new_args.append(make_argument(a, None, ARG_NAMED))
+
+ # keyword-only arguments with defaults
+ for a, d in zip(args.kwonlyargs[num_no_kw_defaults:], args.kw_defaults):
+ new_args.append(make_argument(a, d, ARG_NAMED))
+
+ # **kwarg
+ if args.kwarg is not None:
+ new_args.append(make_argument(args.kwarg, None, ARG_STAR2))
+
+ return new_args
+
+ # TODO: AsyncFunctionDef(identifier name, arguments args,
+ # stmt* body, expr* decorator_list, expr? returns, string? type_comment)
+
+ def stringify_name(self, n: ast35.AST) -> str:
+ if isinstance(n, ast35.Name):
+ return n.id
+ elif isinstance(n, ast35.Attribute):
+ return "{}.{}".format(self.stringify_name(n.value), n.attr)
+ else:
+ assert False, "can't stringify " + str(type(n))
+
+ # ClassDef(identifier name,
+ # expr* bases,
+ # keyword* keywords,
+ # stmt* body,
+ # expr* decorator_list)
+ @with_line
+ def visit_ClassDef(self, n: ast35.ClassDef) -> Node:
+ self.class_nesting += 1
+ metaclass_arg = find(lambda x: x.arg == 'metaclass', n.keywords)
+ metaclass = None
+ if metaclass_arg:
+ metaclass = self.stringify_name(metaclass_arg.value)
+
+ cdef = ClassDef(n.name,
+ Block(self.fix_function_overloads(self.visit_list(n.body))),
+ None,
+ self.visit_list(n.bases),
+ metaclass=metaclass)
+ cdef.decorators = self.visit_list(n.decorator_list)
+ self.class_nesting -= 1
+ return cdef
+
+ # Return(expr? value)
+ @with_line
+ def visit_Return(self, n: ast35.Return) -> Node:
+ return ReturnStmt(self.visit(n.value))
+
+ # Delete(expr* targets)
+ @with_line
+ def visit_Delete(self, n: ast35.Delete) -> Node:
+ if len(n.targets) > 1:
+ tup = TupleExpr(self.visit_list(n.targets))
+ tup.set_line(n.lineno)
+ return DelStmt(tup)
+ else:
+ return DelStmt(self.visit(n.targets[0]))
+
+ # Assign(expr* targets, expr value, string? type_comment)
+ @with_line
+ def visit_Assign(self, n: ast35.Assign) -> Node:
+ typ = None
+ if n.type_comment:
+ typ = parse_type_comment(n.type_comment, n.lineno)
+
+ return AssignmentStmt(self.visit_list(n.targets),
+ self.visit(n.value),
+ type=typ)
+
+ # AugAssign(expr target, operator op, expr value)
+ @with_line
+ def visit_AugAssign(self, n: ast35.AugAssign) -> Node:
+ return OperatorAssignmentStmt(self.from_operator(n.op),
+ self.visit(n.target),
+ self.visit(n.value))
+
+ # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
+ @with_line
+ def visit_For(self, n: ast35.For) -> Node:
+ return ForStmt(self.visit(n.target),
+ self.visit(n.iter),
+ self.as_block(n.body, n.lineno),
+ self.as_block(n.orelse, n.lineno))
+
+ # TODO: AsyncFor(expr target, expr iter, stmt* body, stmt* orelse)
+ # While(expr test, stmt* body, stmt* orelse)
+ @with_line
+ def visit_While(self, n: ast35.While) -> Node:
+ return WhileStmt(self.visit(n.test),
+ self.as_block(n.body, n.lineno),
+ self.as_block(n.orelse, n.lineno))
+
+ # If(expr test, stmt* body, stmt* orelse)
+ @with_line
+ def visit_If(self, n: ast35.If) -> Node:
+ return IfStmt([self.visit(n.test)],
+ [self.as_block(n.body, n.lineno)],
+ self.as_block(n.orelse, n.lineno))
+
+ # With(withitem* items, stmt* body, string? type_comment)
+ @with_line
+ def visit_With(self, n: ast35.With) -> Node:
+ return WithStmt([self.visit(i.context_expr) for i in n.items],
+ [self.visit(i.optional_vars) for i in n.items],
+ self.as_block(n.body, n.lineno))
+
+ # TODO: AsyncWith(withitem* items, stmt* body)
+
+ # Raise(expr? exc, expr? cause)
+ @with_line
+ def visit_Raise(self, n: ast35.Raise) -> Node:
+ return RaiseStmt(self.visit(n.exc), self.visit(n.cause))
+
+ # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
+ @with_line
+ def visit_Try(self, n: ast35.Try) -> Node:
+ vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers]
+ types = [self.visit(h.type) for h in n.handlers]
+ handlers = [self.as_block(h.body, h.lineno) for h in n.handlers]
+
+ return TryStmt(self.as_block(n.body, n.lineno),
+ vs,
+ types,
+ handlers,
+ self.as_block(n.orelse, n.lineno),
+ self.as_block(n.finalbody, n.lineno))
+
+ # Assert(expr test, expr? msg)
+ @with_line
+ def visit_Assert(self, n: ast35.Assert) -> Node:
+ return AssertStmt(self.visit(n.test))
+
+ # Import(alias* names)
+ @with_line
+ def visit_Import(self, n: ast35.Import) -> Node:
+ i = Import([(self.translate_module_id(a.name), a.asname) for a in n.names])
+ self.imports.append(i)
+ return i
+
+ # ImportFrom(identifier? module, alias* names, int? level)
+ @with_line
+ def visit_ImportFrom(self, n: ast35.ImportFrom) -> Node:
+ i = None # type: ImportBase
+ if len(n.names) == 1 and n.names[0].name == '*':
+ i = ImportAll(n.module, n.level)
+ else:
+ i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '',
+ n.level,
+ [(a.name, a.asname) for a in n.names])
+ self.imports.append(i)
+ return i
+
+ # Global(identifier* names)
+ @with_line
+ def visit_Global(self, n: ast35.Global) -> Node:
+ return GlobalDecl(n.names)
+
+ # Nonlocal(identifier* names)
+ @with_line
+ def visit_Nonlocal(self, n: ast35.Nonlocal) -> Node:
+ return NonlocalDecl(n.names)
+
+ # Expr(expr value)
+ @with_line
+ def visit_Expr(self, n: ast35.Expr) -> Node:
+ value = self.visit(n.value)
+ return ExpressionStmt(value)
+
+ # Pass
+ @with_line
+ def visit_Pass(self, n: ast35.Pass) -> Node:
+ return PassStmt()
+
+ # Break
+ @with_line
+ def visit_Break(self, n: ast35.Break) -> Node:
+ return BreakStmt()
+
+ # Continue
+ @with_line
+ def visit_Continue(self, n: ast35.Continue) -> Node:
+ return ContinueStmt()
+
+ # --- expr ---
+ # BoolOp(boolop op, expr* values)
+ @with_line
+ def visit_BoolOp(self, n: ast35.BoolOp) -> Node:
+ # mypy translates (1 and 2 and 3) as (1 and (2 and 3))
+ assert len(n.values) >= 2
+ op = None
+ if isinstance(n.op, ast35.And):
+ op = 'and'
+ elif isinstance(n.op, ast35.Or):
+ op = 'or'
+ else:
+ raise RuntimeError('unknown BoolOp ' + str(type(n)))
+
+ # potentially inefficient!
+ def group(vals):
+ if len(vals) == 2:
+ return OpExpr(op, vals[0], vals[1])
+ else:
+ return OpExpr(op, vals[0], group(vals[1:]))
+
+ return group(self.visit_list(n.values))
+
+ # BinOp(expr left, operator op, expr right)
+ @with_line
+ def visit_BinOp(self, n: ast35.BinOp) -> Node:
+ op = self.from_operator(n.op)
+
+ if op is None:
+ raise RuntimeError('cannot translate BinOp ' + str(type(n.op)))
+
+ return OpExpr(op, self.visit(n.left), self.visit(n.right))
+
+ # UnaryOp(unaryop op, expr operand)
+ @with_line
+ def visit_UnaryOp(self, n: ast35.UnaryOp) -> Node:
+ op = None
+ if isinstance(n.op, ast35.Invert):
+ op = '~'
+ elif isinstance(n.op, ast35.Not):
+ op = 'not'
+ elif isinstance(n.op, ast35.UAdd):
+ op = '+'
+ elif isinstance(n.op, ast35.USub):
+ op = '-'
+
+ if op is None:
+ raise RuntimeError('cannot translate UnaryOp ' + str(type(n.op)))
+
+ return UnaryExpr(op, self.visit(n.operand))
+
+ # Lambda(arguments args, expr body)
+ @with_line
+ def visit_Lambda(self, n: ast35.Lambda) -> Node:
+ body = ast35.Return(n.body)
+ body.lineno = n.lineno
+
+ return FuncExpr(self.transform_args(n.args, n.lineno),
+ self.as_block([body], n.lineno))
+
+ # IfExp(expr test, expr body, expr orelse)
+ @with_line
+ def visit_IfExp(self, n: ast35.IfExp) -> Node:
+ return ConditionalExpr(self.visit(n.test),
+ self.visit(n.body),
+ self.visit(n.orelse))
+
+ # Dict(expr* keys, expr* values)
+ @with_line
+ def visit_Dict(self, n: ast35.Dict) -> Node:
+ return DictExpr(list(zip(self.visit_list(n.keys), self.visit_list(n.values))))
+
+ # Set(expr* elts)
+ @with_line
+ def visit_Set(self, n: ast35.Set) -> Node:
+ return SetExpr(self.visit_list(n.elts))
+
+ # ListComp(expr elt, comprehension* generators)
+ @with_line
+ def visit_ListComp(self, n: ast35.ListComp) -> Node:
+ return ListComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
+
+ # SetComp(expr elt, comprehension* generators)
+ @with_line
+ def visit_SetComp(self, n: ast35.SetComp) -> Node:
+ return SetComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
+
+ # DictComp(expr key, expr value, comprehension* generators)
+ @with_line
+ def visit_DictComp(self, n: ast35.DictComp) -> Node:
+ targets = [self.visit(c.target) for c in n.generators]
+ iters = [self.visit(c.iter) for c in n.generators]
+ ifs_list = [self.visit_list(c.ifs) for c in n.generators]
+ return DictionaryComprehension(self.visit(n.key),
+ self.visit(n.value),
+ targets,
+ iters,
+ ifs_list)
+
+ # GeneratorExp(expr elt, comprehension* generators)
+ @with_line
+ def visit_GeneratorExp(self, n: ast35.GeneratorExp) -> GeneratorExpr:
+ targets = [self.visit(c.target) for c in n.generators]
+ iters = [self.visit(c.iter) for c in n.generators]
+ ifs_list = [self.visit_list(c.ifs) for c in n.generators]
+ return GeneratorExpr(self.visit(n.elt),
+ targets,
+ iters,
+ ifs_list)
+
+ # TODO: Await(expr value)
+
+ # Yield(expr? value)
+ @with_line
+ def visit_Yield(self, n: ast35.Yield) -> Node:
+ return YieldExpr(self.visit(n.value))
+
+ # YieldFrom(expr value)
+ @with_line
+ def visit_YieldFrom(self, n: ast35.YieldFrom) -> Node:
+ return YieldFromExpr(self.visit(n.value))
+
+ # Compare(expr left, cmpop* ops, expr* comparators)
+ @with_line
+ def visit_Compare(self, n: ast35.Compare) -> Node:
+ operators = [self.from_comp_operator(o) for o in n.ops]
+ operands = self.visit_list([n.left] + n.comparators)
+ return ComparisonExpr(operators, operands)
+
+ # Call(expr func, expr* args, keyword* keywords)
+ # keyword = (identifier? arg, expr value)
+ @with_line
+ def visit_Call(self, n: ast35.Call) -> Node:
+ def is_star2arg(k):
+ return k.arg is None
+
+ arg_types = self.visit_list(
+ [a.value if isinstance(a, ast35.Starred) else a for a in n.args] +
+ [k.value for k in n.keywords])
+ arg_kinds = ([ARG_STAR if isinstance(a, ast35.Starred) else ARG_POS for a in n.args] +
+ [ARG_STAR2 if is_star2arg(k) else ARG_NAMED for k in n.keywords])
+ return CallExpr(self.visit(n.func),
+ arg_types,
+ arg_kinds,
+ cast("List[str]", [None for _ in n.args]) + [k.arg for k in n.keywords])
+
+ # Num(object n) -- a number as a PyObject.
+ @with_line
+ def visit_Num(self, n: ast35.Num) -> Node:
+ if isinstance(n.n, int):
+ return IntExpr(n.n)
+ elif isinstance(n.n, float):
+ return FloatExpr(n.n)
+ elif isinstance(n.n, complex):
+ return ComplexExpr(n.n)
+
+ raise RuntimeError('num not implemented for ' + str(type(n.n)))
+
+ # Str(string s) -- need to specify raw, unicode, etc?
+ @with_line
+ def visit_Str(self, n: ast35.Str) -> Node:
+ return StrExpr(n.s)
+
+ # Bytes(bytes s)
+ @with_line
+ def visit_Bytes(self, n: ast35.Bytes) -> Node:
+ # TODO: this is kind of hacky
+ return BytesExpr(str(n.s)[2:-1])
+
+ # NameConstant(singleton value)
+ def visit_NameConstant(self, n: ast35.NameConstant) -> Node:
+ return NameExpr(str(n.value))
+
+ # Ellipsis
+ @with_line
+ def visit_Ellipsis(self, n: ast35.Ellipsis) -> Node:
+ return EllipsisExpr()
+
+ # Attribute(expr value, identifier attr, expr_context ctx)
+ @with_line
+ def visit_Attribute(self, n: ast35.Attribute) -> Node:
+ if (isinstance(n.value, ast35.Call) and
+ isinstance(n.value.func, ast35.Name) and
+ n.value.func.id == 'super'):
+ return SuperExpr(n.attr)
+
+ return MemberExpr(self.visit(n.value), n.attr)
+
+ # Subscript(expr value, slice slice, expr_context ctx)
+ @with_line
+ def visit_Subscript(self, n: ast35.Subscript) -> Node:
+ return IndexExpr(self.visit(n.value), self.visit(n.slice))
+
+ # Starred(expr value, expr_context ctx)
+ @with_line
+ def visit_Starred(self, n: ast35.Starred) -> Node:
+ return StarExpr(self.visit(n.value))
+
+ # Name(identifier id, expr_context ctx)
+ @with_line
+ def visit_Name(self, n: ast35.Name) -> Node:
+ return NameExpr(n.id)
+
+ # List(expr* elts, expr_context ctx)
+ @with_line
+ def visit_List(self, n: ast35.List) -> Node:
+ return ListExpr([self.visit(e) for e in n.elts])
+
+ # Tuple(expr* elts, expr_context ctx)
+ @with_line
+ def visit_Tuple(self, n: ast35.Tuple) -> Node:
+ return TupleExpr([self.visit(e) for e in n.elts])
+
+ # --- slice ---
+
+ # Slice(expr? lower, expr? upper, expr? step)
+ def visit_Slice(self, n: ast35.Slice) -> Node:
+ return SliceExpr(self.visit(n.lower),
+ self.visit(n.upper),
+ self.visit(n.step))
+
+ # ExtSlice(slice* dims)
+ def visit_ExtSlice(self, n: ast35.ExtSlice) -> Node:
+ return TupleExpr(self.visit_list(n.dims))
+
+ # Index(expr value)
+ def visit_Index(self, n: ast35.Index) -> Node:
+ return self.visit(n.value)
+
+
+class TypeConverter(ast35.NodeTransformer):
+ def __init__(self, line: int = -1) -> None:
+ self.line = line
+
+ def generic_visit(self, node: ast35.AST) -> None:
+ raise RuntimeError('Type node not implemented: ' + str(type(node)))
+
+ def visit_NoneType(self, n: Any) -> Type:
+ return None
+
+ def visit_list(self, l: Sequence[ast35.AST]) -> List[Type]:
+ return [self.visit(e) for e in l]
+
+ def visit_Name(self, n: ast35.Name) -> Type:
+ return UnboundType(n.id, line=self.line)
+
+ def visit_NameConstant(self, n: ast35.NameConstant) -> Type:
+ return UnboundType(str(n.value))
+
+ # Str(string s)
+ def visit_Str(self, n: ast35.Str) -> Type:
+ return parse_type_comment(n.s.strip(), line=self.line)
+
+ # Subscript(expr value, slice slice, expr_context ctx)
+ def visit_Subscript(self, n: ast35.Subscript) -> Type:
+ assert isinstance(n.slice, ast35.Index)
+
+ value = self.visit(n.value)
+
+ assert isinstance(value, UnboundType)
+ assert not value.args
+
+ if isinstance(n.slice.value, ast35.Tuple):
+ params = self.visit_list(n.slice.value.elts)
+ else:
+ params = [self.visit(n.slice.value)]
+
+ return UnboundType(value.name, params, line=self.line)
+
+ def visit_Tuple(self, n: ast35.Tuple) -> Type:
+ return TupleType(self.visit_list(n.elts), None, implicit=True, line=self.line)
+
+ # Attribute(expr value, identifier attr, expr_context ctx)
+ def visit_Attribute(self, n: ast35.Attribute) -> Type:
+ before_dot = self.visit(n.value)
+
+ assert isinstance(before_dot, UnboundType)
+ assert not before_dot.args
+
+ return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line)
+
+ # Ellipsis
+ def visit_Ellipsis(self, n: ast35.Ellipsis) -> Type:
+ return EllipsisType(line=self.line)
+
+ # List(expr* elts, expr_context ctx)
+ def visit_List(self, n: ast35.List) -> Type:
+ return TypeList(self.visit_list(n.elts), line=self.line)
diff --git a/mypy/fixup.py b/mypy/fixup.py
new file mode 100644
index 0000000..134e611
--- /dev/null
+++ b/mypy/fixup.py
@@ -0,0 +1,273 @@
+"""Fix up various things after deserialization."""
+
+from typing import Any, Dict, Optional, cast
+
+from mypy.nodes import (MypyFile, SymbolNode, SymbolTable, SymbolTableNode,
+ TypeInfo, FuncDef, OverloadedFuncDef, Decorator, Var,
+ TypeVarExpr, ClassDef,
+ LDEF, MDEF, GDEF, MODULE_REF)
+from mypy.types import (CallableType, EllipsisType, Instance, Overloaded, TupleType,
+ TypeList, TypeVarType, UnboundType, UnionType, TypeVisitor)
+from mypy.visitor import NodeVisitor
+
+
+def fixup_module_pass_one(tree: MypyFile, modules: Dict[str, MypyFile]) -> None:
+ node_fixer = NodeFixer(modules)
+ node_fixer.visit_symbol_table(tree.names)
+
+
+def fixup_module_pass_two(tree: MypyFile, modules: Dict[str, MypyFile]) -> None:
+ compute_all_mros(tree.names, modules)
+
+
+def compute_all_mros(symtab: SymbolTable, modules: Dict[str, MypyFile]) -> None:
+ for key, value in symtab.items():
+ if value.kind in (LDEF, MDEF, GDEF) and isinstance(value.node, TypeInfo):
+ info = value.node
+ info.calculate_mro()
+ assert info.mro, "No MRO calculated for %s" % (info.fullname(),)
+ compute_all_mros(info.names, modules)
+
+
+# TODO: Fix up .info when deserializing, i.e. much earlier.
+class NodeFixer(NodeVisitor[None]):
+ current_info = None # type: Optional[TypeInfo]
+
+ def __init__(self, modules: Dict[str, MypyFile], type_fixer: 'TypeFixer' = None) -> None:
+ self.modules = modules
+ if type_fixer is None:
+ type_fixer = TypeFixer(self.modules)
+ self.type_fixer = type_fixer
+
+ # NOTE: This method isn't (yet) part of the NodeVisitor API.
+ def visit_type_info(self, info: TypeInfo) -> None:
+ save_info = self.current_info
+ try:
+ self.current_info = info
+ if info.defn:
+ info.defn.accept(self)
+ if info.names:
+ self.visit_symbol_table(info.names)
+ if info.subtypes:
+ for st in info.subtypes:
+ self.visit_type_info(st)
+ if info.bases:
+ for base in info.bases:
+ base.accept(self.type_fixer)
+ if info._promote:
+ info._promote.accept(self.type_fixer)
+ if info.tuple_type:
+ info.tuple_type.accept(self.type_fixer)
+ finally:
+ self.current_info = save_info
+
+ # NOTE: This method *definitely* isn't part of the NodeVisitor API.
+ def visit_symbol_table(self, symtab: SymbolTable) -> None:
+ # Copy the items because we may mutate symtab.
+ for key, value in list(symtab.items()):
+ cross_ref = value.cross_ref
+ if cross_ref is not None: # Fix up cross-reference.
+ del value.cross_ref
+ if cross_ref in self.modules:
+ value.node = self.modules[cross_ref]
+ else:
+ stnode = lookup_qualified_stnode(self.modules, cross_ref)
+ assert stnode is not None, "Could not find cross-ref %s" % (cross_ref,)
+ value.node = stnode.node
+ value.type_override = stnode.type_override
+ else:
+ if isinstance(value.node, TypeInfo):
+ # TypeInfo has no accept(). TODO: Add it?
+ self.visit_type_info(value.node)
+ elif value.node is not None:
+ value.node.accept(self)
+ if value.type_override is not None:
+ value.type_override.accept(self.type_fixer)
+
+ def visit_func_def(self, func: FuncDef) -> None:
+ if self.current_info is not None:
+ func.info = self.current_info
+ if func.type is not None:
+ func.type.accept(self.type_fixer)
+ for arg in func.arguments:
+ if arg.type_annotation is not None:
+ arg.type_annotation.accept(self.type_fixer)
+
+ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
+ if self.current_info is not None:
+ o.info = self.current_info
+ if o.type:
+ o.type.accept(self.type_fixer)
+ for item in o.items:
+ item.accept(self)
+
+ def visit_decorator(self, d: Decorator) -> None:
+ if self.current_info is not None:
+ d.var.info = self.current_info
+ if d.func:
+ d.func.accept(self)
+ if d.var:
+ d.var.accept(self)
+ for node in d.decorators:
+ node.accept(self)
+
+ def visit_class_def(self, c: ClassDef) -> None:
+ for v in c.type_vars:
+ for value in v.values:
+ value.accept(self.type_fixer)
+ v.upper_bound.accept(self.type_fixer)
+
+ def visit_type_var_expr(self, tv: TypeVarExpr) -> None:
+ for value in tv.values:
+ value.accept(self.type_fixer)
+
+ def visit_var(self, v: Var) -> None:
+ if self.current_info is not None:
+ v.info = self.current_info
+ if v.type is not None:
+ v.type.accept(self.type_fixer)
+
+
+class TypeFixer(TypeVisitor[None]):
+ def __init__(self, modules: Dict[str, MypyFile]) -> None:
+ self.modules = modules
+
+ def visit_instance(self, inst: Instance) -> None:
+ # TODO: Combine Instances that are exactly the same?
+ type_ref = inst.type_ref
+ if type_ref is None:
+ return # We've already been here.
+ del inst.type_ref
+ node = lookup_qualified(self.modules, type_ref)
+ if isinstance(node, TypeInfo):
+ inst.type = node
+ # TODO: Is this needed or redundant?
+ # Also fix up the bases, just in case.
+ for base in inst.type.bases:
+ if base.type is None:
+ base.accept(self)
+ for a in inst.args:
+ a.accept(self)
+
+ def visit_any(self, o: Any) -> None:
+ pass # Nothing to descend into.
+
+ def visit_callable_type(self, ct: CallableType) -> None:
+ if ct.fallback:
+ ct.fallback.accept(self)
+ for argt in ct.arg_types:
+ # argt may be None, e.g. for __self in NamedTuple constructors.
+ if argt is not None:
+ argt.accept(self)
+ if ct.ret_type is not None:
+ ct.ret_type.accept(self)
+ for v in ct.variables:
+ if v.values:
+ for val in v.values:
+ val.accept(self)
+ v.upper_bound.accept(self)
+
+ def visit_ellipsis_type(self, e: EllipsisType) -> None:
+ pass # Nothing to descend into.
+
+ def visit_overloaded(self, t: Overloaded) -> None:
+ for ct in t.items():
+ ct.accept(self)
+
+ def visit_deleted_type(self, o: Any) -> None:
+ pass # Nothing to descend into.
+
+ def visit_none_type(self, o: Any) -> None:
+ pass # Nothing to descend into.
+
+ def visit_partial_type(self, o: Any) -> None:
+ raise RuntimeError("Shouldn't get here", o)
+
+ def visit_tuple_type(self, tt: TupleType) -> None:
+ if tt.items:
+ for it in tt.items:
+ it.accept(self)
+ if tt.fallback is not None:
+ tt.fallback.accept(self)
+
+ def visit_type_list(self, tl: TypeList) -> None:
+ for t in tl.items:
+ t.accept(self)
+
+ def visit_type_var(self, tvt: TypeVarType) -> None:
+ if tvt.values:
+ for vt in tvt.values:
+ vt.accept(self)
+ if tvt.upper_bound is not None:
+ tvt.upper_bound.accept(self)
+
+ def visit_unbound_type(self, o: UnboundType) -> None:
+ for a in o.args:
+ a.accept(self)
+
+ def visit_union_type(self, ut: UnionType) -> None:
+ if ut.items:
+ for it in ut.items:
+ it.accept(self)
+
+ def visit_void(self, o: Any) -> None:
+ pass # Nothing to descend into.
+
+
+def lookup_qualified(modules: Dict[str, MypyFile], name: str) -> SymbolNode:
+ stnode = lookup_qualified_stnode(modules, name)
+ if stnode is None:
+ return None
+ else:
+ return stnode.node
+
+
+def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str) -> SymbolTableNode:
+ head = name
+ rest = []
+ while True:
+ assert '.' in head, "Cannot find %s" % (name,)
+ head, tail = head.rsplit('.', 1)
+ mod = modules.get(head)
+ if mod is not None:
+ rest.append(tail)
+ break
+ names = mod.names
+ while True:
+ assert rest, "Cannot find %s" % (name,)
+ key = rest.pop()
+ assert key in names, "Cannot find %s for %s" % (key, name)
+ stnode = names[key]
+ if not rest:
+ return stnode
+ node = stnode.node
+ assert isinstance(node, TypeInfo)
+ names = cast(TypeInfo, node).names
+
+
+def store_qualified(modules: Dict[str, MypyFile], name: str, info: SymbolNode) -> None:
+ head = name
+ rest = []
+ while True:
+ head, tail = head.rsplit('.', 1)
+ mod = modules.get(head)
+ if mod is not None:
+ rest.append(tail)
+ break
+ names = mod.names
+ while True:
+ assert rest, "Cannot find %s" % (name,)
+ key = rest.pop()
+ if key not in names:
+ assert not rest, "Cannot find %s for %s" % (key, name)
+ # Store it.
+ # TODO: kind might be something else?
+ names[key] = SymbolTableNode(GDEF, info)
+ return
+ stnode = names[key]
+ node = stnode.node
+ if not rest:
+ stnode.node = info
+ return
+ assert isinstance(node, TypeInfo)
+ names = cast(TypeInfo, node).names
diff --git a/mypy/infer.py b/mypy/infer.py
index 92d1be6..3ba66ef 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -1,6 +1,6 @@
"""Utilities for type argument inference."""
-from typing import List
+from typing import List, Optional
from mypy.constraints import infer_constraints, infer_constraints_for_callable
from mypy.types import Type, CallableType
@@ -9,7 +9,7 @@ from mypy.constraints import SUBTYPE_OF
def infer_function_type_arguments(callee_type: CallableType,
- arg_types: List[Type],
+ arg_types: List[Optional[Type]],
arg_kinds: List[int],
formal_to_actual: List[List[int]],
strict: bool = True) -> List[Type]:
@@ -21,7 +21,8 @@ def infer_function_type_arguments(callee_type: CallableType,
Arguments:
callee_type: the target generic function
- arg_types: argument types at the call site
+ arg_types: argument types at the call site (each optional; if None,
+ we are not considering this argument in the current pass)
arg_kinds: nodes.ARG_* values for arg_types
formal_to_actual: mapping from formal to actual variable indices
"""
diff --git a/mypy/join.py b/mypy/join.py
index 7489fec..2ae0360 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -87,7 +87,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
if is_subtype(self.s, t):
return t
else:
- return UnionType(t.items + [self.s])
+ return UnionType.make_simplified_union([self.s, t])
def visit_error_type(self, t: ErrorType) -> Type:
return t
@@ -120,14 +120,14 @@ class TypeJoinVisitor(TypeVisitor[Type]):
return self.s
def visit_type_var(self, t: TypeVarType) -> Type:
- if isinstance(self.s, TypeVarType) and (cast(TypeVarType, self.s)).id == t.id:
+ if isinstance(self.s, TypeVarType) and self.s.id == t.id:
return self.s
else:
return self.default(self.s)
def visit_instance(self, t: Instance) -> Type:
if isinstance(self.s, Instance):
- return join_instances(t, cast(Instance, self.s))
+ return join_instances(t, self.s)
elif isinstance(self.s, FunctionLike):
return join_types(t, self.s.fallback)
else:
@@ -135,9 +135,8 @@ class TypeJoinVisitor(TypeVisitor[Type]):
def visit_callable_type(self, t: CallableType) -> Type:
# TODO: Consider subtyping instead of just similarity.
- if isinstance(self.s, CallableType) and is_similar_callables(
- t, cast(CallableType, self.s)):
- return combine_similar_callables(t, cast(CallableType, self.s))
+ if isinstance(self.s, CallableType) and is_similar_callables(t, self.s):
+ return combine_similar_callables(t, self.s)
elif isinstance(self.s, Overloaded):
# Switch the order of arguments to that we'll get to visit_overloaded.
return join_types(t, self.s)
@@ -191,12 +190,10 @@ class TypeJoinVisitor(TypeVisitor[Type]):
return join_types(t.fallback, s)
def visit_tuple_type(self, t: TupleType) -> Type:
- if (isinstance(self.s, TupleType) and
- cast(TupleType, self.s).length() == t.length()):
+ if isinstance(self.s, TupleType) and self.s.length() == t.length():
items = [] # type: List[Type]
for i in range(t.length()):
- items.append(self.join(t.items[i],
- (cast(TupleType, self.s)).items[i]))
+ items.append(self.join(t.items[i], self.s.items[i]))
# TODO: What if the fallback types are different?
return TupleType(items, t.fallback)
else:
@@ -235,7 +232,6 @@ def join_instances(t: Instance, s: Instance) -> Type:
Return ErrorType if the result is ambiguous.
"""
-
if t.type == s.type:
# Simplest case: join two types with the same base type (but
# potentially different arguments).
@@ -264,16 +260,29 @@ def join_instances_via_supertype(t: Instance, s: Instance) -> Type:
return join_types(t.type._promote, s)
elif s.type._promote and is_subtype(s.type._promote, t):
return join_types(t, s.type._promote)
- res = s
- mapped = map_instance_to_supertype(t, t.type.bases[0].type)
- join = join_instances(mapped, res)
- # If the join failed, fail. This is a defensive measure (this might
- # never happen).
- if isinstance(join, ErrorType):
- return join
- # Now the result must be an Instance, so the cast below cannot fail.
- res = cast(Instance, join)
- return res
+ # Compute the "best" supertype of t when joined with s.
+ # The definition of "best" may evolve; for now it is the one with
+ # the longest MRO. Ties are broken by using the earlier base.
+ best = None # type: Type
+ for base in t.type.bases:
+ mapped = map_instance_to_supertype(t, base.type)
+ res = join_instances(mapped, s)
+ if best is None or is_better(res, best):
+ best = res
+ assert best is not None
+ return best
+
+
+def is_better(t: Type, s: Type) -> bool:
+ # Given two possible results from join_instances_via_supertype(),
+ # indicate whether t is the better one.
+ if isinstance(t, Instance):
+ if not isinstance(s, Instance):
+ return True
+ # Use len(mro) as a proxy for the better choice.
+ if len(t.type.mro) > len(s.type.mro):
+ return True
+ return False
def is_similar_callables(t: CallableType, s: CallableType) -> bool:
diff --git a/mypy/lex.py b/mypy/lex.py
index d948785..27c18b5 100644
--- a/mypy/lex.py
+++ b/mypy/lex.py
@@ -688,15 +688,16 @@ class Lexer:
def lex_indent(self) -> None:
"""Analyze whitespace chars at the beginning of a line (indents)."""
s = self.match(self.indent_exp)
- if s != '' and s[-1] in self.comment_or_newline:
+ while True:
+ s = self.match(self.indent_exp)
+ if s == '' or s[-1] not in self.comment_or_newline:
+ break
# Empty line (whitespace only or comment only).
self.add_pre_whitespace(s[:-1])
if s[-1] == '#':
self.lex_comment()
else:
self.lex_break()
- self.lex_indent()
- return
indent = self.calc_indent(s)
if indent == self.indents[-1]:
# No change in indent: just whitespace.
@@ -880,7 +881,7 @@ if __name__ == '__main__':
# Lexically analyze a file and dump the tokens to stdout.
import sys
if len(sys.argv) != 2:
- print('Usage: lex.py FILE')
+ print('Usage: lex.py FILE', file=sys.stderr)
sys.exit(2)
fnam = sys.argv[1]
s = open(fnam, 'rb').read()
diff --git a/mypy/main.py b/mypy/main.py
index 6942da9..7579f62 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -1,18 +1,16 @@
"""Mypy type checker command line tool."""
+import argparse
import os
-import shutil
-import subprocess
+import re
import sys
-import tempfile
-import typing
-from typing import Optional, Dict, List, Tuple
+from typing import Optional, Dict, List, Set, Tuple
from mypy import build
from mypy import defaults
from mypy import git
-from mypy.build import BuildSource, PYTHON_EXTENSIONS
+from mypy.build import BuildSource, BuildResult, PYTHON_EXTENSIONS
from mypy.errors import CompileError, set_drop_into_pdb
from mypy.version import __version__
@@ -21,13 +19,14 @@ PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS)
class Options:
+ """Options collected from flags."""
+
def __init__(self) -> None:
# Set default options.
self.target = build.TYPE_CHECK
self.build_flags = [] # type: List[str]
self.pyversion = defaults.PYTHON3_VERSION
self.custom_typing_module = None # type: str
- self.implicit_any = False
self.report_dirs = {} # type: Dict[str, str]
self.python_path = False
self.dirty_stubs = False
@@ -44,19 +43,25 @@ def main(script_path: str) -> None:
bin_dir = find_bin_directory(script_path)
else:
bin_dir = None
- sources, options = process_options(sys.argv[1:])
+ sources, options = process_options()
if options.pdb:
set_drop_into_pdb(True)
if not options.dirty_stubs:
git.verify_git_integrity_or_abort(build.default_data_dir(bin_dir))
+ f = sys.stdout
try:
if options.target == build.TYPE_CHECK:
- type_check_only(sources, bin_dir, options)
+ res = type_check_only(sources, bin_dir, options)
+ a = res.errors
else:
raise RuntimeError('unsupported target %d' % options.target)
except CompileError as e:
- for m in e.messages:
- sys.stdout.write(m + '\n')
+ a = e.messages
+ if not e.use_stdout:
+ f = sys.stderr
+ if a:
+ for m in a:
+ f.write(m + '\n')
sys.exit(1)
@@ -85,162 +90,260 @@ def readlinkabs(link: str) -> str:
def type_check_only(sources: List[BuildSource],
- bin_dir: str, options: Options) -> None:
+ bin_dir: str, options: Options) -> BuildResult:
# Type-check the program and dependencies and translate to Python.
- build.build(sources=sources,
- target=build.TYPE_CHECK,
- bin_dir=bin_dir,
- pyversion=options.pyversion,
- custom_typing_module=options.custom_typing_module,
- implicit_any=options.implicit_any,
- report_dirs=options.report_dirs,
- flags=options.build_flags,
- python_path=options.python_path)
-
-
-def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
+ return build.build(sources=sources,
+ target=build.TYPE_CHECK,
+ bin_dir=bin_dir,
+ pyversion=options.pyversion,
+ custom_typing_module=options.custom_typing_module,
+ report_dirs=options.report_dirs,
+ flags=options.build_flags,
+ python_path=options.python_path)
+
+
+FOOTER = """environment variables:
+MYPYPATH additional module search path"""
+
+
+def process_options() -> Tuple[List[BuildSource], Options]:
"""Process command line arguments.
Return (mypy program path (or None),
module to run as script (or None),
parsed flags)
"""
- # TODO: Rewrite using argparse.
- options = Options()
- help = False
- ver = False
- while args and args[0].startswith('-'):
- if args[0] in ('--verbose', '-v'):
- options.build_flags.append(build.VERBOSE)
- args = args[1:]
- elif args[0] == '--py2':
- # Use Python 2 mode.
- options.pyversion = defaults.PYTHON2_VERSION
- args = args[1:]
- elif args[0] == '--python-version':
- version_components = args[1].split(".")[0:2]
- if len(version_components) != 2:
- fail("Invalid python version {} (expected format: 'x.y')".format(
- repr(args[1])))
- if not all(item.isdigit() for item in version_components):
- fail("Found non-digit in python version: {}".format(
- args[1]))
- options.pyversion = (int(version_components[0]), int(version_components[1]))
- args = args[2:]
- elif args[0] == '-f' or args[0] == '--dirty-stubs':
- options.dirty_stubs = True
- args = args[1:]
- elif args[0] == '-m' and args[1:]:
- if args[2:]:
- fail("No extra argument should appear after '-m mod'")
- options.build_flags.append(build.MODULE)
- return [BuildSource(None, args[1], None)], options
- elif args[0] == '--package' and args[1:]:
- if args[2:]:
- fail("No extra argument should appear after '--package dir'")
- options.build_flags.append(build.MODULE)
- lib_path = [os.getcwd()] + build.mypy_path()
- targets = build.find_modules_recursive(args[1], lib_path)
- if not targets:
- fail("Can't find package '{}'".format(args[1]))
- return targets, options
- elif args[0] == '-c' and args[1:]:
- if args[2:]:
- fail("No extra argument should appear after '-c string'")
- options.build_flags.append(build.PROGRAM_TEXT)
- return [BuildSource(None, None, args[1])], options
- elif args[0] in ('-h', '--help'):
- help = True
- args = args[1:]
- elif args[0] == '--stats':
- options.build_flags.append(build.DUMP_TYPE_STATS)
- args = args[1:]
- elif args[0] == '--inferstats':
- options.build_flags.append(build.DUMP_INFER_STATS)
- args = args[1:]
- elif args[0] == '--custom-typing' and args[1:]:
- options.custom_typing_module = args[1]
- args = args[2:]
- elif is_report(args[0]) and args[1:]:
- report_type = args[0][2:-7]
- report_dir = args[1]
- options.report_dirs[report_type] = report_dir
- args = args[2:]
- elif args[0] == '--use-python-path':
- options.python_path = True
- args = args[1:]
- elif args[0] in ('--silent-imports', '--silent', '-s'):
- options.build_flags.append(build.SILENT_IMPORTS)
- args = args[1:]
- elif args[0] == '--pdb':
- options.pdb = True
- args = args[1:]
- elif args[0] == '--implicit-any':
- options.implicit_any = True
- args = args[1:]
- elif args[0] in ('--version', '-V'):
- ver = True
- args = args[1:]
- else:
- usage('Unknown option: {}'.format(args[0]))
-
- if help:
- usage()
-
- if ver:
- version()
-
- if not args:
- usage('Missing target file or module')
- if options.python_path and options.pyversion[0] == 2:
- usage('Python version 2 (or --py2) specified, '
- 'but --use-python-path will search in sys.path of Python 3')
+ # Make the help output a little less jarring.
+ help_factory = (lambda prog:
+ argparse.RawDescriptionHelpFormatter(prog=prog, max_help_position=28))
+ parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER,
+ formatter_class=help_factory)
- targets = []
- for arg in args:
- if arg.endswith(PY_EXTENSIONS):
- targets.append(BuildSource(arg, crawl_up(arg)[1], None))
- elif os.path.isdir(arg):
- targets.extend(expand_dir(arg))
+ def parse_version(v):
+ m = re.match(r'\A(\d)\.(\d+)\Z', v)
+ if m:
+ return int(m.group(1)), int(m.group(2))
else:
- targets.append(BuildSource(arg, None, None))
- return targets, options
-
+ raise argparse.ArgumentTypeError(
+ "Invalid python version '{}' (expected format: 'x.y')".format(v))
+
+ parser.add_argument('-v', '--verbose', action='count', help="more verbose messages")
+ parser.add_argument('-V', '--version', action='version', # type: ignore # see typeshed#124
+ version='%(prog)s ' + __version__)
+ parser.add_argument('--python-version', type=parse_version, metavar='x.y',
+ help='use Python x.y')
+ parser.add_argument('--py2', dest='python_version', action='store_const',
+ const=defaults.PYTHON2_VERSION, help="use Python 2 mode")
+ parser.add_argument('-s', '--silent-imports', action='store_true',
+ help="don't follow imports to .py files")
+ parser.add_argument('--silent', action='store_true',
+ help="deprecated name for --silent-imports")
+ parser.add_argument('--almost-silent', action='store_true',
+ help="like --silent-imports but reports the imports as errors")
+ parser.add_argument('--disallow-untyped-calls', action='store_true',
+ help="disallow calling functions without type annotations"
+ " from functions with type annotations")
+ parser.add_argument('--disallow-untyped-defs', action='store_true',
+ help="disallow defining functions without type annotations"
+ " or with incomplete type annotations")
+ parser.add_argument('--check-untyped-defs', action='store_true',
+ help="type check the interior of functions without type annotations")
+ parser.add_argument('--fast-parser', action='store_true',
+ help="enable experimental fast parser")
+ parser.add_argument('-i', '--incremental', action='store_true',
+ help="enable experimental module cache")
+ parser.add_argument('-f', '--dirty-stubs', action='store_true',
+ help="don't warn if typeshed is out of sync")
+ parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error")
+ parser.add_argument('--use-python-path', action='store_true',
+ help="an anti-pattern")
+ parser.add_argument('--stats', action='store_true', help="dump stats")
+ parser.add_argument('--inferstats', action='store_true', help="dump type inference stats")
+ parser.add_argument('--custom-typing', metavar='MODULE', help="use a custom typing module")
+
+ report_group = parser.add_argument_group(
+ title='report generation',
+ description='Generate a report in the specified format.')
+ report_group.add_argument('--html-report', metavar='DIR')
+ report_group.add_argument('--old-html-report', metavar='DIR')
+ report_group.add_argument('--xslt-html-report', metavar='DIR')
+ report_group.add_argument('--xml-report', metavar='DIR')
+ report_group.add_argument('--txt-report', metavar='DIR')
+ report_group.add_argument('--xslt-txt-report', metavar='DIR')
+ report_group.add_argument('--linecount-report', metavar='DIR')
+
+ code_group = parser.add_argument_group(title='How to specify the code to type check')
+ code_group.add_argument('-m', '--module', action='append', dest='modules',
+ help="type-check module; can repeat for more modules")
+ # TODO: `mypy -c A -c B` and `mypy -p A -p B` currently silently
+ # ignore A (last option wins). Perhaps -c, -m and -p could just
+ # be command-line flags that modify how we interpret self.files?
+ code_group.add_argument('-c', '--command', help="type-check program passed in as string")
+ code_group.add_argument('-p', '--package', help="type-check all files in a directory")
+ code_group.add_argument('files', nargs='*', help="type-check given files or directories")
+
+ args = parser.parse_args()
+
+ # --use-python-path is no longer supported; explain why.
+ if args.use_python_path:
+ parser.error("Sorry, --use-python-path is no longer supported.\n"
+ "If you are trying this because your code depends on a library module,\n"
+ "you should really investigate how to obtain stubs for that module.\n"
+ "See https://github.com/python/mypy/issues/1411 for more discussion."
+ )
+ # --silent is deprecated; warn about this.
+ if args.silent:
+ print("Warning: --silent is deprecated; use --silent-imports",
+ file=sys.stderr)
+
+ # Check for invalid argument combinations.
+ code_methods = sum(bool(c) for c in [args.modules, args.command, args.package, args.files])
+ if code_methods == 0:
+ parser.error("Missing target module, package, files, or command.")
+ elif code_methods > 1:
+ parser.error("May only specify one of: module, package, files, or command.")
+
+ if args.use_python_path and args.python_version and args.python_version[0] == 2:
+ parser.error('Python version 2 (or --py2) specified, '
+ 'but --use-python-path will search in sys.path of Python 3')
+
+ # Set options.
+ options = Options()
+ options.dirty_stubs = args.dirty_stubs
+ options.python_path = args.use_python_path
+ options.pdb = args.pdb
+ options.custom_typing_module = args.custom_typing
+
+ # Set build flags.
+ if args.python_version is not None:
+ options.pyversion = args.python_version
+
+ if args.verbose:
+ options.build_flags.extend(args.verbose * [build.VERBOSE])
+
+ if args.stats:
+ options.build_flags.append(build.DUMP_TYPE_STATS)
+
+ if args.inferstats:
+ options.build_flags.append(build.DUMP_INFER_STATS)
+
+ if args.silent_imports or args.silent:
+ options.build_flags.append(build.SILENT_IMPORTS)
+ if args.almost_silent:
+ options.build_flags.append(build.SILENT_IMPORTS)
+ options.build_flags.append(build.ALMOST_SILENT)
+
+ if args.disallow_untyped_calls:
+ options.build_flags.append(build.DISALLOW_UNTYPED_CALLS)
+
+ if args.disallow_untyped_defs:
+ options.build_flags.append(build.DISALLOW_UNTYPED_DEFS)
+
+ if args.check_untyped_defs:
+ options.build_flags.append(build.CHECK_UNTYPED_DEFS)
+
+ # experimental
+ if args.fast_parser:
+ options.build_flags.append(build.FAST_PARSER)
+ if args.incremental:
+ options.build_flags.append(build.INCREMENTAL)
+
+ # Set reports.
+ for flag, val in vars(args).items():
+ if flag.endswith('_report') and val is not None:
+ report_type = flag[:-7].replace('_', '-')
+ report_dir = val
+ options.report_dirs[report_type] = report_dir
-def expand_dir(arg: str) -> List[BuildSource]:
- """Convert a directory name to a list of sources to build."""
- dir, mod = crawl_up(arg)
- if not mod:
- # It's a directory without an __init__.py[i].
- # List all the .py[i] files (but not recursively).
- targets = [] # type: List[BuildSource]
- for name in os.listdir(dir):
- stripped = strip_py(name)
- if stripped:
- path = os.path.join(dir, name)
- targets.append(BuildSource(path, stripped, None))
+ # Set target.
+ if args.modules:
+ options.build_flags.append(build.MODULE)
+ targets = [BuildSource(None, m, None) for m in args.modules]
+ return targets, options
+ elif args.package:
+ if os.sep in args.package or os.altsep and os.altsep in args.package:
+ fail("Package name '{}' cannot have a slash in it."
+ .format(args.package))
+ options.build_flags.append(build.MODULE)
+ lib_path = [os.getcwd()] + build.mypy_path()
+ targets = build.find_modules_recursive(args.package, lib_path)
if not targets:
- fail("There are no .py[i] files in directory '{}'".format(arg))
- return targets
-
+ fail("Can't find package '{}'".format(args.package))
+ return targets, options
+ elif args.command:
+ options.build_flags.append(build.PROGRAM_TEXT)
+ return [BuildSource(None, None, args.command)], options
else:
- lib_path = [dir]
- targets = build.find_modules_recursive(mod, lib_path)
- if not targets:
- fail("Found no modules in package '{}'".format(arg))
- return targets
+ targets = []
+ for f in args.files:
+ if f.endswith(PY_EXTENSIONS):
+ targets.append(BuildSource(f, crawl_up(f)[1], None))
+ elif os.path.isdir(f):
+ sub_targets = expand_dir(f)
+ if not sub_targets:
+ fail("There are no .py[i] files in directory '{}'"
+ .format(f))
+ targets.extend(sub_targets)
+ else:
+ targets.append(BuildSource(f, None, None))
+ return targets, options
+
+
+def keyfunc(name: str) -> Tuple[int, str]:
+ """Determines sort order for directory listing.
+
+ The desirable property is foo < foo.pyi < foo.py.
+ """
+ base, suffix = os.path.splitext(name)
+ for i, ext in enumerate(PY_EXTENSIONS):
+ if suffix == ext:
+ return (i, base)
+ return (-1, name)
+
+
+def expand_dir(arg: str, mod_prefix: str = '') -> List[BuildSource]:
+ """Convert a directory name to a list of sources to build."""
+ f = get_init_file(arg)
+ if mod_prefix and not f:
+ return []
+ seen = set() # type: Set[str]
+ sources = []
+ if f and not mod_prefix:
+ top_dir, top_mod = crawl_up(f)
+ mod_prefix = top_mod + '.'
+ if mod_prefix:
+ sources.append(BuildSource(f, mod_prefix.rstrip('.'), None))
+ names = os.listdir(arg)
+ names.sort(key=keyfunc)
+ for name in names:
+ path = os.path.join(arg, name)
+ if os.path.isdir(path):
+ sub_sources = expand_dir(path, mod_prefix + name + '.')
+ if sub_sources:
+ seen.add(name)
+ sources.extend(sub_sources)
+ else:
+ base, suffix = os.path.splitext(name)
+ if base == '__init__':
+ continue
+ if base not in seen and '.' not in base and suffix in PY_EXTENSIONS:
+ seen.add(base)
+ src = BuildSource(path, mod_prefix + base, None)
+ sources.append(src)
+ return sources
def crawl_up(arg: str) -> Tuple[str, str]:
"""Given a .py[i] filename, return (root directory, module).
- We crawl up the path until we find a directory without __init__.py[i].
+ We crawl up the path until we find a directory without
+ __init__.py[i], or until we run out of path components.
"""
dir, mod = os.path.split(arg)
mod = strip_py(mod) or mod
assert '.' not in mod
- while dir and has_init_file(dir):
+ while dir and get_init_file(dir):
dir, base = os.path.split(dir)
if not base:
break
@@ -262,77 +365,19 @@ def strip_py(arg: str) -> Optional[str]:
return None
-def has_init_file(dir: str) -> bool:
- """Return whether a directory contains a file named __init__.py[i]."""
+def get_init_file(dir: str) -> Optional[str]:
+ """Check whether a directory contains a file named __init__.py[i].
+
+ If so, return the file's name (with dir prefixed). If not, return
+ None.
+
+ This prefers .pyi over .py (because of the ordering of PY_EXTENSIONS).
+ """
for ext in PY_EXTENSIONS:
- if os.path.isfile(os.path.join(dir, '__init__' + ext)):
- return True
- return False
-
-
-# Don't generate this from mypy.reports, not all are meant to be public.
-REPORTS = [
- 'html',
- 'old-html',
- 'xslt-html',
- 'xml',
- 'txt',
- 'xslt-txt',
-]
-
-
-def is_report(arg: str) -> bool:
- if arg.startswith('--') and arg.endswith('-report'):
- report_type = arg[2:-7]
- return report_type in REPORTS
- return False
-
-
-def usage(msg: str = None) -> None:
- if msg:
- sys.stderr.write('%s\n' % msg)
- sys.stderr.write("""\
-usage: mypy [option ...] [-c cmd | -m mod | file_or_dir ...]
-Try 'mypy -h' for more information.
-""")
- sys.exit(2)
- else:
- sys.stdout.write("""\
-usage: mypy [option ...] [-c string | -m mod | file_or_dir ...]
-
-Options:
- -h, --help print this help message and exit
- -V, --version show the current version information and exit
- -v, --verbose more verbose messages
- --py2 use Python 2 mode
- --python-version x.y use Python x.y
- -s, --silent-imports don't follow imports to .py files
- --implicit-any behave as though all functions were annotated with Any
- -f, --dirty-stubs don't warn if typeshed is out of sync
- --pdb invoke pdb on fatal error
- --use-python-path search for modules in sys.path of running Python
- --stats dump stats
- --inferstats dump type inference stats
- --custom-typing mod use a custom typing module
- --<fmt>-report dir generate a <fmt> report of type precision under dir/
- <fmt> may be one of: %s
-
-How to specify the code to type-check:
- -m mod type-check module (may be a dotted name)
- -c string type-check program passed in as string
- --package dir type-check all files in a directory
- file ... type-check given files
- dir ... type-check all files in given directories
-
-Environment variables:
- MYPYPATH additional module search path
-""" % ', '.join(REPORTS))
- sys.exit(0)
-
-
-def version() -> None:
- sys.stdout.write("mypy {}\n".format(__version__))
- sys.exit(0)
+ f = os.path.join(dir, '__init__' + ext)
+ if os.path.isfile(f):
+ return f
+ return None
def fail(msg: str) -> None:
diff --git a/mypy/meet.py b/mypy/meet.py
index 1a3c8e2..88b4d4f 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -5,7 +5,6 @@ from mypy.types import (
Type, AnyType, TypeVisitor, UnboundType, Void, ErrorType, NoneTyp, TypeVarType,
Instance, CallableType, TupleType, ErasedType, TypeList, UnionType, PartialType, DeletedType
)
-from mypy.sametypes import is_same_type
from mypy.subtypes import is_subtype
from mypy.nodes import TypeInfo
@@ -40,19 +39,43 @@ def meet_simple(s: Type, t: Type, default_right: bool = True) -> Type:
def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool:
"""Can a value of type t be a value of type s, or vice versa?
- Note that this effectively checks against erased types, since X[Any] is always
- compatible with X[T].
+ Note that this effectively checks against erased types, since type
+ variables are erased at runtime and the overlapping check is based
+ on runtime behavior.
If use_promitions is True, also consider type promotions (int and
float would only be overlapping if it's True).
+
+ This does not consider multiple inheritance. For example, A and B in
+ the following example are not considered overlapping, even though
+ via C they can be overlapping:
+
+ class A: ...
+ class B: ...
+ class C(A, B): ...
+
+ The rationale is that this case is usually very unlikely as multiple
+ inhreitance is rare. Also, we can't reliably determine whether
+ multiple inheritance actually occurs somewhere in a program, due to
+ stub files hiding implementation details, dynamic loading etc.
+
+ TODO: Don't consider tuples always overlapping.
+ TODO: Don't consider callables always overlapping.
+ TODO: Don't consider type variables with values always overlapping.
"""
+ # Since we are effectively working with the erased types, we only
+ # need to handle occurrences of TypeVarType at the top level.
+ if isinstance(t, TypeVarType):
+ t = t.erase_to_union_or_bound()
+ if isinstance(s, TypeVarType):
+ s = s.erase_to_union_or_bound()
if isinstance(t, Instance):
if isinstance(s, Instance):
# Only consider two classes non-disjoint if one is included in the mro
# of another.
if use_promotions:
# Consider cases like int vs float to be overlapping where
- # there is only a type promition relationship but not proper
+ # there is only a type promotion relationship but not proper
# subclassing.
if t.type._promote and is_overlapping_types(t.type._promote, s):
return True
@@ -76,7 +99,6 @@ def nearest_builtin_ancestor(type: TypeInfo) -> TypeInfo:
return base
else:
return None
- assert False, 'No built-in ancestor found for {}'.format(type.name())
class TypeMeetVisitor(TypeVisitor[Type]):
@@ -136,14 +158,14 @@ class TypeMeetVisitor(TypeVisitor[Type]):
return self.s
def visit_type_var(self, t: TypeVarType) -> Type:
- if isinstance(self.s, TypeVarType) and (cast(TypeVarType, self.s)).id == t.id:
+ if isinstance(self.s, TypeVarType) and self.s.id == t.id:
return self.s
else:
return self.default(self.s)
def visit_instance(self, t: Instance) -> Type:
if isinstance(self.s, Instance):
- si = cast(Instance, self.s)
+ si = self.s
if t.type == si.type:
if is_subtype(t, self.s) or is_subtype(self.s, t):
# Combine type arguments. We could have used join below
@@ -166,19 +188,16 @@ class TypeMeetVisitor(TypeVisitor[Type]):
return self.default(self.s)
def visit_callable_type(self, t: CallableType) -> Type:
- if isinstance(self.s, CallableType) and is_similar_callables(
- t, cast(CallableType, self.s)):
- return combine_similar_callables(t, cast(CallableType, self.s))
+ if isinstance(self.s, CallableType) and is_similar_callables(t, self.s):
+ return combine_similar_callables(t, self.s)
else:
return self.default(self.s)
def visit_tuple_type(self, t: TupleType) -> Type:
- if isinstance(self.s, TupleType) and (
- cast(TupleType, self.s).length() == t.length()):
+ if isinstance(self.s, TupleType) and self.s.length() == t.length():
items = [] # type: List[Type]
for i in range(t.length()):
- items.append(self.meet(t.items[i],
- (cast(TupleType, self.s)).items[i]))
+ items.append(self.meet(t.items[i], self.s.items[i]))
# TODO: What if the fallbacks are different?
return TupleType(items, t.fallback)
else:
diff --git a/mypy/messages.py b/mypy/messages.py
index 83344b4..9d54849 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -11,9 +11,12 @@ from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple
from mypy.errors import Errors
from mypy.types import (
Type, CallableType, Instance, TypeVarType, TupleType, UnionType, Void, NoneTyp, AnyType,
- Overloaded, FunctionLike, DeletedType, PartialType
+ Overloaded, FunctionLike, DeletedType
+)
+from mypy.nodes import (
+ TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases,
+ ARG_STAR, ARG_STAR2
)
-from mypy.nodes import TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases
# Constants that represent simple type checker error message, i.e. messages
@@ -69,10 +72,15 @@ READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE = \
INSTANCE_LAYOUT_CONFLICT = 'Instance layout conflict in multiple inheritance'
FORMAT_REQUIRES_MAPPING = 'Format requires a mapping'
GENERIC_TYPE_NOT_VALID_AS_EXPRESSION = \
- "Generic type not valid as an expression any more (use '# type:' comment instead)"
+ "Generic type is prohibited as a runtime expression (use a type alias or '# type:' comment)"
RETURN_TYPE_CANNOT_BE_CONTRAVARIANT = "Cannot use a contravariant type variable as return type"
FUNCTION_PARAMETER_CANNOT_BE_COVARIANT = "Cannot use a covariant type variable as a parameter"
INCOMPATIBLE_IMPORT_OF = "Incompatible import of"
+FUNCTION_TYPE_EXPECTED = "Function is missing a type annotation"
+RETURN_TYPE_EXPECTED = "Function is missing a return type annotation"
+ARGUMENT_TYPE_EXPECTED = "Function is missing a type annotation for one or more arguments"
+KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE = \
+ 'Keyword argument only valid with "str" key type in call to "dict"'
class MessageBuilder:
@@ -152,7 +160,7 @@ class MessageBuilder:
# If format_simple returns a non-trivial result, use that.
return s
elif isinstance(typ, FunctionLike):
- func = cast(FunctionLike, typ)
+ func = typ
if func.is_type_obj():
# The type of a type object type can be derived from the
# return type (this always works).
@@ -191,7 +199,7 @@ class MessageBuilder:
callable type -> "" (empty string)
"""
if isinstance(typ, Instance):
- itype = cast(Instance, typ)
+ itype = typ
# Get the short name of the type.
if verbosity >= 2:
base_str = itype.type.fullname()
@@ -225,13 +233,13 @@ class MessageBuilder:
return '{}[...]'.format(base_str)
elif isinstance(typ, TypeVarType):
# This is similar to non-generic instance types.
- return '"{}"'.format((cast(TypeVarType, typ)).name)
+ return '"{}"'.format(typ.name)
elif isinstance(typ, TupleType):
# Prefer the name of the fallback class (if not tuple), as it's more informative.
if typ.fallback.type.fullname() != 'builtins.tuple':
return self.format_simple(typ.fallback)
items = []
- for t in (cast(TupleType, typ)).items:
+ for t in typ.items:
items.append(strip_quotes(self.format(t)))
s = '"Tuple[{}]"'.format(', '.join(items))
if len(s) < 40:
@@ -240,7 +248,7 @@ class MessageBuilder:
return 'tuple(length {})'.format(len(items))
elif isinstance(typ, UnionType):
items = []
- for t in (cast(UnionType, typ)).items:
+ for t in typ.items:
items.append(strip_quotes(self.format(t)))
s = '"Union[{}]"'.format(', '.join(items))
if len(s) < 40:
@@ -292,7 +300,7 @@ class MessageBuilder:
messages. Return type Any.
"""
if (isinstance(typ, Instance) and
- (cast(Instance, typ)).type.has_readable_member(member)):
+ typ.type.has_readable_member(member)):
self.fail('Member "{}" is not assignable'.format(member), context)
elif isinstance(typ, Void):
self.check_void(typ, context)
@@ -328,8 +336,7 @@ class MessageBuilder:
# The non-special case: a missing ordinary attribute.
if not self.disable_type_names:
failed = False
- if isinstance(typ, Instance) and cast(Instance, typ).type.names:
- typ = cast(Instance, typ)
+ if isinstance(typ, Instance) and typ.type.names:
alternatives = set(typ.type.names.keys())
matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives]
matches.extend(best_matches(member, alternatives)[:3])
@@ -391,8 +398,13 @@ class MessageBuilder:
self.fail('{} not callable'.format(self.format(typ)), context)
return AnyType()
+ def untyped_function_call(self, callee: CallableType, context: Context) -> Type:
+ name = callee.name if callee.name is not None else '(unknown)'
+ self.fail('call to untyped function {} in typed context'.format(name), context)
+ return AnyType()
+
def incompatible_argument(self, n: int, m: int, callee: CallableType, arg_type: Type,
- context: Context) -> None:
+ arg_kind: int, context: Context) -> None:
"""Report an error about an incompatible argument type.
The argument type is arg_type, argument number is n and the
@@ -457,6 +469,10 @@ class MessageBuilder:
except IndexError: # Varargs callees
expected_type = callee.arg_types[-1]
arg_type_str, expected_type_str = self.format_distinctly(arg_type, expected_type)
+ if arg_kind == ARG_STAR:
+ arg_type_str = '*' + arg_type_str
+ elif arg_kind == ARG_STAR2:
+ arg_type_str = '**' + arg_type_str
msg = 'Argument {} {}has incompatible type {}; expected {}'.format(
n, target, arg_type_str, expected_type_str)
self.fail(msg, context)
@@ -664,8 +680,7 @@ class MessageBuilder:
self.fail('List or tuple expected as variable arguments', context)
def invalid_keyword_var_arg(self, typ: Type, context: Context) -> None:
- if isinstance(typ, Instance) and (
- (cast(Instance, typ)).type.fullname() == 'builtins.dict'):
+ if isinstance(typ, Instance) and (typ.type.fullname() == 'builtins.dict'):
self.fail('Keywords must be strings', context)
else:
self.fail('Argument after ** must be a dictionary',
@@ -725,6 +740,9 @@ class MessageBuilder:
def invalid_method_type(self, sig: CallableType, context: Context) -> None:
self.fail('Invalid method type', context)
+ def invalid_class_method_type(self, sig: CallableType, context: Context) -> None:
+ self.fail('Invalid class method type', context)
+
def incompatible_conditional_function_def(self, defn: FuncDef) -> None:
self.fail('All conditional function variants must have identical '
'signatures', defn)
diff --git a/mypy/moduleinfo.py b/mypy/moduleinfo.py
index e0d3246..6deca6f 100644
--- a/mypy/moduleinfo.py
+++ b/mypy/moduleinfo.py
@@ -224,6 +224,7 @@ third_party_modules = {
'gtk',
'PyQt4',
'PyQt5',
+ 'pylons',
}
# Modules and packages common to Python 2.7 and 3.x.
@@ -294,6 +295,7 @@ common_std_lib_modules = {
'io',
'json',
'keyword',
+ 'lib2to3',
'linecache',
'locale',
'logging',
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 6857a16..cde6e0c 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -5,7 +5,7 @@ import re
from abc import abstractmethod, ABCMeta
from typing import (
- Any, overload, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional
+ Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional
)
from mypy.lex import Token
@@ -14,7 +14,7 @@ from mypy.visitor import NodeVisitor
from mypy.util import dump_tagged, short_type
-class Context(metaclass=ABCMeta):
+class Context:
"""Base type for objects that are valid as error message locations."""
@abstractmethod
def get_line(self) -> int: pass
@@ -27,6 +27,8 @@ if False:
T = TypeVar('T')
+JsonDict = Dict[str, Any]
+
# Symbol table node kinds
#
@@ -37,7 +39,7 @@ GDEF = 1 # type: int
MDEF = 2 # type: int
MODULE_REF = 3 # type: int
# Type variable declared using TypeVar(...) has kind UNBOUND_TVAR. It's not
-# valid as a type. A type variable is valid as a type (kind TVAR) within
+# valid as a type. A type variable is valid as a type (kind BOUND_TVAR) within
# (1) a generic class that uses the type variable as a type argument or
# (2) a generic function that refers to the type variable in its signature.
UNBOUND_TVAR = 4 # type: int
@@ -46,6 +48,7 @@ TYPE_ALIAS = 6 # type: int
# Placeholder for a name imported via 'from ... import'. Second phase of
# semantic will replace this the actual imported reference. This is
# needed so that we can detect whether a name has been imported during
+# XXX what?
UNBOUND_IMPORTED = 7 # type: int
@@ -63,6 +66,7 @@ node_kinds = {
TYPE_ALIAS: 'TypeAlias',
UNBOUND_IMPORTED: 'UnboundImported',
}
+inverse_node_kinds = {_kind: _name for _name, _kind in node_kinds.items()}
implicit_module_attrs = {'__name__': '__builtins__.str',
@@ -109,6 +113,21 @@ class Node(Context):
def accept(self, visitor: NodeVisitor[T]) -> T:
raise RuntimeError('Not implemented')
+ # NOTE: Can't use @abstractmethod, since many subclasses of Node
+ # don't implement serialize().
+ def serialize(self) -> Any:
+ raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__))
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Node':
+ classname = data['.class']
+ glo = globals()
+ if classname in glo:
+ cl = glo[classname]
+ if issubclass(cl, cls) and 'deserialize' in cl.__dict__:
+ return cl.deserialize(data)
+ raise NotImplementedError('unexpected .class {}'.format(classname))
+
class SymbolNode(Node):
# Nodes that can be stored in a symbol table.
@@ -121,6 +140,10 @@ class SymbolNode(Node):
@abstractmethod
def fullname(self) -> str: pass
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'SymbolNode':
+ return cast(SymbolNode, super().deserialize(data))
+
class MypyFile(SymbolNode):
"""The abstract syntax tree of a single source file."""
@@ -174,6 +197,26 @@ class MypyFile(SymbolNode):
return not (self.path is None) and len(self.path) != 0 \
and os.path.basename(self.path).startswith('__init__.')
+ def serialize(self) -> JsonDict:
+ return {'.class': 'MypyFile',
+ '_name': self._name,
+ '_fullname': self._fullname,
+ 'names': self.names.serialize(self._fullname),
+ 'is_stub': self.is_stub,
+ 'path': self.path,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'MypyFile':
+ assert data['.class'] == 'MypyFile', data
+ tree = MypyFile([], [])
+ tree._name = data['_name']
+ tree._fullname = data['_fullname']
+ tree.names = SymbolTable.deserialize(data['names'])
+ tree.is_stub = data['is_stub']
+ tree.path = data['path']
+ return tree
+
class ImportBase(Node):
"""Base class for all import statements."""
@@ -270,6 +313,25 @@ class OverloadedFuncDef(FuncBase):
def accept(self, visitor: NodeVisitor[T]) -> T:
return visitor.visit_overloaded_func_def(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'OverloadedFuncDef',
+ 'items': [i.serialize() for i in self.items],
+ 'type': None if self.type is None else self.type.serialize(),
+ 'fullname': self._fullname,
+ 'is_property': self.is_property,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'OverloadedFuncDef':
+ assert data['.class'] == 'OverloadedFuncDef'
+ res = OverloadedFuncDef([Decorator.deserialize(d) for d in data['items']])
+ if data.get('type') is not None:
+ res.type = mypy.types.Type.deserialize(data['type'])
+ res._fullname = data['fullname']
+ res.is_property = data['is_property']
+ # NOTE: res.info will be set in the fixup phase.
+ return res
+
class Argument(Node):
"""A single argument in a FuncItem."""
@@ -311,6 +373,25 @@ class Argument(Node):
self.initialization_statement.set_line(self.line)
self.initialization_statement.lvalues[0].set_line(self.line)
+ def serialize(self) -> JsonDict:
+ data = {'.class': 'Argument',
+ 'kind': self.kind,
+ 'variable': self.variable.serialize(),
+ 'type_annotation': (None if self.type_annotation is None
+ else self.type_annotation.serialize()),
+ } # type: JsonDict
+ # TODO: initializer?
+ return data
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Argument':
+ assert data['.class'] == 'Argument'
+ return Argument(Var.deserialize(data['variable']),
+ (None if data.get('type_annotation') is None
+ else mypy.types.Type.deserialize(data['type_annotation'])),
+ None, # TODO: initializer?
+ kind=data['kind'])
+
class FuncItem(FuncBase):
arguments = [] # type: List[Argument]
@@ -319,7 +400,6 @@ class FuncItem(FuncBase):
# Maximum number of positional arguments, -1 if no explicit limit (*args not included)
max_pos = 0
body = None # type: Block
- is_implicit = False # Implicit dynamic types?
# Is this an overload variant of function with more than one overload variant?
is_overload = False
is_generator = False # Contains a yield statement?
@@ -365,7 +445,7 @@ class FuncDef(FuncItem):
is_conditional = False # Defined conditionally (within block)?
is_abstract = False
is_property = False
- original_def = None # type: Union[FuncDef, Var] # Original conditional definition
+ original_def = None # type: Union[None, FuncDef, Var] # Original conditional definition
def __init__(self,
name: str, # Function name
@@ -384,6 +464,44 @@ class FuncDef(FuncItem):
def is_constructor(self) -> bool:
return self.info is not None and self._name == '__init__'
+ def serialize(self) -> JsonDict:
+ return {'.class': 'FuncDef',
+ 'name': self._name,
+ 'fullname': self._fullname,
+ 'arguments': [a.serialize() for a in self.arguments],
+ 'type': None if self.type is None else self.type.serialize(),
+ 'is_property': self.is_property,
+ 'is_overload': self.is_overload,
+ 'is_generator': self.is_generator,
+ 'is_static': self.is_static,
+ 'is_class': self.is_class,
+ 'is_decorated': self.is_decorated,
+ 'is_conditional': self.is_conditional,
+ 'is_abstract': self.is_abstract,
+ # TODO: Do we need expanded, original_def?
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'FuncDef':
+ assert data['.class'] == 'FuncDef'
+ body = Block([])
+ ret = FuncDef(data['name'],
+ [Argument.deserialize(a) for a in data['arguments']],
+ body,
+ (None if data['type'] is None
+ else mypy.types.FunctionLike.deserialize(data['type'])))
+ ret._fullname = data['fullname']
+ ret.is_property = data['is_property']
+ ret.is_overload = data['is_overload']
+ ret.is_generator = data['is_generator']
+ ret.is_static = data['is_static']
+ ret.is_class = data['is_class']
+ ret.is_decorated = data['is_decorated']
+ ret.is_conditional = data['is_conditional']
+ ret.is_abstract = data['is_abstract']
+ # NOTE: ret.info is set in the fixup phase.
+ return ret
+
class Decorator(SymbolNode):
"""A decorated function.
@@ -392,7 +510,7 @@ class Decorator(SymbolNode):
"""
func = None # type: FuncDef # Decorated function
- decorators = None # type: List[Node] # Decorators, at least one
+ decorators = None # type: List[Node] # Decorators, at least one # XXX Not true
var = None # type: Var # Represents the decorated function obj
is_overload = False
@@ -412,6 +530,22 @@ class Decorator(SymbolNode):
def accept(self, visitor: NodeVisitor[T]) -> T:
return visitor.visit_decorator(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'Decorator',
+ 'func': self.func.serialize(),
+ 'var': self.var.serialize(),
+ 'is_overload': self.is_overload,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Decorator':
+ assert data['.class'] == 'Decorator'
+ dec = Decorator(FuncDef.deserialize(data['func']),
+ [],
+ Var.deserialize(data['var']))
+ dec.is_overload = data['is_overload']
+ return dec
+
class Var(SymbolNode):
"""A variable.
@@ -449,6 +583,37 @@ class Var(SymbolNode):
def accept(self, visitor: NodeVisitor[T]) -> T:
return visitor.visit_var(self)
+ def serialize(self) -> JsonDict:
+ # TODO: Leave default values out?
+ # NOTE: Sometimes self.is_ready is False here, but we don't care.
+ data = {'.class': 'Var',
+ 'name': self._name,
+ 'fullname': self._fullname,
+ 'type': None if self.type is None else self.type.serialize(),
+ 'is_self': self.is_self,
+ 'is_initialized_in_class': self.is_initialized_in_class,
+ 'is_staticmethod': self.is_staticmethod,
+ 'is_classmethod': self.is_classmethod,
+ 'is_property': self.is_property,
+ 'is_settable_property': self.is_settable_property,
+ } # type: JsonDict
+ return data
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Var':
+ assert data['.class'] == 'Var'
+ name = data['name']
+ type = None if data['type'] is None else mypy.types.Type.deserialize(data['type'])
+ v = Var(name, type)
+ v._fullname = data['fullname']
+ v.is_self = data['is_self']
+ v.is_initialized_in_class = data['is_initialized_in_class']
+ v.is_staticmethod = data['is_staticmethod']
+ v.is_classmethod = data['is_classmethod']
+ v.is_property = data['is_property']
+ v.is_settable_property = data['is_settable_property']
+ return v
+
class ClassDef(Node):
"""Class definition"""
@@ -467,16 +632,16 @@ class ClassDef(Node):
# Built-in/extension class? (single implementation inheritance only)
is_builtinclass = False
- def __init__(self, name: str, defs: 'Block',
+ def __init__(self,
+ name: str,
+ defs: 'Block',
type_vars: List['mypy.types.TypeVarDef'] = None,
base_type_exprs: List[Node] = None,
metaclass: str = None) -> None:
- if not base_type_exprs:
- base_type_exprs = []
self.name = name
self.defs = defs
self.type_vars = type_vars or []
- self.base_type_exprs = base_type_exprs
+ self.base_type_exprs = base_type_exprs or []
self.base_types = [] # Not yet semantically analyzed --> don't know base types
self.metaclass = metaclass
self.decorators = []
@@ -487,6 +652,30 @@ class ClassDef(Node):
def is_generic(self) -> bool:
return self.info.is_generic()
+ def serialize(self) -> JsonDict:
+ # Not serialized: defs, base_type_exprs, decorators
+ return {'.class': 'ClassDef',
+ 'name': self.name,
+ 'fullname': self.fullname,
+ 'type_vars': [v.serialize() for v in self.type_vars],
+ 'base_types': [t.serialize() for t in self.base_types],
+ 'metaclass': self.metaclass,
+ 'is_builtinclass': self.is_builtinclass,
+ }
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'ClassDef':
+ assert data['.class'] == 'ClassDef'
+ res = ClassDef(data['name'],
+ Block([]),
+ [mypy.types.TypeVarDef.deserialize(v) for v in data['type_vars']],
+ metaclass=data['metaclass'],
+ )
+ res.fullname = data['fullname']
+ res.base_types = [mypy.types.Instance.deserialize(t) for t in data['base_types']]
+ res.is_builtinclass = data['is_builtinclass']
+ return res
+
class GlobalDecl(Node):
"""Declaration global x, y, ..."""
@@ -896,12 +1085,32 @@ class NameExpr(RefExpr):
self.name = name
self.literal_hash = ('Var', name,)
- def type_node(self):
- return cast(TypeInfo, self.node)
-
def accept(self, visitor: NodeVisitor[T]) -> T:
return visitor.visit_name_expr(self)
+ def serialize(self) -> JsonDict:
+ # TODO: Find out where and why NameExpr is being serialized (if at all).
+ assert False, "Serializing NameExpr: %s" % (self,)
+ return {'.class': 'NameExpr',
+ 'kind': self.kind,
+ 'node': None if self.node is None else self.node.serialize(),
+ 'fullname': self.fullname,
+ 'is_def': self.is_def,
+ 'name': self.name,
+ 'literal': self.literal,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'NameExpr':
+ assert data['.class'] == 'NameExpr'
+ ret = NameExpr(data['name'])
+ ret.kind = data['kind']
+ ret.node = None if data['node'] is None else Node.deserialize(data['node'])
+ ret.fullname = data['fullname']
+ ret.is_def = data['is_def']
+ ret.literal = data['literal']
+ return ret
+
class MemberExpr(RefExpr):
"""Member access expression x.y"""
@@ -1180,7 +1389,7 @@ class FuncExpr(FuncItem):
def expr(self) -> Node:
"""Return the expression (the body) of the lambda."""
- ret = cast(ReturnStmt, self.body.body[0])
+ ret = cast(ReturnStmt, self.body.body[-1])
return ret.expr
def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -1212,8 +1421,8 @@ class DictExpr(Node):
if all(x[0].literal == LITERAL_YES and x[1].literal == LITERAL_YES
for x in items):
self.literal = LITERAL_YES
- self.literal_hash = ('Dict',) + tuple((x[0].literal_hash, x[1].literal_hash)
- for x in items) # type: ignore
+ self.literal_hash = ('Dict',) + tuple(
+ (x[0].literal_hash, x[1].literal_hash) for x in items) # type: ignore
def accept(self, visitor: NodeVisitor[T]) -> T:
return visitor.visit_dict_expr(self)
@@ -1377,6 +1586,9 @@ class TypeVarExpr(SymbolNode):
# Value restriction: only types in the list are valid as values. If the
# list is empty, there is no restriction.
values = None # type: List[mypy.types.Type]
+ # Upper bound: only subtypes of upper_bound are valid as values. By default
+ # this is 'object', meaning no restriction.
+ upper_bound = None # type: mypy.types.Type
# Variance of the type variable. Invariant is the default.
# TypeVar(..., covariant=True) defines a covariant type variable.
# TypeVar(..., contravariant=True) defines a contravariant type
@@ -1385,10 +1597,12 @@ class TypeVarExpr(SymbolNode):
def __init__(self, name: str, fullname: str,
values: List['mypy.types.Type'],
+ upper_bound: 'mypy.types.Type',
variance: int=INVARIANT) -> None:
self._name = name
self._fullname = fullname
self.values = values
+ self.upper_bound = upper_bound
self.variance = variance
def name(self) -> str:
@@ -1400,6 +1614,24 @@ class TypeVarExpr(SymbolNode):
def accept(self, visitor: NodeVisitor[T]) -> T:
return visitor.visit_type_var_expr(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'TypeVarExpr',
+ 'name': self._name,
+ 'fullname': self._fullname,
+ 'values': [t.serialize() for t in self.values],
+ 'upper_bound': self.upper_bound.serialize(),
+ 'variance': self.variance,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'TypeVarExpr':
+ assert data['.class'] == 'TypeVarExpr'
+ return TypeVarExpr(data['name'],
+ data['fullname'],
+ [mypy.types.Type.deserialize(v) for v in data['values']],
+ mypy.types.Type.deserialize(data['upper_bound']),
+ data['variance'])
+
class TypeAliasExpr(Node):
"""Type alias expression (rvalue)."""
@@ -1513,6 +1745,12 @@ class TypeInfo(SymbolNode):
# Is this a named tuple type?
is_named_tuple = False
+ # Is this a dummy from deserialization?
+ is_dummy = False
+
+ # Alternative to fullname() for 'anonymous' classes.
+ alt_fullname = None # type: Optional[str]
+
def __init__(self, names: 'SymbolTable', defn: ClassDef) -> None:
"""Initialize a TypeInfo."""
self.names = names
@@ -1538,7 +1776,7 @@ class TypeInfo(SymbolNode):
def is_generic(self) -> bool:
"""Is the type generic (i.e. does it have type variables)?"""
- return self.type_vars is not None and len(self.type_vars) > 0
+ return len(self.type_vars) > 0
def get(self, name: str) -> 'SymbolTableNode':
for cls in self.mro:
@@ -1577,7 +1815,7 @@ class TypeInfo(SymbolNode):
if name in cls.names:
node = cls.names[name].node
if isinstance(node, Var):
- return cast(Var, node)
+ return node
else:
return None
return None
@@ -1591,6 +1829,8 @@ class TypeInfo(SymbolNode):
return self.get_var(name)
def get_method(self, name: str) -> FuncBase:
+ if self.mro is None: # Might be because of a previous error.
+ return None
for cls in self.mro:
if name in cls.names:
node = cls.names[name].node
@@ -1605,16 +1845,19 @@ class TypeInfo(SymbolNode):
Raise MroError if cannot determine mro.
"""
- self.mro = linearize_hierarchy(self)
+ mro = linearize_hierarchy(self)
+ assert mro, "Could not produce a MRO at all for %s" % (self,)
+ self.mro = mro
def has_base(self, fullname: str) -> bool:
"""Return True if type has a base type with the specified name.
This can be either via extension or via implementation.
"""
- for cls in self.mro:
- if cls.fullname() == fullname:
- return True
+ if self.mro:
+ for cls in self.mro:
+ if cls.fullname() == fullname:
+ return True
return False
def all_subtypes(self) -> 'Set[TypeInfo]':
@@ -1650,6 +1893,46 @@ class TypeInfo(SymbolNode):
('Names', sorted(self.names.keys()))],
'TypeInfo')
+ def serialize(self) -> Union[str, JsonDict]:
+ # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates.
+ data = {'.class': 'TypeInfo',
+ 'fullname': self.fullname(),
+ 'alt_fullname': self.alt_fullname,
+ 'names': self.names.serialize(self.alt_fullname or self.fullname()),
+ 'defn': self.defn.serialize(),
+ 'is_abstract': self.is_abstract,
+ 'abstract_attributes': self.abstract_attributes,
+ 'is_enum': self.is_enum,
+ 'fallback_to_any': self.fallback_to_any,
+ 'type_vars': self.type_vars,
+ 'bases': [b.serialize() for b in self.bases],
+ '_promote': None if self._promote is None else self._promote.serialize(),
+ 'tuple_type': None if self.tuple_type is None else self.tuple_type.serialize(),
+ 'is_named_tuple': self.is_named_tuple,
+ }
+ return data
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'TypeInfo':
+ names = SymbolTable.deserialize(data['names'])
+ defn = ClassDef.deserialize(data['defn'])
+ ti = TypeInfo(names, defn)
+ ti._fullname = data['fullname']
+ ti.alt_fullname = data['alt_fullname']
+ # TODO: Is there a reason to reconstruct ti.subtypes?
+ ti.is_abstract = data['is_abstract']
+ ti.abstract_attributes = data['abstract_attributes']
+ ti.is_enum = data['is_enum']
+ ti.fallback_to_any = data['fallback_to_any']
+ ti.type_vars = data['type_vars']
+ ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']]
+ ti._promote = (None if data['_promote'] is None
+ else mypy.types.Type.deserialize(data['_promote']))
+ ti.tuple_type = (None if data['tuple_type'] is None
+ else mypy.types.TupleType.deserialize(data['tuple_type']))
+ ti.is_named_tuple = data['is_named_tuple']
+ return ti
+
class SymbolTableNode:
# Kind of node. Possible values:
@@ -1670,12 +1953,15 @@ class SymbolTableNode:
# Module id (e.g. "foo.bar") or None
mod_id = ''
# If this not None, override the type of the 'node' attribute.
- type_override = None # type: mypy.types.Type
+ type_override = None # type: Optional[mypy.types.Type]
# If False, this name won't be imported via 'from <module> import *'.
# This has no effect on names within classes.
module_public = True
+ # For deserialized MODULE_REF nodes, the referenced module name;
+ # for other nodes, optionally the name of the referenced object.
+ cross_ref = None # type: Optional[str]
- def __init__(self, kind: int, node: SymbolNode, mod_id: str = None,
+ def __init__(self, kind: int, node: Optional[SymbolNode], mod_id: str = None,
typ: 'mypy.types.Type' = None, tvar_id: int = 0,
module_public: bool = True) -> None:
self.kind = kind
@@ -1702,7 +1988,7 @@ class SymbolTableNode:
and node.type is not None):
return node.type
elif isinstance(node, Decorator):
- return (cast(Decorator, node)).var.type
+ return node.var.type
else:
return None
@@ -1715,6 +2001,65 @@ class SymbolTableNode:
s += ' : {}'.format(self.type)
return s
+ def serialize(self, prefix: str, name: str) -> JsonDict:
+ """Serialize a SymbolTableNode.
+
+ Args:
+ prefix: full name of the containing module or class; or None
+ name: name of this object relative to the containing object
+ """
+ data = {'.class': 'SymbolTableNode',
+ 'kind': node_kinds[self.kind],
+ } # type: JsonDict
+ if self.tvar_id:
+ data['tvar_id'] = self.tvar_id
+ if not self.module_public:
+ data['module_public'] = False
+ if self.kind == MODULE_REF:
+ assert self.node is not None, "Missing module cross ref in %s for %s" % (prefix, name)
+ data['cross_ref'] = self.node.fullname()
+ else:
+ if self.node is not None:
+ if prefix is not None:
+ # Check whether this is an alias for another object.
+ # If the object's canonical full name differs from
+ # the full name computed from prefix and name,
+ # it's an alias, and we serialize it as a cross ref.
+ if isinstance(self.node, TypeInfo):
+ fullname = self.node.alt_fullname or self.node.fullname()
+ else:
+ fullname = self.node.fullname()
+ if (fullname is not None and '.' in fullname and
+ fullname != prefix + '.' + name):
+ data['cross_ref'] = fullname
+ return data
+ data['node'] = self.node.serialize()
+ if self.type_override is not None:
+ data['type_override'] = self.type_override.serialize()
+ return data
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'SymbolTableNode':
+ assert data['.class'] == 'SymbolTableNode'
+ kind = inverse_node_kinds[data['kind']]
+ if 'cross_ref' in data:
+ # This will be fixed up later.
+ stnode = SymbolTableNode(kind, None)
+ stnode.cross_ref = data['cross_ref']
+ else:
+ node = None
+ if 'node' in data:
+ node = SymbolNode.deserialize(data['node'])
+ typ = None
+ if 'type_override' in data:
+ typ = mypy.types.Type.deserialize(data['type_override'])
+ stnode = SymbolTableNode(kind, node, typ=typ)
+ if 'tvar_id' in data:
+ stnode.tvar_id = data['tvar_id']
+ if 'module_public' in data:
+ stnode.module_public = data['module_public']
+ return stnode
+
class SymbolTable(Dict[str, SymbolTableNode]):
def __str__(self) -> str:
@@ -1733,10 +2078,26 @@ class SymbolTable(Dict[str, SymbolTableNode]):
a[-1] += ')'
return '\n'.join(a)
-
-def clean_up(s: str) -> str:
- # TODO remove
- return re.sub('.*::', '', s)
+ def serialize(self, fullname: str) -> JsonDict:
+ data = {'.class': 'SymbolTable'} # type: JsonDict
+ for key, value in self.items():
+ # Skip __builtins__: it's a reference to the builtins
+ # module that gets added to every module by
+ # SemanticAnalyzer.visit_file(), but it shouldn't be
+ # accessed by users of the module.
+ if key == '__builtins__':
+ continue
+ data[key] = value.serialize(fullname, key)
+ return data
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'SymbolTable':
+ assert data['.class'] == 'SymbolTable'
+ st = SymbolTable()
+ for key, value in data.items():
+ if key != '.class':
+ st[key] = SymbolTableNode.deserialize(value)
+ return st
def function_type(func: FuncBase, fallback: 'mypy.types.Instance') -> 'mypy.types.FunctionLike':
@@ -1761,6 +2122,7 @@ def function_type(func: FuncBase, fallback: 'mypy.types.Instance') -> 'mypy.type
mypy.types.AnyType(),
fallback,
name,
+ implicit=True,
)
@@ -1782,6 +2144,11 @@ def method_type(sig: 'mypy.types.FunctionLike') -> 'mypy.types.FunctionLike':
def method_callable(c: 'mypy.types.CallableType') -> 'mypy.types.CallableType':
+ if c.arg_kinds and c.arg_kinds[0] == ARG_STAR:
+ # The signature is of the form 'def foo(*args, ...)'.
+ # In this case we shouldn't drop the first arg,
+ # since self will be absorbed by the *args.
+ return c
return c.copy_modified(arg_types=c.arg_types[1:],
arg_kinds=c.arg_kinds[1:],
arg_names=c.arg_names[1:])
@@ -1796,8 +2163,12 @@ def linearize_hierarchy(info: TypeInfo) -> List[TypeInfo]:
if info.mro:
return info.mro
bases = info.direct_base_classes()
- return [info] + merge([linearize_hierarchy(base) for base in bases] +
- [bases])
+ lin_bases = []
+ for base in bases:
+ assert base is not None, "Cannot linearize bases for %s %s" % (info.fullname(), bases)
+ lin_bases.append(linearize_hierarchy(base))
+ lin_bases.append(bases)
+ return [info] + merge(lin_bases)
def merge(seqs: List[List[TypeInfo]]) -> List[TypeInfo]:
diff --git a/mypy/parse.py b/mypy/parse.py
index f37b04e..ecf8e21 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -20,11 +20,11 @@ from mypy.nodes import (
ClassDef, Decorator, Block, Var, OperatorAssignmentStmt,
ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt,
DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl,
- WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, CastExpr,
+ WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt,
TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
- UnaryExpr, FuncExpr, TypeApplication, PrintStmt, ImportBase, ComparisonExpr,
+ UnaryExpr, FuncExpr, PrintStmt, ImportBase, ComparisonExpr,
StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, ExecStmt, Argument,
BackquoteExpr
@@ -32,7 +32,7 @@ from mypy.nodes import (
from mypy import defaults
from mypy import nodes
from mypy.errors import Errors, CompileError
-from mypy.types import Void, Type, CallableType, AnyType, UnboundType
+from mypy.types import Type, CallableType, AnyType, UnboundType
from mypy.parsetype import (
parse_type, parse_types, parse_signature, TypeParseError, parse_str_as_signature
)
@@ -70,7 +70,8 @@ none = Token('') # Empty token
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
- custom_typing_module: str = None, implicit_any: bool = False) -> MypyFile:
+ custom_typing_module: str = None,
+ fast_parser: bool = False) -> MypyFile:
"""Parse a source file, without doing any semantic analysis.
Return the parse tree. If errors is not provided, raise ParseError
@@ -78,13 +79,20 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
The pyversion (major, minor) argument determines the Python syntax variant.
"""
+ if fast_parser:
+ import mypy.fastparse
+ return mypy.fastparse.parse(source,
+ fnam=fnam,
+ errors=errors,
+ pyversion=pyversion,
+ custom_typing_module=custom_typing_module)
+
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
parser = Parser(fnam,
errors,
pyversion,
custom_typing_module,
- is_stub_file=is_stub_file,
- implicit_any=implicit_any)
+ is_stub_file=is_stub_file)
tree = parser.parse(source)
tree.path = fnam
tree.is_stub = is_stub_file
@@ -116,13 +124,11 @@ class Parser:
ignored_lines = None # type: Set[int]
def __init__(self, fnam: str, errors: Errors, pyversion: Tuple[int, int],
- custom_typing_module: str = None, is_stub_file: bool = False,
- implicit_any = False) -> None:
+ custom_typing_module: str = None, is_stub_file: bool = False) -> None:
self.raise_on_error = errors is None
self.pyversion = pyversion
self.custom_typing_module = custom_typing_module
self.is_stub_file = is_stub_file
- self.implicit_any = implicit_any
if errors is not None:
self.errors = errors
else:
@@ -329,7 +335,7 @@ class Parser:
metaclass = None
try:
- commas, base_types = [], [] # type: List[Token], List[Node]
+ base_types = [] # type: List[Node]
try:
name_tok = self.expect_type(Name)
name = name_tok.string
@@ -341,13 +347,13 @@ class Parser:
while True:
if self.current_str() == ')':
break
- if self.current_str() == 'metaclass':
- metaclass = self.parse_metaclass()
+ if self.peek().string == '=':
+ metaclass = self.parse_class_keywords()
break
base_types.append(self.parse_super_type())
if self.current_str() != ',':
break
- commas.append(self.skip())
+ self.skip()
self.expect(')')
except ParseError:
pass
@@ -360,14 +366,29 @@ class Parser:
self.errors.pop_type()
self.is_class_body = old_is_class_body
+ def parse_class_keywords(self) -> Optional[str]:
+ """Parse the class keyword arguments, keeping the metaclass but
+ ignoring all others. Returns None if the metaclass isn't found.
+ """
+ metaclass = None
+ while True:
+ key = self.expect_type(Name)
+ self.expect('=')
+ if key.string == 'metaclass':
+ metaclass = self.parse_qualified_name()
+ else:
+ # skip the class value
+ self.parse_expression(precedence[','])
+ if self.current_str() != ',':
+ break
+ self.skip()
+ if self.current_str() == ')':
+ break
+ return metaclass
+
def parse_super_type(self) -> Node:
return self.parse_expression(precedence[','])
- def parse_metaclass(self) -> str:
- self.expect('metaclass')
- self.expect('=')
- return self.parse_qualified_name()
-
def parse_decorated_function_or_class(self) -> Node:
decorators = []
no_type_checks = False
@@ -421,7 +442,18 @@ class Parser:
self.errors.report(
def_tok.line, 'Function has duplicate type signatures')
sig = cast(CallableType, comment_type)
- if is_method and len(sig.arg_kinds) < len(arg_kinds):
+ if sig.is_ellipsis_args:
+ # When we encounter an ellipsis, fill in the arg_types with
+ # a bunch of AnyTypes, emulating Callable[..., T]
+ arg_types = [AnyType()] * len(arg_kinds) # type: List[Type]
+ typ = CallableType(
+ arg_types,
+ arg_kinds,
+ arg_names,
+ sig.ret_type,
+ None,
+ line=def_tok.line)
+ elif is_method and len(sig.arg_kinds) < len(arg_kinds):
self.check_argument_kinds(arg_kinds,
[nodes.ARG_POS] + sig.arg_kinds,
def_tok.line)
@@ -450,20 +482,6 @@ class Parser:
if is_error:
return None
- # add implicit anys
- if typ is None and self.implicit_any and not self.is_stub_file:
- ret_type = None # type: Type
- if is_method and name == '__init__':
- ret_type = UnboundType('None', [])
- else:
- ret_type = AnyType()
- typ = CallableType([AnyType() for _ in args],
- arg_kinds,
- [a.variable.name() for a in args],
- ret_type,
- None,
- line=def_tok.line)
-
node = FuncDef(name, args, body, typ)
node.set_line(def_tok)
if typ is not None:
@@ -628,9 +646,10 @@ class Parser:
elif self.current_str() == '(':
arg, extra_stmt, names = self.parse_tuple_arg(len(args))
args.append(arg)
- extra_stmts.append(extra_stmt)
+ if extra_stmt is not None:
+ extra_stmts.append(extra_stmt)
+ has_tuple_arg = True
arg_names.extend(names)
- has_tuple_arg = True
else:
arg, require_named = self.parse_normal_arg(
require_named,
@@ -688,6 +707,10 @@ class Parser:
function body (the second return value).
Return tuple (argument, decomposing assignment, list of names defined).
+
+ Special case: if the argument is just (x) then it's not a tuple;
+ we indicate this by returning (argument, None, ['x']).
+ However, if the argument is (x,) then it *is* a (singleton) tuple.
"""
line = self.current().line
# Generate a new argument name that is very unlikely to clash with anything.
@@ -697,13 +720,14 @@ class Parser:
paren_arg = self.parse_parentheses()
self.verify_tuple_arg(paren_arg)
if isinstance(paren_arg, NameExpr):
- # This isn't a tuple. Revert to a normal argument. We'll still get a no-op
- # assignment below but that's benign.
+ # This isn't a tuple. Revert to a normal argument.
arg_name = paren_arg.name
- rvalue = NameExpr(arg_name)
- rvalue.set_line(line)
- decompose = AssignmentStmt([paren_arg], rvalue)
- decompose.set_line(line)
+ decompose = None
+ else:
+ rvalue = NameExpr(arg_name)
+ rvalue.set_line(line)
+ decompose = AssignmentStmt([paren_arg], rvalue)
+ decompose.set_line(line)
kind = nodes.ARG_POS
initializer = None
if self.current_str() == '=':
@@ -791,13 +815,13 @@ class Parser:
arg_types = [arg.type_annotation for arg in args]
for i in range(len(arg_types)):
if arg_types[i] is None:
- arg_types[i] = AnyType()
+ arg_types[i] = AnyType(implicit=True)
if ret_type is None:
- ret_type = AnyType()
+ ret_type = AnyType(implicit=True)
arg_kinds = [arg.kind for arg in args]
arg_names = [arg.variable.name() for arg in args]
return CallableType(arg_types, arg_kinds, arg_names, ret_type, None, name=None,
- variables=None, bound_vars=[], line=line)
+ variables=None, line=line)
# Parsing statements
@@ -1837,7 +1861,7 @@ class Parser:
self.parse_error_at(self.current())
raise ParseError()
- def parse_error_at(self, tok: Token, skip: bool = True) -> None:
+ def parse_error_at(self, tok: Token, skip: bool = True, reason: Optional[str] = None) -> None:
msg = ''
if isinstance(tok, LexError):
msg = token_repr(tok)
@@ -1845,7 +1869,8 @@ class Parser:
elif isinstance(tok, Indent) or isinstance(tok, Dedent):
msg = 'Inconsistent indentation'
else:
- msg = 'Parse error before {}'.format(token_repr(tok))
+ formatted_reason = ": {}".format(reason) if reason else ""
+ msg = 'Parse error before {}{}'.format(token_repr(tok), formatted_reason)
self.errors.report(tok.line, msg)
@@ -1878,7 +1903,7 @@ class Parser:
try:
typ, self.ind = parse_type(self.tok, self.ind)
except TypeParseError as e:
- self.parse_error_at(e.token)
+ self.parse_error_at(e.token, reason=e.message)
raise ParseError()
return typ
@@ -1908,7 +1933,7 @@ class Parser:
else:
type, index = parse_signature(tokens)
except TypeParseError as e:
- self.parse_error_at(e.token, skip=False)
+ self.parse_error_at(e.token, skip=False, reason=e.message)
return None
if index < len(tokens) - 2:
self.parse_error_at(tokens[index], skip=False)
@@ -1968,7 +1993,7 @@ if __name__ == '__main__':
import sys
def usage():
- print('Usage: parse.py [--py2] [--quiet] FILE [...]')
+ print('Usage: parse.py [--py2] [--quiet] FILE [...]', file=sys.stderr)
sys.exit(2)
args = sys.argv[1:]
diff --git a/mypy/parsetype.py b/mypy/parsetype.py
index 555daa6..73bb12e 100644
--- a/mypy/parsetype.py
+++ b/mypy/parsetype.py
@@ -1,12 +1,12 @@
"""Type parser"""
-from typing import List, Tuple, Union, cast
+from typing import List, Tuple, Union, cast, Optional
from mypy.types import (
- Type, UnboundType, TupleType, UnionType, TypeList, AnyType, CallableType, StarType,
+ Type, UnboundType, TupleType, TypeList, CallableType, StarType,
EllipsisType
)
-from mypy.lex import Token, Name, StrLit, Break, lex
+from mypy.lex import Token, Name, StrLit, lex
from mypy import nodes
@@ -14,10 +14,11 @@ none = Token('') # Empty token
class TypeParseError(Exception):
- def __init__(self, token: Token, index: int) -> None:
+ def __init__(self, token: Token, index: int, message: Optional[str] = None) -> None:
super().__init__()
self.token = token
self.index = index
+ self.message = message
def parse_type(tok: List[Token], index: int) -> Tuple[Type, int]:
@@ -223,6 +224,7 @@ def parse_signature(tokens: List[Token]) -> Tuple[CallableType, int]:
i += 1
arg_types = [] # type: List[Type]
arg_kinds = [] # type: List[int]
+ encountered_ellipsis = False
while tokens[i].string != ')':
if tokens[i].string == '*':
arg_kinds.append(nodes.ARG_STAR)
@@ -235,6 +237,16 @@ def parse_signature(tokens: List[Token]) -> Tuple[CallableType, int]:
arg, i = parse_type(tokens, i)
arg_types.append(arg)
next = tokens[i].string
+
+ # Check for ellipsis. If it exists, assert it's the only arg_type.
+ # Disallow '(..., int) -> None' for example.
+ if isinstance(arg, EllipsisType):
+ encountered_ellipsis = True
+ if encountered_ellipsis and len(arg_types) != 1:
+ raise TypeParseError(tokens[i], i,
+ "Ellipses cannot accompany other argument types"
+ " in function type signature.")
+
if next not in ',)':
raise TypeParseError(tokens[i], i)
if next == ',':
@@ -247,4 +259,5 @@ def parse_signature(tokens: List[Token]) -> Tuple[CallableType, int]:
return CallableType(arg_types,
arg_kinds,
[None] * len(arg_types),
- ret_type, None), i
+ ret_type, None,
+ is_ellipsis_args=encountered_ellipsis), i
diff --git a/mypy/replacetvars.py b/mypy/replacetvars.py
index a7658e8..675cc58 100644
--- a/mypy/replacetvars.py
+++ b/mypy/replacetvars.py
@@ -1,9 +1,6 @@
"""Type operations"""
-import typing
-
-from mypy.lex import Token
-from mypy.types import Type, AnyType, NoneTyp, TypeTranslator, TypeVarType
+from mypy.types import Type, AnyType, TypeTranslator, TypeVarType
def replace_type_vars(typ: Type, func_tvars: bool = True) -> Type:
diff --git a/mypy/report.py b/mypy/report.py
index bf52a73..a4a71bb 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -4,20 +4,21 @@ from abc import ABCMeta, abstractmethod
import cgi
import os
import shutil
+import tokenize
-from typing import Callable, Dict, List, cast
+from typing import Callable, Dict, List, Tuple, cast
-from mypy.types import Type
-from mypy.nodes import MypyFile, Node
+from mypy.nodes import MypyFile, Node, FuncDef
from mypy import stats
+from mypy.traverser import TraverserVisitor
+from mypy.types import Type
reporter_classes = {} # type: Dict[str, Callable[[Reports, str], AbstractReporter]]
class Reports:
- def __init__(self, main_file: str, data_dir: str, report_dirs: Dict[str, str]) -> None:
- self.main_file = main_file
+ def __init__(self, data_dir: str, report_dirs: Dict[str, str]) -> None:
self.data_dir = data_dir
self.reporters = [] # type: List[AbstractReporter]
self.named_reporters = {} # type: Dict[str, AbstractReporter]
@@ -58,6 +59,50 @@ class AbstractReporter(metaclass=ABCMeta):
pass
+class FuncCounterVisitor(TraverserVisitor):
+ def __init__(self) -> None:
+ super().__init__()
+ self.counts = [0, 0]
+
+ def visit_func_def(self, defn: FuncDef):
+ self.counts[defn.type is not None] += 1
+
+
+class LineCountReporter(AbstractReporter):
+ def __init__(self, reports: Reports, output_dir: str) -> None:
+ super().__init__(reports, output_dir)
+ self.counts = {} # type: Dict[str, Tuple[int, int, int, int]]
+
+ stats.ensure_dir_exists(output_dir)
+
+ def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+ physical_lines = len(open(tree.path).readlines())
+
+ func_counter = FuncCounterVisitor()
+ tree.accept(func_counter)
+ unannotated_funcs, annotated_funcs = func_counter.counts
+ total_funcs = annotated_funcs + unannotated_funcs
+
+ imputed_annotated_lines = (physical_lines * annotated_funcs // total_funcs
+ if total_funcs else physical_lines)
+
+ self.counts[tree._fullname] = (imputed_annotated_lines, physical_lines,
+ annotated_funcs, total_funcs)
+
+ def on_finish(self) -> None:
+ counts = sorted(((c, p) for p, c in self.counts.items()),
+ reverse=True) # type: List[Tuple[tuple, str]]
+ total_counts = tuple(sum(c[i] for c, p in counts)
+ for i in range(4))
+ with open(os.path.join(self.output_dir, 'linecount.txt'), 'w') as f:
+ f.write('{:7} {:7} {:6} {:6} total\n'.format(*total_counts))
+ for c, p in counts:
+ f.write('{:7} {:7} {:6} {:6} {}\n'.format(
+ c[0], c[1], c[2], c[3], p))
+
+reporter_classes['linecount'] = LineCountReporter
+
+
class OldHtmlReporter(AbstractReporter):
"""Old HTML reporter.
@@ -97,7 +142,6 @@ class MemoryXmlReporter(AbstractReporter):
super().__init__(reports, output_dir)
- self.main_file = reports.main_file
self.xslt_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.xslt')
self.xslt_txt_path = os.path.join(reports.data_dir, 'xml', 'mypy-txt.xslt')
self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css')
@@ -125,7 +169,7 @@ class MemoryXmlReporter(AbstractReporter):
doc = etree.ElementTree(root)
file_info = FileInfo(path, tree._fullname)
- with open(path) as input_file:
+ with tokenize.open(path) as input_file:
for lineno, line_text in enumerate(input_file, 1):
status = visitor.line_map.get(lineno, stats.TYPE_EMPTY)
file_info.counts[status] += 1
@@ -150,7 +194,7 @@ class MemoryXmlReporter(AbstractReporter):
# index_path = os.path.join(self.output_dir, 'index.xml')
output_files = sorted(self.files, key=lambda x: x.module)
- root = etree.Element('mypy-report-index', name=self.main_file)
+ root = etree.Element('mypy-report-index', name='index')
doc = etree.ElementTree(root)
for file_info in output_files:
diff --git a/mypy/sametypes.py b/mypy/sametypes.py
index e42d842..bfe37a6 100644
--- a/mypy/sametypes.py
+++ b/mypy/sametypes.py
@@ -1,4 +1,4 @@
-from typing import List, cast, Sequence
+from typing import Sequence
from mypy.types import (
Type, UnboundType, ErrorType, AnyType, NoneTyp, Void, TupleType, UnionType, CallableType,
@@ -78,17 +78,17 @@ class SameTypeVisitor(TypeVisitor[bool]):
def visit_instance(self, left: Instance) -> bool:
return (isinstance(self.right, Instance) and
- left.type == (cast(Instance, self.right)).type and
- is_same_types(left.args, (cast(Instance, self.right)).args))
+ left.type == self.right.type and
+ is_same_types(left.args, self.right.args))
def visit_type_var(self, left: TypeVarType) -> bool:
return (isinstance(self.right, TypeVarType) and
- left.id == (cast(TypeVarType, self.right)).id)
+ left.id == self.right.id)
def visit_callable_type(self, left: CallableType) -> bool:
# FIX generics
if isinstance(self.right, CallableType):
- cright = cast(CallableType, self.right)
+ cright = self.right
return (is_same_type(left.ret_type, cright.ret_type) and
is_same_types(left.arg_types, cright.arg_types) and
left.arg_names == cright.arg_names and
@@ -100,14 +100,14 @@ class SameTypeVisitor(TypeVisitor[bool]):
def visit_tuple_type(self, left: TupleType) -> bool:
if isinstance(self.right, TupleType):
- return is_same_types(left.items, cast(TupleType, self.right).items)
+ return is_same_types(left.items, self.right.items)
else:
return False
def visit_union_type(self, left: UnionType) -> bool:
# XXX This is a test for syntactic equality, not equivalence
if isinstance(self.right, UnionType):
- return is_same_types(left.items, cast(UnionType, self.right).items)
+ return is_same_types(left.items, self.right.items)
else:
return False
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 2d20c0a..e19681d 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -30,7 +30,10 @@ subdivided into three passes:
it will reject Dict[int]. We don't do this in the second pass,
since we infer the type argument counts of classes during this
pass, and it is possible to refer to classes defined later in a
- file, which would not have the type argument count set yet.
+ file, which would not have the type argument count set yet. This
+ pass also recomputes the method resolution order of each class, in
+ case one of its bases belongs to a module involved in an import
+ loop.
Semantic analysis of types is implemented in module mypy.typeanal.
@@ -41,7 +44,7 @@ TODO: Check if the third pass slows down type checking significantly.
"""
from typing import (
- List, Dict, Set, Tuple, cast, Any, overload, TypeVar, Union, Optional
+ List, Dict, Set, Tuple, cast, Any, overload, TypeVar, Union, Optional, Callable
)
from mypy.nodes import (
@@ -55,7 +58,7 @@ from mypy.nodes import (
SliceExpr, CastExpr, TypeApplication, Context, SymbolTable,
SymbolTableNode, BOUND_TVAR, UNBOUND_TVAR, ListComprehension, GeneratorExpr,
FuncExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr,
- StrExpr, PrintStmt, ConditionalExpr, PromoteExpr,
+ StrExpr, BytesExpr, PrintStmt, ConditionalExpr, PromoteExpr,
ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, MroError, type_aliases,
YieldFromExpr, NamedTupleExpr, NonlocalDecl,
SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr,
@@ -76,6 +79,7 @@ from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
from mypy.lex import lex
from mypy.parsetype import parse_type
from mypy.sametypes import is_same_type
+from mypy.erasetype import erase_typevars
from mypy import defaults
@@ -169,6 +173,8 @@ class SemanticAnalyzer(NodeVisitor):
# Stack of functions being analyzed
function_stack = None # type: List[FuncItem]
+ # Stack of next available function type variable ids
+ next_function_tvar_id_stack = None # type: List[int]
# Status of postponing analysis of nested function bodies. By using this we
# can have mutually recursive nested functions. Values are FUNCTION_x
@@ -184,8 +190,11 @@ class SemanticAnalyzer(NodeVisitor):
imports = None # type: Set[str] # Imported modules (during phase 2 analysis)
errors = None # type: Errors # Keeps track of generated errors
- def __init__(self, lib_path: List[str], errors: Errors,
- pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION) -> None:
+ def __init__(self,
+ lib_path: List[str],
+ errors: Errors,
+ pyversion: Tuple[int, int],
+ check_untyped_defs: bool) -> None:
"""Construct semantic analyzer.
Use lib_path to search for modules, and report analysis errors
@@ -198,12 +207,14 @@ class SemanticAnalyzer(NodeVisitor):
self.bound_tvars = None
self.tvar_stack = []
self.function_stack = []
+ self.next_function_tvar_id_stack = [-1]
self.block_depth = [0]
self.loop_depth = 0
self.lib_path = lib_path
self.errors = errors
self.modules = {}
self.pyversion = pyversion
+ self.check_untyped_defs = check_untyped_defs
self.postpone_nested_functions_stack = [FUNCTION_BOTH_PHASES]
self.postponed_functions_stack = []
@@ -237,10 +248,12 @@ class SemanticAnalyzer(NodeVisitor):
def visit_func_def(self, defn: FuncDef) -> None:
phase_info = self.postpone_nested_functions_stack[-1]
if phase_info != FUNCTION_SECOND_PHASE:
+ self.function_stack.append(defn)
# First phase of analysis for function.
self.errors.push_function(defn.name())
self.update_function_type_variables(defn)
self.errors.pop_function()
+ self.function_stack.pop()
defn.is_conditional = self.block_depth[-1] > 0
@@ -306,9 +319,10 @@ class SemanticAnalyzer(NodeVisitor):
self.fail('Method must have at least one argument', func)
elif func.type:
sig = cast(FunctionLike, func.type)
- # TODO: A classmethod's first argument should be more
- # precisely typed than Any.
- leading_type = AnyType() if func.is_class else self_type(self.type)
+ if func.is_class:
+ leading_type = self.class_type(self.type)
+ else:
+ leading_type = self_type(self.type)
func.type = replace_implicit_first_type(sig, leading_type)
def is_conditional_func(self, previous: Node, new: FuncDef) -> bool:
@@ -335,7 +349,9 @@ class SemanticAnalyzer(NodeVisitor):
typevars = [(name, tvar) for name, tvar in typevars
if not self.is_defined_type_var(name, defn)]
if typevars:
- defs = [TypeVarDef(tvar[0], -i - 1, tvar[1].values, self.object_type(),
+ next_tvar_id = self.next_function_tvar_id()
+ defs = [TypeVarDef(tvar[0], next_tvar_id - i,
+ tvar[1].values, tvar[1].upper_bound,
tvar[1].variance)
for i, tvar in enumerate(typevars)]
functype.variables = defs
@@ -430,9 +446,17 @@ class SemanticAnalyzer(NodeVisitor):
self.fail("Decorated property not supported", item)
item.func.accept(self)
+ def next_function_tvar_id(self) -> int:
+ return self.next_function_tvar_id_stack[-1]
+
def analyze_function(self, defn: FuncItem) -> None:
is_method = self.is_class_scope()
+
tvarnodes = self.add_func_type_variables_to_symbol_table(defn)
+ next_function_tvar_id = min([self.next_function_tvar_id()] +
+ [n.tvar_id - 1 for n in tvarnodes])
+ self.next_function_tvar_id_stack.append(next_function_tvar_id)
+
if defn.type:
# Signature must be analyzed in the surrounding scope so that
# class-level imported names and type variables are in scope.
@@ -471,7 +495,9 @@ class SemanticAnalyzer(NodeVisitor):
self.postpone_nested_functions_stack.pop()
self.postponed_functions_stack.pop()
+ self.next_function_tvar_id_stack.pop()
disable_typevars(tvarnodes)
+
self.leave()
self.function_stack.pop()
@@ -482,11 +508,11 @@ class SemanticAnalyzer(NodeVisitor):
tt = defn.type
names = self.type_var_names()
items = cast(CallableType, tt).variables
- for i, item in enumerate(items):
+ for item in items:
name = item.name
if name in names:
self.name_already_defined(name, defn)
- node = self.bind_type_var(name, -i - 1, defn)
+ node = self.bind_type_var(name, item.id, defn)
nodes.append(node)
names.add(name)
return nodes
@@ -654,7 +680,7 @@ class SemanticAnalyzer(NodeVisitor):
removed.append(i)
for j, (name, tvar_expr) in enumerate(tvars):
type_vars.append(TypeVarDef(name, j + 1, tvar_expr.values,
- self.object_type(), tvar_expr.variance))
+ tvar_expr.upper_bound, tvar_expr.variance))
if type_vars:
defn.type_vars = type_vars
if defn.info:
@@ -736,22 +762,17 @@ class SemanticAnalyzer(NodeVisitor):
obj = self.object_type()
defn.base_types.insert(0, obj)
defn.info.bases = defn.base_types
+ # Calculate the MRO. It might be incomplete at this point if
+ # the bases of defn include classes imported from other
+ # modules in an import loop. We'll recompute it in ThirdPass.
if not self.verify_base_classes(defn):
defn.info.mro = []
return
- try:
- defn.info.calculate_mro()
- except MroError:
- self.fail("Cannot determine consistent method resolution order "
- '(MRO) for "%s"' % defn.name, defn)
- defn.info.mro = []
- else:
- # If there are cyclic imports, we may be missing 'object' in
- # the MRO. Fix MRO if needed.
- if defn.info.mro[-1].fullname() != 'builtins.object':
- defn.info.mro.append(self.object_type().type)
- # The property of falling back to Any is inherited.
- defn.info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in defn.info.mro)
+ calculate_class_mro(defn, self.fail)
+ # If there are cyclic imports, we may be missing 'object' in
+ # the MRO. Fix MRO if needed.
+ if defn.info.mro and defn.info.mro[-1].fullname() != 'builtins.object':
+ defn.info.mro.append(self.object_type().type)
def expr_to_analyzed_type(self, expr: Node) -> Type:
if isinstance(expr, CallExpr):
@@ -808,6 +829,16 @@ class SemanticAnalyzer(NodeVisitor):
def object_type(self) -> Instance:
return self.named_type('__builtins__.object')
+ def class_type(self, info: TypeInfo) -> Type:
+ # Construct a function type whose fallback is cls.
+ from mypy import checkmember # To avoid import cycle.
+ leading_type = checkmember.type_object_type(info, self.builtin_type)
+ if isinstance(leading_type, Overloaded):
+ # Overloaded __init__ is too complex to handle. Plus it's stubs only.
+ return AnyType()
+ else:
+ return leading_type
+
def named_type(self, qualified_name: str, args: List[Type] = None) -> Instance:
sym = self.lookup_qualified(qualified_name, None)
return Instance(cast(TypeInfo, sym.node), args or [])
@@ -912,6 +943,9 @@ class SemanticAnalyzer(NodeVisitor):
if node.fullname in type_aliases:
# Node refers to an aliased type such as typing.List; normalize.
node = self.lookup_qualified(type_aliases[node.fullname], ctx)
+ if node.fullname == 'typing.DefaultDict':
+ self.add_module_symbol('collections', '__mypy_collections__', False, ctx)
+ node = self.lookup_qualified('__mypy_collections__.defaultdict', ctx)
return node
def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str:
@@ -1074,8 +1108,7 @@ class SemanticAnalyzer(NodeVisitor):
elif isinstance(lval.node, Var) and lval.is_def:
# Since the is_def flag is set, this must have been analyzed
# already in the first pass and added to the symbol table.
- v = cast(Var, lval.node)
- assert v.name() in self.globals
+ assert lval.node.name() in self.globals
elif (self.is_func_scope() and lval.name not in self.locals[-1] and
lval.name not in self.global_decls[-1] and
lval.name not in self.nonlocal_decls[-1]):
@@ -1179,7 +1212,7 @@ class SemanticAnalyzer(NodeVisitor):
if isinstance(lvalue, RefExpr):
lvalue.is_def = False
if isinstance(lvalue.node, Var):
- var = cast(Var, lvalue.node)
+ var = lvalue.node
var.type = typ
var.is_ready = True
# If node is not a variable, we'll catch it elsewhere.
@@ -1224,17 +1257,19 @@ class SemanticAnalyzer(NodeVisitor):
n_values = call.arg_kinds[1:].count(ARG_POS)
values = self.analyze_types(call.args[1:1 + n_values])
- variance = self.process_typevar_parameters(call.args[1 + n_values:],
- call.arg_names[1 + n_values:],
- call.arg_kinds[1 + n_values:],
- s)
- if variance is None:
+ res = self.process_typevar_parameters(call.args[1 + n_values:],
+ call.arg_names[1 + n_values:],
+ call.arg_kinds[1 + n_values:],
+ bool(values),
+ s)
+ if res is None:
return
+ variance, upper_bound = res
# Yes, it's a valid type variable definition! Add it to the symbol table.
node = self.lookup(name, s)
node.kind = UNBOUND_TVAR
- TypeVar = TypeVarExpr(name, node.fullname, values, variance)
+ TypeVar = TypeVarExpr(name, node.fullname, values, upper_bound, variance)
TypeVar.line = call.line
call.analyzed = TypeVar
node.node = TypeVar
@@ -1243,7 +1278,7 @@ class SemanticAnalyzer(NodeVisitor):
if len(call.args) < 1:
self.fail("Too few arguments for TypeVar()", context)
return False
- if not isinstance(call.args[0], StrExpr) or not call.arg_kinds[0] == ARG_POS:
+ if not isinstance(call.args[0], (StrExpr, BytesExpr)) or not call.arg_kinds[0] == ARG_POS:
self.fail("TypeVar() expects a string literal as first argument", context)
return False
if cast(StrExpr, call.args[0]).value != name:
@@ -1270,9 +1305,11 @@ class SemanticAnalyzer(NodeVisitor):
def process_typevar_parameters(self, args: List[Node],
names: List[Optional[str]],
kinds: List[int],
- context: Context) -> Optional[int]:
+ has_values: bool,
+ context: Context) -> Optional[Tuple[int, Type]]:
covariant = False
contravariant = False
+ upper_bound = self.object_type() # type: Type
for param_value, param_name, param_kind in zip(args, names, kinds):
if not param_kind == ARG_NAMED:
self.fail("Unexpected argument to TypeVar()", context)
@@ -1298,8 +1335,14 @@ class SemanticAnalyzer(NodeVisitor):
self.fail("TypeVar 'contravariant' may only be 'True'", context)
return None
elif param_name == 'bound':
- self.fail("TypeVar 'bound' argument not supported yet", context)
- return None
+ if has_values:
+ self.fail("TypeVar cannot have both values and an upper bound", context)
+ return None
+ try:
+ upper_bound = self.expr_to_analyzed_type(param_value)
+ except TypeTranslationError:
+ self.fail("TypeVar 'bound' must be a type", param_value)
+ return None
elif param_name == 'values':
# Probably using obsolete syntax with values=(...). Explain the current syntax.
self.fail("TypeVar 'values' argument not supported", context)
@@ -1309,34 +1352,39 @@ class SemanticAnalyzer(NodeVisitor):
else:
self.fail("Unexpected argument to TypeVar(): {}".format(param_name), context)
return None
+
if covariant and contravariant:
self.fail("TypeVar cannot be both covariant and contravariant", context)
return None
elif covariant:
- return COVARIANT
+ variance = COVARIANT
elif contravariant:
- return CONTRAVARIANT
+ variance = CONTRAVARIANT
else:
- return INVARIANT
+ variance = INVARIANT
+ return (variance, upper_bound)
def process_namedtuple_definition(self, s: AssignmentStmt) -> None:
"""Check if s defines a namedtuple; if yes, store the definition in symbol table."""
if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr):
return
- named_tuple = self.check_namedtuple(s.rvalue)
+ lvalue = cast(NameExpr, s.lvalues[0])
+ name = lvalue.name
+ named_tuple = self.check_namedtuple(s.rvalue, name)
if named_tuple is None:
return
# Yes, it's a valid namedtuple definition. Add it to the symbol table.
- lvalue = cast(NameExpr, s.lvalues[0])
- name = lvalue.name
node = self.lookup(name, s)
node.kind = GDEF # TODO locally defined namedtuple
# TODO call.analyzed
node.node = named_tuple
- def check_namedtuple(self, node: Node) -> TypeInfo:
+ def check_namedtuple(self, node: Node, var_name: str = None) -> TypeInfo:
"""Check if a call defines a namedtuple.
+ The optional var_name argument is the name of the variable to
+ which this is assigned, if any.
+
If it does, return the corresponding TypeInfo. Return None otherwise.
If the definition is invalid but looks like a namedtuple,
@@ -1351,18 +1399,23 @@ class SemanticAnalyzer(NodeVisitor):
fullname = callee.fullname
if fullname not in ('collections.namedtuple', 'typing.NamedTuple'):
return None
- items, types = self.parse_namedtuple_args(call, fullname)
- if not items:
+ items, types, ok = self.parse_namedtuple_args(call, fullname)
+ if not ok:
# Error. Construct dummy return value.
return self.build_namedtuple_typeinfo('namedtuple', [], [])
else:
+ # Give it a unique name derived from the line number.
name = cast(StrExpr, call.args[0]).value
+ if name != var_name:
+ name += '@' + str(call.line)
info = self.build_namedtuple_typeinfo(name, items, types)
+ # Store it as a global just in case it would remain anonymous.
+ self.globals[name] = SymbolTableNode(GDEF, info, self.cur_mod_id)
call.analyzed = NamedTupleExpr(info).set_line(call.line)
return info
def parse_namedtuple_args(self, call: CallExpr,
- fullname: str) -> Tuple[List[str], List[Type]]:
+ fullname: str) -> Tuple[List[str], List[Type], bool]:
# TODO Share code with check_argument_count in checkexpr.py?
args = call.args
if len(args) < 2:
@@ -1371,34 +1424,35 @@ class SemanticAnalyzer(NodeVisitor):
return self.fail_namedtuple_arg("Too many arguments for namedtuple()", call)
if call.arg_kinds != [ARG_POS, ARG_POS]:
return self.fail_namedtuple_arg("Unexpected arguments to namedtuple()", call)
- if not isinstance(args[0], StrExpr):
+ if not isinstance(args[0], (StrExpr, BytesExpr)):
return self.fail_namedtuple_arg(
"namedtuple() expects a string literal as the first argument", call)
types = [] # type: List[Type]
+ ok = True
if not isinstance(args[1], ListExpr):
- if fullname == 'collections.namedtuple' and isinstance(args[1], StrExpr):
+ if fullname == 'collections.namedtuple' and isinstance(args[1], (StrExpr, BytesExpr)):
str_expr = cast(StrExpr, args[1])
items = str_expr.value.split()
else:
return self.fail_namedtuple_arg(
"List literal expected as the second argument to namedtuple()", call)
else:
- listexpr = cast(ListExpr, args[1])
+ listexpr = args[1]
if fullname == 'collections.namedtuple':
# The fields argument contains just names, with implicit Any types.
- if any(not isinstance(item, StrExpr) for item in listexpr.items):
+ if any(not isinstance(item, (StrExpr, BytesExpr)) for item in listexpr.items):
return self.fail_namedtuple_arg("String literal expected as namedtuple() item",
call)
items = [cast(StrExpr, item).value for item in listexpr.items]
else:
# The fields argument contains (name, type) tuples.
- items, types = self.parse_namedtuple_fields_with_types(listexpr.items, call)
+ items, types, ok = self.parse_namedtuple_fields_with_types(listexpr.items, call)
if not types:
types = [AnyType() for _ in items]
- return items, types
+ return items, types, ok
def parse_namedtuple_fields_with_types(self, nodes: List[Node],
- context: Context) -> Tuple[List[str], List[Type]]:
+ context: Context) -> Tuple[List[str], List[Type], bool]:
items = [] # type: List[str]
types = [] # type: List[Type]
for item in nodes:
@@ -1407,7 +1461,7 @@ class SemanticAnalyzer(NodeVisitor):
return self.fail_namedtuple_arg("Invalid NamedTuple field definition",
item)
name, type_node = item.items
- if isinstance(name, StrExpr):
+ if isinstance(name, (StrExpr, BytesExpr)):
items.append(name.value)
else:
return self.fail_namedtuple_arg("Invalid NamedTuple() field name", item)
@@ -1418,11 +1472,12 @@ class SemanticAnalyzer(NodeVisitor):
types.append(self.anal_type(type))
else:
return self.fail_namedtuple_arg("Tuple expected as NamedTuple() field", item)
- return items, types
+ return items, types, True
- def fail_namedtuple_arg(self, message: str, context: Context) -> Tuple[List[str], List[Type]]:
+ def fail_namedtuple_arg(self, message: str,
+ context: Context) -> Tuple[List[str], List[Type], bool]:
self.fail(message, context)
- return [], []
+ return [], [], False
def build_namedtuple_typeinfo(self, name: str, items: List[str],
types: List[Type]) -> TypeInfo:
@@ -1580,11 +1635,11 @@ class SemanticAnalyzer(NodeVisitor):
def visit_break_stmt(self, s: BreakStmt) -> None:
if self.loop_depth == 0:
- self.fail("'break' outside loop", s)
+ self.fail("'break' outside loop", s, True)
def visit_continue_stmt(self, s: ContinueStmt) -> None:
if self.loop_depth == 0:
- self.fail("'continue' outside loop", s)
+ self.fail("'continue' outside loop", s, True)
def visit_if_stmt(self, s: IfStmt) -> None:
infer_reachability_of_if_statement(s, pyversion=self.pyversion)
@@ -1775,6 +1830,9 @@ class SemanticAnalyzer(NodeVisitor):
if not call.args:
return None
if not all(kind == ARG_NAMED for kind in call.arg_kinds):
+ # Must still accept those args.
+ for a in call.args:
+ a.accept(self)
return None
expr = DictExpr([(StrExpr(key), value)
for key, value in zip(call.arg_names, call.args)])
@@ -1805,11 +1863,9 @@ class SemanticAnalyzer(NodeVisitor):
base = expr.expr
base.accept(self)
# Bind references to module attributes.
- if isinstance(base, RefExpr) and cast(RefExpr,
- base).kind == MODULE_REF:
- file = cast(MypyFile, cast(RefExpr, base).node)
- names = file.names
- n = names.get(expr.name, None)
+ if isinstance(base, RefExpr) and base.kind == MODULE_REF:
+ file = cast(MypyFile, base.node)
+ n = file.names.get(expr.name, None) if file is not None else None
if n:
n = self.normalize_type_alias(n, expr)
if not n:
@@ -1825,7 +1881,7 @@ class SemanticAnalyzer(NodeVisitor):
# one type checker run. If we reported errors here,
# the build would terminate after semantic analysis
# and we wouldn't be able to report any type errors.
- full_name = '%s.%s' % (file.fullname(), expr.name)
+ full_name = '%s.%s' % (file.fullname() if file is not None else None, expr.name)
if full_name in obsolete_name_mapping:
self.fail("Module has no attribute %r (it's now called %r)" % (
expr.name, obsolete_name_mapping[full_name]), expr)
@@ -1848,7 +1904,7 @@ class SemanticAnalyzer(NodeVisitor):
# Translate index to an unanalyzed type.
types = [] # type: List[Type]
if isinstance(expr.index, TupleExpr):
- items = (cast(TupleExpr, expr.index)).items
+ items = expr.index.items
else:
items = [expr.index]
for item in items:
@@ -2151,10 +2207,19 @@ class SemanticAnalyzer(NodeVisitor):
def name_already_defined(self, name: str, ctx: Context) -> None:
self.fail("Name '{}' already defined".format(name), ctx)
- def fail(self, msg: str, ctx: Context) -> None:
+ def fail(self, msg: str, ctx: Context, serious: bool = False) -> None:
+ if (not serious and
+ not self.check_untyped_defs and
+ self.function_stack and
+ self.function_stack[-1].is_dynamic()):
+ return
self.errors.report(ctx.get_line(), msg)
def note(self, msg: str, ctx: Context) -> None:
+ if (not self.check_untyped_defs and
+ self.function_stack and
+ self.function_stack[-1].is_dynamic()):
+ return
self.errors.report(ctx.get_line(), msg, severity='note')
def undefined_name_extra_info(self, fullname: str) -> Optional[str]:
@@ -2214,12 +2279,29 @@ class FirstPass(NodeVisitor):
for d in defs:
d.accept(self)
- # Add implicit definition of 'None' to builtins, as we cannot define a
- # variable with a None type explicitly.
+ # Add implicit definition of literals/keywords to builtins, as we
+ # cannot define a variable with them explicitly.
if mod_id == 'builtins':
- v = Var('None', NoneTyp())
- v._fullname = self.sem.qualified_name('None')
- self.sem.globals['None'] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
+ literal_types = [
+ ('None', NoneTyp()),
+ ] # type: List[Tuple[str, Type]]
+
+ # TODO(ddfisher): This guard is only needed because mypy defines
+ # fake builtins for its tests which often don't define bool. If
+ # mypy is fast enough that we no longer need those, this
+ # conditional check should be removed.
+ if 'bool' in self.sem.globals:
+ bool_type = self.sem.named_type('bool')
+ literal_types.extend([
+ ('True', bool_type),
+ ('False', bool_type),
+ ('__debug__', bool_type),
+ ])
+
+ for name, typ in literal_types:
+ v = Var(name, typ)
+ v._fullname = self.sem.qualified_name(name)
+ self.sem.globals[name] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
def visit_block(self, b: Block) -> None:
if b.is_unreachable:
@@ -2274,7 +2356,10 @@ class FirstPass(NodeVisitor):
for node in outer_def.defs.body:
if isinstance(node, ClassDef):
node.info = TypeInfo(SymbolTable(), node)
- node.info._fullname = node.info.name()
+ if outer_def.fullname:
+ node.info._fullname = outer_def.fullname + '.' + node.info.name()
+ else:
+ node.info._fullname = node.info.name()
symbol = SymbolTableNode(MDEF, node.info)
outer_def.info.names[node.name] = symbol
self.process_nested_classes(node)
@@ -2306,6 +2391,7 @@ class FirstPass(NodeVisitor):
for n in s.target:
if n:
self.analyze_lvalue(n)
+ s.body.accept(self)
def visit_decorator(self, d: Decorator) -> None:
d.var._fullname = self.sem.qualified_name(d.var.name())
@@ -2338,7 +2424,20 @@ class ThirdPass(TraverserVisitor[None]):
def visit_file(self, file_node: MypyFile, fnam: str) -> None:
self.errors.set_file(fnam)
- file_node.accept(self)
+ self.errors.set_ignored_lines(file_node.ignored_lines)
+ self.accept(file_node)
+ self.errors.set_ignored_lines(set())
+
+ def accept(self, node: Node) -> None:
+ try:
+ node.accept(self)
+ except Exception as err:
+ report_internal_error(err, self.errors.file, node.line)
+
+ def visit_block(self, b: Block) -> None:
+ if b.is_unreachable:
+ return
+ super().visit_block(b)
def visit_func_def(self, fdef: FuncDef) -> None:
self.errors.push_function(fdef.name())
@@ -2349,13 +2448,23 @@ class ThirdPass(TraverserVisitor[None]):
def visit_class_def(self, tdef: ClassDef) -> None:
for type in tdef.info.bases:
self.analyze(type)
+ # Recompute MRO now that we have analyzed all modules, to pick
+ # up superclasses of bases imported from other modules in an
+ # import loop. (Only do so if we succeeded the first time.)
+ if tdef.info.mro:
+ tdef.info.mro = [] # Force recomputation
+ calculate_class_mro(tdef, self.fail)
super().visit_class_def(tdef)
def visit_decorator(self, dec: Decorator) -> None:
"""Try to infer the type of the decorated function.
- This helps us resolve forward references to decorated
- functions during type checking.
+ This lets us resolve references to decorated functions during
+ type checking when there are cyclic imports, as otherwise the
+ type might not be available when we need it.
+
+ This basically uses a simple special-purpose type inference
+ engine just for decorators.
"""
super().visit_decorator(dec)
if dec.var.is_property:
@@ -2381,13 +2490,21 @@ class ThirdPass(TraverserVisitor[None]):
decorator_preserves_type = False
break
if decorator_preserves_type:
- # No non-special decorators left. We can trivially infer the type
+ # No non-identity decorators left. We can trivially infer the type
# of the function here.
dec.var.type = function_type(dec.func, self.builtin_type('function'))
- if dec.decorators and returns_any_if_called(dec.decorators[0]):
- # The outermost decorator will return Any so we know the type of the
- # decorated function.
- dec.var.type = AnyType()
+ if dec.decorators:
+ if returns_any_if_called(dec.decorators[0]):
+ # The outermost decorator will return Any so we know the type of the
+ # decorated function.
+ dec.var.type = AnyType()
+ sig = find_fixed_callable_return(dec.decorators[0])
+ if sig:
+ # The outermost decorator always returns the same kind of function,
+ # so we know that this is the type of the decoratored function.
+ orig_sig = function_type(dec.func, self.builtin_type('function'))
+ sig.name = orig_sig.items()[0].name
+ dec.var.type = sig
def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
self.analyze(s.type)
@@ -2458,15 +2575,24 @@ def set_callable_name(sig: Type, fdef: FuncDef) -> Type:
def refers_to_fullname(node: Node, fullname: str) -> bool:
"""Is node a name or member expression with the given full name?"""
- return isinstance(node,
- RefExpr) and cast(RefExpr, node).fullname == fullname
+ return isinstance(node, RefExpr) and node.fullname == fullname
def refers_to_class_or_function(node: Node) -> bool:
"""Does semantically analyzed node refer to a class?"""
return (isinstance(node, RefExpr) and
- isinstance(cast(RefExpr, node).node, (TypeInfo, FuncDef,
- OverloadedFuncDef)))
+ isinstance(node.node, (TypeInfo, FuncDef, OverloadedFuncDef)))
+
+
+def calculate_class_mro(defn: ClassDef, fail: Callable[[str, Context], None]) -> None:
+ try:
+ defn.info.calculate_mro()
+ except MroError:
+ fail("Cannot determine consistent method resolution order "
+ '(MRO) for "%s"' % defn.name, defn)
+ defn.info.mro = []
+ # The property of falling back to Any is inherited.
+ defn.info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in defn.info.mro)
def find_duplicate(list: List[T]) -> T:
@@ -2497,6 +2623,8 @@ def remove_imported_names_from_symtable(names: SymbolTable,
"""Remove all imported names from the symbol table of a module."""
removed = [] # type: List[str]
for name, node in names.items():
+ if node.node is None:
+ continue
fullname = node.node.fullname()
prefix = fullname[:fullname.rfind('.')]
if prefix != module:
@@ -2516,7 +2644,7 @@ def infer_reachability_of_if_statement(s: IfStmt,
# This condition is always true, so all of the remaining
# elif/else bodies will never be executed.
for body in s.body[i + 1:]:
- mark_block_unreachable(s.body[i])
+ mark_block_unreachable(body)
if s.else_body:
mark_block_unreachable(s.else_body)
break
@@ -2601,3 +2729,23 @@ def returns_any_if_called(expr: Node) -> bool:
elif isinstance(expr, CallExpr):
return returns_any_if_called(expr.callee)
return False
+
+
+def find_fixed_callable_return(expr: Node) -> Optional[CallableType]:
+ if isinstance(expr, RefExpr):
+ if isinstance(expr.node, FuncDef):
+ typ = expr.node.type
+ if typ:
+ if isinstance(typ, CallableType) and has_no_typevars(typ.ret_type):
+ if isinstance(typ.ret_type, CallableType):
+ return typ.ret_type
+ elif isinstance(expr, CallExpr):
+ t = find_fixed_callable_return(expr.callee)
+ if t:
+ if isinstance(t.ret_type, CallableType):
+ return t.ret_type
+ return None
+
+
+def has_no_typevars(typ: Type) -> bool:
+ return is_same_type(typ, erase_typevars(typ))
diff --git a/mypy/strconv.py b/mypy/strconv.py
index 7cd88f3..7276491 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -399,14 +399,17 @@ class StrConv(NodeVisitor[str]):
return self.dump([o.expr, ('Types', o.types)], o)
def visit_type_var_expr(self, o):
+ import mypy.types
+ a = []
if o.variance == mypy.nodes.COVARIANT:
- return self.dump(['Variance(COVARIANT)'], o)
+ a += ['Variance(COVARIANT)']
if o.variance == mypy.nodes.CONTRAVARIANT:
- return self.dump(['Variance(CONTRAVARIANT)'], o)
+ a += ['Variance(CONTRAVARIANT)']
if o.values:
- return self.dump([('Values', o.values)], o)
- else:
- return 'TypeVarExpr:{}()'.format(o.line)
+ a += [('Values', o.values)]
+ if not mypy.types.is_named_instance(o.upper_bound, 'builtins.object'):
+ a += ['UpperBound({})'.format(o.upper_bound)]
+ return self.dump(a, o)
def visit_type_alias_expr(self, o):
return 'TypeAliasExpr({})'.format(o.type)
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 45647a7..7d87688 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -148,7 +148,7 @@ def load_python_module_info(module: str, interpreter: str) -> Tuple[str, Optiona
try:
output_bytes = subprocess.check_output(cmd_template % code, shell=True)
except subprocess.CalledProcessError:
- print("Can't import module %s" % module)
+ print("Can't import module %s" % module, file=sys.stderr)
sys.exit(1)
output = output_bytes.decode('ascii').strip().splitlines()
module_path = output[0]
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 38958a3..46db522 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -60,6 +60,19 @@ def is_equivalent(a: Type, b: Type,
return is_subtype(a, b, type_parameter_checker) and is_subtype(b, a, type_parameter_checker)
+def satisfies_upper_bound(a: Type, upper_bound: Type) -> bool:
+ """Is 'a' valid value for a type variable with the given 'upper_bound'?
+
+ Same as is_subtype except that Void is considered to be a subtype of
+ any upper_bound. This is needed in a case like
+
+ def f(g: Callable[[], T]) -> T: ...
+ def h() -> None: ...
+ f(h)
+ """
+ return isinstance(a, Void) or is_subtype(a, upper_bound)
+
+
class SubtypeVisitor(TypeVisitor[bool]):
def __init__(self, right: Type,
@@ -95,6 +108,8 @@ class SubtypeVisitor(TypeVisitor[bool]):
return True
def visit_instance(self, left: Instance) -> bool:
+ if left.type.fallback_to_any:
+ return True
right = self.right
if isinstance(right, Instance):
if left.type._promote and is_subtype(left.type._promote,
@@ -119,7 +134,7 @@ class SubtypeVisitor(TypeVisitor[bool]):
if isinstance(right, TypeVarType):
return left.id == right.id
else:
- return is_named_instance(self.right, 'builtins.object')
+ return is_subtype(left.upper_bound, self.right)
def visit_callable_type(self, left: CallableType) -> bool:
right = self.right
@@ -202,9 +217,18 @@ def is_callable_subtype(left: CallableType, right: CallableType,
# Non-type cannot be a subtype of type.
if right.is_type_obj() and not left.is_type_obj():
return False
- if right.variables:
- # Subtyping is not currently supported for generic function as the supertype.
- return False
+
+ # A callable L is a subtype of a generic callable R if L is a
+ # subtype of every type obtained from R by substituting types for
+ # the variables of R. We can check this by simply leaving the
+ # generic variables of R as type variables, effectively varying
+ # over all possible values.
+
+ # It's okay even if these variables share ids with generic
+ # type variables of L, because generating and solving
+ # constraints for the variables of L to make L a subtype of R
+ # (below) treats type variables on the two sides as independent.
+
if left.variables:
# Apply generic type variables away in left via type inference.
left = unify_generic_callable(left, right, ignore_return=ignore_return)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index b0b9572..5ae8cc4 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -127,9 +127,9 @@ class TransformVisitor(NodeVisitor[Node]):
new.info = original.info
new.min_args = original.min_args
new.max_pos = original.max_pos
- new.is_implicit = original.is_implicit
new.is_overload = original.is_overload
new.is_generator = original.is_generator
+ new.line = original.line
def duplicate_inits(self,
inits: List[AssignmentStmt]) -> List[AssignmentStmt]:
@@ -444,7 +444,8 @@ class TransformVisitor(NodeVisitor[Node]):
def visit_type_var_expr(self, node: TypeVarExpr) -> Node:
return TypeVarExpr(node.name(), node.fullname(),
- self.types(node.values), variance=node.variance)
+ self.types(node.values),
+ self.type(node.upper_bound), variance=node.variance)
def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr:
return TypeAliasExpr(node.type)
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 444b3e6..6de132d 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1,6 +1,6 @@
"""Semantic analysis of types"""
-from typing import Callable, cast, List, Tuple, Dict, Any, Union
+from typing import Callable, cast, List, Tuple
from mypy.types import (
Type, UnboundType, TypeVarType, TupleType, UnionType, Instance, AnyType, CallableType,
@@ -8,12 +8,13 @@ from mypy.types import (
EllipsisType
)
from mypy.nodes import (
- GDEF, BOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
+ BOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
TypeInfo, Context, SymbolTableNode, TypeVarExpr, Var, Node,
- IndexExpr, NameExpr, TupleExpr, RefExpr
+ IndexExpr, RefExpr
)
from mypy.sametypes import is_same_type
from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
+from mypy.subtypes import satisfies_upper_bound
from mypy import nodes
@@ -86,7 +87,7 @@ class TypeAnalyser(TypeVisitor[Type]):
t.name), t)
tvar_expr = cast(TypeVarExpr, sym.node)
return TypeVarType(t.name, sym.tvar_id, tvar_expr.values,
- self.builtin_type('builtins.object'),
+ tvar_expr.upper_bound,
tvar_expr.variance,
t.line)
elif fullname == 'builtins.None':
@@ -179,9 +180,8 @@ class TypeAnalyser(TypeVisitor[Type]):
def visit_callable_type(self, t: CallableType) -> Type:
return t.copy_modified(arg_types=self.anal_array(t.arg_types),
ret_type=t.ret_type.accept(self),
- fallback=self.builtin_type('builtins.function'),
- variables=self.anal_var_defs(t.variables),
- bound_vars=self.anal_bound_vars(t.bound_vars))
+ fallback=t.fallback or self.builtin_type('builtins.function'),
+ variables=self.anal_var_defs(t.variables))
def visit_tuple_type(self, t: TupleType) -> Type:
if t.implicit:
@@ -208,30 +208,38 @@ class TypeAnalyser(TypeVisitor[Type]):
return AnyType()
def analyze_callable_type(self, t: UnboundType) -> Type:
- if len(t.args) != 2:
- self.fail('Invalid function type', t)
- return AnyType()
- ret_type = t.args[1].accept(self)
fallback = self.builtin_type('builtins.function')
- if isinstance(t.args[0], TypeList):
- # Callable[[ARG, ...], RET] (ordinary callable type)
- args = t.args[0].items
- return CallableType(self.anal_array(args),
- [nodes.ARG_POS] * len(args),
- [None] * len(args),
- ret_type=ret_type,
- fallback=fallback)
- elif isinstance(t.args[0], EllipsisType):
- # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments)
+ if len(t.args) == 0:
+ # Callable (bare)
return CallableType([AnyType(), AnyType()],
[nodes.ARG_STAR, nodes.ARG_STAR2],
[None, None],
- ret_type=ret_type,
- fallback=fallback,
- is_ellipsis_args=True)
- else:
- self.fail('Invalid function type', t)
- return AnyType()
+ ret_type=AnyType(),
+ fallback=fallback)
+ elif len(t.args) == 2:
+ ret_type = t.args[1].accept(self)
+ if isinstance(t.args[0], TypeList):
+ # Callable[[ARG, ...], RET] (ordinary callable type)
+ args = t.args[0].items
+ return CallableType(self.anal_array(args),
+ [nodes.ARG_POS] * len(args),
+ [None] * len(args),
+ ret_type=ret_type,
+ fallback=fallback)
+ elif isinstance(t.args[0], EllipsisType):
+ # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments)
+ return CallableType([AnyType(), AnyType()],
+ [nodes.ARG_STAR, nodes.ARG_STAR2],
+ [None, None],
+ ret_type=ret_type,
+ fallback=fallback,
+ is_ellipsis_args=True)
+ else:
+ self.fail('The first argument to Callable must be a list of types or "..."', t)
+ return AnyType()
+
+ self.fail('Invalid function type', t)
+ return AnyType()
def anal_array(self, a: List[Type]) -> List[Type]:
res = [] # type: List[Type]
@@ -239,13 +247,6 @@ class TypeAnalyser(TypeVisitor[Type]):
res.append(t.accept(self))
return res
- def anal_bound_vars(self,
- a: List[Tuple[int, Type]]) -> List[Tuple[int, Type]]:
- res = [] # type: List[Tuple[int, Type]]
- for id, t in a:
- res.append((id, t.accept(self)))
- return res
-
def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]:
a = [] # type: List[TypeVarDef]
for vd in var_defs:
@@ -323,6 +324,10 @@ class TypeAnalyserPass3(TypeVisitor[None]):
arg_values = [arg]
self.check_type_var_values(info, arg_values,
TypeVar.values, t)
+ if not satisfies_upper_bound(arg, TypeVar.upper_bound):
+ self.fail('Type argument "{}" of "{}" must be '
+ 'a subtype of "{}"'.format(
+ arg, info.name(), TypeVar.upper_bound), t)
for arg in t.args:
arg.accept(self)
diff --git a/mypy/types.py b/mypy/types.py
index ff08d71..1d2c231 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1,7 +1,7 @@
"""Classes for representing mypy types."""
from abc import abstractmethod
-from typing import Any, TypeVar, List, Tuple, cast, Generic, Set, Sequence, Optional
+from typing import Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Sequence, Optional
import mypy.nodes
from mypy.nodes import INVARIANT, SymbolNode
@@ -9,6 +9,8 @@ from mypy.nodes import INVARIANT, SymbolNode
T = TypeVar('T')
+JsonDict = Dict[str, Any]
+
class Type(mypy.nodes.Context):
"""Abstract base class for all types."""
@@ -27,18 +29,31 @@ class Type(mypy.nodes.Context):
def __repr__(self) -> str:
return self.accept(TypeStrVisitor())
+ def serialize(self) -> JsonDict:
+ raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__))
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Type':
+ classname = data['.class']
+ glo = globals()
+ if classname in glo:
+ cl = glo[classname]
+ if 'deserialize' in cl.__dict__:
+ return cl.deserialize(data)
+ raise NotImplementedError('unexpected .class {}'.format(classname))
+
class TypeVarDef(mypy.nodes.Context):
"""Definition of a single type variable."""
name = ''
id = 0
- values = None # type: List[Type]
+ values = None # type: Optional[List[Type]]
upper_bound = None # type: Type
variance = INVARIANT # type: int
line = 0
- def __init__(self, name: str, id: int, values: List[Type],
+ def __init__(self, name: str, id: int, values: Optional[List[Type]],
upper_bound: Type, variance: int = INVARIANT, line: int = -1) -> None:
self.name = name
self.id = id
@@ -53,9 +68,31 @@ class TypeVarDef(mypy.nodes.Context):
def __repr__(self) -> str:
if self.values:
return '{} in {}'.format(self.name, tuple(self.values))
+ elif not is_named_instance(self.upper_bound, 'builtins.object'):
+ return '{} <: {}'.format(self.name, self.upper_bound)
else:
return self.name
+ def serialize(self) -> JsonDict:
+ return {'.class': 'TypeVarDef',
+ 'name': self.name,
+ 'id': self.id,
+ 'values': None if self.values is None else [v.serialize() for v in self.values],
+ 'upper_bound': self.upper_bound.serialize(),
+ 'variance': self.variance,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'TypeVarDef':
+ assert data['.class'] == 'TypeVarDef'
+ return TypeVarDef(data['name'],
+ data['id'],
+ None if data['values'] is None
+ else [Type.deserialize(v) for v in data['values']],
+ Type.deserialize(data['upper_bound']),
+ data['variance'],
+ )
+
class UnboundType(Type):
"""Instance type that has not been bound during semantic analysis."""
@@ -73,6 +110,18 @@ class UnboundType(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_unbound_type(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'UnboundType',
+ 'name': self.name,
+ 'args': [a.serialize() for a in self.args],
+ }
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'UnboundType':
+ assert data['.class'] == 'UnboundType'
+ return UnboundType(data['name'],
+ [Type.deserialize(a) for a in data['args']])
+
class ErrorType(Type):
"""The error type is used as the result of failed type operations."""
@@ -98,13 +147,35 @@ class TypeList(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_type_list(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'TypeList',
+ 'items': [t.serialize() for t in self.items],
+ }
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'TypeList':
+ assert data['.class'] == 'TypeList'
+ return TypeList([Type.deserialize(t) for t in data['items']])
+
class AnyType(Type):
"""The type 'Any'."""
+ def __init__(self, implicit=False, line: int = -1) -> None:
+ super().__init__(line)
+ self.implicit = implicit
+
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_any(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'AnyType'}
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'AnyType':
+ assert data['.class'] == 'AnyType'
+ return AnyType()
+
class Void(Type):
"""The return type 'None'.
@@ -125,6 +196,14 @@ class Void(Type):
def with_source(self, source: str) -> 'Void':
return Void(source, self.line)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'Void'}
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Void':
+ assert data['.class'] == 'Void'
+ return Void()
+
class NoneTyp(Type):
"""The type of 'None'.
@@ -145,6 +224,14 @@ class NoneTyp(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_none_type(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'NoneTyp'}
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'NoneTyp':
+ assert data['.class'] == 'NoneTyp'
+ return NoneTyp()
+
class ErasedType(Type):
"""Placeholder for an erased type.
@@ -172,6 +259,15 @@ class DeletedType(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_deleted_type(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'DeletedType',
+ 'source': self.source}
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'DeletedType':
+ assert data['.class'] == 'DeletedType'
+ return DeletedType(data['source'])
+
class Instance(Type):
"""An instance type of form C[T1, ..., Tn].
@@ -193,6 +289,29 @@ class Instance(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_instance(self)
+ type_ref = None # type: str
+
+ def serialize(self) -> JsonDict:
+ data = {'.class': 'Instance',
+ } # type: JsonDict
+ assert self.type is not None
+ data['type_ref'] = self.type.alt_fullname or self.type.fullname()
+ if self.args:
+ data['args'] = [arg.serialize() for arg in self.args]
+ return data
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'Instance':
+ assert data['.class'] == 'Instance'
+ args = [] # type: List[Type]
+ if 'args' in data:
+ args_list = data['args']
+ assert isinstance(args_list, list)
+ args = [Type.deserialize(arg) for arg in args_list]
+ inst = Instance(None, args)
+ inst.type_ref = data['type_ref'] # Will be fixed up by fixup.py later.
+ return inst
+
class TypeVarType(Type):
"""A type variable type.
@@ -204,7 +323,7 @@ class TypeVarType(Type):
name = '' # Name of the type variable (for messages and debugging)
id = 0 # 1, 2, ... for type-related, -1, ... for function-related
values = None # type: List[Type] # Value restriction, empty list if no restriction
- upper_bound = None # type: Type # Upper bound for values (currently always 'object')
+ upper_bound = None # type: Type # Upper bound for values
# See comments in TypeVarDef for more about variance.
variance = INVARIANT # type: int
@@ -220,6 +339,30 @@ class TypeVarType(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_type_var(self)
+ def erase_to_union_or_bound(self) -> Type:
+ if self.values:
+ return UnionType.make_simplified_union(self.values)
+ else:
+ return self.upper_bound
+
+ def serialize(self) -> JsonDict:
+ return {'.class': 'TypeVarType',
+ 'name': self.name,
+ 'id': self.id,
+ 'values': [v.serialize() for v in self.values],
+ 'upper_bound': self.upper_bound.serialize(),
+ 'variance': self.variance,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'TypeVarType':
+ assert data['.class'] == 'TypeVarType'
+ return TypeVarType(data['name'],
+ data['id'],
+ [Type.deserialize(v) for v in data['values']],
+ Type.deserialize(data['upper_bound']),
+ data['variance'])
+
class FunctionLike(Type):
"""Abstract base class for function types."""
@@ -227,6 +370,9 @@ class FunctionLike(Type):
@abstractmethod
def is_type_obj(self) -> bool: pass
+ def is_concrete_type_obj(self) -> bool:
+ return self.is_type_obj()
+
@abstractmethod
def type_object(self) -> mypy.nodes.TypeInfo: pass
@@ -239,6 +385,10 @@ class FunctionLike(Type):
# Corresponding instance type (e.g. builtins.type)
fallback = None # type: Instance
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'FunctionLike':
+ return cast(FunctionLike, super().deserialize(data))
+
_dummy = object() # type: Any
@@ -249,33 +399,26 @@ class CallableType(FunctionLike):
arg_types = None # type: List[Type] # Types of function arguments
arg_kinds = None # type: List[int] # mypy.nodes.ARG_ constants
arg_names = None # type: List[str] # None if not a keyword argument
- min_args = 0 # Minimum number of arguments
- is_var_arg = False # Is it a varargs function?
- ret_type = None # type:Type # Return value type
+ min_args = 0 # Minimum number of arguments; derived from arg_kinds
+ is_var_arg = False # Is it a varargs function? derived from arg_kinds
+ ret_type = None # type: Type # Return value type
name = '' # Name (may be None; for error messages)
definition = None # type: SymbolNode # For error messages. May be None.
# Type variables for a generic function
variables = None # type: List[TypeVarDef]
- # Implicit bound values of type variables. These can be either for
- # class type variables or for generic function type variables.
- # For example, the method 'append' of List[int] has implicit value
- # 'int' for the list type variable; the explicit method type is
- # just 'def append(int) -> None', without any type variable. Implicit
- # values are needed for runtime type checking, but they do not
- # affect static type checking.
- #
- # All class type arguments must be stored first, ordered by id,
- # and function type arguments must be stored next, again ordered by id
- # (absolute value this time).
- #
- # Stored as tuples (id, type).
- bound_vars = None # type: List[Tuple[int, Type]]
-
# Is this Callable[..., t] (with literal '...')?
is_ellipsis_args = False
+ # Is this callable constructed for the benefit of a classmethod's 'cls' argument?
+ is_classmethod_class = False
+ # Was this type implicitly generated instead of explicitly specified by the user?
+ implicit = False
+ # Defined for signatures that require special handling (currently only value is 'dict'
+ # for a signature similar to 'dict')
+ special_sig = None # type: Optional[str]
- def __init__(self, arg_types: List[Type],
+ def __init__(self,
+ arg_types: List[Type],
arg_kinds: List[int],
arg_names: List[str],
ret_type: Type,
@@ -283,13 +426,14 @@ class CallableType(FunctionLike):
name: str = None,
definition: SymbolNode = None,
variables: List[TypeVarDef] = None,
- bound_vars: List[Tuple[int, Type]] = None,
line: int = -1,
- is_ellipsis_args: bool = False) -> None:
+ is_ellipsis_args: bool = False,
+ implicit=False,
+ is_classmethod_class=False,
+ special_sig=None,
+ ) -> None:
if variables is None:
variables = []
- if not bound_vars:
- bound_vars = []
self.arg_types = arg_types
self.arg_kinds = arg_kinds
self.arg_names = arg_names
@@ -301,8 +445,9 @@ class CallableType(FunctionLike):
self.name = name
self.definition = definition
self.variables = variables
- self.bound_vars = bound_vars
self.is_ellipsis_args = is_ellipsis_args
+ self.implicit = implicit
+ self.special_sig = special_sig
super().__init__(line)
def copy_modified(self,
@@ -314,9 +459,9 @@ class CallableType(FunctionLike):
name: str = _dummy,
definition: SymbolNode = _dummy,
variables: List[TypeVarDef] = _dummy,
- bound_vars: List[Tuple[int, Type]] = _dummy,
line: int = _dummy,
- is_ellipsis_args: bool = _dummy) -> 'CallableType':
+ is_ellipsis_args: bool = _dummy,
+ special_sig: Optional[str] = _dummy) -> 'CallableType':
return CallableType(
arg_types=arg_types if arg_types is not _dummy else self.arg_types,
arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds,
@@ -326,14 +471,19 @@ class CallableType(FunctionLike):
name=name if name is not _dummy else self.name,
definition=definition if definition is not _dummy else self.definition,
variables=variables if variables is not _dummy else self.variables,
- bound_vars=bound_vars if bound_vars is not _dummy else self.bound_vars,
line=line if line is not _dummy else self.line,
is_ellipsis_args=(
is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args),
+ implicit=self.implicit,
+ is_classmethod_class=self.is_classmethod_class,
+ special_sig=special_sig if special_sig is not _dummy else self.special_sig,
)
def is_type_obj(self) -> bool:
- return self.fallback.type.fullname() == 'builtins.type'
+ return self.fallback.type is not None and self.fallback.type.fullname() == 'builtins.type'
+
+ def is_concrete_type_obj(self) -> bool:
+ return self.is_type_obj() and self.is_classmethod_class
def type_object(self) -> mypy.nodes.TypeInfo:
assert self.is_type_obj()
@@ -370,6 +520,41 @@ class CallableType(FunctionLike):
a.append(tv.id)
return a
+ def serialize(self) -> JsonDict:
+ # TODO: As an optimization, leave out everything related to
+ # generic functions for non-generic functions.
+ return {'.class': 'CallableType',
+ 'arg_types': [(None if t is None else t.serialize())
+ for t in self.arg_types],
+ 'arg_kinds': self.arg_kinds,
+ 'arg_names': self.arg_names,
+ 'ret_type': self.ret_type.serialize(),
+ 'fallback': self.fallback.serialize(),
+ 'name': self.name,
+ # We don't serialize the definition (only used for error messages).
+ 'variables': [v.serialize() for v in self.variables],
+ 'is_ellipsis_args': self.is_ellipsis_args,
+ 'implicit': self.implicit,
+ 'is_classmethod_class': self.is_classmethod_class,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'CallableType':
+ assert data['.class'] == 'CallableType'
+ # TODO: Set definition to the containing SymbolNode?
+ return CallableType([(None if t is None else Type.deserialize(t))
+ for t in data['arg_types']],
+ data['arg_kinds'],
+ data['arg_names'],
+ Type.deserialize(data['ret_type']),
+ Instance.deserialize(data['fallback']),
+ name=data['name'],
+ variables=[TypeVarDef.deserialize(v) for v in data['variables']],
+ is_ellipsis_args=data['is_ellipsis_args'],
+ implicit=data['implicit'],
+ is_classmethod_class=data['is_classmethod_class'],
+ )
+
class Overloaded(FunctionLike):
"""Overloaded function type T1, ... Tn, where each Ti is CallableType.
@@ -412,6 +597,16 @@ class Overloaded(FunctionLike):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_overloaded(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'Overloaded',
+ 'items': [t.serialize() for t in self.items()],
+ }
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'Overloaded':
+ assert data['.class'] == 'Overloaded'
+ return Overloaded([CallableType.deserialize(t) for t in data['items']])
+
class TupleType(Type):
"""The tuple type Tuple[T1, ..., Tn] (at least one type argument).
@@ -441,6 +636,20 @@ class TupleType(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_tuple_type(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'TupleType',
+ 'items': [t.serialize() for t in self.items],
+ 'fallback': self.fallback.serialize(),
+ 'implicit': self.implicit,
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'TupleType':
+ assert data['.class'] == 'TupleType'
+ return TupleType([Type.deserialize(t) for t in data['items']],
+ Instance.deserialize(data['fallback']),
+ implicit=data['implicit'])
+
class StarType(Type):
"""The star type *type_parameter.
@@ -512,10 +721,20 @@ class UnionType(Type):
TODO: Deal with attributes of TupleType etc.
TODO: This should probably be refactored to go elsewhere.
"""
- return all((isinstance(x, UnionType) and cast(UnionType, x).has_readable_member(name)) or
- (isinstance(x, Instance) and cast(Instance, x).type.has_readable_member(name))
+ return all((isinstance(x, UnionType) and x.has_readable_member(name)) or
+ (isinstance(x, Instance) and x.type.has_readable_member(name))
for x in self.items)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'UnionType',
+ 'items': [t.serialize() for t in self.items],
+ }
+
+ @classmethod
+ def deserialize(cls, data: JsonDict) -> 'UnionType':
+ assert data['.class'] == 'UnionType'
+ return UnionType([Type.deserialize(t) for t in data['items']])
+
class PartialType(Type):
"""Type such as List[?] where type arguments are unknown, or partial None type.
@@ -555,6 +774,14 @@ class EllipsisType(Type):
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_ellipsis_type(self)
+ def serialize(self) -> JsonDict:
+ return {'.class': 'EllipsisType'}
+
+ @classmethod
+ def deserialize(self, data: JsonDict) -> 'EllipsisType':
+ assert data['.class'] == 'EllipsisType'
+ return EllipsisType()
+
#
# Visitor-related classes
@@ -679,8 +906,7 @@ class TypeTranslator(TypeVisitor[Type]):
def visit_callable_type(self, t: CallableType) -> Type:
return t.copy_modified(arg_types=self.translate_types(t.arg_types),
ret_type=t.ret_type.accept(self),
- variables=self.translate_variables(t.variables),
- bound_vars=self.translate_bound_vars(t.bound_vars))
+ variables=self.translate_variables(t.variables))
def visit_tuple_type(self, t: TupleType) -> Type:
return TupleType(self.translate_types(t.items),
@@ -699,10 +925,6 @@ class TypeTranslator(TypeVisitor[Type]):
def translate_types(self, types: List[Type]) -> List[Type]:
return [t.accept(self) for t in types]
- def translate_bound_vars(
- self, types: List[Tuple[int, Type]]) -> List[Tuple[int, Type]]:
- return [(id, t.accept(self)) for id, t in types]
-
def translate_variables(self,
variables: List[TypeVarDef]) -> List[TypeVarDef]:
return variables
@@ -726,7 +948,6 @@ class TypeStrVisitor(TypeVisitor[str]):
Do not preserve original formatting.
Notes:
- - Include implicit bound type variables of callables.
- Represent unbound types as Foo? or Foo?[...].
- Represent the NoneTyp type as None.
"""
@@ -763,7 +984,7 @@ class TypeStrVisitor(TypeVisitor[str]):
return "<Deleted '{}'>".format(t.source)
def visit_instance(self, t):
- s = t.type.fullname()
+ s = t.type.fullname() if t.type is not None else '<?>'
if t.erased:
s += '*'
if t.args != []:
@@ -805,13 +1026,6 @@ class TypeStrVisitor(TypeVisitor[str]):
if t.variables:
s = '{} {}'.format(t.variables, s)
- if t.bound_vars != []:
- # Include implicit bound type variables.
- a = []
- for i, bt in t.bound_vars:
- a.append('{}:{}'.format(i, bt))
- s = '[{}] {}'.format(', '.join(a), s)
-
return 'def {}'.format(s)
def visit_overloaded(self, t):
@@ -822,7 +1036,7 @@ class TypeStrVisitor(TypeVisitor[str]):
def visit_tuple_type(self, t):
s = self.list_str(t.items)
- if t.fallback:
+ if t.fallback and t.fallback.type:
fallback_name = t.fallback.type.fullname()
if fallback_name != 'builtins.tuple':
return 'Tuple[{}, fallback={}]'.format(s, t.fallback.accept(self))
@@ -983,4 +1197,5 @@ def replace_leading_arg_type(t: CallableType, self_type: Type) -> CallableType:
def is_named_instance(t: Type, fullname: str) -> bool:
return (isinstance(t, Instance) and
- cast(Instance, t).type.fullname() == fullname)
+ t.type is not None and
+ t.type.fullname() == fullname)
diff --git a/mypy/util.py b/mypy/util.py
index 33f9f3c..d8b10b8 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -7,7 +7,7 @@ from typing import TypeVar, List, Any, Tuple, Optional
T = TypeVar('T')
-ENCODING_RE = re.compile(br'(\s*#.*(\r\n?|\n))?\s*#.*coding[:=]\s*([-\w.]+)')
+ENCODING_RE = re.compile(br'([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)')
default_python2_interpreter = ['python2', 'python', '/usr/bin/python']
diff --git a/mypy/version.py b/mypy/version.py
index e1424ed..58d168b 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1 +1 @@
-__version__ = '0.3.1'
+__version__ = '0.4'
diff --git a/mypy/waiter.py b/mypy/waiter.py
index 2d9767a..50a949f 100644
--- a/mypy/waiter.py
+++ b/mypy/waiter.py
@@ -3,14 +3,15 @@
This is used for running mypy tests.
"""
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Optional, Set, Tuple
import os
import pipes
import re
-from subprocess import Popen, PIPE, STDOUT
+from subprocess import Popen, STDOUT
import sys
import tempfile
+import time
class WaiterError(Exception):
@@ -26,9 +27,12 @@ class LazySubprocess:
self.args = args
self.cwd = cwd
self.env = env
+ self.start_time = None # type: float
+ self.end_time = None # type: float
def start(self) -> None:
self.outfile = tempfile.NamedTemporaryFile()
+ self.start_time = time.time()
self.process = Popen(self.args, cwd=self.cwd, env=self.env,
stdout=self.outfile, stderr=STDOUT)
self.pid = self.process.pid
@@ -37,6 +41,7 @@ class LazySubprocess:
"""Update process exit status received via an external os.waitpid() call."""
# Inlined subprocess._handle_exitstatus, it's not a public API.
# TODO(jukka): I'm not quite sure why this is implemented like this.
+ self.end_time = time.time()
process = self.process
assert process.returncode is None
if os.WIFSIGNALED(status):
@@ -63,6 +68,10 @@ class LazySubprocess:
self.outfile.close()
assert not os.path.exists(self.outfile.name)
+ @property
+ def elapsed_time(self) -> float:
+ return self.end_time - self.start_time
+
class Noter:
"""Update stats about running jobs.
@@ -136,6 +145,8 @@ class Waiter:
assert limit > 0
self.xfail = set(xfail)
self._note = None # type: Noter
+ self.times1 = {} # type: Dict[str, float]
+ self.times2 = {} # type: Dict[str, float]
def add(self, cmd: LazySubprocess) -> int:
rv = len(self.queue)
@@ -159,6 +170,14 @@ class Waiter:
self._note.start(num)
self.next += 1
+ def _record_time(self, name: str, elapsed_time: float) -> None:
+ # The names we use are space-separated series of rather arbitrary words.
+ # They tend to start general and get more specific, so use that.
+ name1 = re.sub(' .*', '', name) # First word.
+ self.times1[name1] = elapsed_time + self.times1.get(name1, 0)
+ name2 = re.sub('( .*?) .*', r'\1', name) # First two words.
+ self.times2[name2] = elapsed_time + self.times2.get(name2, 0)
+
def _wait_next(self) -> Tuple[List[str], int, int]:
"""Wait for a single task to finish.
@@ -166,10 +185,12 @@ class Waiter:
"""
pid, status = os.waitpid(-1, 0)
num, cmd = self.current.pop(pid)
+ name = cmd.name
cmd.handle_exit_status(status)
- name = cmd.name
+ self._record_time(cmd.name, cmd.elapsed_time)
+
rc = cmd.wait()
if rc >= 0:
msg = 'EXIT %d' % rc
@@ -223,7 +244,7 @@ class Waiter:
sys.stdout.write(output + '\n')
sys.stdout.flush()
- def run(self) -> None:
+ def run(self) -> int:
if self.verbosity >= -1:
print('%-8s %d' % ('PARALLEL', self.limit))
sys.stdout.flush()
@@ -256,13 +277,15 @@ class Waiter:
print('*** FAILURE ***')
sys.stdout.flush()
if any('XFAIL' not in f for f in all_failures):
- sys.exit(1)
+ return 1
else:
print('SUMMARY all %d tasks and %d tests passed' % (
len(self.queue), total_tests))
print('*** OK ***')
sys.stdout.flush()
+ return 0
+
def parse_test_stats_from_output(output: str, fail_type: Optional[str]) -> Tuple[int, int]:
"""Parse tasks output and determine test counts.
diff --git a/setup.py b/setup.py
index 89554a6..69cbfb2 100644
--- a/setup.py
+++ b/setup.py
@@ -74,6 +74,10 @@ package_dir = {'mypy': 'mypy'}
if sys.version_info < (3, 5, 0):
package_dir[''] = 'lib-typing/3.2'
+scripts = ['scripts/mypy', 'scripts/stubgen']
+if os.name == 'nt':
+ scripts.append('scripts/mypy.bat')
+
setup(name='mypy-lang',
version=version,
description=description,
@@ -86,7 +90,7 @@ setup(name='mypy-lang',
package_dir=package_dir,
py_modules=['typing'] if sys.version_info < (3, 5, 0) else [],
packages=['mypy'],
- scripts=['scripts/mypy', 'scripts/stubgen'],
+ scripts=scripts,
data_files=data_files,
classifiers=classifiers,
)
diff --git a/typeshed/runtests.py b/typeshed/runtests.py
index 603f421..2d13744 100755
--- a/typeshed/runtests.py
+++ b/typeshed/runtests.py
@@ -17,10 +17,13 @@ import re
import sys
import argparse
-parser = argparse.ArgumentParser(description="Test runner for typeshed. Patterns are unanchored regexps on the full path.")
+parser = argparse.ArgumentParser(description="Test runner for typeshed. "
+ "Patterns are unanchored regexps on the full path.")
parser.add_argument('-v', '--verbose', action='count', default=0, help="More output")
parser.add_argument('-n', '--dry-run', action='store_true', help="Don't actually run mypy")
parser.add_argument('-x', '--exclude', type=str, nargs='*', help="Exclude pattern")
+parser.add_argument('-p', '--python-version', type=str, nargs='*',
+ help="These versions only (major[.minor])")
parser.add_argument('filter', type=str, nargs='*', help="Include pattern (default all)")
@@ -28,6 +31,7 @@ def log(args, *varargs):
if args.verbose >= 2:
print(*varargs)
+
def match(args, fn):
if not args.filter and not args.exclude:
log(args, fn, 'accept by default')
@@ -49,6 +53,20 @@ def match(args, fn):
return True
+def libpath(major, minor):
+ versions = ['%d.%d' % (major, minor)
+ for minor in reversed(range(minor + 1))]
+ versions.append(str(major))
+ versions.append('2and3')
+ paths = []
+ for v in versions:
+ for top in ['stdlib', 'third_party']:
+ p = os.path.join(top, v)
+ if os.path.isdir(p):
+ paths.append(p)
+ return paths
+
+
def main():
args = parser.parse_args()
@@ -58,37 +76,62 @@ def main():
print("Cannot import mypy. Did you install it?")
sys.exit(1)
- files2 = []
- files3 = []
- for dir, subdirs, files in os.walk('.'):
- for file in files:
- if file == '__builtin__.pyi':
- continue # Special case (alias for builtins.py).
- if file.endswith('.pyi') or file.endswith('.py'):
- full = os.path.join(dir, file)
- if match(args, full):
- if '/2' in dir:
- files2.append(full)
- if '/3' in dir or '/2and3' in dir:
- files3.append(full)
- if not (files2 or files3):
- print('--- nothing to do ---')
+ versions = [(3, 5), (3, 4), (3, 3), (3, 2), (2, 7)]
+ if args.python_version:
+ versions = [v for v in versions
+ if any(('%d.%d' % v).startswith(av) for av in args.python_version)]
+ if not versions:
+ print("--- no versions selected ---")
+ sys.exit(1)
+
code = 0
- for flags, files in [([], files3), (['--py2'], files2)]:
+ runs = 0
+ for major, minor in versions:
+ roots = libpath(major, minor)
+ files = []
+ seen = {'__builtin__', 'builtins', 'typing'} # Always ignore these.
+ for root in roots:
+ names = os.listdir(root)
+ for name in names:
+ full = os.path.join(root, name)
+ mod, ext = os.path.splitext(name)
+ if mod in seen:
+ continue
+ if ext in ['.pyi', '.py']:
+ if match(args, full):
+ seen.add(mod)
+ files.append(full)
+ elif (os.path.isfile(os.path.join(full, '__init__.pyi')) or
+ os.path.isfile(os.path.join(full, '__init__.py'))):
+ for r, ds, fs in os.walk(full):
+ ds.sort()
+ fs.sort()
+ for f in fs:
+ m, x = os.path.splitext(f)
+ if x in ['.pyi', '.py']:
+ fn = os.path.join(r, f)
+ if match(args, fn):
+ seen.add(mod)
+ files.append(fn)
if files:
+ runs += 1
+ flags = ['--python-version', '%d.%d' % (major, minor)]
sys.argv = ['mypy'] + flags + files
if args.verbose:
- print('running', ' '.join(sys.argv))
+ print("running", ' '.join(sys.argv))
else:
- print('running mypy', ' '.join(flags), '# with', len(files), 'files')
+ print("running mypy", ' '.join(flags), "# with", len(files), "files")
try:
if not args.dry_run:
mypy_main('')
except SystemExit as err:
code = max(code, err.code)
if code:
- print('--- exit status', code, '---')
+ print("--- exit status", code, "---")
sys.exit(code)
+ if not runs:
+ print("--- nothing to do; exit 1 ---")
+ sys.exit(1)
if __name__ == '__main__':
diff --git a/typeshed/stdlib/2.7/ConfigParser.pyi b/typeshed/stdlib/2.7/ConfigParser.pyi
new file mode 100644
index 0000000..f9c7ea4
--- /dev/null
+++ b/typeshed/stdlib/2.7/ConfigParser.pyi
@@ -0,0 +1,103 @@
+from typing import Any, Tuple
+
+__all__ = None # type: list[str]
+DEFAULTSECT = None # type: str
+MAX_INTERPOLATION_DEPTH = None # type: int
+
+class Error(Exception):
+ message = None # type: Any
+ def __init__(self, msg: str = ...) -> None: ...
+ def _get_message(self) -> None: ...
+ def _set_message(self, value: str) -> None: ...
+ def __repr__(self) -> str: ...
+ __str__ = __repr__
+
+class NoSectionError(Error):
+ section = ... # type: str
+ args = ... # type: Tuple[str]
+ def __init__(self, section: str) -> None: ...
+
+class DuplicateSectionError(Error):
+ section = ... # type: str
+ args = ... # type: Tuple[str]
+ def __init__(self, section: str) -> None: ...
+
+class NoOptionError(Error):
+ section = ... # type: str
+ option = ... # type: str
+ args = ... # type: Tuple[str,str]
+ def __init__(self, option: str, section: str) -> None: ...
+
+class InterpolationError(Error):
+ section = ... # type: str
+ option = ... # type: str
+ msg = ... # type: str
+ args = ... # type: Tuple[str,str,str]
+ def __init__(self, option: str, section: str, msg: str) -> None: ...
+
+class InterpolationMissingOptionError(InterpolationError):
+ reference = ... # type: str
+ args = ... # type: Tuple[str,str,str,str]
+ def __init__(self, option: str, section: str, rawval: str, reference: str) -> None: ...
+
+class InterpolationSyntaxError(InterpolationError): ...
+
+class InterpolationDepthError(InterpolationError):
+ def __init__(self, option: str, section: str, rawval: str) -> None: ...
+
+class ParsingError(Error):
+ filename = ... # type: str
+ errors = ... # type: list[Tuple[Any,Any]]
+ args = ... # type: Tuple[str]
+ def __init__(self, filename: str) -> None: ...
+ def append(self, lineno: Any, line: Any) -> None: ...
+
+class MissingSectionHeaderError(ParsingError):
+ lineno = ... # type: Any
+ line = ... # type: Any
+ args = ... # type: Tuple[str,Any,Any]
+ def __init__(self, filename: str, lineno: Any, line: Any) -> None: ...
+
+
+class RawConfigParser:
+ _dict = ... # type: Any
+ _sections = ... # type: dict
+ _defaults = ... # type: dict
+ _optcre = ... # type: Any
+ SECTCRE = ... # type: Any
+ OPTCRE = ... # type: Any
+ OPTCRE_NV = ... # type: Any
+ def __init__(self, defaults: dict[Any,Any] = ..., dict_type: Any = ..., allow_no_value: bool = ...) -> None: ...
+ def defaults(self) -> dict[Any,Any]: ...
+ def sections(self) -> list[str]: ...
+ def add_section(self, section: str) -> None: ...
+ def has_section(self, section: str) -> bool: ...
+ def options(self, section: str) -> list[str]: ...
+ def read(self, filenames: str) -> list[str]: ...
+ def readfp(self, fp: file, filename: str = ...) -> None: ...
+ def get(self, section: str, option: str) -> str: ...
+ def items(self, section: str) -> list[Tuple[Any,Any]]: ...
+ def _get(self, section: str, conv: type, option: str) -> Any: ...
+ def getint(self, section: str, option: str) -> int: ...
+ def getfloat(self, section: str, option: str) -> float: ...
+ _boolean_states = ... # type: dict[str,bool]
+ def getboolean(self, section: str, option: str) -> bool: ...
+ def optionxform(self, optionstr: str) -> str: ...
+ def has_option(self, section: str, option: str) -> bool: ...
+ def set(self, section: str, option: str, value: Any = ...) -> None: ...
+ def write(self, fp: file) -> None: ...
+ def remove_option(self, section: str, option: Any) -> bool: ...
+ def remove_section(self, section: str) -> bool: ...
+ def _read(self, fp: file, fpname: str) -> None: ...
+
+class ConfigParser(RawConfigParser):
+ _KEYCRE = ... # type: Any
+ def get(self, section: str, option: str, raw: bool = ..., vars: dict = ...) -> Any: ...
+ def items(self, section: str, raw: bool = ..., vars: dict = ...) -> list[Tuple[str,Any]]: ...
+ def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ...
+ def _interpolation_replace(self, match: Any) -> str: ...
+
+class SafeConfigParser(ConfigParser):
+ _interpvar_re = ... # type: Any
+ def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ...
+ def _interpolate_some(self, option: str, accum: list, rest: str, section: str, map: dict, depth: int) -> None: ...
diff --git a/typeshed/stdlib/2.7/Cookie.pyi b/typeshed/stdlib/2.7/Cookie.pyi
new file mode 100644
index 0000000..2aea075
--- /dev/null
+++ b/typeshed/stdlib/2.7/Cookie.pyi
@@ -0,0 +1,44 @@
+# Stubs for Cookie (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class CookieError(Exception): ...
+
+class Morsel(dict):
+ key = ... # type: Any
+ def __init__(self): ...
+ def __setitem__(self, K, V): ...
+ def isReservedKey(self, K): ...
+ value = ... # type: Any
+ coded_value = ... # type: Any
+ def set(self, key, val, coded_val, LegalChars=..., idmap=..., translate=...): ...
+ def output(self, attrs=None, header=''): ...
+ def js_output(self, attrs=None): ...
+ def OutputString(self, attrs=None): ...
+
+class BaseCookie(dict):
+ def value_decode(self, val): ...
+ def value_encode(self, val): ...
+ def __init__(self, input=None): ...
+ def __setitem__(self, key, value): ...
+ def output(self, attrs=None, header='', sep=''): ...
+ def js_output(self, attrs=None): ...
+ def load(self, rawdata): ...
+
+class SimpleCookie(BaseCookie):
+ def value_decode(self, val): ...
+ def value_encode(self, val): ...
+
+class SerialCookie(BaseCookie):
+ def __init__(self, input=None): ...
+ def value_decode(self, val): ...
+ def value_encode(self, val): ...
+
+class SmartCookie(BaseCookie):
+ def __init__(self, input=None): ...
+ def value_decode(self, val): ...
+ def value_encode(self, val): ...
+
+Cookie = ... # type: Any
diff --git a/typeshed/stdlib/2.7/Queue.pyi b/typeshed/stdlib/2.7/Queue.pyi
index c12ee7a..61ba510 100644
--- a/typeshed/stdlib/2.7/Queue.pyi
+++ b/typeshed/stdlib/2.7/Queue.pyi
@@ -1,13 +1,13 @@
# Stubs for Queue (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
+from typing import Any, TypeVar, Generic
+
+_T = TypeVar('_T')
class Empty(Exception): ...
class Full(Exception): ...
-class Queue:
+class Queue(Generic[_T]):
maxsize = ... # type: Any
mutex = ... # type: Any
not_empty = ... # type: Any
@@ -20,10 +20,10 @@ class Queue:
def qsize(self) -> int: ...
def empty(self) -> bool: ...
def full(self) -> bool: ...
- def put(self, item: Any, block: bool = ..., timeout: float = ...) -> None: ...
- def put_nowait(self, item) -> None: ...
- def get(self, block: bool = ..., timeout: float = ...) -> Any: ...
- def get_nowait(self) -> Any: ...
+ def put(self, item: _T, block: bool = ..., timeout: float = ...) -> None: ...
+ def put_nowait(self, item: _T) -> None: ...
+ def get(self, block: bool = ..., timeout: float = ...) -> _T: ...
+ def get_nowait(self) -> _T: ...
class PriorityQueue(Queue): ...
class LifoQueue(Queue): ...
diff --git a/typeshed/stdlib/2.7/__builtin__.pyi b/typeshed/stdlib/2.7/__builtin__.pyi
index 546202b..e09ebd9 100644
--- a/typeshed/stdlib/2.7/__builtin__.pyi
+++ b/typeshed/stdlib/2.7/__builtin__.pyi
@@ -1,5 +1,8 @@
# Stubs for builtins (Python 2.7)
+# True and False are deliberately omitted because they are keywords in
+# Python 3, and stub files conform to Python 3 syntax.
+
from typing import (
TypeVar, Iterator, Iterable, overload,
Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set,
@@ -46,8 +49,11 @@ class type:
@overload
def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ...
# TODO: __new__ may have to be special and not a static method.
- @staticmethod
+ @overload
+ def __new__(cls, o: object) -> type: ...
+ @overload
def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
+ def __call__(self, *args: Any, **kwds: Any) -> Any: ...
class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
@overload
@@ -399,7 +405,7 @@ class bytearray(Sequence[int]):
def upper(self) -> bytearray: ...
def zfill(self, width: int) -> bytearray: ...
@staticmethod
- def fromhex(self, x: str) -> bytearray: ...
+ def fromhex(x: str) -> bytearray: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[int]: ...
@@ -509,12 +515,14 @@ class list(MutableSequence[_T], Generic[_T]):
def __le__(self, x: List[_T]) -> bool: ...
class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+ # NOTE: Keyword arguments are special. If they are used, _KT must include
+ # str, but we have no way of enforcing it here.
@overload
- def __init__(self) -> None: ...
+ def __init__(self, **kwargs: _VT) -> None: ...
@overload
- def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+ def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
- def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... # TODO keyword args
+ def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
def has_key(self, k: _KT) -> bool: ...
def clear(self) -> None: ...
@@ -523,8 +531,10 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
- def update(self, m: Union[Mapping[_KT, _VT],
- Iterable[Tuple[_KT, _VT]]]) -> None: ...
+ @overload
+ def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ @overload
+ def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
def keys(self) -> List[_KT]: ...
def values(self) -> List[_VT]: ...
def items(self) -> List[Tuple[_KT, _VT]]: ...
@@ -629,10 +639,6 @@ class module:
__file__ = ... # type: str
__dict__ = ... # type: Dict[unicode, Any]
-True = ... # type: bool
-False = ... # type: bool
-__debug__ = False
-
long = int
bytes = str
@@ -714,7 +720,10 @@ def quit(code: int = ...) -> None: ...
def range(x: int, y: int = 0, step: int = 1) -> List[int]: ...
def raw_input(prompt: unicode = ...) -> str: ...
-def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], initializer: _T = None) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initializer: _T) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T]) -> _T: ...
def reload(module: Any) -> Any: ...
@overload
@@ -812,7 +821,11 @@ class OSError(EnvironmentError): ...
class WindowsError(OSError): ...
class OverflowError(ArithmeticError): ...
class ReferenceError(StandardError): ...
-class SyntaxError(StandardError): ...
+class SyntaxError(StandardError):
+ msg = ... # type: str
+ lineno = ... # type: int
+ offset = ... # type: int
+ text = ... # type: str
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class SystemError(StandardError): ...
@@ -836,7 +849,7 @@ class UnicodeWarning(Warning): ...
class BytesWarning(Warning): ...
class ResourceWarning(Warning): ...
-def eval(s: str) -> Any: ...
+def eval(s: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ...
def cmp(x: Any, y: Any) -> int: ...
@@ -852,7 +865,7 @@ class file(BinaryIO):
def __iter__(self) -> Iterator[str]: ...
def read(self, n: int = ...) -> str: ...
def __enter__(self) -> BinaryIO: ...
- def __exit__(self, typ, exc, tb) -> bool: ...
+ def __exit__(self, t: type = None, exc: BaseException = None, tb: Any = None) -> bool: ...
def flush(self) -> None: ...
def fileno(self) -> int: ...
def isatty(self) -> bool: ...
diff --git a/typeshed/stdlib/2.7/__future__.pyi b/typeshed/stdlib/2.7/__future__.pyi
index e863874..065057b 100644
--- a/typeshed/stdlib/2.7/__future__.pyi
+++ b/typeshed/stdlib/2.7/__future__.pyi
@@ -1,4 +1,8 @@
-class _Feature: ...
+from sys import _version_info
+
+class _Feature:
+ def getOptionalRelease(self) -> _version_info: ...
+ def getMandatoryRelease(self) -> _version_info: ...
absolute_import = None # type: _Feature
division = None # type: _Feature
diff --git a/typeshed/stdlib/2.7/_ast.pyi b/typeshed/stdlib/2.7/_ast.pyi
index bd8d259..26f9ce1 100644
--- a/typeshed/stdlib/2.7/_ast.pyi
+++ b/typeshed/stdlib/2.7/_ast.pyi
@@ -1,516 +1,328 @@
-from typing import Any
-from typing import Tuple as TypingTuple
+import typing
+from typing import Optional, Union
-__version__ = ... # type: int
+__version__ = ... # type: str
PyCF_ONLY_AST = ... # type: int
-class AST(object):
- _attributes = ... # type: TypingTuple[str]
- _fields = ... # type: TypingTuple[str]
- def __init__(self, *args, **kwargs) -> None: pass
+identifier = str
-class alias(AST):
- pass
-
-class arguments(AST):
- pass
-
-class boolop(AST):
- pass
-
-class cmpop(AST):
- pass
-
-class comprehension(AST):
- pass
+class AST:
+ _attributes = ... # type: typing.Tuple[str, ...]
+ _fields = ... # type: typing.Tuple[str, ...]
+ def __init__(self, *args, **kwargs) -> None: ...
-class excepthandler(AST):
- pass
-
-class expr(AST):
- pass
+class mod(AST):
+ ...
-class expr_context(AST):
- pass
+class Module(mod):
+ body = ... # type: typing.List[stmt]
-class keyword(AST):
- pass
+class Interactive(mod):
+ body = ... # type: typing.List[stmt]
-class mod(AST):
- pass
+class Expression(mod):
+ body = ... # type: expr
-class operator(AST):
- pass
+class Suite(mod):
+ body = ... # type: typing.List[stmt]
-class slice(AST):
- pass
class stmt(AST):
- pass
-
-class unaryop(AST):
- pass
+ lineno = ... # type: int
+ col_offset = ... # type: int
+class FunctionDef(stmt):
+ name = ... # type: identifier
+ args = ... # type: arguments
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
-class Add(operator):
- def __init__(self) -> None:
- pass
+class ClassDef(stmt):
+ name = ... # type: identifier
+ bases = ... # type: typing.List[expr]
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
-class And(boolop):
- def __init__(self) -> None:
- pass
+class Return(stmt):
+ value = ... # type: Optional[expr]
-class Assert(stmt):
- test = ... # type: Any
- msg = ... # type: Any
- def __init__(self, test = ..., msg = ...) -> None:
- pass
+class Delete(stmt):
+ targets = ... # type: typing.List[expr]
class Assign(stmt):
- targets = ... # type: Any
- value = ... # type: Any
- def __init__(self, targets = ..., value = ...) -> None:
- pass
-
-class Attribute(expr):
- value = ... # type: Any
- attr = ... # type: Any
- ctx = ... # type: Any
- def __init__(self, value = ..., attr = ..., ctx = ...) -> None:
- pass
+ targets = ... # type: typing.List[expr]
+ value = ... # type: expr
class AugAssign(stmt):
- target = ... # type: Any
- op = ... # type: Any
- value = ... # type: Any
- def __init__(self, target = ..., op = ..., value = ...) -> None:
- pass
-
-class AugLoad(expr_context):
- def __init__(self) -> None:
- pass
+ target = ... # type: expr
+ op = ... # type: operator
+ value = ... # type: expr
-class AugStore(expr_context):
- def __init__(self) -> None:
- pass
-
-class BinOp(expr):
- left = ... # type: Any
- op = ... # type: Any
- right = ... # type: Any
- def __init__(self, left = ..., op = ..., right = ...) -> None:
- pass
-
-class BitAnd(operator):
- def __init__(self) -> None:
- pass
-
-class BitOr(operator):
- def __init__(self) -> None:
- pass
-
-class BitXor(operator):
- def __init__(self) -> None:
- pass
+class Print(stmt):
+ dest = ... # type: Optional[expr]
+ values = ... # type: typing.List[expr]
+ nl = ... # type: bool
-class BoolOp(expr):
- op = ... # type: Any
- values = ... # type: Any
- def __init__(self, op = ..., values = ...) -> None:
- pass
+class For(stmt):
+ target = ... # type: expr
+ iter = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
-class Break(stmt):
- def __init__(self) -> None:
- pass
+class While(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
-class Call(expr):
- func = ... # type: Any
- args = ... # type: Any
- keywords = ... # type: Any
- starargs = ... # type: Any
- kwargs = ... # type: Any
- def __init__(self, func = ..., args = ..., keywords = ..., starargs = ..., kwargs = ...) -> None:
- pass
+class If(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
-class ClassDef(stmt):
- name = ... # type: Any
- bases = ... # type: Any
- body = ... # type: Any
- decorator_list = ... # type: Any
- def __init__(self, name = ..., bases = ..., body = ..., decorator_list = ...) -> None:
- pass
+class With(stmt):
+ context_expr = ... # type: expr
+ optional_vars = ... # type: Optional[expr]
+ body = ... # type: typing.List[stmt]
-class Compare(expr):
- left = ... # type: Any
- ops = ... # type: Any
- comparators = ... # type: Any
- def __init__(self, left = ..., ops = ..., comparators = ...) -> None:
- pass
+class Raise(stmt):
+ type = ... # type: Optional[expr]
+ inst = ... # type: Optional[expr]
+ tback = ... # type: Optional[expr]
-class Continue(stmt):
- def __init__(self) -> None:
- pass
+class TryExcept(stmt):
+ body = ... # type: typing.List[stmt]
+ handlers = ... # type: typing.List[ExceptHandler]
+ orelse = ... # type: typing.List[stmt]
-class Del(expr_context):
- def __init__(self) -> None:
- pass
+class TryFinally(stmt):
+ body = ... # type: typing.List[stmt]
+ finalbody = ... # type: typing.List[stmt]
-class Delete(stmt):
- targets = ... # type: Any
- def __init__(self, targets = ...) -> None:
- pass
+class Assert(stmt):
+ test = ... # type: expr
+ msg = ... # type: Optional[expr]
-class Dict(expr):
- keys = ... # type: Any
- values = ... # type: Any
- def __init__(self, keys = ..., values = ...) -> None:
- pass
+class Import(stmt):
+ names = ... # type: typing.List[alias]
-class DictComp(expr):
- key = ... # type: Any
- value = ... # type: Any
- generators = ... # type: Any
- def __init__(self, key = ..., value = ..., generators = ...) -> None:
- pass
-
-class Div(operator):
- def __init__(self) -> None:
- pass
-
-class Ellipsis(slice):
- def __init__(self) -> None:
- pass
-
-class Eq(cmpop):
- def __init__(self) -> None:
- pass
-
-class ExceptHandler(excepthandler):
- type = ... # type: Any
- name = ... # type: Any
- body = ... # type: Any
- def __init__(self, type = ..., name = ..., body = ...) -> None:
- pass
+class ImportFrom(stmt):
+ module = ... # type: Optional[identifier]
+ names = ... # type: typing.List[alias]
+ level = ... # type: Optional[int]
class Exec(stmt):
- body = ... # type: Any
- globals = ... # type: Any
- locals = ... # type: Any
- def __init__(self, body = ..., globals = ..., locals = ...) -> None:
- pass
-
-class Expr(stmt):
- value = ... # type: Any
- def __init__(self, value = ...) -> None:
- pass
-
-class Expression(mod):
- body = ... # type: Any
- def __init__(self, body = ...) -> None:
- pass
-
-class ExtSlice(slice):
- dims = ... # type: Any
- def __init__(self, dims = ...) -> None:
- pass
-
-class FloorDiv(operator):
- def __init__(self) -> None:
- pass
-
-class For(stmt):
- target = ... # type: Any
- iter = ... # type: Any
- body = ... # type: Any
- orelse = ... # type: Any
- def __init__(self, target = ..., iter = ..., body = ..., orelse = ...) -> None:
- pass
-
-class FunctionDef(stmt):
- name = ... # type: Any
- args = ... # type: Any
- body = ... # type: Any
- decorator_list = ... # type: Any
- def __init__(self, name = ..., args = ..., body = ..., decorator_list = ...) -> None:
- pass
-
-class GeneratorExp(expr):
- elt = ... # type: Any
- generators = ... # type: Any
- def __init__(self, elt = ..., generators = ...) -> None:
- pass
+ body = ... # type: expr
+ globals = ... # type: Optional[expr]
+ locals = ... # type: Optional[expr]
class Global(stmt):
- names = ... # type: Any
- def __init__(self, names = ...) -> None:
- pass
+ names = ... # type: typing.List[identifier]
-class Gt(cmpop):
- def __init__(self) -> None:
- pass
+class Expr(stmt):
+ value = ... # type: expr
-class GtE(cmpop):
- def __init__(self) -> None:
- pass
+class Pass(stmt): ...
+class Break(stmt): ...
+class Continue(stmt): ...
-class If(stmt):
- test = ... # type: Any
- body = ... # type: Any
- orelse = ... # type: Any
- def __init__(self, test = ..., body = ..., orelse = ...) -> None:
- pass
-class IfExp(expr):
- test = ... # type: Any
- body = ... # type: Any
- orelse = ... # type: Any
- def __init__(self, test = ..., body = ..., orelse = ...) -> None:
- pass
+class slice(AST):
+ ...
-class Import(stmt):
- names = ... # type: Any
- def __init__(self, names = ...) -> None:
- pass
+_slice = slice # this lets us type the variable named 'slice' below
-class ImportFrom(stmt):
- module = ... # type: Any
- names = ... # type: Any
- level = ... # type: Any
- def __init__(self, module = ..., names = ..., level = ...) -> None:
- pass
+class Slice(slice):
+ lower = ... # type: Optional[expr]
+ upper = ... # type: Optional[expr]
+ step = ... # type: Optional[expr]
-class In(cmpop):
- def __init__(self) -> None:
- pass
+class ExtSlice(slice):
+ dims = ... # type: typing.List[slice]
class Index(slice):
- value = ... # type: Any
- def __init__(self, value = ...) -> None:
- pass
+ value = ... # type: expr
-class Interactive(mod):
- body = ... # type: Any
- def __init__(self, body = ...) -> None:
- pass
+class Ellipsis(slice): ...
-class Invert(unaryop):
- def __init__(self) -> None:
- pass
-class Is(cmpop):
- def __init__(self) -> None:
- pass
+class expr(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
-class IsNot(cmpop):
- def __init__(self) -> None:
- pass
+class BoolOp(expr):
+ op = ... # type: boolop
+ values = ... # type: typing.List[expr]
-class LShift(operator):
- def __init__(self) -> None:
- pass
+class BinOp(expr):
+ left = ... # type: expr
+ op = ... # type: operator
+ right = ... # type: expr
-class Lambda(expr):
- args = ... # type: Any
- body = ... # type: Any
- def __init__(self, args = ..., body = ...) -> None:
- pass
+class UnaryOp(expr):
+ op = ... # type: unaryop
+ operand = ... # type: expr
-class List(expr):
- elts = ... # type: Any
- ctx = ... # type: Any
- def __init__(self, elts = ..., ctx = ...) -> None:
- pass
+class Lambda(expr):
+ args = ... # type: arguments
+ body = ... # type: expr
-class ListComp(expr):
- elt = ... # type: Any
- generators = ... # type: Any
- def __init__(self, elt = ..., generators = ...) -> None:
- pass
+class IfExp(expr):
+ test = ... # type: expr
+ body = ... # type: expr
+ orelse = ... # type: expr
-class Load(expr_context):
- def __init__(self) -> None:
- pass
+class Dict(expr):
+ keys = ... # type: typing.List[expr]
+ values = ... # type: typing.List[expr]
-class Lt(cmpop):
- def __init__(self) -> None:
- pass
+class Set(expr):
+ elts = ... # type: typing.List[expr]
-class LtE(cmpop):
- def __init__(self) -> None:
- pass
+class ListComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
-class Mod(operator):
- def __init__(self) -> None:
- pass
+class SetComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
-class Module(mod):
- body = ... # type: Any
- def __init__(self, body = ...) -> None:
- pass
+class DictComp(expr):
+ key = ... # type: expr
+ value = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
-class Mult(operator):
- def __init__(self) -> None:
- pass
+class GeneratorExp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
-class Name(expr):
- id = ... # type: Any
- ctx = ... # type: Any
- def __init__(self, id = ..., ctx = ...) -> None:
- pass
+class Yield(expr):
+ value = ... # type: Optional[expr]
-class Not(unaryop):
- def __init__(self) -> None:
- pass
+class Compare(expr):
+ left = ... # type: expr
+ ops = ... # type: typing.List[cmpop]
+ comparators = ... # type: typing.List[expr]
-class NotEq(cmpop):
- def __init__(self) -> None:
- pass
+class Call(expr):
+ func = ... # type: expr
+ args = ... # type: typing.List[expr]
+ keywords = ... # type: typing.List[keyword]
+ starargs = ... # type: Optional[expr]
+ kwargs = ... # type: Optional[expr]
-class NotIn(cmpop):
- def __init__(self) -> None:
- pass
+class Repr(expr):
+ value = ... # type: expr
class Num(expr):
- n = ... # type: Any
- def __init__(self, n = ...) -> None:
- pass
-
-class Or(boolop):
- def __init__(self) -> None:
- pass
+ n = ... # type: Union[int, float]
-class Param(expr_context):
- def __init__(self) -> None:
- pass
+class Str(expr):
+ s = ... # type: str
-class Pass(stmt):
- def __init__(self) -> None:
- pass
+class Attribute(expr):
+ value = ... # type: expr
+ attr = ... # type: identifier
+ ctx = ... # type: expr_context
-class Pow(operator):
- def __init__(self) -> None:
- pass
+class Subscript(expr):
+ value = ... # type: expr
+ slice = ... # type: _slice
+ ctx = ... # type: expr_context
-class Print(stmt):
- dest = ... # type: Any
- values = ... # type: Any
- nl = ... # type: Any
- def __init__(self, dest = ..., values = ..., nl = ...) -> None:
- pass
+class Name(expr):
+ id = ... # type: identifier
+ ctx = ... # type: expr_context
-class RShift(operator):
- def __init__(self) -> None:
- pass
+class List(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
-class Raise(stmt):
- type = ... # type: Any
- inst = ... # type: Any
- tback = ... # type: Any
- def __init__(self, type = ..., inst = ..., tback = ...) -> None:
- pass
+class Tuple(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
-class Repr(expr):
- value = ... # type: Any
- def __init__(self, value = ...) -> None:
- pass
-class Return(stmt):
- value = ... # type: Any
- def __init__(self, value = ...) -> None:
- pass
+class expr_context(AST):
+ ...
-class Set(expr):
- elts = ... # type: Any
- def __init__(self, elts = ...) -> None:
- pass
+class AugLoad(expr_context): ...
+class AugStore(expr_context): ...
+class Del(expr_context): ...
+class Load(expr_context): ...
+class Param(expr_context): ...
+class Store(expr_context): ...
-class SetComp(expr):
- elt = ... # type: Any
- generators = ... # type: Any
- def __init__(self, elt = ..., generators = ...) -> None:
- pass
-class Slice(slice):
- lower = ... # type: Any
- upper = ... # type: Any
- step = ... # type: Any
- def __init__(self, lower = ..., upper = ..., step = ...) -> None:
- pass
+class boolop(AST):
+ ...
-class Store(expr_context):
- def __init__(self) -> None:
- pass
+class And(boolop): ...
+class Or(boolop): ...
-class Str(expr):
- s = ... # type: Any
- def __init__(self, s = ...) -> None:
- pass
+class operator(AST):
+ ...
+
+class Add(operator): ...
+class BitAnd(operator): ...
+class BitOr(operator): ...
+class BitXor(operator): ...
+class Div(operator): ...
+class FloorDiv(operator): ...
+class LShift(operator): ...
+class Mod(operator): ...
+class Mult(operator): ...
+class Pow(operator): ...
+class RShift(operator): ...
+class Sub(operator): ...
-class Sub(operator):
- def __init__(self) -> None:
- pass
+class unaryop(AST):
+ ...
-class Subscript(expr):
- value = ... # type: Any
- slice = ... # type: Any
- ctx = ... # type: Any
- def __init__(self, value = ..., slice = ..., ctx = ...) -> None:
- pass
+class Invert(unaryop): ...
+class Not(unaryop): ...
+class UAdd(unaryop): ...
+class USub(unaryop): ...
-class Suite(mod):
- body = ... # type: Any
- def __init__(self, body = ...) -> None:
- pass
+class cmpop(AST):
+ ...
-class TryExcept(stmt):
- body = ... # type: Any
- handlers = ... # type: Any
- orelse = ... # type: Any
- def __init__(self, body = ..., handlers = ..., orelse = ...) -> None:
- pass
+class Eq(cmpop): ...
+class Gt(cmpop): ...
+class GtE(cmpop): ...
+class In(cmpop): ...
+class Is(cmpop): ...
+class IsNot(cmpop): ...
+class Lt(cmpop): ...
+class LtE(cmpop): ...
+class NotEq(cmpop): ...
+class NotIn(cmpop): ...
-class TryFinally(stmt):
- body = ... # type: Any
- finalbody = ... # type: Any
- def __init__(self, body = ..., finalbody = ...) -> None:
- pass
-class Tuple(expr):
- elts = ... # type: Any
- ctx = ... # type: Any
- def __init__(self, elts = ..., ctx = ...) -> None:
- pass
+class comprehension(AST):
+ target = ... # type: expr
+ iter = ... # type: expr
+ ifs = ... # type: typing.List[expr]
-class UAdd(unaryop):
- def __init__(self) -> None:
- pass
-class USub(unaryop):
- def __init__(self) -> None:
- pass
+class ExceptHandler(AST):
+ type = ... # type: Optional[expr]
+ name = ... # type: Optional[expr]
+ body = ... # type: typing.List[stmt]
+ lineno = ... # type: int
+ col_offset = ... # type: int
-class UnaryOp(expr):
- op = ... # type: Any
- operand = ... # type: Any
- def __init__(self, op = ..., operand = ...) -> None:
- pass
-class While(stmt):
- test = ... # type: Any
- body = ... # type: Any
- orelse = ... # type: Any
- def __init__(self, test = ..., body = ..., orelse = ...) -> None:
- pass
+class arguments(AST):
+ args = ... # type: typing.List[expr]
+ vararg = ... # type: Optional[identifier]
+ kwarg = ... # type: Optional[identifier]
+ defaults = ... # type: typing.List[expr]
-class With(stmt):
- context_expr = ... # type: Any
- optional_vars = ... # type: Any
- body = ... # type: Any
- def __init__(self, context_expr = ..., optional_vars = ..., body = ...) -> None:
- pass
+class keyword(AST):
+ arg = ... # type: identifier
+ value = ... # type: expr
-class Yield(expr):
- value = ... # type: Any
- def __init__(self, value = ...) -> None:
- pass
+class alias(AST):
+ name = ... # type: identifier
+ asname = ... # type: Optional[identifier]
diff --git a/typeshed/stdlib/2.7/_functools.pyi b/typeshed/stdlib/2.7/_functools.pyi
index d6245db..555d3e1 100644
--- a/typeshed/stdlib/2.7/_functools.pyi
+++ b/typeshed/stdlib/2.7/_functools.pyi
@@ -1,15 +1,16 @@
"""Stub file for the '_functools' module."""
-from typing import Any, Callable, Dict, Iterator, Optional, TypeVar, Tuple, overload
+from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Tuple, overload
_T = TypeVar("_T")
+_S = TypeVar("_S")
@overload
def reduce(function: Callable[[_T, _T], _T],
- sequence: Iterator[_T]) -> _T: ...
+ sequence: Iterable[_T]) -> _T: ...
@overload
-def reduce(function: Callable[[_T, _T], _T],
- sequence: Iterator[_T], initial: _T) -> _T: ...
+def reduce(function: Callable[[_T, _S], _T],
+ sequence: Iterable[_S], initial: _T) -> _T: ...
class partial(object):
func = ... # type: Callable[..., Any]
diff --git a/typeshed/stdlib/2.7/_weakref.pyi b/typeshed/stdlib/2.7/_weakref.pyi
index d2c457b..9ed024e 100644
--- a/typeshed/stdlib/2.7/_weakref.pyi
+++ b/typeshed/stdlib/2.7/_weakref.pyi
@@ -1,4 +1,6 @@
-from typing import Any, Callable
+from typing import Any, Callable, Generic, Optional, TypeVar
+
+_T = TypeVar('_T')
class CallableProxyType(object): # "weakcallableproxy"
pass
@@ -6,8 +8,11 @@ class CallableProxyType(object): # "weakcallableproxy"
class ProxyType(object): # "weakproxy"
pass
-class ReferenceType(object): # "weakref"
- pass
+class ReferenceType(Generic[_T]):
+ # TODO rest of members
+ def __init__(self, o: _T, callback: Callable[[ReferenceType[_T]],
+ Any] = ...) -> None: ...
+ def __call__(self) -> Optional[_T]: ...
ref = ReferenceType
diff --git a/typeshed/stdlib/2.7/_weakrefset.pyi b/typeshed/stdlib/2.7/_weakrefset.pyi
index a3de693..d0689f1 100644
--- a/typeshed/stdlib/2.7/_weakrefset.pyi
+++ b/typeshed/stdlib/2.7/_weakrefset.pyi
@@ -2,4 +2,4 @@ from typing import Iterator, Any
class WeakSet:
def __iter__(self) -> Iterator[Any]: ...
- def add(self, *args, **kwargs) -> Any: ...
+ def add(self, *args: Any, **kwargs: Any) -> Any: ...
diff --git a/typeshed/stdlib/2.7/abc.pyi b/typeshed/stdlib/2.7/abc.pyi
index 66f7183..0eb26f0 100644
--- a/typeshed/stdlib/2.7/abc.pyi
+++ b/typeshed/stdlib/2.7/abc.pyi
@@ -7,7 +7,7 @@ WeakSet = ... # type: _weakrefset.WeakSet
_InstanceType = ... # type: type
types = ... # type: module
-def abstractmethod(funcobj) -> Any: ...
+def abstractmethod(funcobj: Any) -> Any: ...
class ABCMeta(type):
# TODO: FrozenSet
@@ -18,10 +18,10 @@ class ABCMeta(type):
_abc_negative_cache = ... # type: _weakrefset.WeakSet
_abc_negative_cache_version = ... # type: int
_abc_registry = ... # type: _weakrefset.WeakSet
- def __init__(self, name, bases, namespace: Dict[Any, Any]) -> None: ...
- def __instancecheck__(cls: "ABCMeta", instance) -> Any: ...
- def __subclasscheck__(cls: "ABCMeta", subclass) -> Any: ...
- def _dump_registry(cls: "ABCMeta", *args, **kwargs) -> None: ...
+ def __init__(self, name: str, bases: Tuple[type, ...], namespace: Dict[Any, Any]) -> None: ...
+ def __instancecheck__(cls: "ABCMeta", instance: Any) -> Any: ...
+ def __subclasscheck__(cls: "ABCMeta", subclass: Any) -> Any: ...
+ def _dump_registry(cls: "ABCMeta", *args: Any, **kwargs: Any) -> None: ...
# TODO: subclass: Union["ABCMeta", type, Tuple[type, ...]]
def register(cls: "ABCMeta", subclass: Any) -> None: ...
@@ -30,7 +30,7 @@ class _C:
# TODO: The real abc.abstractproperty inherits from "property".
class abstractproperty(object):
- def __new__(cls, func): ...
+ def __new__(cls, func: Any) -> Any: ...
__doc__ = ... # type: str
__isabstractmethod__ = ... # type: bool
doc = ... # type: Any
diff --git a/typeshed/stdlib/2.7/argparse.pyi b/typeshed/stdlib/2.7/argparse.pyi
index 96b213c..a0b6f80 100644
--- a/typeshed/stdlib/2.7/argparse.pyi
+++ b/typeshed/stdlib/2.7/argparse.pyi
@@ -2,7 +2,7 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any, Sequence
+from typing import Any, Sequence, Union
SUPPRESS = ... # type: Any
OPTIONAL = ... # type: Any
@@ -125,7 +125,7 @@ class _ActionsContainer:
def get_default(self, dest): ...
def add_argument(self,
*args: str,
- action: str = ...,
+ action: Union[str, Action] = ...,
nargs: str = ...,
const: Any = ...,
default: Any = ...,
@@ -134,7 +134,9 @@ class _ActionsContainer:
required: bool = ...,
help: str = ...,
metavar: str = ...,
- dest: str = ...) -> None: ...
+ dest: str = ...,
+ version: str = ...
+ ) -> None: ...
def add_argument_group(self, *args, **kwargs): ...
def add_mutually_exclusive_group(self, **kwargs): ...
diff --git a/typeshed/stdlib/2.7/ast.pyi b/typeshed/stdlib/2.7/ast.pyi
index dbaf6af..1864d31 100644
--- a/typeshed/stdlib/2.7/ast.pyi
+++ b/typeshed/stdlib/2.7/ast.pyi
@@ -1,40 +1,45 @@
-# Automatically generated by pytype. May contain errors.
+# Python 2.7 ast
-from typing import Any, Tuple, Generator
+import typing
+from typing import Any, Tuple, Iterator, Union
from _ast import (
- AST, alias, arguments, boolop, cmpop, comprehension, excepthandler,
- expr, expr_context, keyword, mod, operator, slice, stmt, unaryop, Add,
- And, Assert, Assign, Attribute, AugAssign, AugLoad, AugStore, BinOp,
- BitAnd, BitOr, BitXor, BoolOp, Break, Call, ClassDef, Compare, Continue,
- Del, Delete, Dict, DictComp, Div, Ellipsis, Eq, ExceptHandler, Exec,
- Expr, Expression, ExtSlice, FloorDiv, For, FunctionDef, GeneratorExp,
- Global, Gt, GtE, If, IfExp, Import, ImportFrom, In, Index, Interactive,
- Invert, Is, IsNot, LShift, Lambda, List, ListComp, Load, Lt, LtE, Mod,
- Module, Mult, Name, Not, NotEq, NotIn, Num, Or, Param, Pass, Pow, Print,
- RShift, Raise, Repr, Return, Set, SetComp, Slice, Store, Str, Sub,
- Subscript, Suite, TryExcept, TryFinally, Tuple, UAdd, USub, UnaryOp,
- While, With, Yield)
-
-__version__ = ... # type: int
+ Add, alias, And, arguments, Assert, Assign, AST, Attribute, AugAssign,
+ AugLoad, AugStore, BinOp, BitAnd, BitOr, BitXor, BoolOp, boolop, Break,
+ Call, ClassDef, cmpop, Compare, comprehension, Continue, Del, Delete, Dict,
+ DictComp, Div, Ellipsis, Eq, ExceptHandler, Exec, Expr, expr, Expression,
+ expr_context, ExtSlice, FloorDiv, For, FunctionDef, GeneratorExp, Global,
+ Gt, GtE, If, IfExp, Import, ImportFrom, In, Index, Interactive, Invert, Is,
+ IsNot, keyword, Lambda, List, ListComp, Load, LShift, Lt, LtE, Mod, mod,
+ Module, Mult, Name, Not, NotEq, NotIn, Num, operator, Or, Param, Pass, Pow,
+ Print, Raise, Repr, Return, RShift, Set, SetComp, Slice, slice, stmt,
+ Store, Str, Sub, Subscript, Suite, TryExcept, TryFinally, Tuple, UAdd,
+ UnaryOp, unaryop, USub, While, With, Yield
+)
+
+__version__ = ... # type: str
PyCF_ONLY_AST = ... # type: int
-def copy_location(new_node, old_node) -> Any: ...
-def dump(node, *args, **kwargs) -> str: ...
-def fix_missing_locations(node) -> Any: ...
-def get_docstring(node, *args, **kwargs) -> Any: ...
-def increment_lineno(node, *args, **kwargs) -> Any: ...
-def iter_child_nodes(node) -> Generator[Any, Any, Any]: ...
-def iter_fields(node) -> Any: ... # TODO: Generator[Tuple[Any, ...]]: ...
-def literal_eval(node_or_string) -> Any: ...
-def parse(source, filename = ..., mode = ..., *args, **kwargs) -> AST: ...
-def walk(node) -> Any: ... # TODO: Generator[Any]: ...
-
-class NodeVisitor(object):
+
+def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def copy_location(new_node: AST, old_node: AST) -> AST: ...
+def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
+def fix_missing_locations(node: AST) -> AST: ...
+def get_docstring(node: AST, clean: bool = ...) -> str: ...
+def increment_lineno(node: AST, n: int = ...) -> AST: ...
+def iter_child_nodes(node: AST) -> Iterator[AST]: ...
+def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ...
+def literal_eval(node_or_string: Union[str, AST]) -> Any: ...
+def walk(node: AST) -> Iterator[AST]: ...
+
+class NodeVisitor():
__doc__ = ... # type: str
- def generic_visit(self, node) -> None: ...
- def visit(self, node) -> Any: ...
+ def visit(self, node: AST) -> Any: ...
+ def generic_visit(self, node: AST) -> None: ...
class NodeTransformer(NodeVisitor):
__doc__ = ... # type: str
- def generic_visit(self, node) -> Any: ...
+ def generic_visit(self, node: AST) -> None: ...
+
+
+
diff --git a/typeshed/stdlib/2.7/bisect.pyi b/typeshed/stdlib/2.7/bisect.pyi
new file mode 100644
index 0000000..5d3f2e5
--- /dev/null
+++ b/typeshed/stdlib/2.7/bisect.pyi
@@ -0,0 +1,6 @@
+def bisect(a: list, x: object, lo: int = ..., hi: int = ...) -> int: ...
+def bisect_left(a: list, x: object, lo: int = ..., hi: int = ...) -> int: ...
+def bisect_right(a: list, x: object, lo: int = ..., hi: int = ...) -> int: ...
+def insort_left(a: list, x: object, lo: int = ..., hi: int = ...) -> int: ...
+def insort_right(a: list, x: object, lo: int = ..., hi: int = ...) -> int: ...
+def insort(a: list, x: object, lo: int = ..., hi: int = ...) -> int: ...
diff --git a/typeshed/stdlib/2.7/builtins.pyi b/typeshed/stdlib/2.7/builtins.pyi
index 546202b..e09ebd9 100644
--- a/typeshed/stdlib/2.7/builtins.pyi
+++ b/typeshed/stdlib/2.7/builtins.pyi
@@ -1,5 +1,8 @@
# Stubs for builtins (Python 2.7)
+# True and False are deliberately omitted because they are keywords in
+# Python 3, and stub files conform to Python 3 syntax.
+
from typing import (
TypeVar, Iterator, Iterable, overload,
Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set,
@@ -46,8 +49,11 @@ class type:
@overload
def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ...
# TODO: __new__ may have to be special and not a static method.
- @staticmethod
+ @overload
+ def __new__(cls, o: object) -> type: ...
+ @overload
def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
+ def __call__(self, *args: Any, **kwds: Any) -> Any: ...
class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
@overload
@@ -399,7 +405,7 @@ class bytearray(Sequence[int]):
def upper(self) -> bytearray: ...
def zfill(self, width: int) -> bytearray: ...
@staticmethod
- def fromhex(self, x: str) -> bytearray: ...
+ def fromhex(x: str) -> bytearray: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[int]: ...
@@ -509,12 +515,14 @@ class list(MutableSequence[_T], Generic[_T]):
def __le__(self, x: List[_T]) -> bool: ...
class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+ # NOTE: Keyword arguments are special. If they are used, _KT must include
+ # str, but we have no way of enforcing it here.
@overload
- def __init__(self) -> None: ...
+ def __init__(self, **kwargs: _VT) -> None: ...
@overload
- def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+ def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
- def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... # TODO keyword args
+ def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
def has_key(self, k: _KT) -> bool: ...
def clear(self) -> None: ...
@@ -523,8 +531,10 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
- def update(self, m: Union[Mapping[_KT, _VT],
- Iterable[Tuple[_KT, _VT]]]) -> None: ...
+ @overload
+ def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ @overload
+ def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
def keys(self) -> List[_KT]: ...
def values(self) -> List[_VT]: ...
def items(self) -> List[Tuple[_KT, _VT]]: ...
@@ -629,10 +639,6 @@ class module:
__file__ = ... # type: str
__dict__ = ... # type: Dict[unicode, Any]
-True = ... # type: bool
-False = ... # type: bool
-__debug__ = False
-
long = int
bytes = str
@@ -714,7 +720,10 @@ def quit(code: int = ...) -> None: ...
def range(x: int, y: int = 0, step: int = 1) -> List[int]: ...
def raw_input(prompt: unicode = ...) -> str: ...
-def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], initializer: _T = None) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initializer: _T) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T]) -> _T: ...
def reload(module: Any) -> Any: ...
@overload
@@ -812,7 +821,11 @@ class OSError(EnvironmentError): ...
class WindowsError(OSError): ...
class OverflowError(ArithmeticError): ...
class ReferenceError(StandardError): ...
-class SyntaxError(StandardError): ...
+class SyntaxError(StandardError):
+ msg = ... # type: str
+ lineno = ... # type: int
+ offset = ... # type: int
+ text = ... # type: str
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class SystemError(StandardError): ...
@@ -836,7 +849,7 @@ class UnicodeWarning(Warning): ...
class BytesWarning(Warning): ...
class ResourceWarning(Warning): ...
-def eval(s: str) -> Any: ...
+def eval(s: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...) -> Any: ...
def cmp(x: Any, y: Any) -> int: ...
@@ -852,7 +865,7 @@ class file(BinaryIO):
def __iter__(self) -> Iterator[str]: ...
def read(self, n: int = ...) -> str: ...
def __enter__(self) -> BinaryIO: ...
- def __exit__(self, typ, exc, tb) -> bool: ...
+ def __exit__(self, t: type = None, exc: BaseException = None, tb: Any = None) -> bool: ...
def flush(self) -> None: ...
def fileno(self) -> int: ...
def isatty(self) -> bool: ...
diff --git a/typeshed/stdlib/2.7/cPickle.pyi b/typeshed/stdlib/2.7/cPickle.pyi
index 583cb8b..fb129d1 100644
--- a/typeshed/stdlib/2.7/cPickle.pyi
+++ b/typeshed/stdlib/2.7/cPickle.pyi
@@ -7,10 +7,19 @@ format_version = ... # type: str
class Pickler:
def __init__(self, file: IO[str], protocol: int = ...) -> None: ...
+ def dump(self, obj: Any) -> None: ...
+
+ def clear_memo(self) -> None: ...
+
class Unpickler:
def __init__(self, file: IO[str]) -> None: ...
+ def load(self) -> Any: ...
+
+ def noload(self) -> Any: ...
+
+
def dump(obj: Any, file: IO[str], protocol: int = ...) -> None: ...
def dumps(obj: Any, protocol: int = ...) -> str: ...
def load(file: IO[str]) -> Any: ...
diff --git a/typeshed/stdlib/2.7/calendar.pyi b/typeshed/stdlib/2.7/calendar.pyi
new file mode 100644
index 0000000..d5d1813
--- /dev/null
+++ b/typeshed/stdlib/2.7/calendar.pyi
@@ -0,0 +1,75 @@
+from typing import Any, Iterable, Optional, Tuple
+import datetime
+
+LocaleType = Tuple[Optional[str], Optional[str]]
+
+class IllegalMonthError(ValueError):
+ def __init__(self, month: int) -> None: ...
+ def __str__(self) -> str: ...
+
+class IllegalWeekdayError(ValueError):
+ def __init__(self, weekday: int) -> None: ...
+ def __str__(self) -> str: ...
+
+def isleap(year: int) -> bool: ...
+def leapdays(y1: int, y2: int) -> int: ...
+def weekday(year: int, month: int, day: int) -> int: ...
+def monthrange(year: int, month: int) -> Tuple[int, int]: ...
+
+class Calendar(object):
+ def __init__(self, firstweekday: int = 0) -> None: ...
+ def getfirstweekday(self) -> int: ...
+ def setfirstweekday(self, firstweekday: int) -> None: ...
+ def iterweekdays(self) -> Iterable[int]: ...
+ def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ...
+ def itermonthdays2(self, year: int, month: int) -> Iterable[Tuple[int, int]]: ...
+ def itermonthdays(self, year: int, month: int) -> Iterable[int]: ...
+ def monthdatescalendar(self, year: int, month: int) -> List[List[datetime.date]]: ...
+ def monthdays2calendar(self, year: int, month: int) -> List[List[Tuple[int, int]]]: ...
+ def monthdayscalendar(self, year: int, month: int) -> List[List[int]]: ...
+ def yeardatescalendar(self, year: int, width: int = 3) -> List[List[int]]: ...
+ def yeardays2calendar(self, year: int, width: int = 3) -> List[List[Tuple[int, int]]]: ...
+ def yeardayscalendar(self, year: int, width: int = 3) -> List[List[int]]: ...
+
+class TextCalendar(Calendar):
+ def prweek(self, theweek: int, width: int) -> None: ...
+ def formatday(self, day: int, weekday: int, width: int) -> str: ...
+ def formatweek(self, theweek: int, width: int) -> str: ...
+ def formatweekday(self, day: int, width: int) -> str: ...
+ def formatweekheader(self, width: int) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ...
+ def prmonth(self, theyear: int, themonth: int, w: Any=0, l: Any = 0) -> None: ...
+ def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ...
+ def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ...
+ def pryear(self, theyear: int, w: Any = 0, l: Any = 0, c: Any = 6, m: Any = 3) -> None: ...
+
+class HTMLCalendar(Calendar):
+ def formatday(self, day: int, weekday: int) -> str: ...
+ def formatweek(self, theweek: int) -> str: ...
+ def formatweekday(self, day: int) -> str: ...
+ def formatweekheader(self) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ...
+ def formatmonth(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ...
+ def formatyear(self, theyear: int, width: int = 3) -> str: ...
+ def formatyearpage(self, theyear: int, width: int = 3, css: Optional[str] = 'calendar.css', encoding: Optional[str] = ...) -> str: ...
+
+class TimeEncoding:
+ def __init__(self, locale: LocaleType) -> None: ...
+ def __enter__(self) -> LocaleType: ...
+ def __exit__(self, *args) -> None: ...
+
+class LocaleTextCalendar(TextCalendar):
+ def __init__(self, firstweekday: int = 0, locale: Optional[LocaleType] = ...) -> None: ...
+ def formatweekday(self, day: int, width: int) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ...
+
+class LocaleHTMLCalendar(HTMLCalendar):
+ def __init__(self, firstweekday: int = 0, locale: Optional[LocaleType] = ...) -> None: ...
+ def formatweekday(self, day: int) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ...
+
+c = ... # type: TextCalendar
+def setfirstweekday(firstweekday: int) -> None: ...
+def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
+def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
+def timegm(tuple: Tuple[int]) -> int: ...
diff --git a/typeshed/stdlib/2.7/codecs.pyi b/typeshed/stdlib/2.7/codecs.pyi
index bd576ad..582bf40 100644
--- a/typeshed/stdlib/2.7/codecs.pyi
+++ b/typeshed/stdlib/2.7/codecs.pyi
@@ -188,6 +188,10 @@ class StreamReader(Codec):
class StreamReaderWriter:
def __init__(self, stream: BinaryIO, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
...
+ def __enter__(self) -> BinaryIO:
+ ...
+ def __exit__(self, typ, exc, tb) -> bool:
+ ...
class StreamRecoder(BinaryIO):
def __init__(self, stream: BinaryIO, encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
diff --git a/typeshed/stdlib/2.7/collections.pyi b/typeshed/stdlib/2.7/collections.pyi
index 69f1367..0d7182e 100644
--- a/typeshed/stdlib/2.7/collections.pyi
+++ b/typeshed/stdlib/2.7/collections.pyi
@@ -63,9 +63,10 @@ class Counter(Dict[_T, int], Generic[_T]):
# it's included so that the signature is compatible with
# Dict.update. Not sure if we should use '# type: ignore' instead
# and omit the type from the union.
- def update(self, m: Union[Mapping[_T, int],
- Iterable[Tuple[_T, int]],
- Iterable[_T]]) -> None: ...
+ @overload
+ def update(self, m: Mapping[_T, int]) -> None: ...
+ @overload
+ def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]]) -> None: ...
class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
diff --git a/typeshed/stdlib/2.7/contextlib.pyi b/typeshed/stdlib/2.7/contextlib.pyi
index af9f0ab..4400098 100644
--- a/typeshed/stdlib/2.7/contextlib.pyi
+++ b/typeshed/stdlib/2.7/contextlib.pyi
@@ -2,14 +2,16 @@
# NOTE: These are incomplete!
-from typing import Any, TypeVar, Generic
-
-# TODO more precise type?
-def contextmanager(func: Any) -> Any: ...
+from typing import Callable, Generic, Iterator, TypeVar
_T = TypeVar('_T')
-class closing(Generic[_T]):
- def __init__(self, thing: _T) -> None: ...
+class ContextManager(Generic[_T]):
def __enter__(self) -> _T: ...
def __exit__(self, *exc_info) -> None: ...
+
+# TODO this doesn't capture the relationship that the returned function's args are the same as func's.
+def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ...
+
+class closing(ContextManager[_T], Generic[_T]):
+ def __init__(self, thing: _T) -> None: ...
diff --git a/typeshed/stdlib/2.7/copy.pyi b/typeshed/stdlib/2.7/copy.pyi
index 237f420..0661cb7 100644
--- a/typeshed/stdlib/2.7/copy.pyi
+++ b/typeshed/stdlib/2.7/copy.pyi
@@ -2,9 +2,9 @@
# NOTE: These are incomplete!
-from typing import TypeVar
+from typing import TypeVar, Dict, Any
_T = TypeVar('_T')
-def deepcopy(x: _T) -> _T: ...
+def deepcopy(x: _T, memo: Dict[Any, Any] = ...) -> _T: ...
def copy(x: _T) -> _T: ...
diff --git a/typeshed/stdlib/2.7/email/_parseaddr.pyi b/typeshed/stdlib/2.7/email/_parseaddr.pyi
new file mode 100644
index 0000000..dcb59d1
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/_parseaddr.pyi
@@ -0,0 +1,44 @@
+# Stubs for email._parseaddr (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def parsedate_tz(data): ...
+def parsedate(data): ...
+def mktime_tz(data): ...
+def quote(str): ...
+
+class AddrlistClass:
+ specials = ... # type: Any
+ pos = ... # type: Any
+ LWS = ... # type: Any
+ CR = ... # type: Any
+ FWS = ... # type: Any
+ atomends = ... # type: Any
+ phraseends = ... # type: Any
+ field = ... # type: Any
+ commentlist = ... # type: Any
+ def __init__(self, field): ...
+ def gotonext(self): ...
+ def getaddrlist(self): ...
+ def getaddress(self): ...
+ def getrouteaddr(self): ...
+ def getaddrspec(self): ...
+ def getdomain(self): ...
+ def getdelimited(self, beginchar, endchars, allowcomments=True): ...
+ def getquote(self): ...
+ def getcomment(self): ...
+ def getdomainliteral(self): ...
+ def getatom(self, atomends=None): ...
+ def getphraselist(self): ...
+
+class AddressList(AddrlistClass):
+ addresslist = ... # type: Any
+ def __init__(self, field): ...
+ def __len__(self): ...
+ def __add__(self, other): ...
+ def __iadd__(self, other): ...
+ def __sub__(self, other): ...
+ def __isub__(self, other): ...
+ def __getitem__(self, index): ...
diff --git a/typeshed/stdlib/2.7/email/utils.pyi b/typeshed/stdlib/2.7/email/utils.pyi
new file mode 100644
index 0000000..33e8894
--- /dev/null
+++ b/typeshed/stdlib/2.7/email/utils.pyi
@@ -0,0 +1,22 @@
+# Stubs for email.utils (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from email._parseaddr import AddressList as _AddressList
+from email._parseaddr import mktime_tz as mktime_tz
+from email._parseaddr import parsedate as _parsedate
+from email._parseaddr import parsedate_tz as _parsedate_tz
+from quopri import decodestring as _qdecode
+
+def formataddr(pair): ...
+def getaddresses(fieldvalues): ...
+def formatdate(timeval=None, localtime=False, usegmt=False): ...
+def make_msgid(idstring=None): ...
+def parsedate(data): ...
+def parsedate_tz(data): ...
+def parseaddr(addr): ...
+def unquote(str): ...
+def decode_rfc2231(s): ...
+def encode_rfc2231(s, charset=None, language=None): ...
+def decode_params(params): ...
+def collapse_rfc2231_value(value, errors='', fallback_charset=''): ...
diff --git a/typeshed/stdlib/2.7/fcntl.pyi b/typeshed/stdlib/2.7/fcntl.pyi
index 5a1a536..5e7da7f 100644
--- a/typeshed/stdlib/2.7/fcntl.pyi
+++ b/typeshed/stdlib/2.7/fcntl.pyi
@@ -1,4 +1,4 @@
-from typing import Union
+from typing import Any, Union
import io
FASYNC = ... # type: int
@@ -74,12 +74,14 @@ LOCK_WRITE = ... # type: int
_ANYFILE = Union[int, io.IOBase]
-def fcntl(fd: _ANYFILE, op: int, arg: Union[int, str] = ...) -> Union[int, str]: ...
+# TODO All these return either int or bytes depending on the value of
+# cmd (not on the type of arg).
+def fcntl(fd: _ANYFILE, op: int, arg: Union[int, bytes] = ...) -> Any: ...
# TODO: arg: int or read-only buffer interface or read-write buffer interface
-def ioctl(fd: _ANYFILE, op: int, arg: Union[int, str] = ...,
- mutate_flag: bool = ...) -> Union[int, str]: ...
+def ioctl(fd: _ANYFILE, op: int, arg: Union[int, bytes] = ...,
+ mutate_flag: bool = ...) -> Any: ...
def flock(fd: _ANYFILE, op: int) -> None: ...
def lockf(fd: _ANYFILE, op: int, length: int = ..., start: int = ...,
- whence: int = ...) -> Union[int, str]: ...
+ whence: int = ...) -> Any: ...
diff --git a/typeshed/stdlib/2.7/functools.pyi b/typeshed/stdlib/2.7/functools.pyi
index f4ad487..c163764 100644
--- a/typeshed/stdlib/2.7/functools.pyi
+++ b/typeshed/stdlib/2.7/functools.pyi
@@ -3,14 +3,19 @@
# NOTE: These are incomplete!
from abc import ABCMeta, abstractmethod
-from typing import Any, Callable, Generic, Dict, Iterator, Optional, Sequence, Tuple, TypeVar
+from typing import Any, Callable, Generic, Dict, Iterable, Optional, Sequence, Tuple, TypeVar, overload
from collections import namedtuple
_AnyCallable = Callable[..., Any]
_T = TypeVar("_T")
-def reduce(function: Callable[[_T], _T],
- sequence: Iterator[_T], initial: Optional[_T] = ...) -> _T: ...
+_S = TypeVar("_S")
+ at overload
+def reduce(function: Callable[[_T, _T], _T],
+ sequence: Iterable[_T]) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _S], _T],
+ sequence: Iterable[_S], initial: _T) -> _T: ...
WRAPPER_ASSIGNMENTS = ... # type: Sequence[str]
WRAPPER_UPDATES = ... # type: Sequence[str]
diff --git a/typeshed/stdlib/2.7/gc.pyi b/typeshed/stdlib/2.7/gc.pyi
index f5728d9..74dc445 100644
--- a/typeshed/stdlib/2.7/gc.pyi
+++ b/typeshed/stdlib/2.7/gc.pyi
@@ -1,6 +1,7 @@
-"""Stubs for the 'gc' module."""
+# Stubs for gc
+
+from typing import Any, List, Tuple
-from typing import List, Any, Tuple
def enable() -> None: ...
def disable() -> None: ...
@@ -9,7 +10,8 @@ def collect(generation: int = ...) -> int: ...
def set_debug(flags: int) -> None: ...
def get_debug() -> int: ...
def get_objects() -> List[Any]: ...
-def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ...
+def set_threshold(threshold0: int, threshold1: int = ...,
+ threshold2: int = ...) -> None: ...
def get_count() -> Tuple[int, int, int]: ...
def get_threshold() -> Tuple[int, int, int]: ...
def get_referrers(*objs: Any) -> List[Any]: ...
@@ -18,10 +20,10 @@ def is_tracked(obj: Any) -> bool: ...
garbage = ... # type: List[Any]
-DEBUG_STATS = ... # type: Any
-DEBUG_COLLECTABLE = ... # type: Any
-DEBUG_UNCOLLECTABLE = ... # type: Any
-DEBUG_INSTANCES = ... # type: Any
-DEBUG_OBJECTS = ... # type: Any
-DEBUG_SAVEALL = ... # type: Any
-DEBUG_LEAK = ... # type: Any
+DEBUG_STATS = ... # type: int
+DEBUG_COLLECTABLE = ... # type: int
+DEBUG_UNCOLLECTABLE = ... # type: int
+DEBUG_INSTANCES = ... # type: int
+DEBUG_OBJECTS = ... # type: int
+DEBUG_SAVEALL = ... # type: int
+DEBUG_LEAK = ... # type: int
diff --git a/typeshed/stdlib/2.7/genericpath.pyi b/typeshed/stdlib/2.7/genericpath.pyi
new file mode 100644
index 0000000..85f1c0f
--- /dev/null
+++ b/typeshed/stdlib/2.7/genericpath.pyi
@@ -0,0 +1,14 @@
+# Stubs for genericpath (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+class _unicode: ...
+
+def exists(path): ...
+def isfile(path): ...
+def isdir(s): ...
+def getsize(filename): ...
+def getmtime(filename): ...
+def getatime(filename): ...
+def getctime(filename): ...
+def commonprefix(m): ...
diff --git a/typeshed/stdlib/2.7/hashlib.pyi b/typeshed/stdlib/2.7/hashlib.pyi
index dd7edd3..95f2b82 100644
--- a/typeshed/stdlib/2.7/hashlib.pyi
+++ b/typeshed/stdlib/2.7/hashlib.pyi
@@ -11,7 +11,7 @@ class _hash(object):
def hexdigest(self) -> str: ...
def copy(self) -> _hash: ...
-def new(algo: str = ...) -> _hash: ...
+def new(name: str, data: str = ...) -> _hash: ...
def md5(s: str = ...) -> _hash: ...
def sha1(s: str = ...) -> _hash: ...
diff --git a/typeshed/stdlib/2.7/heapq.pyi b/typeshed/stdlib/2.7/heapq.pyi
new file mode 100644
index 0000000..4a7a65f
--- /dev/null
+++ b/typeshed/stdlib/2.7/heapq.pyi
@@ -0,0 +1,15 @@
+from typing import TypeVar, List, Iterable, Any, Callable
+
+_T = TypeVar('_T')
+
+def cmp_lt(x, y) -> bool: ...
+def heappush(heap: List[_T], item: _T) -> None: ...
+def heappop(heap: List[_T]) -> _T:
+ raise IndexError() # if heap is empty
+def heappushpop(heap: List[_T], item: _T) -> _T: ...
+def heapify(x: List[_T]) -> None: ...
+def heapreplace(heap: List[_T], item: _T) -> _T:
+ raise IndexError() # if heap is empty
+def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ...
+def nlargest(n: int, iterable: Iterable[_T]) -> List[_T]: ...
+def nsmallest(n: int, iterable: Iterable[_T]) -> List[_T]: ...
diff --git a/typeshed/stdlib/2.7/hmac.pyi b/typeshed/stdlib/2.7/hmac.pyi
index eafa030..65282cd 100644
--- a/typeshed/stdlib/2.7/hmac.pyi
+++ b/typeshed/stdlib/2.7/hmac.pyi
@@ -1,6 +1,6 @@
# Stubs for hmac (Python 2)
-from typing import Any
+from typing import Any, AnyStr
class HMAC:
def update(self, msg: str) -> None: ...
@@ -9,3 +9,5 @@ class HMAC:
def copy(self) -> HMAC: ...
def new(key: str, msg: str = ..., digestmod: Any = ...) -> HMAC: ...
+
+def compare_digest(a: AnyStr, b: AnyStr) -> bool: ...
diff --git a/typeshed/stdlib/2.7/httplib.pyi b/typeshed/stdlib/2.7/httplib.pyi
index a8a87d8..c0d1178 100644
--- a/typeshed/stdlib/2.7/httplib.pyi
+++ b/typeshed/stdlib/2.7/httplib.pyi
@@ -62,7 +62,7 @@ class HTTPConnection:
class HTTP:
debuglevel = ... # type: Any
- def __init__(self, host='', port=None, strict=None) -> None: ...
+ def __init__(self, host:str=..., port=None, strict=None) -> None: ...
def connect(self, host=None, port=None): ...
def getfile(self): ...
file = ... # type: Any
@@ -81,7 +81,7 @@ class HTTPSConnection(HTTPConnection):
class HTTPS(HTTP):
key_file = ... # type: Any
cert_file = ... # type: Any
- def __init__(self, host='', port=None, key_file=None, cert_file=None, strict=None, context=None) -> None: ...
+ def __init__(self, host:str=..., port=None, key_file=None, cert_file=None, strict=None, context=None) -> None: ...
class HTTPException(Exception): ...
class NotConnected(HTTPException): ...
diff --git a/typeshed/stdlib/2.7/inspect.pyi b/typeshed/stdlib/2.7/inspect.pyi
index 1ea9fa4..818f525 100644
--- a/typeshed/stdlib/2.7/inspect.pyi
+++ b/typeshed/stdlib/2.7/inspect.pyi
@@ -1,14 +1,53 @@
# TODO incomplete
-from typing import Any, List, Tuple, NamedTuple
+from types import TracebackType, FrameType, ModuleType
+from typing import Any, Callable, List, Optional, Tuple, NamedTuple
-def isgenerator(object: Any) -> bool: ...
+# Types and members
+ModuleInfo = NamedTuple('ModuleInfo', [('name', str),
+ ('suffix', str),
+ ('mode', str),
+ ('module_type', int),
+ ])
+def getmembers(object: object,
+ predicate: Callable[[Any], bool] = ...,
+ ) -> List[Tuple[str, object]]: ...
+def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ...
+def getmodulename(path: str) -> Optional[str]: ...
-class _Frame:
- ...
-_FrameRecord = Tuple[_Frame, str, int, str, List[str], int]
+def ismodule(object: object) -> bool: ...
+def isclass(object: object) -> bool: ...
+def ismethod(object: object) -> bool: ...
+def isfunction(object: object) -> bool: ...
+def isisgeneratorfunction(object: object) -> bool: ...
+def isgenerator(object: object) -> bool: ...
+def istraceback(object: object) -> bool: ...
+def isframe(object: object) -> bool: ...
+def iscode(object: object) -> bool: ...
+def isbuiltin(object: object) -> bool: ...
+def isroutine(object: object) -> bool: ...
+def isabstract(object: object) -> bool: ...
+def ismethoddescriptor(object: object) -> bool: ...
+def isdatadescriptor(object: object) -> bool: ...
+def isgetsetdescriptor(object: object) -> bool: ...
+def ismemberdescriptor(object: object) -> bool: ...
-def currentframe() -> _FrameRecord: ...
-def stack(context: int = ...) -> List[_FrameRecord]: ...
+# Retrieving source code
+def getdoc(object: object) -> str: ...
+def getcomments(object: object) -> str: ...
+def getfile(object: object) -> str: ...
+def getmodule(object: object) -> ModuleType: ...
+def getsourcefile(object: object) -> str: ...
+# TODO restrict to "module, class, method, function, traceback, frame,
+# or code object"
+def getsourcelines(object: object) -> Tuple[List[str], int]: ...
+# TODO restrict to "a module, class, method, function, traceback, frame,
+# or code object"
+def getsource(object: object) -> str: ...
+def cleandoc(doc: str) -> str: ...
+
+# Classes and functions
+# TODO make the return type more specific
+def getclasstree(classes: List[type], unique: bool = ...) -> Any: ...
ArgSpec = NamedTuple('ArgSpec', [('args', List[str]),
('varargs', str),
@@ -17,3 +56,21 @@ ArgSpec = NamedTuple('ArgSpec', [('args', List[str]),
])
def getargspec(func: object) -> ArgSpec: ...
+# TODO make the return type more specific
+def getargvalues(frame: FrameType) -> Any: ...
+# TODO formatargspec
+# TODO formatargvalues
+def getmro(cls: type) -> Tuple[type, ...]: ...
+# TODO getcallargs
+
+# The interpreter stack
+# TODO getframeinfo
+# TODO getouterframes
+def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameType]:
+ ...
+
+_FrameRecord = Tuple[FrameType, str, int, str, List[str], int]
+
+def currentframe() -> FrameType: ...
+def stack(context: int = ...) -> List[_FrameRecord]: ...
+def trace(context: int = ...) -> List[_FrameRecord]: ...
diff --git a/typeshed/stdlib/2.7/itertools.pyi b/typeshed/stdlib/2.7/itertools.pyi
index fe2fa9c..97623cb 100644
--- a/typeshed/stdlib/2.7/itertools.pyi
+++ b/typeshed/stdlib/2.7/itertools.pyi
@@ -3,7 +3,7 @@
# Based on https://docs.python.org/2/library/itertools.html
from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
- Union, Sequence)
+ Union, Sequence, Generic)
_T = TypeVar('_T')
_S = TypeVar('_S')
@@ -15,8 +15,14 @@ def cycle(iterable: Iterable[_T]) -> Iterator[_T]: ...
def repeat(object: _T, times: int = ...) -> Iterator[_T]: ...
def accumulate(iterable: Iterable[_T]) -> Iterator[_T]: ...
-def chain(*iterables: Iterable[_T]) -> Iterator[_T]: ...
-# TODO chain.from_Iterable
+
+class chain(Iterator[_T], Generic[_T]):
+ def __init__(self, *iterables: Iterable[_T]) -> None: ...
+ def next(self) -> _T: ...
+ def __iter__(self) -> Iterator[_T]: ...
+ @staticmethod
+ def from_iterable(iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ...
+
def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ...
def dropwhile(predicate: Callable[[_T], Any],
iterable: Iterable[_T]) -> Iterator[_T]: ...
diff --git a/typeshed/stdlib/2.7/linecache.pyi b/typeshed/stdlib/2.7/linecache.pyi
new file mode 100644
index 0000000..9859788
--- /dev/null
+++ b/typeshed/stdlib/2.7/linecache.pyi
@@ -0,0 +1,7 @@
+# Stubs for linecache (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def getline(filename, lineno, module_globals=None): ...
+def clearcache(): ...
+def checkcache(filename=None): ...
diff --git a/typeshed/stdlib/2.7/locale.pyi b/typeshed/stdlib/2.7/locale.pyi
new file mode 100644
index 0000000..b79617e
--- /dev/null
+++ b/typeshed/stdlib/2.7/locale.pyi
@@ -0,0 +1,33 @@
+# Stubs for locale (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from _locale import *
+
+class _unicode: ...
+
+CHAR_MAX = ... # type: Any
+LC_ALL = ... # type: Any
+LC_COLLATE = ... # type: Any
+LC_CTYPE = ... # type: Any
+LC_MESSAGES = ... # type: Any
+LC_MONETARY = ... # type: Any
+LC_NUMERIC = ... # type: Any
+LC_TIME = ... # type: Any
+Error = ... # type: Any
+
+def localeconv(): ...
+def strcoll(a, b): ...
+def strxfrm(s): ...
+def format(percent, value, grouping=False, monetary=False, *additional): ...
+def format_string(f, val, grouping=False): ...
+def currency(val, symbol=True, grouping=False, international=False): ...
+def str(val): ...
+def atof(string, func=...): ...
+def atoi(str): ...
+def normalize(localename): ...
+def getdefaultlocale(envvars=...): ...
+def getlocale(category=...): ...
+def resetlocale(category=...): ...
+def getpreferredencoding(do_setlocale=True): ...
diff --git a/typeshed/stdlib/2.7/logging/__init__.pyi b/typeshed/stdlib/2.7/logging/__init__.pyi
index e4489d1..377e800 100644
--- a/typeshed/stdlib/2.7/logging/__init__.pyi
+++ b/typeshed/stdlib/2.7/logging/__init__.pyi
@@ -2,7 +2,7 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any, Dict, Optional, Sequence, Tuple, overload
+from typing import Any, Dict, Optional, Sequence, Tuple, overload, Union
CRITICAL = 0
FATAL = 0
@@ -170,15 +170,15 @@ class Logger(Filterer):
disabled = ... # type: Any
def __init__(self, name: str, level: int = ...) -> None: ...
def setLevel(self, level: int) -> None: ...
- def debug(self, msg: str, *args, **kwargs) -> None: ...
- def info(self, msg: str, *args, **kwargs) -> None: ...
- def warning(self, msg: str, *args, **kwargs) -> None: ...
- def warn(self, msg: str, *args, **kwargs) -> None: ...
- def error(self, msg: str, *args, **kwargs) -> None: ...
- def exception(self, msg: str, *args, **kwargs) -> None: ...
- def critical(self, msg: str, *args, **kwargs) -> None: ...
+ def debug(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def info(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def warning(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def warn(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def error(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def exception(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def critical(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
fatal = ... # type: Any
- def log(self, level: int, msg: str, *args, **kwargs) -> None: ...
+ def log(self, level: int, msg: Union[str, unicode], *args, **kwargs) -> None: ...
def findCaller(self) -> Tuple[str, int, str]: ...
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=..., extra=...,
sinfo=...): ...
@@ -199,14 +199,14 @@ class LoggerAdapter:
extra = ... # type: Any
def __init__(self, logger, extra) -> None: ...
def process(self, msg, kwargs): ...
- def debug(self, msg: str, *args, **kwargs) -> None: ...
- def info(self, msg: str, *args, **kwargs) -> None: ...
- def warning(self, msg: str, *args, **kwargs) -> None: ...
- def warn(self, msg: str, *args, **kwargs) -> None: ...
- def error(self, msg: str, *args, **kwargs) -> None: ...
- def exception(self, msg: str, *args, **kwargs) -> None: ...
- def critical(self, msg: str, *args, **kwargs) -> None: ...
- def log(self, level: int, msg: str, *args, **kwargs) -> None: ...
+ def debug(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def info(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def warning(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def warn(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def error(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def exception(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def critical(self, msg: Union[str, unicode], *args, **kwargs) -> None: ...
+ def log(self, level: int, msg: Union[str, unicode], *args, **kwargs) -> None: ...
def isEnabledFor(self, level: int) -> bool: ...
def setLevel(self, level: int) -> None: ...
def getEffectiveLevel(self) -> int: ...
@@ -218,16 +218,16 @@ def critical(msg: str, *args, **kwargs) -> None: ...
fatal = ... # type: Any
-def error(msg: str, *args, **kwargs) -> None: ...
+def error(msg: Union[str, unicode], *args, **kwargs) -> None: ...
@overload
-def exception(msg: str, *args, **kwargs) -> None: ...
+def exception(msg: Union[str, unicode], *args, **kwargs) -> None: ...
@overload
def exception(exception: Exception, *args, **kwargs) -> None: ...
-def warning(msg: str, *args, **kwargs) -> None: ...
-def warn(msg: str, *args, **kwargs) -> None: ...
-def info(msg: str, *args, **kwargs) -> None: ...
-def debug(msg: str, *args, **kwargs) -> None: ...
-def log(level: int, msg: str, *args, **kwargs) -> None: ...
+def warning(msg: Union[str, unicode], *args, **kwargs) -> None: ...
+def warn(msg: Union[str, unicode], *args, **kwargs) -> None: ...
+def info(msg: Union[str, unicode], *args, **kwargs) -> None: ...
+def debug(msg: Union[str, unicode], *args, **kwargs) -> None: ...
+def log(level: int, msg: Union[str, unicode], *args, **kwargs) -> None: ...
def disable(level: int) -> None: ...
class NullHandler(Handler):
diff --git a/typeshed/stdlib/2.7/mimetypes.pyi b/typeshed/stdlib/2.7/mimetypes.pyi
new file mode 100644
index 0000000..a06fcd9
--- /dev/null
+++ b/typeshed/stdlib/2.7/mimetypes.pyi
@@ -0,0 +1,26 @@
+# Stubs for mimetypes (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class MimeTypes:
+ encodings_map = ... # type: Any
+ suffix_map = ... # type: Any
+ types_map = ... # type: Any
+ types_map_inv = ... # type: Any
+ def __init__(self, filenames=..., strict=True): ...
+ def add_type(self, type, ext, strict=True): ...
+ def guess_type(self, url, strict=True): ...
+ def guess_all_extensions(self, type, strict=True): ...
+ def guess_extension(self, type, strict=True): ...
+ def read(self, filename, strict=True): ...
+ def readfp(self, fp, strict=True): ...
+ def read_windows_registry(self, strict=True): ...
+
+def guess_type(url, strict=True): ...
+def guess_all_extensions(type, strict=True): ...
+def guess_extension(type, strict=True): ...
+def add_type(type, ext, strict=True): ...
+def init(files=None): ...
+def read_mime_types(file): ...
diff --git a/typeshed/stdlib/2.7/multiprocessing/__init__.pyi b/typeshed/stdlib/2.7/multiprocessing/__init__.pyi
new file mode 100644
index 0000000..91754a3
--- /dev/null
+++ b/typeshed/stdlib/2.7/multiprocessing/__init__.pyi
@@ -0,0 +1,32 @@
+# Stubs for multiprocessing (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from multiprocessing.process import Process as Process, current_process as current_process, active_children as active_children
+from multiprocessing.util import SUBDEBUG as SUBDEBUG, SUBWARNING as SUBWARNING
+
+class ProcessError(Exception): ...
+class BufferTooShort(ProcessError): ...
+class TimeoutError(ProcessError): ...
+class AuthenticationError(ProcessError): ...
+
+def Manager(): ...
+def Pipe(duplex=True): ...
+def cpu_count(): ...
+def freeze_support(): ...
+def get_logger(): ...
+def log_to_stderr(level=None): ...
+def allow_connection_pickling(): ...
+def Lock(): ...
+def RLock(): ...
+def Condition(lock=None): ...
+def Semaphore(value=1): ...
+def BoundedSemaphore(value=1): ...
+def Event(): ...
+def Queue(maxsize=0): ...
+def JoinableQueue(maxsize=0): ...
+def Pool(processes=None, initializer=None, initargs=..., maxtasksperchild=None): ...
+def RawValue(typecode_or_type, *args): ...
+def RawArray(typecode_or_type, size_or_initializer): ...
+def Value(typecode_or_type, *args, **kwds): ...
+def Array(typecode_or_type, size_or_initializer, **kwds): ...
diff --git a/typeshed/stdlib/2.7/multiprocessing/process.pyi b/typeshed/stdlib/2.7/multiprocessing/process.pyi
new file mode 100644
index 0000000..a4650cf
--- /dev/null
+++ b/typeshed/stdlib/2.7/multiprocessing/process.pyi
@@ -0,0 +1,39 @@
+# Stubs for multiprocessing.process (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+def current_process(): ...
+def active_children(): ...
+
+class Process:
+ def __init__(self, group=None, target=None, name=None, args=..., kwargs=...): ...
+ def run(self): ...
+ def start(self): ...
+ def terminate(self): ...
+ def join(self, timeout=None): ...
+ def is_alive(self): ...
+ @property
+ def name(self): ...
+ @name.setter
+ def name(self, name): ...
+ @property
+ def daemon(self): ...
+ @daemon.setter
+ def daemon(self, daemonic): ...
+ @property
+ def authkey(self): ...
+ @authkey.setter
+ def authkey(self, authkey): ...
+ @property
+ def exitcode(self): ...
+ @property
+ def ident(self): ...
+ pid = ... # type: Any
+
+class AuthenticationString(bytes):
+ def __reduce__(self): ...
+
+class _MainProcess(Process):
+ def __init__(self): ...
diff --git a/typeshed/stdlib/2.7/multiprocessing/util.pyi b/typeshed/stdlib/2.7/multiprocessing/util.pyi
new file mode 100644
index 0000000..d52860c
--- /dev/null
+++ b/typeshed/stdlib/2.7/multiprocessing/util.pyi
@@ -0,0 +1,33 @@
+# Stubs for multiprocessing.util (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import threading
+
+SUBDEBUG = ... # type: Any
+SUBWARNING = ... # type: Any
+
+def sub_debug(msg, *args): ...
+def debug(msg, *args): ...
+def info(msg, *args): ...
+def sub_warning(msg, *args): ...
+def get_logger(): ...
+def log_to_stderr(level=None): ...
+def get_temp_dir(): ...
+def register_after_fork(obj, func): ...
+
+class Finalize:
+ def __init__(self, obj, callback, args=..., kwargs=None, exitpriority=None): ...
+ def __call__(self, wr=None): ...
+ def cancel(self): ...
+ def still_active(self): ...
+
+def is_exiting(): ...
+
+class ForkAwareThreadLock:
+ def __init__(self): ...
+
+class ForkAwareLocal(threading.local):
+ def __init__(self): ...
+ def __reduce__(self): ...
diff --git a/typeshed/stdlib/2.7/optparse.pyi b/typeshed/stdlib/2.7/optparse.pyi
new file mode 100644
index 0000000..e315ac4
--- /dev/null
+++ b/typeshed/stdlib/2.7/optparse.pyi
@@ -0,0 +1,256 @@
+# Generated by pytype, with only minor tweaks. Might be incomplete.
+
+from typing import Any, Optional, List, Callable, Tuple, Dict, Iterable, Union
+
+# See https://groups.google.com/forum/#!topic/python-ideas/gA1gdj3RZ5g
+Text = Union[str, unicode]
+
+NO_DEFAULT = ... # type: Tuple[str, ...]
+SUPPRESS_HELP = ... # type: str
+SUPPRESS_USAGE = ... # type: str
+
+def check_builtin(option: Option, opt, value: Text) -> Any: ...
+def check_choice(option: Option, opt, value) -> Any: ...
+def isbasestring(x) -> bool: ...
+
+class OptParseError(Exception):
+ msg = ... # type: Any
+ def __init__(self, msg) -> None: ...
+
+class BadOptionError(OptParseError):
+ __doc__ = ... # type: str
+ opt_str = ... # type: Any
+ def __init__(self, opt_str) -> None: ...
+
+class AmbiguousOptionError(BadOptionError):
+ possibilities = ... # type: Any
+ def __init__(self, opt_str, possibilities) -> None: ...
+
+class OptionError(OptParseError):
+ msg = ... # type: Any
+ option_id = ... # type: str
+ def __init__(self, msg, option: Option) -> None: ...
+
+class OptionConflictError(OptionError): ...
+
+class HelpFormatter:
+ NO_DEFAULT_VALUE = ... # type: str
+ _long_opt_fmt = ... # type: Union[str, unicode]
+ _short_opt_fmt = ... # type: Union[str, unicode]
+ current_indent = ... # type: int
+ default_tag = ... # type: str
+ help_position = ... # type: Any
+ help_width = ... # type: Any
+ indent_increment = ... # type: Any
+ level = ... # type: int
+ max_help_position = ... # type: int
+ option_strings = ... # type: Dict[Option, str]
+ parser = ... # type: Any
+ short_first = ... # type: Any
+ width = ... # type: Any
+ def __init__(self, indent_increment, max_help_position, width, short_first) -> None: ...
+ def _format_text(self, text: Text) -> Text: ...
+ def dedent(self) -> None: ...
+ def expand_default(self, option: Option) -> Text: ...
+ def format_description(self, description) -> Any: ...
+ def format_epilog(self, epilog) -> Any: ...
+ def format_heading(self, heading) -> Any: ...
+ def format_option(self, option: Any) -> str: ...
+ def format_option_strings(self, option: Any) -> Any: ...
+ def format_usage(self, usage) -> Any: ...
+ def indent(self) -> None: ...
+ def set_long_opt_delimiter(self, delim) -> None: ...
+ def set_parser(self, parser) -> None: ...
+ def set_short_opt_delimiter(self, delim) -> None: ...
+ def store_option_strings(self, parser) -> None: ...
+
+class IndentedHelpFormatter(HelpFormatter):
+ __doc__ = ... # type: str
+ _long_opt_fmt = ... # type: str
+ _short_opt_fmt = ... # type: str
+ current_indent = ... # type: int
+ default_tag = ... # type: str
+ help_position = ... # type: int
+ help_width = ... # type: Optional[int]
+ indent_increment = ... # type: Any
+ level = ... # type: int
+ max_help_position = ... # type: int
+ option_strings = ... # type: Dict[Any, Any]
+ parser = ... # type: Optional[OptionParser]
+ short_first = ... # type: Any
+ width = ... # type: Any
+ def __init__(self, *args, **kwargs) -> None: ...
+ def format_heading(self, heading) -> str: ...
+ def format_usage(self, usage) -> str: ...
+
+class Option:
+ ACTIONS = ... # type: Tuple[str, ...]
+ ALWAYS_TYPED_ACTIONS = ... # type: Tuple[str, ...]
+ ATTRS = ... # type: List[str]
+ CHECK_METHODS = ... # type: Union[None, List[Callable]]
+ CONST_ACTIONS = ... # type: Tuple[str, ...]
+ STORE_ACTIONS = ... # type: Tuple[str, ...]
+ TYPED_ACTIONS = ... # type: Tuple[str, ...]
+ TYPES = ... # type: Tuple[str, ...]
+ TYPE_CHECKER = ... # type: Dict[str, Callable]
+ __doc__ = ... # type: str
+ _long_opts = ... # type: List[Text]
+ _short_opts = ... # type: List[Text]
+ action = ... # type: str
+ dest = ... # type: Any
+ nargs = ... # type: int
+ type = ... # type: Any
+ def __init__(self, *args, **kwargs) -> None: ...
+ def __repr__(self) -> str: ...
+ def __str__(self) -> str: ...
+ def _check_action(self) -> None: ...
+ def _check_callback(self) -> None: ...
+ def _check_choice(self) -> None: ...
+ def _check_const(self) -> None: ...
+ def _check_dest(self) -> None: ...
+ def _check_nargs(self) -> None: ...
+ def _check_opt_strings(self, opts) -> Any: ...
+ def _check_type(self) -> None: ...
+ def _set_attrs(self, attrs: Dict[str, Any]) -> None: ...
+ def _set_opt_strings(self, opts) -> None: ...
+ def check_value(self, opt, value) -> Any: ...
+ def convert_value(self, opt, value) -> Any: ...
+ def get_opt_string(self) -> Text: ...
+ def process(self, opt, value, values: Any, parser: OptionParser) -> int: ...
+ def take_action(self, action, dest, opt, value, values, parser: OptionParser) -> int: ...
+ def takes_value(self) -> bool: ...
+
+make_option = Option
+
+class OptionContainer:
+ _long_opt = ... # type: Dict[Text, Any]
+ _short_opt = ... # type: Dict[Text, Any]
+ conflict_handler = ... # type: Any
+ defaults = ... # type: Dict[Text, Any]
+ description = ... # type: Any
+ option_class = ... # type: Any
+ def __init__(self, option_class, conflict_handler, description) -> None: ...
+ def _check_conflict(self, option: Any) -> None: ...
+ def _create_option_mappings(self) -> None: ...
+ def _share_option_mappings(self, parser) -> None: ...
+ def add_option(self, *args, **kwargs) -> Any: ...
+ def add_options(self, option_list) -> None: ...
+ def destroy(self) -> None: ...
+ def format_description(self, formatter: Optional[HelpFormatter]) -> Any: ...
+ def format_help(self, formatter: HelpFormatter) -> str: ...
+ def format_option_help(self, formatter: Optional[HelpFormatter]) -> str: ...
+ def get_description(self) -> Any: ...
+ def get_option(self, opt_str) -> Optional[Option]: ...
+ def has_option(self, opt_str) -> bool: ...
+ def remove_option(self, opt_str) -> None: ...
+ def set_conflict_handler(self, handler) -> None: ...
+ def set_description(self, description) -> None: ...
+
+class OptionGroup(OptionContainer):
+ _long_opt = ... # type: Dict[Any, Any]
+ _short_opt = ... # type: Dict[Any, Any]
+ conflict_handler = ... # type: Any
+ defaults = ... # type: Dict[Text, Any]
+ description = ... # type: Any
+ option_class = ... # type: Any
+ option_list = ... # type: List
+ parser = ... # type: Any
+ title = ... # type: Any
+ def __init__(self, parser, title, *args, **kwargs) -> None: ...
+ def _create_option_list(self) -> None: ...
+ def format_help(self, formatter: HelpFormatter) -> Any: ...
+ def set_title(self, title) -> None: ...
+
+class OptionParser(OptionContainer):
+ __doc__ = ... # type: str
+ _long_opt = ... # type: Dict[Text, Any]
+ _short_opt = ... # type: Dict[Any, Any]
+ allow_interspersed_args = ... # type: bool
+ conflict_handler = ... # type: Any
+ defaults = ... # type: Dict[Any, Any]
+ description = ... # type: Text
+ epilog = ... # type: Any
+ formatter = ... # type: HelpFormatter
+ largs = ... # type: Union[None, List[Text]]
+ option_class = ... # type: Callable
+ option_groups = ... # type: List[OptionParser]
+ option_list = ... # type: List[Any]
+ process_default_values = ... # type: Any
+ prog = ... # type: Any
+ rargs = ... # type: Optional[List[Any]]
+ standard_option_list = ... # type: List
+ usage = ... # type: Optional[Text]
+ values = ... # type: Any
+ version = ... # type: Text
+ def __init__(self, *args, **kwargs) -> None: ...
+ def _add_help_option(self) -> None: ...
+ def _add_version_option(self) -> None: ...
+ def _create_option_list(self) -> None: ...
+ def _get_all_options(self) -> List[Any]: ...
+ def _get_args(self, args: Iterable) -> List[Any]: ...
+ def _get_encoding(self, file) -> Any: ...
+ def _init_parsing_state(self) -> None: ...
+ def _match_long_opt(self, opt) -> Any: ...
+ def _populate_option_list(self, option_list, *args, **kwargs) -> None: ...
+ def _process_args(self, largs: List[Text], rargs: List, values: Values) -> None: ...
+ def _process_long_opt(self, rargs: List, values) -> None: ...
+ def _process_short_opts(self, rargs: List, values) -> None: ...
+ def add_option_group(self, *args, **kwargs) -> OptionParser: ...
+ def check_values(self, values, args) -> Tuple[Any, ...]: ...
+ def disable_interspersed_args(self) -> None: ...
+ def enable_interspersed_args(self) -> None: ...
+ def error(self, msg) -> None: ...
+ def exit(self, *args, **kwargs) -> None: ...
+ def expand_prog_name(self, s: Optional[Text]) -> Any: ...
+ def format_epilog(self, formatter: Union[HelpFormatter, OptionParser, None]) -> Any: ...
+ def format_help(self, *args, **kwargs) -> str: ...
+ def format_option_help(self, *args, **kwargs) -> str: ...
+ def get_default_values(self) -> Values: ...
+ def get_option_group(self, opt_str) -> Any: ...
+ def get_prog_name(self) -> Any: ...
+ def get_usage(self) -> Text: ...
+ def get_version(self) -> Any: ...
+ def parse_args(self, *args, **kwargs) -> Tuple[Any, ...]: ...
+ def print_help(self, *args, **kwargs) -> None: ...
+ def print_usage(self, *args, **kwargs) -> None: ...
+ def print_version(self, *args, **kwargs) -> None: ...
+ def set_default(self, dest, value) -> None: ...
+ def set_defaults(self, *args, **kwargs) -> None: ...
+ def set_process_default_values(self, process) -> None: ...
+ def set_usage(self, usage: Text) -> None: ...
+
+class OptionValueError(OptParseError):
+ __doc__ = ... # type: str
+ msg = ... # type: Any
+
+class TitledHelpFormatter(HelpFormatter):
+ __doc__ = ... # type: str
+ _long_opt_fmt = ... # type: str
+ _short_opt_fmt = ... # type: str
+ current_indent = ... # type: int
+ default_tag = ... # type: str
+ help_position = ... # type: int
+ help_width = ... # type: None
+ indent_increment = ... # type: Any
+ level = ... # type: int
+ max_help_position = ... # type: int
+ option_strings = ... # type: Dict
+ parser = ... # type: None
+ short_first = ... # type: Any
+ width = ... # type: Any
+ def __init__(self, *args, **kwargs) -> None: ...
+ def format_heading(self, heading) -> str: ...
+ def format_usage(self, usage) -> str: ...
+
+class Values:
+ def __cmp__(self, other) -> int: ...
+ def __init__(self, *args, **kwargs) -> None: ...
+ def __repr__(self) -> str: ...
+ def __str__(self) -> str: ...
+ def _update(self, dict: Dict[str, Any], mode) -> None: ...
+ def _update_careful(self, dict: Dict[str, Any]) -> None: ...
+ def _update_loose(self, dict) -> None: ...
+ def ensure_value(self, attr, value) -> Any: ...
+ def read_file(self, filename, *args, **kwargs) -> None: ...
+ def read_module(self, modname, *args, **kwargs) -> None: ...
+
diff --git a/typeshed/stdlib/2.7/os/__init__.pyi b/typeshed/stdlib/2.7/os/__init__.pyi
index 8a71f72..55145c8 100644
--- a/typeshed/stdlib/2.7/os/__init__.pyi
+++ b/typeshed/stdlib/2.7/os/__init__.pyi
@@ -1,11 +1,17 @@
# created from https://docs.python.org/2/library/os.html
-from typing import List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, MutableMapping, Iterator
+from typing import (
+ List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, Iterator, MutableMapping
+)
import os.path as path
error = OSError
name = ... # type: str
-environ = ... # type: MutableMapping[str, str]
+
+class _Environ(MutableMapping[str, str]):
+ def copy(self) -> Dict[str, str]: ...
+
+environ = ... # type: _Environ
def chdir(path: unicode) -> None: ...
def fchdir(fd: int) -> None: ...
@@ -185,7 +191,7 @@ def nice(increment: int) -> int: ...
# TODO: plock, popen*, spawn*, P_*
-def startfile(path: unicode, operation: str) -> None: ...
+def startfile(path: unicode, operation: str = ...) -> None: ... # Windows only
def system(command: unicode) -> int: ...
def times() -> Tuple[float, float, float, float, float]: ...
def wait() -> Tuple[int, int]: ... # Unix only
diff --git a/typeshed/stdlib/2.7/pdb.pyi b/typeshed/stdlib/2.7/pdb.pyi
new file mode 100644
index 0000000..1c375fe
--- /dev/null
+++ b/typeshed/stdlib/2.7/pdb.pyi
@@ -0,0 +1,30 @@
+# Stub for pdb (incomplete, only some global functions)
+
+from typing import Any, Dict
+
+def run(statement: str,
+ globals: Dict[str, Any] = None,
+ locals: Dict[str, Any] = None) -> None:
+ ...
+
+def runeval(expression: str,
+ globals: Dict[str, Any] = None,
+ locals: Dict[str, Any] = None) -> Any:
+ ...
+
+def runctx(statement: str,
+ globals: Dict[str, Any],
+ locals: Dict[str, Any]) -> None:
+ ...
+
+def runcall(*args: Any, **kwds: Any) -> Any:
+ ...
+
+def set_trace() -> None:
+ ...
+
+def post_mortem(t: Any = None) -> None:
+ ...
+
+def pm() -> None:
+ ...
diff --git a/typeshed/stdlib/2.7/pickle.pyi b/typeshed/stdlib/2.7/pickle.pyi
index 1e47e02..1b5b1d6 100644
--- a/typeshed/stdlib/2.7/pickle.pyi
+++ b/typeshed/stdlib/2.7/pickle.pyi
@@ -1,8 +1,38 @@
# Stubs for pickle (Python 2)
-from typing import Any, IO
+from typing import Any, BinaryIO
-def dump(obj: Any, file: IO[str], protocol: int = ...) -> None: ...
-def dumps(obj: Any, protocol: int = ...) -> str: ...
-def load(file: IO[str]) -> Any: ...
-def loads(str: str) -> Any: ...
+
+HIGHEST_PROTOCOL = ... # type: int
+
+
+def dump(obj: Any, file: BinaryIO, protocol: int = None) -> None: ...
+def dumps(obj: Any, protocol: int = ...) -> bytes: ...
+def load(file: BinaryIO) -> Any: ...
+def loads(string: bytes) -> Any: ...
+
+
+class PickleError(Exception):
+ pass
+
+
+class PicklingError(PickleError):
+ pass
+
+
+class UnpicklingError(PickleError):
+ pass
+
+
+class Pickler:
+ def __init__(self, file: BinaryIO, protocol: int = None) -> None: ...
+
+ def dump(self, obj: Any) -> None: ...
+
+ def clear_memo(self) -> None: ...
+
+
+class Unpickler:
+ def __init__(self, file: BinaryIO) -> None: ...
+
+ def load(self) -> Any: ...
diff --git a/typeshed/stdlib/2.7/pipes.pyi b/typeshed/stdlib/2.7/pipes.pyi
index 6cfb94e..d5f5291 100644
--- a/typeshed/stdlib/2.7/pipes.pyi
+++ b/typeshed/stdlib/2.7/pipes.pyi
@@ -4,10 +4,10 @@ class Template:
def __init__(self) -> None: ...
def reset(self) -> None: ...
def clone(self) -> Template: ...
- def debug(flag: bool) -> None: ...
- def append(cmd: str, kind: str) -> None: ...
- def prepend(cmd: str, kind: str) -> None: ...
- def open(file: str, mode: str) -> IO[Any]: ...
- def copy(infile: str, outfile: str) -> None: ...
+ def debug(self, flag: bool) -> None: ...
+ def append(self, cmd: str, kind: str) -> None: ...
+ def prepend(self, cmd: str, kind: str) -> None: ...
+ def open(self, file: str, mode: str) -> IO[Any]: ...
+ def copy(self, infile: str, outfile: str) -> None: ...
def quote(s: str) -> str: ...
diff --git a/typeshed/stdlib/2.7/posixpath.pyi b/typeshed/stdlib/2.7/posixpath.pyi
new file mode 100644
index 0000000..5b6d64d
--- /dev/null
+++ b/typeshed/stdlib/2.7/posixpath.pyi
@@ -0,0 +1,50 @@
+# Stubs for posixpath (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from genericpath import *
+
+curdir = ... # type: Any
+pardir = ... # type: Any
+extsep = ... # type: Any
+sep = ... # type: Any
+pathsep = ... # type: Any
+defpath = ... # type: Any
+altsep = ... # type: Any
+devnull = ... # type: Any
+
+def normcase(s): ...
+def isabs(s): ...
+def join(a, *p): ...
+def split(p): ...
+def splitext(p): ...
+def splitdrive(p): ...
+def basename(p): ...
+def dirname(p): ...
+def islink(path): ...
+def lexists(path): ...
+def samefile(f1, f2): ...
+def sameopenfile(fp1, fp2): ...
+def samestat(s1, s2): ...
+def ismount(path): ...
+def walk(top, func, arg): ...
+def expanduser(path): ...
+def expandvars(path): ...
+def normpath(path): ...
+def abspath(path): ...
+def realpath(filename): ...
+
+supports_unicode_filenames = ... # type: Any
+
+def relpath(path, start=...): ...
+
+# Names in __all__ with no definition:
+# commonprefix
+# exists
+# getatime
+# getctime
+# getmtime
+# getsize
+# isdir
+# isfile
diff --git a/typeshed/stdlib/2.7/pprint.pyi b/typeshed/stdlib/2.7/pprint.pyi
index 2eb27d6..4c9819a 100644
--- a/typeshed/stdlib/2.7/pprint.pyi
+++ b/typeshed/stdlib/2.7/pprint.pyi
@@ -4,7 +4,7 @@
from typing import IO, Any
-def pprint(object: Any, stream: IO[Any] = ..., indent: int = ..., width: int = ...,
+def pprint(object: Any, stream: IO[str] = ..., indent: int = ..., width: int = ...,
depth: int = ...) -> None: ...
def pformat(object, indent=..., width=..., depth=...): ...
def saferepr(object): ...
@@ -13,7 +13,7 @@ def isrecursive(object): ...
class PrettyPrinter:
def __init__(self, indent: int = ..., width: int = ..., depth: int = ...,
- stream: IO[Any] = ...) -> None: ...
+ stream: IO[str] = ...) -> None: ...
def pprint(self, object): ...
def pformat(self, object): ...
def isrecursive(self, object): ...
diff --git a/typeshed/stdlib/2.7/quopri.pyi b/typeshed/stdlib/2.7/quopri.pyi
new file mode 100644
index 0000000..93ac393
--- /dev/null
+++ b/typeshed/stdlib/2.7/quopri.pyi
@@ -0,0 +1,8 @@
+# Stubs for quopri (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def encode(input, output, quotetabs, header=0): ...
+def encodestring(s, quotetabs=0, header=0): ...
+def decode(input, output, header=0): ...
+def decodestring(s, header=0): ...
diff --git a/typeshed/stdlib/2.7/runpy.pyi b/typeshed/stdlib/2.7/runpy.pyi
new file mode 100644
index 0000000..ecc7f47
--- /dev/null
+++ b/typeshed/stdlib/2.7/runpy.pyi
@@ -0,0 +1,21 @@
+# Stubs for runpy (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _TempModule:
+ mod_name = ... # type: Any
+ module = ... # type: Any
+ def __init__(self, mod_name): ...
+ def __enter__(self): ...
+ def __exit__(self, *args): ...
+
+class _ModifiedArgv0:
+ value = ... # type: Any
+ def __init__(self, value): ...
+ def __enter__(self): ...
+ def __exit__(self, *args): ...
+
+def run_module(mod_name, init_globals=None, run_name=None, alter_sys=False): ...
+def run_path(path_name, init_globals=None, run_name=None): ...
diff --git a/typeshed/stdlib/2.7/select.pyi b/typeshed/stdlib/2.7/select.pyi
index f17b22d..6af6846 100644
--- a/typeshed/stdlib/2.7/select.pyi
+++ b/typeshed/stdlib/2.7/select.pyi
@@ -66,7 +66,7 @@ POLLWRBAND = ... # type: int
POLLWRNORM = ... # type: int
def poll() -> epoll: ...
-def select(rlist, wlist, xlist, timeout: Optional[int]) -> Tuple[List, List, List]: ...
+def select(rlist, wlist, xlist, timeout: float = None) -> Tuple[List, List, List]: ...
class error(Exception): ...
diff --git a/typeshed/stdlib/2.7/shelve.pyi b/typeshed/stdlib/2.7/shelve.pyi
new file mode 100644
index 0000000..f5a92b9
--- /dev/null
+++ b/typeshed/stdlib/2.7/shelve.pyi
@@ -0,0 +1,33 @@
+from typing import Any, Dict, Iterator, List, Optional, Tuple
+import collections
+
+
+class Shelf(collections.MutableMapping):
+ def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+ def __iter__(self) -> Iterator[str]: ...
+ def keys(self) -> List[Any]: ...
+ def __len__(self) -> int: ...
+ def has_key(self, key: Any) -> bool: ...
+ def __contains__(self, key: Any) -> bool: ...
+ def get(self, key: Any, default: Any = None) -> Any: ...
+ def __getitem__(self, key: Any) -> Any: ...
+ def __setitem__(self, key: Any, value: Any) -> None: ...
+ def __delitem__(self, key: Any) -> None: ...
+ def __enter__(self) -> Shelf: ...
+ def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ...
+ def close(self) -> None: ...
+ def __del__(self) -> None: ...
+ def sync(self) -> None: ...
+
+class BsdDbShelf(Shelf):
+ def __init__(self, dict: Dict[Any, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+ def set_location(self, key: Any) -> Tuple[str, Any]: ...
+ def next(self) -> Tuple[str, Any]: ...
+ def previous(self) -> Tuple[str, Any]: ...
+ def first(self) -> Tuple[str, Any]: ...
+ def last(self) -> Tuple[str, Any]: ...
+
+class DbfilenameShelf(Shelf):
+ def __init__(self, filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> None: ...
+
+def open(filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> DbfilenameShelf: ...
diff --git a/typeshed/stdlib/2.7/socket.pyi b/typeshed/stdlib/2.7/socket.pyi
index 807c0f1..93864c3 100644
--- a/typeshed/stdlib/2.7/socket.pyi
+++ b/typeshed/stdlib/2.7/socket.pyi
@@ -6,7 +6,7 @@
# see: http://nullege.com/codes/search/socket
# adapted for Python 2.7 by Michal Pokorny
-from typing import Any, Tuple, overload, List, Optional, Union
+from typing import Any, Tuple, List, Optional, Union, overload
# ----- variables and constants -----
@@ -283,24 +283,10 @@ class socket:
# --- methods ---
# second tuple item is an address
def accept(self) -> Tuple['socket', Any]: ...
-
- @overload
- def bind(self, address: tuple) -> None: ...
- @overload
- def bind(self, address: str) -> None: ...
-
+ def bind(self, address: Union[tuple, str]) -> None: ...
def close(self) -> None: ...
-
- @overload
- def connect(self, address: tuple) -> None: ...
- @overload
- def connect(self, address: str) -> None: ...
-
- @overload
- def connect_ex(self, address: tuple) -> int: ...
- @overload
- def connect_ex(self, address: str) -> int: ...
-
+ def connect(self, address: Union[tuple, str]) -> None: ...
+ def connect_ex(self, address: Union[tuple, str]) -> int: ...
def detach(self) -> int: ...
def fileno(self) -> int: ...
@@ -309,9 +295,9 @@ class socket:
def getsockname(self) -> Any: ...
@overload
- def getsockopt(self, level: int, optname: str) -> str: ...
+ def getsockopt(self, level: int, optname: str) -> int: ...
@overload
- def getsockopt(self, level: int, optname: str, buflen: int) -> str: ...
+ def getsockopt(self, level: int, optname: str, buflen: int) -> bytes: ...
def gettimeout(self) -> float: ...
def ioctl(self, control: object,
@@ -333,21 +319,10 @@ class socket:
def send(self, data: str, flags=...) -> int: ...
def sendall(self, data: str, flags=...) -> Any:
... # return type: None on success
-
- @overload
- def sendto(self, data: str, address: tuple, flags: int = ...) -> int: ...
- @overload
- def sendto(self, data: str, address: str, flags: int = ...) -> int: ...
-
+ def sendto(self, data: str, address: Union[tuple, str], flags: int = ...) -> int: ...
def setblocking(self, flag: bool) -> None: ...
- # TODO None valid for the value argument
- def settimeout(self, value: float) -> None: ...
-
- @overload
- def setsockopt(self, level: int, optname: str, value: int) -> None: ...
- @overload
- def setsockopt(self, level: int, optname: str, value: str) -> None: ...
-
+ def settimeout(self, value: Union[float, None]) -> None: ...
+ def setsockopt(self, level: int, optname: str, value: Union[int, str]) -> None: ...
def shutdown(self, how: int) -> None: ...
@@ -383,6 +358,5 @@ def inet_aton(ip_string: str) -> str: ... # ret val 4 bytes in length
def inet_ntoa(packed_ip: str) -> str: ...
def inet_pton(address_family: int, ip_string: str) -> str: ...
def inet_ntop(address_family: int, packed_ip: str) -> str: ...
-# TODO the timeout may be None
-def getdefaulttimeout() -> float: ...
+def getdefaulttimeout() -> Union[float, None]: ...
def setdefaulttimeout(timeout: float) -> None: ...
diff --git a/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi b/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
index aa423c2..42a7f30 100644
--- a/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
+++ b/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
@@ -1,15 +1,17 @@
-# Stubs for sqlite3.dbapi2 (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
+# Filip Hron <filip.hron at gmail.com>
+# based heavily on Andrey Vlasovskikh's python-skeletons https://github.com/JetBrains/python-skeletons/blob/master/sqlite3.py
-from typing import Any
+from typing import Any, Union, List
+from numbers import Integral
+from datetime import time, datetime
+from collections import Iterable
-paramstyle = ... # type: Any
-threadsafety = ... # type: Any
-apilevel = ... # type: Any
-Date = ... # type: Any
-Time = ... # type: Any
-Timestamp = ... # type: Any
+paramstyle = ... # type: str
+threadsafety = ... # type: int
+apilevel = ... # type: str
+Date = ... # type: datetime
+Time = ... # type: time
+Timestamp = ... # type: datetime
def DateFromTicks(ticks): ...
def TimeFromTicks(ticks): ...
@@ -61,18 +63,26 @@ converters = ... # type: Any
sqlite_version = ... # type: str
version = ... # type: str
+# TODO: adapt needs to get probed
def adapt(obj, protocol, alternate): ...
-def complete_statement(sql): ...
-def connect(*args, **kwargs): ...
-def enable_callback_tracebacks(flag): ...
-def enable_shared_cache(do_enable): ...
-def register_adapter(type, callable): ...
-def register_converter(typename, callable): ...
+def complete_statement(sql: str) -> bool: ...
+def connect(database: Union[bytes, unicode],
+ timeout: float = ... ,
+ detect_types: int = ...,
+ isolation_level: Union[str, None] = ...,
+ check_same_thread: bool = ...,
+ factory: Union[Connection, None] = ...,
+ cached_statements: int = ...) -> Connection: ...
+def enable_callback_tracebacks(flag: bool) -> None: ...
+def enable_shared_cache(do_enable: int) -> None: ...
+def register_adapter(type: type, callable: Any) -> None: ...
+# TODO: sqlite3.register_converter.__doc__ specifies callable as unknown
+def register_converter(typename: str, callable: bytes) -> None: ...
class Cache:
- def __init__(self, *args, **kwargs): ...
- def display(self, *args, **kwargs): ...
- def get(self, *args, **kwargs): ...
+ def __init__(self, *args, **kwargs) -> None: ...
+ def display(self, *args, **kwargs) -> None: ...
+ def get(self, *args, **kwargs) -> None: ...
class Connection:
DataError = ... # type: Any
@@ -91,20 +101,25 @@ class Connection:
text_factory = ... # type: Any
total_changes = ... # type: Any
def __init__(self, *args, **kwargs): ...
- def close(self, *args, **kwargs): ...
- def commit(self, *args, **kwargs): ...
- def create_aggregate(self, *args, **kwargs): ...
- def create_collation(self, *args, **kwargs): ...
- def create_function(self, *args, **kwargs): ...
- def cursor(self, *args, **kwargs): ...
- def execute(self, *args, **kwargs): ...
- def executemany(self, *args, **kwargs): ...
- def executescript(self, *args, **kwargs): ...
- def interrupt(self, *args, **kwargs): ...
- def iterdump(self, *args, **kwargs): ...
+ def close(self) -> None: ...
+ def commit(self) -> None: ...
+ def create_aggregate(self, name: str, num_params: int, aggregate_class: type) -> None: ...
+ def create_collation(self, name: str, callable: Any) -> None: ...
+ def create_function(self, name: str, num_params: int, func: Any) -> None: ...
+ def cursor(self, cursorClass= Union[type, None]) -> Cursor: ...
+ def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ...
+ # TODO: please check in executemany() if seq_of_parameters type is possible like this
+ def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]) -> Cursor: ...
+ def executescript(self, sql_script: Union[bytes, unicode]) -> Cursor: ...
+ def interrupt(self, *args, **kwargs) -> None: ...
+ def iterdump(self, *args, **kwargs) -> None: ...
def rollback(self, *args, **kwargs): ...
- def set_authorizer(self, *args, **kwargs): ...
- def set_progress_handler(self, *args, **kwargs): ...
+ # TODO: set_authorizer(authorzer_callback)
+ # see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_authorizer
+ # returns [SQLITE_OK, SQLITE_DENY, SQLITE_IGNORE] so perhaps int
+ def set_authorizer(self, *args, **kwargs) -> None: ...
+ # set_progress_handler(handler, n) -> see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_progress_handler
+ def set_progress_handler(self, *args, **kwargs) -> None: ...
def set_trace_callback(self, *args, **kwargs): ...
def __call__(self, *args, **kwargs): ...
def __enter__(self, *args, **kwargs): ...
@@ -117,14 +132,17 @@ class Cursor:
lastrowid = ... # type: Any
row_factory = ... # type: Any
rowcount = ... # type: Any
+ # TODO: Cursor class accepts exactly 1 argument
+ # required type is sqlite3.Connection (which is imported as _Connection)
+ # however, the name of the __init__ variable is unknown
def __init__(self, *args, **kwargs): ...
def close(self, *args, **kwargs): ...
- def execute(self, *args, **kwargs): ...
- def executemany(self, *args, **kwargs): ...
- def executescript(self, *args, **kwargs): ...
- def fetchall(self, *args, **kwargs): ...
- def fetchmany(self, *args, **kwargs): ...
- def fetchone(self, *args, **kwargs): ...
+ def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ...
+ def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]): ...
+ def executescript(self, sql_script: Union[bytes, unicode]) -> Cursor: ...
+ def fetchall(self) -> List[tuple]: ...
+ def fetchmany(self, size: Integral = ...) -> List[tuple]: ...
+ def fetchone(self) -> Union[tuple, None]: ...
def setinputsizes(self, *args, **kwargs): ...
def setoutputsize(self, *args, **kwargs): ...
def __iter__(self): ...
diff --git a/typeshed/stdlib/2.7/ssl.pyi b/typeshed/stdlib/2.7/ssl.pyi
index c6ca2fa..103fc45 100644
--- a/typeshed/stdlib/2.7/ssl.pyi
+++ b/typeshed/stdlib/2.7/ssl.pyi
@@ -1,5 +1,190 @@
-# Stubs for ssl (incomplete)
+# Stubs for ssl (Python 2)
-import socket
+from typing import Any
+from socket import socket, error as socket_error
+from collections import namedtuple
-class SSLError(socket.error): ...
+class SSLError(OSError): ...
+class SSLEOFError(SSLError): ...
+class SSLSyscallError(SSLError): ...
+class SSLWantReadError(SSLError): ...
+class SSLWantWriteError(SSLError): ...
+class SSLZeroReturnError(SSLError): ...
+
+OPENSSL_VERSION = ... # type: str
+OPENSSL_VERSION_INFO = ... # type: Any
+OPENSSL_VERSION_NUMBER = ... # type: int
+
+VERIFY_CRL_CHECK_CHAIN = ... # type: int
+VERIFY_CRL_CHECK_LEAF = ... # type: int
+VERIFY_DEFAULT = ... # type: int
+VERIFY_X509_STRICT = ... # type: int
+
+ALERT_DESCRIPTION_ACCESS_DENIED = ... # type: int
+ALERT_DESCRIPTION_BAD_CERTIFICATE = ... # type: int
+ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = ... # type: int
+ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE = ... # type: int
+ALERT_DESCRIPTION_BAD_RECORD_MAC = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_EXPIRED = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_REVOKED = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN = ... # type: int
+ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE = ... # type: int
+ALERT_DESCRIPTION_CLOSE_NOTIFY = ... # type: int
+ALERT_DESCRIPTION_DECODE_ERROR = ... # type: int
+ALERT_DESCRIPTION_DECOMPRESSION_FAILURE = ... # type: int
+ALERT_DESCRIPTION_DECRYPT_ERROR = ... # type: int
+ALERT_DESCRIPTION_HANDSHAKE_FAILURE = ... # type: int
+ALERT_DESCRIPTION_ILLEGAL_PARAMETER = ... # type: int
+ALERT_DESCRIPTION_INSUFFICIENT_SECURITY = ... # type: int
+ALERT_DESCRIPTION_INTERNAL_ERROR = ... # type: int
+ALERT_DESCRIPTION_NO_RENEGOTIATION = ... # type: int
+ALERT_DESCRIPTION_PROTOCOL_VERSION = ... # type: int
+ALERT_DESCRIPTION_RECORD_OVERFLOW = ... # type: int
+ALERT_DESCRIPTION_UNEXPECTED_MESSAGE = ... # type: int
+ALERT_DESCRIPTION_UNKNOWN_CA = ... # type: int
+ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY = ... # type: int
+ALERT_DESCRIPTION_UNRECOGNIZED_NAME = ... # type: int
+ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE = ... # type: int
+ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION = ... # type: int
+ALERT_DESCRIPTION_USER_CANCELLED = ... # type: int
+
+OP_ALL = ... # type: int
+OP_CIPHER_SERVER_PREFERENCE = ... # type: int
+OP_NO_COMPRESSION = ... # type: int
+OP_NO_SSLv2 = ... # type: int
+OP_NO_SSLv3 = ... # type: int
+OP_NO_TLSv1 = ... # type: int
+OP_NO_TLSv1_1 = ... # type: int
+OP_NO_TLSv1_2 = ... # type: int
+OP_SINGLE_DH_USE = ... # type: int
+OP_SINGLE_ECDH_USE = ... # type: int
+
+SSL_ERROR_EOF = ... # type: int
+SSL_ERROR_INVALID_ERROR_CODE = ... # type: int
+SSL_ERROR_SSL = ... # type: int
+SSL_ERROR_SYSCALL = ... # type: int
+SSL_ERROR_WANT_CONNECT = ... # type: int
+SSL_ERROR_WANT_READ = ... # type: int
+SSL_ERROR_WANT_WRITE = ... # type: int
+SSL_ERROR_WANT_X509_LOOKUP = ... # type: int
+SSL_ERROR_ZERO_RETURN = ... # type: int
+
+CERT_NONE = ... # type: int
+CERT_OPTIONAL = ... # type: int
+CERT_REQUIRED = ... # type: int
+
+PROTOCOL_SSLv23 = ... # type: int
+PROTOCOL_SSLv3 = ... # type: int
+PROTOCOL_TLSv1 = ... # type: int
+PROTOCOL_TLSv1_1 = ... # type: int
+PROTOCOL_TLSv1_2 = ... # type: int
+
+HAS_ECDH = ... # type: bool
+HAS_NPN = ... # type: bool
+HAS_SNI = ... # type: bool
+
+CHANNEL_BINDING_TYPES = ... # type: Any
+
+class CertificateError(ValueError): ...
+
+def match_hostname(cert, hostname): ...
+
+DefaultVerifyPaths = namedtuple('DefaultVerifyPaths', 'cafile capath openssl_cafile_env openssl_cafile openssl_capath_env openssl_capath')
+
+def get_default_verify_paths(): ...
+
+class _ASN1Object:
+ def __new__(cls, oid): ...
+ @classmethod
+ def fromnid(cls, nid): ...
+ @classmethod
+ def fromname(cls, name): ...
+
+class Purpose(_ASN1Object):
+ SERVER_AUTH = ... # type: Purpose
+ CLIENT_AUTH = ... # type: Purpose
+
+class _SSLContext:
+ check_hostname = ... # type: Any
+ options = ... # type: Any
+ verify_flags = ... # type: Any
+ verify_mode = ... # type: Any
+ def __init__(self, *args, **kwargs) -> None: ...
+ def _set_npn_protocols(self, *args, **kwargs): ...
+ def _wrap_socket(self, *args, **kwargs): ...
+ def cert_store_stats(self): ...
+ def get_ca_certs(self, binary_form=...): ...
+ def load_cert_chain(self, *args, **kwargs): ...
+ def load_dh_params(self, *args, **kwargs): ...
+ def load_verify_locations(self, *args, **kwargs): ...
+ def session_stats(self, *args, **kwargs): ...
+ def set_ciphers(self, *args, **kwargs): ...
+ def set_default_verify_paths(self, *args, **kwargs): ...
+ def set_ecdh_curve(self, *args, **kwargs): ...
+ def set_servername_callback(self, method): ...
+
+class SSLContext(_SSLContext):
+ def __new__(cls, protocol, *args, **kwargs): ...
+ protocol = ... # type: Any
+ def __init__(self, protocol): ...
+ def wrap_socket(self, sock, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True, server_hostname=None): ...
+ def set_npn_protocols(self, npn_protocols): ...
+ def set_alpn_protocols(self, alpn_protocols): ...
+ def load_default_certs(self, purpose=...): ...
+
+def create_default_context(purpose=..., cafile=None, capath=None, cadata=None): ...
+
+class SSLSocket(socket):
+ keyfile = ... # type: Any
+ certfile = ... # type: Any
+ cert_reqs = ... # type: Any
+ ssl_version = ... # type: Any
+ ca_certs = ... # type: Any
+ ciphers = ... # type: Any
+ server_side = ... # type: Any
+ server_hostname = ... # type: Any
+ do_handshake_on_connect = ... # type: Any
+ suppress_ragged_eofs = ... # type: Any
+ def __init__(self, sock=None, keyfile=None, certfile=None, server_side=False, cert_reqs=..., ssl_version=..., ca_certs=None, do_handshake_on_connect=True, family=..., type=..., proto=0, fileno=None, suppress_ragged_eofs=True, npn_protocols=None, ciphers=None, server_hostname=None, _context=None): ...
+ @property
+ def context(self): ...
+ @context.setter
+ def context(self, ctx): ...
+ def dup(self): ...
+ def read(self, len=0, buffer=None): ...
+ def write(self, data): ...
+ def getpeercert(self, binary_form=False): ...
+ def selected_npn_protocol(self): ...
+ def selected_alpn_protocol(self): ...
+ def cipher(self): ...
+ def compression(self): ...
+ def send(self, data, flags=0): ...
+ def sendto(self, data, flags_or_addr, addr=None): ...
+ def sendall(self, data, flags=0): ...
+ def recv(self, buflen=1024, flags=0): ...
+ def recv_into(self, buffer, nbytes=None, flags=0): ...
+ def recvfrom(self, buflen=1024, flags=0): ...
+ def recvfrom_into(self, buffer, nbytes=None, flags=0): ...
+ def pending(self): ...
+ def shutdown(self, how): ...
+ def close(self): ...
+ def unwrap(self): ...
+ def do_handshake(self, block=False): ...
+ def connect(self, addr): ...
+ def connect_ex(self, addr): ...
+ def accept(self): ...
+ def makefile(self, mode='', bufsize=-1): ...
+ def get_channel_binding(self, cb_type=''): ...
+ def version(self): ...
+
+def wrap_socket(sock, keyfile=None, certfile=None, server_side=False, cert_reqs=..., ssl_version=..., ca_certs=None, do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None): ...
+def cert_time_to_seconds(cert_time): ...
+
+PEM_HEADER = ... # type: Any
+PEM_FOOTER = ... # type: Any
+
+def DER_cert_to_PEM_cert(der_cert_bytes): ...
+def PEM_cert_to_DER_cert(pem_cert_string): ...
+def get_server_certificate(addr, ssl_version=..., ca_certs=None): ...
+def get_protocol_name(protocol_code): ...
+def sslwrap_simple(sock, keyfile=None, certfile=None): ...
diff --git a/typeshed/stdlib/2.7/subprocess.pyi b/typeshed/stdlib/2.7/subprocess.pyi
index eaa5892..6053a4e 100644
--- a/typeshed/stdlib/2.7/subprocess.pyi
+++ b/typeshed/stdlib/2.7/subprocess.pyi
@@ -29,7 +29,7 @@ class CalledProcessError(Exception):
cmd = ... # type: str
output = ... # type: str # May be None
- def __init__(self, returncode: int, cmd: str, output: str) -> None: ...
+ def __init__(self, returncode: int, cmd: str, output: str = ...) -> None: ...
class Popen:
stdin = ... # type: Optional[IO[Any]]
@@ -57,9 +57,9 @@ class Popen:
def poll(self) -> int: ...
def wait(self) -> int: ...
# Return str/bytes
- def communicate(self, input: str = ...) -> Tuple[str, str]: ...
+ def communicate(self, input: Union[str, unicode] = ...) -> Tuple[str, str]: ...
def send_signal(self, signal: int) -> None: ...
- def terminatate(self) -> None: ...
+ def terminate(self) -> None: ...
def kill(self) -> None: ...
def __enter__(self) -> 'Popen': ...
def __exit__(self, type, value, traceback) -> bool: ...
diff --git a/typeshed/stdlib/2.7/time.pyi b/typeshed/stdlib/2.7/time.pyi
index 8a564c8..79f2526 100644
--- a/typeshed/stdlib/2.7/time.pyi
+++ b/typeshed/stdlib/2.7/time.pyi
@@ -1,19 +1,22 @@
"""Stub file for the 'time' module."""
# See https://docs.python.org/2/library/time.html
-from typing import NamedTuple, Tuple, Union
+from typing import NamedTuple, Tuple, Union, Any
# ----- variables and constants -----
-accept2dyear = False
+accept2dyear = False
altzone = 0
daylight = 0
timezone = 0
tzname = ... # type: Tuple[str, str]
-struct_time = NamedTuple('struct_time',
- [('tm_year', int), ('tm_mon', int), ('tm_mday', int),
- ('tm_hour', int), ('tm_min', int), ('tm_sec', int),
- ('tm_wday', int), ('tm_yday', int), ('tm_isdst', int)])
+class struct_time(NamedTuple('_struct_time',
+ [('tm_year', int), ('tm_mon', int), ('tm_mday', int),
+ ('tm_hour', int), ('tm_min', int), ('tm_sec', int),
+ ('tm_wday', int), ('tm_yday', int), ('tm_isdst', int)])):
+ def __init__(self, o: Tuple[int, int, int,
+ int, int, int,
+ int, int, int], _arg: Any = ...) -> None: ...
_TIME_TUPLE = Tuple[int, int, int, int, int, int, int, int, int]
diff --git a/typeshed/stdlib/2.7/traceback.pyi b/typeshed/stdlib/2.7/traceback.pyi
index b407107..4101c20 100644
--- a/typeshed/stdlib/2.7/traceback.pyi
+++ b/typeshed/stdlib/2.7/traceback.pyi
@@ -1,17 +1,19 @@
from typing import Any, IO, AnyStr, Callable, Tuple, List
from types import TracebackType, FrameType
+ExtractTbResult = List[Tuple[str, int, str, str]]
+
def print_tb(traceback: TracebackType, limit: int = ..., file: IO[str] = ...) -> None: ...
def print_exception(type: type, value: Exception, limit: int = ..., file: IO[str] = ...) -> None: ...
def print_exc(limit: int = ..., file: IO[str] = ...) -> None: ...
def format_exc(limit: int = ...) -> str: ...
def print_last(limit: int = ..., file: IO[str] = ...) -> None: ...
-def print_stack(f: FrameType, limit: int = ..., file: IO[AnyStr] = ...) -> None: ...
-def extract_tb(f: TracebackType, limit: int = ...) -> List[Tuple[str, int, str, str]]: ...
-def extract_stack(f: FrameType = ..., limit: int = ...) -> None: ...
-def format_list(list: List[Tuple[str, int, str, str]]) -> str: ...
+def print_stack(f: FrameType = ..., limit: int = ..., file: IO[AnyStr] = ...) -> None: ...
+def extract_tb(f: TracebackType, limit: int = ...) -> ExtractTbResult: ...
+def extract_stack(f: FrameType = ..., limit: int = ...) -> ExtractTbResult: ...
+def format_list(list: ExtractTbResult) -> List[str]: ...
def format_exception_only(type: type, value: List[str]) -> str: ...
-def format_exception(type: type, value: List[str], tb: TracebackType, limit: int) -> str: ...
-def format_tb(f: TracebackType, limit: int = ...) -> str: ...
-def format_stack(f: FrameType = ..., limit: int = ...) -> str: ...
+def format_exception(type: type, value: List[str], tb: TracebackType, limit: int = ...) -> str: ...
+def format_tb(f: TracebackType, limit: int = ...) -> List[str]: ...
+def format_stack(f: FrameType = ..., limit: int = ...) -> List[str]: ...
def tb_lineno(tb: TracebackType) -> AnyStr: ...
diff --git a/typeshed/stdlib/2.7/typing.pyi b/typeshed/stdlib/2.7/typing.pyi
index a04ad24..77873df 100644
--- a/typeshed/stdlib/2.7/typing.pyi
+++ b/typeshed/stdlib/2.7/typing.pyi
@@ -20,13 +20,14 @@ NamedTuple = object()
class TypeAlias:
# Class for defining generic aliases for library types.
- def __init__(self, target_type) -> None: ...
- def __getitem__(self, typeargs): ...
+ def __init__(self, target_type: type) -> None: ...
+ def __getitem__(self, typeargs: Any) -> Any: ...
Union = TypeAlias(object)
Optional = TypeAlias(object)
List = TypeAlias(object)
Dict = TypeAlias(object)
+DefaultDict = TypeAlias(object)
Set = TypeAlias(object)
# Predefined type variables.
@@ -84,7 +85,7 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
def send(self, value: _T_contra) -> _T_co:...
@abstractmethod
- def throw(self, typ: BaseException, val: Any=None, tb=None) -> None:...
+ def throw(self, typ: BaseException, val: Any = None, tb: Any = None) -> None:...
@abstractmethod
def close(self) -> None:...
@@ -166,6 +167,7 @@ class Mapping(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT]):
def iterkeys(self) -> Iterator[_KT]: ...
def itervalues(self) -> Iterator[_VT]: ...
def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
+ def __contains__(self, o: object) -> bool: ...
class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
@abstractmethod
@@ -177,8 +179,12 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
- def update(self, m: Union[Mapping[_KT, _VT],
- Iterable[Tuple[_KT, _VT]]]) -> None: ...
+ @overload
+ def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ @overload
+ def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+
+Text = unicode
class IO(Iterable[AnyStr], Generic[AnyStr]):
# TODO detach
@@ -227,7 +233,7 @@ class IO(Iterable[AnyStr], Generic[AnyStr]):
@abstractmethod
def __enter__(self) -> 'IO[AnyStr]': ...
@abstractmethod
- def __exit__(self, type, value, traceback) -> bool: ...
+ def __exit__(self, t: type, value: Any, traceback: Any) -> bool: ...
class BinaryIO(IO[str]):
# TODO readinto
diff --git a/typeshed/stdlib/2.7/urllib.pyi b/typeshed/stdlib/2.7/urllib.pyi
index 080f88a..8b129cf 100644
--- a/typeshed/stdlib/2.7/urllib.pyi
+++ b/typeshed/stdlib/2.7/urllib.pyi
@@ -128,7 +128,7 @@ def quote(s: str, safe=...) -> str: ...
def quote_plus(s: str, safe=...) -> str: ...
def urlencode(query: Union[Sequence[Tuple[Any, Any]], Mapping[Any, Any]], doseq=...) -> str: ...
-def getproxies() -> Mapping[str, str]: ... # type: Any
+def getproxies() -> Mapping[str, str]: ...
def proxy_bypass(host): ...
# Names in __all__ with no definition:
diff --git a/typeshed/stdlib/2.7/urllib2.pyi b/typeshed/stdlib/2.7/urllib2.pyi
index 966e6a8..471b493 100644
--- a/typeshed/stdlib/2.7/urllib2.pyi
+++ b/typeshed/stdlib/2.7/urllib2.pyi
@@ -15,8 +15,8 @@ class Request(object):
origin_req_host= ...
unredirected_hdrs = ...
- def __init__(self, url: str, data: str, headers: Dict[str, str],
- origin_req_host: str, unverifiable: bool) -> None: ...
+ def __init__(self, url: str, data: str = None, headers: Dict[str, str] = ...,
+ origin_req_host: str = None, unverifiable: bool = ...) -> None: ...
def __getattr__(self, attr): ...
def get_method(self) -> str: ...
def add_data(self, data) -> None: ...
@@ -33,12 +33,13 @@ class Request(object):
def add_header(self, key: str, val: str) -> None: ...
def add_unredirected_header(self, key: str, val: str) -> None: ...
def has_header(self, header_name: str) -> bool: ...
- def get_header(self, header_name: str, default: str) -> str: ...
+ def get_header(self, header_name: str, default: str = None) -> str: ...
def header_items(self): ...
class OpenerDirector(object): ...
-def urlopen(url, data, timeout): ...
+def urlopen(url, data=None, timeout=..., cafile=None, capath=None, cadefault=False,
+ context=None): ...
def install_opener(opener): ...
def build_opener(*handlers): ...
@@ -65,7 +66,7 @@ class HTTPRedirectHandler(BaseHandler):
class ProxyHandler(BaseHandler):
- def __init__(self, proxies): ...
+ def __init__(self, proxies=None): ...
def proxy_open(self, req, proxy, type): ...
class HTTPPasswordMgr:
@@ -78,7 +79,7 @@ class HTTPPasswordMgr:
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): ...
class AbstractBasicAuthHandler:
- def __init__(self, password_mgr): ...
+ def __init__(self, password_mgr=None): ...
def reset_retry_count(self): ...
def http_error_auth_reqed(self, authreq, host, req, headers): ...
def retry_http_basic_auth(self, host, req, realm): ...
@@ -92,7 +93,7 @@ class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
def http_error_407(self, req, fp, code, msg, headers): ...
class AbstractDigestAuthHandler:
- def __init__(self, passwd): ...
+ def __init__(self, passwd=None): ...
def reset_retry_count(self): ...
def http_error_auth_reqed(self, auth_header, host, req, headers): ...
def retry_http_digest_auth(self, req, auth): ...
@@ -121,11 +122,12 @@ class HTTPHandler(AbstractHTTPHandler):
http_request = AbstractHTTPHandler.do_request_
class HTTPSHandler(AbstractHTTPHandler):
+ def __init__(self, debuglevel=0, context=None): ...
def https_open(self, req): ...
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
- def __init__(self, cookiejar): ...
+ def __init__(self, cookiejar=None): ...
def http_request(self, request): ...
def http_response(self, request, response): ...
diff --git a/typeshed/stdlib/2.7/weakref.pyi b/typeshed/stdlib/2.7/weakref.pyi
new file mode 100644
index 0000000..93fd9e8
--- /dev/null
+++ b/typeshed/stdlib/2.7/weakref.pyi
@@ -0,0 +1,74 @@
+# Stubs for weakref (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import UserDict
+from _weakref import (
+ getweakrefcount as getweakrefcount,
+ getweakrefs as getweakrefs,
+ ref as ref,
+ proxy as proxy,
+ CallableProxyType as CallableProxyType,
+ ProxyType as ProxyType,
+ ReferenceType as ReferenceType
+)
+from _weakrefset import WeakSet as WeakSet
+from exceptions import ReferenceError as ReferenceError
+
+ProxyTypes = ... # type: Any
+
+class WeakValueDictionary(UserDict.UserDict):
+ def __init__(self, *args, **kw): ...
+ def __getitem__(self, key): ...
+ def __delitem__(self, key): ...
+ def __contains__(self, key): ...
+ def has_key(self, key): ...
+ def __setitem__(self, key, value): ...
+ def clear(self): ...
+ def copy(self): ...
+ __copy__ = ... # type: Any
+ def __deepcopy__(self, memo): ...
+ def get(self, key, default=None): ...
+ def items(self): ...
+ def iteritems(self): ...
+ def iterkeys(self): ...
+ __iter__ = ... # type: Any
+ def itervaluerefs(self): ...
+ def itervalues(self): ...
+ def popitem(self): ...
+ def pop(self, key, *args): ...
+ def setdefault(self, key, default=None): ...
+ def update(self, dict=None, **kwargs): ...
+ def valuerefs(self): ...
+ def values(self): ...
+
+class KeyedRef(ReferenceType):
+ key = ... # type: Any
+ def __new__(type, ob, callback, key): ...
+ def __init__(self, ob, callback, key): ...
+
+class WeakKeyDictionary(UserDict.UserDict):
+ data = ... # type: Any
+ def __init__(self, dict=None): ...
+ def __delitem__(self, key): ...
+ def __getitem__(self, key): ...
+ def __setitem__(self, key, value): ...
+ def copy(self): ...
+ __copy__ = ... # type: Any
+ def __deepcopy__(self, memo): ...
+ def get(self, key, default=None): ...
+ def has_key(self, key): ...
+ def __contains__(self, key): ...
+ def items(self): ...
+ def iteritems(self): ...
+ def iterkeyrefs(self): ...
+ def iterkeys(self): ...
+ __iter__ = ... # type: Any
+ def itervalues(self): ...
+ def keyrefs(self): ...
+ def keys(self): ...
+ def popitem(self): ...
+ def pop(self, key, *args): ...
+ def setdefault(self, key, default=None): ...
+ def update(self, dict=None, **kwargs): ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/2.7/wsgiref/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/2.7/wsgiref/__init__.pyi
diff --git a/typeshed/stdlib/2.7/wsgiref/validate.pyi b/typeshed/stdlib/2.7/wsgiref/validate.pyi
new file mode 100644
index 0000000..1fb10b4
--- /dev/null
+++ b/typeshed/stdlib/2.7/wsgiref/validate.pyi
@@ -0,0 +1,47 @@
+# Stubs for wsgiref.validate (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class WSGIWarning(Warning): ...
+
+def validator(application): ...
+
+class InputWrapper:
+ input = ... # type: Any
+ def __init__(self, wsgi_input): ...
+ def read(self, *args): ...
+ def readline(self): ...
+ def readlines(self, *args): ...
+ def __iter__(self): ...
+ def close(self): ...
+
+class ErrorWrapper:
+ errors = ... # type: Any
+ def __init__(self, wsgi_errors): ...
+ def write(self, s): ...
+ def flush(self): ...
+ def writelines(self, seq): ...
+ def close(self): ...
+
+class WriteWrapper:
+ writer = ... # type: Any
+ def __init__(self, wsgi_writer): ...
+ def __call__(self, s): ...
+
+class PartialIteratorWrapper:
+ iterator = ... # type: Any
+ def __init__(self, wsgi_iterator): ...
+ def __iter__(self): ...
+
+class IteratorWrapper:
+ original_iterator = ... # type: Any
+ iterator = ... # type: Any
+ closed = ... # type: Any
+ check_start_response = ... # type: Any
+ def __init__(self, wsgi_iterator, check_start_response): ...
+ def __iter__(self): ...
+ def next(self): ...
+ def close(self): ...
+ def __del__(self): ...
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementInclude.pyi b/typeshed/stdlib/2.7/xml/etree/ElementInclude.pyi
new file mode 100644
index 0000000..a9e04f2
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/etree/ElementInclude.pyi
@@ -0,0 +1,19 @@
+# Stubs for xml.etree.ElementInclude (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Optional, Callable
+from .ElementTree import Element
+
+XINCLUDE = ... # type: str
+XINCLUDE_INCLUDE = ... # type: str
+XINCLUDE_FALLBACK = ... # type: str
+
+class FatalIncludeError(SyntaxError): ...
+
+def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
+
+# TODO: loader is of type default_loader ie it takes a callable that has the
+# same signature as default_loader. But default_loader has a keyword argument
+# Which can't be represented using Callable...
+def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementPath.pyi b/typeshed/stdlib/2.7/xml/etree/ElementPath.pyi
new file mode 100644
index 0000000..e17e5bb
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/etree/ElementPath.pyi
@@ -0,0 +1,35 @@
+# Stubs for xml.etree.ElementPath (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
+from .ElementTree import Element
+
+xpath_tokenizer_re = ... # type: Pattern
+
+_token = Tuple[str, str]
+_next = Callable[[], _token]
+_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
+
+def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
+def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
+def prepare_child(next: _next, token: _token) -> _callback: ...
+def prepare_star(next: _next, token: _token) -> _callback: ...
+def prepare_self(next: _next, token: _token) -> _callback: ...
+def prepare_descendant(next: _next, token: _token) -> _callback: ...
+def prepare_parent(next: _next, token: _token) -> _callback: ...
+def prepare_predicate(next: _next, token: _token) -> _callback: ...
+
+ops = ... # type: Dict[str, Callable[[_next, _token], _callback]]
+
+class _SelectorContext:
+ parent_map = ... # type: Dict[Element, Element]
+ root = ... # type: Element
+ def __init__(self, root: Element) -> None: ...
+
+_T = TypeVar('_T')
+
+def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi b/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi
new file mode 100644
index 0000000..b212430
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/etree/ElementTree.pyi
@@ -0,0 +1,116 @@
+# Stubs for xml.etree.ElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, Generator
+import io
+
+VERSION = ... # type: str
+
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
+class _SimpleElementPath:
+ def find(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Optional['Element']: ...
+ def findtext(self, element: 'Element', tag: _str_or_bytes, default: _T=..., namespaces: Any=...) -> Union[str, bytes, _T]: ...
+ def iterfind(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Generator['Element', None, None]: ...
+ def findall(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> List['Element']: ...
+
+class ParseError(SyntaxError): ...
+
+def iselement(element: 'Element') -> bool: ...
+
+class Element:
+ tag = ... # type: _str_or_bytes
+ attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
+ text = ... # type: Optional[_str_or_bytes]
+ tail = ... # type: Optional[_str_or_bytes]
+ def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+ def append(self, element: 'Element') -> None: ...
+ def clear(self) -> None: ...
+ def copy(self) -> 'Element': ...
+ def extend(self, elements: Sequence['Element']) -> None: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def getchildren(self) -> List['Element']: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
+ def insert(self, index: int, element: 'Element') -> None: ...
+ def items(self) -> List[Tuple[AnyStr, AnyStr]]: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def itertext(self) -> Generator[str, None, None]: ...
+ def keys(self) -> List[AnyStr]: ...
+ def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
+ def remove(self, element: 'Element') -> None: ...
+ def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __delitem__(self, index: int) -> None: ...
+ def __getitem__(self, index) -> 'Element': ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, index: int, element: 'Element') -> None: ...
+
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def Comment(text: _str_or_bytes=...) -> Element: ...
+def ProcessingInstruction(target: str, text: str=...) -> Element: ...
+
+PI = ... # type: Callable[..., Element]
+
+class QName:
+ text = ... # type: str
+ def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+
+
+_file_or_filename = Union[str, bytes, int, IO[Any]]
+
+class ElementTree:
+ def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
+ def getroot(self) -> Element: ...
+ def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
+ def write_c14n(self, file: _file_or_filename) -> None: ...
+
+def register_namespace(prefix: str, uri: str) -> None: ...
+def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
+def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
+def dump(elem: Element) -> None: ...
+def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
+def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
+
+class _IterParseIterator:
+ root = ... # type: Any
+ def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
+ def next(self) -> Tuple[str, Element]: ...
+ def __iter__(self) -> _IterParseIterator: ...
+
+def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
+def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
+# TODO-improve this type
+fromstring = ... # type: Callable[..., Element]
+def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
+
+class TreeBuilder:
+ def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
+ def close(self) -> Element: ...
+ def data(self, data: AnyStr) -> None: ...
+ def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
+ def end(self, tag: AnyStr) -> Element: ...
+
+class XMLParser:
+ parser = ... # type: Any
+ target = ... # type: TreeBuilder
+ # TODO-what is entity used for???
+ entity = ... # type: Any
+ version = ... # type: str
+ def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
+ def doctype(self, name: str, pubid: str, system: str) -> None: ...
+ def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
+ def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/2.7/xml/etree/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/2.7/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/2.7/xml/etree/cElementTree.pyi b/typeshed/stdlib/2.7/xml/etree/cElementTree.pyi
new file mode 100644
index 0000000..a6f4274
--- /dev/null
+++ b/typeshed/stdlib/2.7/xml/etree/cElementTree.pyi
@@ -0,0 +1,5 @@
+# Stubs for xml.etree.cElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from xml.etree.ElementTree import *
diff --git a/typeshed/stdlib/2.7/zlib.pyi b/typeshed/stdlib/2.7/zlib.pyi
index d6a2c8c..d3c479e 100644
--- a/typeshed/stdlib/2.7/zlib.pyi
+++ b/typeshed/stdlib/2.7/zlib.pyi
@@ -1,7 +1,5 @@
# Stubs for zlib (Python 2.7)
-class error(Exception): ...
-
DEFLATED = ... # type: int
DEF_MEM_LEVEL = ... # type: int
MAX_WBITS = ... # type: int
@@ -17,20 +15,28 @@ Z_HUFFMAN_ONLY = ... # type: int
Z_NO_FLUSH = ... # type: int
Z_SYNC_FLUSH = ... # type: int
-def adler32(data: str, value: int = ...) -> int: ...
-def compress(data: str, level: int = ...) -> str: ...
-def crc32(data: str, value: int = ...) -> int: ...
-def decompress(data: str, wbits: int = ..., bufsize: int = ...) -> str: ...
-class compressobj:
- def __init__(self, level: int = ..., method: int = ..., wbits: int = ..., memlevel: int = ...,
- strategy: int = ...) -> None: ...
- def copy(self) -> "compressobj": ...
+class error(Exception): ...
+
+
+class Compress:
def compress(self, data: str) -> str: ...
- def flush(self) -> None: ...
+ def flush(self) -> str: ...
+ def copy(self) -> "Compress": ...
+
-class decompressobj:
- def __init__(self, wbits: int = ...) -> None: ...
- def copy(self) -> "decompressobj": ...
+class Decompress:
+ unused_data = ... # type: str
+ unconsumed_tail = ... # type: str
def decompress(self, data: str, max_length: int = ...) -> str: ...
- def flush(self) -> None: ...
+ def flush(self) -> str: ...
+ def copy(self) -> "Decompress": ...
+
+
+def adler32(data: str, value: int = ...) -> int: ...
+def compress(data: str, level: int = ...) -> str: ...
+def compressobj(level: int = ..., method: int = ..., wbits: int = ...,
+ memlevel: int = ..., strategy: int = ...) -> Compress: ...
+def crc32(data: str, value: int = ...) -> int: ...
+def decompress(data: str, wbits: int = ..., bufsize: int = ...) -> str: ...
+def decompressobj(wbits: int = ...) -> Decompress: ...
diff --git a/typeshed/stdlib/2and3/bz2.pyi b/typeshed/stdlib/2and3/bz2.pyi
new file mode 100644
index 0000000..31db7b7
--- /dev/null
+++ b/typeshed/stdlib/2and3/bz2.pyi
@@ -0,0 +1,6 @@
+# Stubs for bz2
+
+# TODO: This stub is incomplete
+
+def compress(data: bytes, compresslevel: int = ...) -> bytes: ...
+def decompress(data: bytes) -> bytes: ...
diff --git a/typeshed/stdlib/2and3/math.pyi b/typeshed/stdlib/2and3/math.pyi
index 44fb75c..2595566 100644
--- a/typeshed/stdlib/2and3/math.pyi
+++ b/typeshed/stdlib/2and3/math.pyi
@@ -7,6 +7,9 @@ import sys
e = ... # type: float
pi = ... # type: float
+if sys.version_info >= (3, 5):
+ inf = ... # type: float
+ nan = ... # type: float
def acos(x: float) -> float: ...
def acosh(x: float) -> float: ...
@@ -31,7 +34,11 @@ def fmod(x: float, y: float) -> float: ...
def frexp(x: float) -> Tuple[float, int]: ...
def fsum(iterable: Iterable) -> float: ...
def gamma(x: float) -> float: ...
+if sys.version_info >= (3, 5):
+ def gcd(a: int, b: int) -> int: ...
def hypot(x: float, y: float) -> float: ...
+if sys.version_info >= (3, 5):
+ def isclose(a: float, b: float, rel_tol: float = ..., abs_tol: float = ...) -> bool: ...
def isinf(x: float) -> bool: ...
if sys.version_info[0] >= 3:
def isfinite(x: float) -> bool: ...
@@ -41,6 +48,8 @@ def lgamma(x: float) -> float: ...
def log(x: float, base: float = ...) -> float: ...
def log10(x: float) -> float: ...
def log1p(x: float) -> float: ...
+if sys.version_info >= (3, 3):
+ def log2(x: float) -> float: ...
def modf(x: float) -> Tuple[float, float]: ...
def pow(x: float, y: float) -> float: ...
def radians(x: float) -> float: ...
diff --git a/typeshed/stdlib/2and3/operator.pyi b/typeshed/stdlib/2and3/operator.pyi
index ae5c4ba..125ec67 100644
--- a/typeshed/stdlib/2and3/operator.pyi
+++ b/typeshed/stdlib/2and3/operator.pyi
@@ -1,6 +1,6 @@
# Stubs for operator
-from typing import Any, Callable
+from typing import Any, Callable, overload, Tuple
def __abs__(a: Any) -> Any: ...
def __add__(a: Any, b: Any) -> Any: ...
@@ -114,14 +114,22 @@ def truediv(a: Any, b: Any) -> Any: ...
def truth(x: Any) -> bool: ...
def xor(a: Any, b: Any) -> Any: ...
-# Unsupported feature: "If more than one attribute is requested,
-# returns a tuple of attributes."
+# Unsupported: more than 3 attributes.
# Unsupported: on Python 2 the parameter type should be `basestring`.
-def attrgetter(attr: str) -> Callable[[Any], Any]: ...
+ at overload
+def attrgetter(attr1: str) -> Callable[[Any], Any]: ...
+ at overload
+def attrgetter(attr1: str, attr2: str) -> Callable[[Any], Tuple[Any, Any]]: ...
+ at overload
+def attrgetter(attr1: str, attr2: str, attr3: str) -> Callable[[Any], Tuple[Any, Any, Any]]: ...
-# Unsupported feature: "If multiple items are specified, returns a
-# tuple of lookup values."
-def itemgetter(item: Any) -> Callable[[Any], Any]: ...
+# Unsupported: more than 3 items.
+ at overload
+def itemgetter(item1: Any) -> Callable[[Any], Any]: ...
+ at overload
+def itemgetter(item1: Any, item2: Any) -> Callable[[Any], Tuple[Any, Any]]: ...
+ at overload
+def itemgetter(item1: Any, item2: Any, item3: Any) -> Callable[[Any], Tuple[Any, Any, Any]]: ...
# Unsupported: on Python 2 the parameter type should be `basestring`.
def methodcaller(name: str, *args, **kwargs) -> Callable[[Any], Any]: ...
diff --git a/typeshed/stdlib/3/warnings.pyi b/typeshed/stdlib/2and3/warnings.pyi
similarity index 100%
rename from typeshed/stdlib/3/warnings.pyi
rename to typeshed/stdlib/2and3/warnings.pyi
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.2/xml/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.2/xml/__init__.pyi
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi
new file mode 100644
index 0000000..a9e04f2
--- /dev/null
+++ b/typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi
@@ -0,0 +1,19 @@
+# Stubs for xml.etree.ElementInclude (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Optional, Callable
+from .ElementTree import Element
+
+XINCLUDE = ... # type: str
+XINCLUDE_INCLUDE = ... # type: str
+XINCLUDE_FALLBACK = ... # type: str
+
+class FatalIncludeError(SyntaxError): ...
+
+def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
+
+# TODO: loader is of type default_loader ie it takes a callable that has the
+# same signature as default_loader. But default_loader has a keyword argument
+# Which can't be represented using Callable...
+def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.2/xml/etree/ElementPath.pyi
new file mode 100644
index 0000000..e17e5bb
--- /dev/null
+++ b/typeshed/stdlib/3.2/xml/etree/ElementPath.pyi
@@ -0,0 +1,35 @@
+# Stubs for xml.etree.ElementPath (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
+from .ElementTree import Element
+
+xpath_tokenizer_re = ... # type: Pattern
+
+_token = Tuple[str, str]
+_next = Callable[[], _token]
+_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
+
+def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
+def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
+def prepare_child(next: _next, token: _token) -> _callback: ...
+def prepare_star(next: _next, token: _token) -> _callback: ...
+def prepare_self(next: _next, token: _token) -> _callback: ...
+def prepare_descendant(next: _next, token: _token) -> _callback: ...
+def prepare_parent(next: _next, token: _token) -> _callback: ...
+def prepare_predicate(next: _next, token: _token) -> _callback: ...
+
+ops = ... # type: Dict[str, Callable[[_next, _token], _callback]]
+
+class _SelectorContext:
+ parent_map = ... # type: Dict[Element, Element]
+ root = ... # type: Element
+ def __init__(self, root: Element) -> None: ...
+
+_T = TypeVar('_T')
+
+def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
new file mode 100644
index 0000000..a78606d
--- /dev/null
+++ b/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
@@ -0,0 +1,120 @@
+# Stubs for xml.etree.ElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
+import io
+
+VERSION = ... # type: str
+
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
+class _SimpleElementPath:
+ def find(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Optional['Element']: ...
+ def findtext(self, element: 'Element', tag: _str_or_bytes, default: _T=..., namespaces: Any=...) -> Union[str, bytes, _T]: ...
+ def iterfind(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Generator['Element', None, None]: ...
+ def findall(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> List['Element']: ...
+
+class ParseError(SyntaxError): ...
+
+def iselement(element: 'Element') -> bool: ...
+
+class Element:
+ tag = ... # type: _str_or_bytes
+ attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
+ text = ... # type: Optional[_str_or_bytes]
+ tail = ... # type: Optional[_str_or_bytes]
+ def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+ def append(self, subelement: 'Element') -> None: ...
+ def clear(self) -> None: ...
+ def copy(self) -> 'Element': ...
+ def extend(self, elements: Sequence['Element']) -> None: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def getchildren(self) -> List['Element']: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
+ def insert(self, index: int, subelement: 'Element') -> None: ...
+ def items(self) -> ItemsView[AnyStr, AnyStr]: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def itertext(self) -> Generator[str, None, None]: ...
+ def keys(self) -> KeysView[AnyStr]: ...
+ def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
+ def remove(self, subelement: 'Element') -> None: ...
+ def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __delitem__(self, index: int) -> None: ...
+ def __getitem__(self, index) -> 'Element': ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, index: int, element: 'Element') -> None: ...
+
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def Comment(text: _str_or_bytes=...) -> Element: ...
+def ProcessingInstruction(target: str, text: str=...) -> Element: ...
+
+PI = ... # type: Callable[..., Element]
+
+class QName:
+ text = ... # type: str
+ def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+
+
+_file_or_filename = Union[str, bytes, int, IO[Any]]
+
+class ElementTree:
+ def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
+ def getroot(self) -> Element: ...
+ def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
+ def write_c14n(self, file: _file_or_filename) -> None: ...
+
+def register_namespace(prefix: str, uri: str) -> None: ...
+def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
+
+def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
+def dump(elem: Element) -> None: ...
+def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
+def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
+
+
+class _IterParseIterator:
+ root = ... # type: Any
+ def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
+ def __next__(self) -> Tuple[str, Element]: ...
+ def __iter__(self) -> _IterParseIterator: ...
+
+def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
+def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
+
+# TODO-improve this type
+fromstring = ... # type: Callable[..., Element]
+
+def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
+
+class TreeBuilder:
+ def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
+ def close(self) -> Element: ...
+ def data(self, data: AnyStr) -> None: ...
+ def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
+ def end(self, tag: AnyStr) -> Element: ...
+
+class XMLParser:
+ parser = ... # type: Any
+ target = ... # type: TreeBuilder
+ # TODO-what is entity used for???
+ entity = ... # type: Any
+ version = ... # type: str
+ def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
+ def doctype(self, name: str, pubid: str, system: str) -> None: ...
+ def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
+ def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.2/xml/etree/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.2/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/3.2/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.2/xml/etree/cElementTree.pyi
new file mode 100644
index 0000000..a6f4274
--- /dev/null
+++ b/typeshed/stdlib/3.2/xml/etree/cElementTree.pyi
@@ -0,0 +1,5 @@
+# Stubs for xml.etree.cElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from xml.etree.ElementTree import *
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.3/xml/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.3/xml/__init__.pyi
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi
new file mode 100644
index 0000000..a9e04f2
--- /dev/null
+++ b/typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi
@@ -0,0 +1,19 @@
+# Stubs for xml.etree.ElementInclude (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Optional, Callable
+from .ElementTree import Element
+
+XINCLUDE = ... # type: str
+XINCLUDE_INCLUDE = ... # type: str
+XINCLUDE_FALLBACK = ... # type: str
+
+class FatalIncludeError(SyntaxError): ...
+
+def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
+
+# TODO: loader is of type default_loader ie it takes a callable that has the
+# same signature as default_loader. But default_loader has a keyword argument
+# Which can't be represented using Callable...
+def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.3/xml/etree/ElementPath.pyi
new file mode 100644
index 0000000..e17e5bb
--- /dev/null
+++ b/typeshed/stdlib/3.3/xml/etree/ElementPath.pyi
@@ -0,0 +1,35 @@
+# Stubs for xml.etree.ElementPath (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
+from .ElementTree import Element
+
+xpath_tokenizer_re = ... # type: Pattern
+
+_token = Tuple[str, str]
+_next = Callable[[], _token]
+_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
+
+def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
+def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
+def prepare_child(next: _next, token: _token) -> _callback: ...
+def prepare_star(next: _next, token: _token) -> _callback: ...
+def prepare_self(next: _next, token: _token) -> _callback: ...
+def prepare_descendant(next: _next, token: _token) -> _callback: ...
+def prepare_parent(next: _next, token: _token) -> _callback: ...
+def prepare_predicate(next: _next, token: _token) -> _callback: ...
+
+ops = ... # type: Dict[str, Callable[[_next, _token], _callback]]
+
+class _SelectorContext:
+ parent_map = ... # type: Dict[Element, Element]
+ root = ... # type: Element
+ def __init__(self, root: Element) -> None: ...
+
+_T = TypeVar('_T')
+
+def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
new file mode 100644
index 0000000..652322f
--- /dev/null
+++ b/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
@@ -0,0 +1,122 @@
+# Stubs for xml.etree.ElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
+import io
+
+VERSION = ... # type: str
+
+class ParseError(SyntaxError): ...
+
+def iselement(element: 'Element') -> bool: ...
+
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
+class Element:
+ tag = ... # type: _str_or_bytes
+ attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
+ text = ... # type: Optional[_str_or_bytes]
+ tail = ... # type: Optional[_str_or_bytes]
+ def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+ def append(self, subelement: 'Element') -> None: ...
+ def clear(self) -> None: ...
+ def copy(self) -> 'Element': ...
+ def extend(self, elements: Sequence['Element']) -> None: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def getchildren(self) -> List['Element']: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
+ def insert(self, index: int, subelement: 'Element') -> None: ...
+ def items(self) -> ItemsView[AnyStr, AnyStr]: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def itertext(self) -> Generator[str, None, None]: ...
+ def keys(self) -> KeysView[AnyStr]: ...
+ def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
+ def remove(self, subelement: 'Element') -> None: ...
+ def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __delitem__(self, index: int) -> None: ...
+ def __getitem__(self, index) -> 'Element': ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, index: int, element: 'Element') -> None: ...
+
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def Comment(text: _str_or_bytes=...) -> Element: ...
+def ProcessingInstruction(target: str, text: str=...) -> Element: ...
+
+PI = ... # type: Callable[..., Element]
+
+class QName:
+ text = ... # type: str
+ def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+
+
+_file_or_filename = Union[str, bytes, int, IO[Any]]
+
+class ElementTree:
+ def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
+ def getroot(self) -> Element: ...
+ def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
+ def write_c14n(self, file: _file_or_filename) -> None: ...
+
+def register_namespace(prefix: str, uri: str) -> None: ...
+def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
+
+class _ListDataStream(io.BufferedIOBase):
+ lst = ... # type: List[str]
+ def __init__(self, lst) -> None: ...
+ def writable(self) -> bool: ...
+ def seekable(self) -> bool: ...
+ def write(self, b: str) -> None: ...
+ def tell(self) -> int: ...
+
+def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
+def dump(elem: Element) -> None: ...
+def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
+def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
+
+
+class _IterParseIterator:
+ root = ... # type: Any
+ def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
+ def __next__(self) -> Tuple[str, Element]: ...
+ def __iter__(self) -> _IterParseIterator: ...
+
+def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
+def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
+
+# TODO-improve this type
+fromstring = ... # type: Callable[..., Element]
+
+def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
+
+class TreeBuilder:
+ def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
+ def close(self) -> Element: ...
+ def data(self, data: AnyStr) -> None: ...
+ def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
+ def end(self, tag: AnyStr) -> Element: ...
+
+class XMLParser:
+ parser = ... # type: Any
+ target = ... # type: TreeBuilder
+ # TODO-what is entity used for???
+ entity = ... # type: Any
+ version = ... # type: str
+ def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
+ def doctype(self, name: str, pubid: str, system: str) -> None: ...
+ def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
+ def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.3/xml/etree/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.3/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/3.3/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.3/xml/etree/cElementTree.pyi
new file mode 100644
index 0000000..a6f4274
--- /dev/null
+++ b/typeshed/stdlib/3.3/xml/etree/cElementTree.pyi
@@ -0,0 +1,5 @@
+# Stubs for xml.etree.cElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from xml.etree.ElementTree import *
diff --git a/typeshed/stdlib/3.4/asyncio/__init__.pyi b/typeshed/stdlib/3.4/asyncio/__init__.pyi
index e22015e..171ab01 100644
--- a/typeshed/stdlib/3.4/asyncio/__init__.pyi
+++ b/typeshed/stdlib/3.4/asyncio/__init__.pyi
@@ -1,9 +1,40 @@
"""The asyncio package, tracking PEP 3156."""
+from asyncio.coroutines import (
+ coroutine as coroutine,
+ iscoroutinefunction as iscoroutinefunction,
+ iscoroutine as iscoroutine,
+)
+from asyncio.protocols import (
+ BaseProtocol as BaseProtocol,
+ Protocol as Protocol,
+ DatagramProtocol as DatagramProtocol,
+ SubprocessProtocol as SubprocessProtocol,
+)
+from asyncio.streams import (
+ StreamReader as StreamReader,
+ StreamWriter as StreamWriter,
+ StreamReaderProtocol as StreamReaderProtocol,
+ open_connection as open_connection,
+ start_server as start_server,
+ IncompleteReadError as IncompleteReadError,
+ LimitOverrunError as LimitOverrunError,
+)
+from asyncio.subprocess import (
+ create_subprocess_exec as create_subprocess_exec,
+ create_subprocess_shell as create_subprocess_shell,
+)
+from asyncio.transports import (
+ BaseTransport as BaseTransport,
+ ReadTransport as ReadTransport,
+ WriteTransport as WriteTransport,
+ Transport as Transport,
+ DatagramTransport as DatagramTransport,
+ SubprocessTransport as SubprocessTransport,
+)
from asyncio.futures import (
Future as Future,
)
from asyncio.tasks import (
- coroutine as coroutine,
sleep as sleep,
Task as Task,
FIRST_COMPLETED as FIRST_COMPLETED,
@@ -15,6 +46,7 @@ from asyncio.tasks import (
from asyncio.events import (
AbstractEventLoopPolicy as AbstractEventLoopPolicy,
AbstractEventLoop as AbstractEventLoop,
+ AbstractServer as AbstractServer,
Handle as Handle,
get_event_loop as get_event_loop,
)
@@ -27,7 +59,4 @@ from asyncio.queues import (
QueueEmpty as QueueEmpty,
)
-__all__ = (futures.__all__ +
- tasks.__all__ +
- events.__all__ +
- queues.__all__)
+__all__ = ... # type: str
diff --git a/typeshed/stdlib/3.4/asyncio/coroutines.pyi b/typeshed/stdlib/3.4/asyncio/coroutines.pyi
new file mode 100644
index 0000000..455e36d
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/coroutines.pyi
@@ -0,0 +1,9 @@
+from typing import Callable, Any, TypeVar
+
+__all__ = ... # type: str
+
+_T = TypeVar('_T')
+
+def coroutine(func: _T) -> _T: ...
+def iscoroutinefunction(func: Callable[..., Any]) -> bool: ...
+def iscoroutine(obj: Any) -> bool: ...
diff --git a/typeshed/stdlib/3.4/asyncio/events.pyi b/typeshed/stdlib/3.4/asyncio/events.pyi
index afdd639..beaa637 100644
--- a/typeshed/stdlib/3.4/asyncio/events.pyi
+++ b/typeshed/stdlib/3.4/asyncio/events.pyi
@@ -1,16 +1,9 @@
from typing import Any, Awaitable, TypeVar, List, Callable, Tuple, Union, Dict, Generator
from abc import ABCMeta, abstractmethod
from asyncio.futures import Future
+from asyncio.coroutines import coroutine
-# __all__ = ['AbstractServer',
-# 'TimerHandle',
-# 'get_event_loop_policy', 'set_event_loop_policy',
-# 'set_event_loop', 'new_event_loop',
-# 'get_child_watcher', 'set_child_watcher',
-# ]
-
-
-__all__ = ['AbstractEventLoopPolicy', 'AbstractEventLoop', 'Handle', 'get_event_loop']
+__all__ = ... # type: str
_T = TypeVar('_T')
@@ -29,6 +22,10 @@ class Handle:
def cancel(self) -> None: ...
def _run(self) -> None: ...
+class AbstractServer:
+ def close(self) -> None: ...
+ @coroutine
+ def wait_closed(self) -> None: ...
class AbstractEventLoop(metaclass=ABCMeta):
@abstractmethod
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
index 37e72a1..318f9c1 100644
--- a/typeshed/stdlib/3.4/asyncio/futures.pyi
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -1,10 +1,7 @@
from typing import Any, Union, Callable, TypeVar, List, Generic, Iterable, Generator
-from asyncio.events import AbstractEventLoop
-# __all__ = ['CancelledError', 'TimeoutError',
-# 'InvalidStateError',
-# 'wrap_future',
-# ]
-__all__ = ['Future']
+from .events import AbstractEventLoop
+
+__all__ = ... # type: str
_T = TypeVar('_T')
diff --git a/typeshed/stdlib/3.4/asyncio/protocols.pyi b/typeshed/stdlib/3.4/asyncio/protocols.pyi
new file mode 100644
index 0000000..8cae805
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/protocols.pyi
@@ -0,0 +1,24 @@
+from typing import AnyStr
+
+__all__ = ... # type: str
+
+from asyncio import transports
+
+class BaseProtocol:
+ def connection_made(self, transport: transports.BaseTransport) -> None: ...
+ def connection_lost(self, exc: Exception) -> None: ...
+ def pause_writing(self) -> None: ...
+ def resume_writing(self) -> None: ...
+
+class Protocol(BaseProtocol):
+ def data_received(self, data: AnyStr) -> None: ...
+ def eof_received(self) -> bool: ...
+
+class DatagramProtocol(BaseProtocol):
+ def datagram_received(self, data: AnyStr, addr: str) -> None: ...
+ def error_received(self, exc: Exception) -> None: ...
+
+class SubprocessProtocol(BaseProtocol):
+ def pipe_data_received(self, fd: int, data: AnyStr) -> None: ...
+ def pipe_connection_lost(self, fd: int, exc: Exception) -> None: ...
+ def process_exited(self) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/queues.pyi b/typeshed/stdlib/3.4/asyncio/queues.pyi
index 720864a..edbb183 100644
--- a/typeshed/stdlib/3.4/asyncio/queues.pyi
+++ b/typeshed/stdlib/3.4/asyncio/queues.pyi
@@ -1,10 +1,9 @@
from typing import TypeVar, Generic
-__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue',
- 'QueueFull', 'QueueEmpty']
+__all__ = ... # type: str
from asyncio.events import AbstractEventLoop
-from .tasks import coroutine
+from .coroutines import coroutine
from .futures import Future
diff --git a/typeshed/stdlib/3.4/asyncio/streams.pyi b/typeshed/stdlib/3.4/asyncio/streams.pyi
new file mode 100644
index 0000000..a30a950
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/streams.pyi
@@ -0,0 +1,101 @@
+from typing import Iterable, Tuple, Callable, Any, AnyStr
+
+ClientConnectedCallback = Callable[[Tuple[StreamReader, StreamWriter]], None]
+import socket
+
+from . import coroutines
+from . import events
+from . import protocols
+from . import transports
+
+__all__ = ... # type: str
+
+class IncompleteReadError(EOFError):
+ def __init__(self, partial: str, expected: int) -> None: ...
+
+class LimitOverrunError(Exception):
+ def __init__(self, message: str, consumed: int) -> None: ...
+
+ at coroutines.coroutine
+def open_connection(
+ host: str = ...,
+ port: int = ...,
+ *,
+ loop: events.AbstractEventLoop = ...,
+ limit: int = ...,
+ **kwds: Any) -> Tuple[StreamReader, StreamWriter]: ...
+
+ at coroutines.coroutine
+def start_server(
+ client_connected_cb: ClientConnectedCallback,
+ host: str = ...,
+ port: int = ...,
+ *,
+ loop: events.AbstractEventLoop = ...,
+ limit: int = ...,
+ **kwds: Any) -> events.AbstractServer: ...
+
+if hasattr(socket, 'AF_UNIX'):
+ @coroutines.coroutine
+ def open_unix_connection(
+ path: str = ...,
+ *,
+ loop: events.AbstractEventLoop = ...,
+ limit: int = ...,
+ **kwds: Any): ...
+
+ @coroutines.coroutine
+ def start_unix_server(
+ client_connected_cb: ClientConnectedCallback,
+ path: str = ...,
+ *,
+ loop: int = ...,
+ limit: int = ...,
+ **kwds: Any) -> events.AbstractServer: ...
+
+class FlowControlMixin(protocols.Protocol): ...
+
+class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
+ def __init__(self,
+ stream_reader: StreamReader,
+ client_connected_cb: ClientConnectedCallback = ...,
+ loop: events.AbstractEventLoop = ...) -> None: ...
+ def connection_made(self, transport: transports.BaseTransport) -> None: ...
+ def connection_lost(self, exc: Exception) -> None: ...
+ def data_received(self, data: AnyStr) -> None: ...
+ def eof_received(self) -> bool: ...
+
+class StreamWriter:
+ def __init__(self,
+ transport: transports.BaseTransport,
+ protocol: protocols.BaseProtocol,
+ reader: StreamReader,
+ loop: events.AbstractEventLoop) -> None: ...
+ @property
+ def transport(self) -> transports.BaseTransport: ...
+ def write(self, data: AnyStr) -> None: ...
+ def writelines(self, data: Iterable[str]) -> None: ...
+ def write_eof(self) -> None: ...
+ def can_write_eof(self) -> bool: ...
+ def close(self) -> None: ...
+ def get_extra_info(self, name: Any, default: Any = ...) -> Any: ...
+ def drain(self) -> None: ...
+
+class StreamReader:
+ def __init__(self,
+ limit: int = ...,
+ loop: events.AbstractEventLoop = ...) -> None: ...
+ def exception(self) -> Exception: ...
+ def set_exception(self, exc: Exception) -> None: ...
+ def set_transport(self, transport: transports.BaseTransport) -> None: ...
+ def feed_eof(self) -> None: ...
+ def at_eof(self) -> bool: ...
+ def feed_data(self, data: AnyStr): ...
+ @coroutines.coroutine
+ def readline(self) -> str: ...
+ @coroutines.coroutine
+ def readuntil(self, separator=b'\n') -> str: ...
+ @coroutines.coroutine
+ def read(self, n=-1) -> str: ...
+ @coroutines.coroutine
+ def readexactly(self, n) -> str: ...
diff --git a/typeshed/stdlib/3.4/asyncio/subprocess.pyi b/typeshed/stdlib/3.4/asyncio/subprocess.pyi
new file mode 100644
index 0000000..4ea6deb
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/subprocess.pyi
@@ -0,0 +1,60 @@
+from typing import Any, AnyStr, Tuple, Union
+
+__all__ = ... # type: str
+
+from asyncio import events
+from asyncio import protocols
+from asyncio import streams
+from asyncio import transports
+from asyncio.coroutines import coroutine
+
+
+PIPE = ... # type: int
+STDOUT = ... # type: int
+DEVNULL = ... # type: int
+
+class SubprocessStreamProtocol(streams.FlowControlMixin,
+ protocols.SubprocessProtocol):
+ def __init__(self, limit: int, loop: events.AbstractEventLoop) -> None: ...
+ def connection_made(self, transport: transports.BaseTransport) -> None: ...
+ def pipe_data_received(self, fd: int, data: AnyStr) -> None: ...
+ def pipe_connection_lost(self, fd: int, exc: Exception): ...
+ def process_exited(self) -> None: ...
+
+
+class Process:
+ def __init__(self,
+ transport: transports.BaseTransport,
+ protocol: protocols.BaseProtocol,
+ loop: events.AbstractEventLoop) -> None: ...
+ @property
+ def returncode(self) -> int: ...
+ @coroutine
+ def wait(self) -> int: ...
+ def send_signal(self, signal: int) -> None: ...
+ def terminate(self) -> None: ...
+ def kill(self) -> None: ...
+ @coroutine
+ def communicate(self, input: bytes = ...) -> Tuple[bytes, bytes]: ...
+
+
+ at coroutine
+def create_subprocess_shell(
+ *Args: Union[str, bytes], # Union used instead of AnyStr due to mypy issue #1236
+ stdin: int = ...,
+ stdout: int = ...,
+ stderr: int = ...,
+ loop: events.AbstractEventLoop = ...,
+ limit: int = ...,
+ **kwds: Any): ...
+
+ at coroutine
+def create_subprocess_exec(
+ program: Union[str, bytes], # Union used instead of AnyStr due to mypy issue #1236
+ *args: Any,
+ stdin: int = ...,
+ stdout: int = ...,
+ stderr: int = ...,
+ loop: events.AbstractEventLoop = ...,
+ limit: int = ...,
+ **kwds: Any) -> Process: ...
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index 3adebe1..b4649d3 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -1,20 +1,14 @@
from typing import Any, Iterable, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable, Generator
-from asyncio.events import AbstractEventLoop
-from asyncio.futures import Future
-# __all__ = ['iscoroutinefunction', 'iscoroutine',
-# 'as_completed', 'async',
-# 'gather', 'shield',
-# ]
-__all__ = ['coroutine', 'Task', 'sleep',
- 'FIRST_COMPLETED', 'FIRST_EXCEPTION', 'ALL_COMPLETED',
- 'wait', 'wait_for']
+__all__ = ... # type: str
+
+from .events import AbstractEventLoop
+from .futures import Future
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
FIRST_COMPLETED = 'FIRST_COMPLETED'
ALL_COMPLETED = 'ALL_COMPLETED'
_T = TypeVar('_T')
-def coroutine(f: _T) -> _T: ... # Here comes and go a function
def sleep(delay: float, result: _T = ..., loop: AbstractEventLoop = ...) -> Future[_T]: ...
def wait(fs: List[Task[_T]], *, loop: AbstractEventLoop = ...,
timeout: float = ..., return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ...
diff --git a/typeshed/stdlib/3.4/asyncio/transports.pyi b/typeshed/stdlib/3.4/asyncio/transports.pyi
new file mode 100644
index 0000000..b2f2191
--- /dev/null
+++ b/typeshed/stdlib/3.4/asyncio/transports.pyi
@@ -0,0 +1,37 @@
+from typing import Dict, Any, TypeVar, Mapping, List
+
+__all__ = ... # type: str
+
+class BaseTransport:
+ def __init__(self, extra: Mapping[Any, Any] = ...) -> None: ...
+ def get_extra_info(self, name: Any, default: Any = ...) -> Any: ...
+ def is_closing(self) -> bool: ...
+ def close(self) -> None: ...
+
+class ReadTransport(BaseTransport):
+ def pause_reading(self) -> None: ...
+ def resume_reading(self) -> None: ...
+
+class WriteTransport(BaseTransport):
+ def set_write_buffer_limits(
+ self, high: int = ..., low: int = ...) -> None: ...
+ def get_write_buffer_size(self) -> int: ...
+ def write(self, data: Any) -> None: ...
+ def writelines(self, list_of_data: List[Any]): ...
+ def write_eof(self) -> None: ...
+ def can_write_eof(self) -> bool: ...
+ def abort(self) -> None: ...
+
+class Transport(ReadTransport, WriteTransport): ...
+
+class DatagramTransport(BaseTransport):
+ def sendto(self, data: Any, addr: str = ...) -> None: ...
+ def abort(self) -> None: ...
+
+class SubprocessTransport(BaseTransport):
+ def get_pid(self) -> int: ...
+ def get_returncode(self) -> int: ...
+ def get_pipe_transport(self, fd: int) -> BaseTransport: ...
+ def send_signal(self, signal: int) -> int: ...
+ def terminate(self) -> None: ...
+ def kill(self) -> None: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.4/xml/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.4/xml/__init__.pyi
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi
new file mode 100644
index 0000000..a9e04f2
--- /dev/null
+++ b/typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi
@@ -0,0 +1,19 @@
+# Stubs for xml.etree.ElementInclude (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Optional, Callable
+from .ElementTree import Element
+
+XINCLUDE = ... # type: str
+XINCLUDE_INCLUDE = ... # type: str
+XINCLUDE_FALLBACK = ... # type: str
+
+class FatalIncludeError(SyntaxError): ...
+
+def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
+
+# TODO: loader is of type default_loader ie it takes a callable that has the
+# same signature as default_loader. But default_loader has a keyword argument
+# Which can't be represented using Callable...
+def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.4/xml/etree/ElementPath.pyi
new file mode 100644
index 0000000..e17e5bb
--- /dev/null
+++ b/typeshed/stdlib/3.4/xml/etree/ElementPath.pyi
@@ -0,0 +1,35 @@
+# Stubs for xml.etree.ElementPath (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
+from .ElementTree import Element
+
+xpath_tokenizer_re = ... # type: Pattern
+
+_token = Tuple[str, str]
+_next = Callable[[], _token]
+_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
+
+def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
+def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
+def prepare_child(next: _next, token: _token) -> _callback: ...
+def prepare_star(next: _next, token: _token) -> _callback: ...
+def prepare_self(next: _next, token: _token) -> _callback: ...
+def prepare_descendant(next: _next, token: _token) -> _callback: ...
+def prepare_parent(next: _next, token: _token) -> _callback: ...
+def prepare_predicate(next: _next, token: _token) -> _callback: ...
+
+ops = ... # type: Dict[str, Callable[[_next, _token], _callback]]
+
+class _SelectorContext:
+ parent_map = ... # type: Dict[Element, Element]
+ root = ... # type: Element
+ def __init__(self, root: Element) -> None: ...
+
+_T = TypeVar('_T')
+
+def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
new file mode 100644
index 0000000..73b49df
--- /dev/null
+++ b/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
@@ -0,0 +1,127 @@
+# Stubs for xml.etree.ElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
+import io
+
+VERSION = ... # type: str
+
+class ParseError(SyntaxError): ...
+
+def iselement(element: 'Element') -> bool: ...
+
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
+class Element:
+ tag = ... # type: _str_or_bytes
+ attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
+ text = ... # type: Optional[_str_or_bytes]
+ tail = ... # type: Optional[_str_or_bytes]
+ def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+ def append(self, subelement: 'Element') -> None: ...
+ def clear(self) -> None: ...
+ def copy(self) -> 'Element': ...
+ def extend(self, elements: Sequence['Element']) -> None: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def getchildren(self) -> List['Element']: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
+ def insert(self, index: int, subelement: 'Element') -> None: ...
+ def items(self) -> ItemsView[AnyStr, AnyStr]: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def itertext(self) -> Generator[str, None, None]: ...
+ def keys(self) -> KeysView[AnyStr]: ...
+ def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
+ def remove(self, subelement: 'Element') -> None: ...
+ def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __delitem__(self, index: int) -> None: ...
+ def __getitem__(self, index) -> 'Element': ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, index: int, element: 'Element') -> None: ...
+
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def Comment(text: _str_or_bytes=...) -> Element: ...
+def ProcessingInstruction(target: str, text: str=...) -> Element: ...
+
+PI = ... # type: Callable[..., Element]
+
+class QName:
+ text = ... # type: str
+ def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+
+
+_file_or_filename = Union[str, bytes, int, IO[Any]]
+
+class ElementTree:
+ def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
+ def getroot(self) -> Element: ...
+ def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
+ def write_c14n(self, file: _file_or_filename) -> None: ...
+
+def register_namespace(prefix: str, uri: str) -> None: ...
+def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> str: ...
+
+class _ListDataStream(io.BufferedIOBase):
+ lst = ... # type: List[str]
+ def __init__(self, lst) -> None: ...
+ def writable(self) -> bool: ...
+ def seekable(self) -> bool: ...
+ def write(self, b: str) -> None: ...
+ def tell(self) -> int: ...
+
+def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[str]: ...
+def dump(elem: Element) -> None: ...
+def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
+def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
+
+class XMLPullParser:
+ def __init__(self, events: Sequence[str]=..., *, _parser: 'XMLParser'=...) -> None: ...
+ def feed(self, data: bytes) -> None: ...
+ def close(self) -> None: ...
+ def read_events(self) -> Iterator[Tuple[str, Element]]: ...
+
+class _IterParseIterator:
+ root = ... # type: Any
+ def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
+ def __next__(self) -> Tuple[str, Element]: ...
+ def __iter__(self) -> _IterParseIterator: ...
+
+def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
+def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
+
+# TODO-improve this type
+fromstring = ... # type: Callable[..., Element]
+
+def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
+
+class TreeBuilder:
+ def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
+ def close(self) -> Element: ...
+ def data(self, data: AnyStr) -> None: ...
+ def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
+ def end(self, tag: AnyStr) -> Element: ...
+
+class XMLParser:
+ parser = ... # type: Any
+ target = ... # type: TreeBuilder
+ # TODO-what is entity used for???
+ entity = ... # type: Any
+ version = ... # type: str
+ def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
+ def doctype(self, name: str, pubid: str, system: str) -> None: ...
+ def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
+ def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.4/xml/etree/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.4/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/3.4/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.4/xml/etree/cElementTree.pyi
new file mode 100644
index 0000000..a6f4274
--- /dev/null
+++ b/typeshed/stdlib/3.4/xml/etree/cElementTree.pyi
@@ -0,0 +1,5 @@
+# Stubs for xml.etree.cElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from xml.etree.ElementTree import *
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.5/xml/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.5/xml/__init__.pyi
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi
new file mode 100644
index 0000000..a9e04f2
--- /dev/null
+++ b/typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi
@@ -0,0 +1,19 @@
+# Stubs for xml.etree.ElementInclude (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Optional, Callable
+from .ElementTree import Element
+
+XINCLUDE = ... # type: str
+XINCLUDE_INCLUDE = ... # type: str
+XINCLUDE_FALLBACK = ... # type: str
+
+class FatalIncludeError(SyntaxError): ...
+
+def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
+
+# TODO: loader is of type default_loader ie it takes a callable that has the
+# same signature as default_loader. But default_loader has a keyword argument
+# Which can't be represented using Callable...
+def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.5/xml/etree/ElementPath.pyi
new file mode 100644
index 0000000..e17e5bb
--- /dev/null
+++ b/typeshed/stdlib/3.5/xml/etree/ElementPath.pyi
@@ -0,0 +1,35 @@
+# Stubs for xml.etree.ElementPath (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
+from .ElementTree import Element
+
+xpath_tokenizer_re = ... # type: Pattern
+
+_token = Tuple[str, str]
+_next = Callable[[], _token]
+_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
+
+def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
+def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
+def prepare_child(next: _next, token: _token) -> _callback: ...
+def prepare_star(next: _next, token: _token) -> _callback: ...
+def prepare_self(next: _next, token: _token) -> _callback: ...
+def prepare_descendant(next: _next, token: _token) -> _callback: ...
+def prepare_parent(next: _next, token: _token) -> _callback: ...
+def prepare_predicate(next: _next, token: _token) -> _callback: ...
+
+ops = ... # type: Dict[str, Callable[[_next, _token], _callback]]
+
+class _SelectorContext:
+ parent_map = ... # type: Dict[Element, Element]
+ root = ... # type: Element
+ def __init__(self, root: Element) -> None: ...
+
+_T = TypeVar('_T')
+
+def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
new file mode 100644
index 0000000..73b49df
--- /dev/null
+++ b/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
@@ -0,0 +1,127 @@
+# Stubs for xml.etree.ElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
+import io
+
+VERSION = ... # type: str
+
+class ParseError(SyntaxError): ...
+
+def iselement(element: 'Element') -> bool: ...
+
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
+class Element:
+ tag = ... # type: _str_or_bytes
+ attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
+ text = ... # type: Optional[_str_or_bytes]
+ tail = ... # type: Optional[_str_or_bytes]
+ def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> None: ...
+ def append(self, subelement: 'Element') -> None: ...
+ def clear(self) -> None: ...
+ def copy(self) -> 'Element': ...
+ def extend(self, elements: Sequence['Element']) -> None: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def getchildren(self) -> List['Element']: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
+ def insert(self, index: int, subelement: 'Element') -> None: ...
+ def items(self) -> ItemsView[AnyStr, AnyStr]: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
+ def itertext(self) -> Generator[str, None, None]: ...
+ def keys(self) -> KeysView[AnyStr]: ...
+ def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
+ def remove(self, subelement: 'Element') -> None: ...
+ def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __delitem__(self, index: int) -> None: ...
+ def __getitem__(self, index) -> 'Element': ...
+ def __len__(self) -> int: ...
+ def __setitem__(self, index: int, element: 'Element') -> None: ...
+
+def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> Element: ...
+def Comment(text: _str_or_bytes=...) -> Element: ...
+def ProcessingInstruction(target: str, text: str=...) -> Element: ...
+
+PI = ... # type: Callable[..., Element]
+
+class QName:
+ text = ... # type: str
+ def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+
+
+_file_or_filename = Union[str, bytes, int, IO[Any]]
+
+class ElementTree:
+ def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
+ def getroot(self) -> Element: ...
+ def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
+ def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
+ def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
+ def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
+ def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
+ def write_c14n(self, file: _file_or_filename) -> None: ...
+
+def register_namespace(prefix: str, uri: str) -> None: ...
+def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> str: ...
+
+class _ListDataStream(io.BufferedIOBase):
+ lst = ... # type: List[str]
+ def __init__(self, lst) -> None: ...
+ def writable(self) -> bool: ...
+ def seekable(self) -> bool: ...
+ def write(self, b: str) -> None: ...
+ def tell(self) -> int: ...
+
+def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[str]: ...
+def dump(elem: Element) -> None: ...
+def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
+def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
+
+class XMLPullParser:
+ def __init__(self, events: Sequence[str]=..., *, _parser: 'XMLParser'=...) -> None: ...
+ def feed(self, data: bytes) -> None: ...
+ def close(self) -> None: ...
+ def read_events(self) -> Iterator[Tuple[str, Element]]: ...
+
+class _IterParseIterator:
+ root = ... # type: Any
+ def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
+ def __next__(self) -> Tuple[str, Element]: ...
+ def __iter__(self) -> _IterParseIterator: ...
+
+def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
+def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
+
+# TODO-improve this type
+fromstring = ... # type: Callable[..., Element]
+
+def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
+
+class TreeBuilder:
+ def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
+ def close(self) -> Element: ...
+ def data(self, data: AnyStr) -> None: ...
+ def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
+ def end(self, tag: AnyStr) -> Element: ...
+
+class XMLParser:
+ parser = ... # type: Any
+ target = ... # type: TreeBuilder
+ # TODO-what is entity used for???
+ entity = ... # type: Any
+ version = ... # type: str
+ def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
+ def doctype(self, name: str, pubid: str, system: str) -> None: ...
+ def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
+ def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3.5/xml/etree/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3.5/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/3.5/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.5/xml/etree/cElementTree.pyi
new file mode 100644
index 0000000..a6f4274
--- /dev/null
+++ b/typeshed/stdlib/3.5/xml/etree/cElementTree.pyi
@@ -0,0 +1,5 @@
+# Stubs for xml.etree.cElementTree (Python 3.4)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from xml.etree.ElementTree import *
diff --git a/typeshed/stdlib/3/__future__.pyi b/typeshed/stdlib/3/__future__.pyi
index 01265e8..2414069 100644
--- a/typeshed/stdlib/3/__future__.pyi
+++ b/typeshed/stdlib/3/__future__.pyi
@@ -1,4 +1,8 @@
-class _Feature: ...
+from sys import _version_info
+
+class _Feature:
+ def getOptionalRelease(self) -> _version_info: ...
+ def getMandatoryRelease(self) -> _version_info: ...
absolute_import = ... # type: _Feature
division = ... # type: _Feature
diff --git a/typeshed/stdlib/3/_ast.pyi b/typeshed/stdlib/3/_ast.pyi
new file mode 100644
index 0000000..23bf799
--- /dev/null
+++ b/typeshed/stdlib/3/_ast.pyi
@@ -0,0 +1,358 @@
+# Python 3.5 _ast
+import typing
+from typing import Any, Optional, Union
+
+PyCF_ONLY_AST = ... # type: int
+
+identifier = str
+
+class AST:
+ _attributes = ... # type: typing.Tuple[str, ...]
+ _fields = ... # type: typing.Tuple[str, ...]
+ def __init__(self, *args, **kwargs) -> None: ...
+
+class mod(AST):
+ ...
+
+class Module(mod):
+ body = ... # type: typing.List[stmt]
+
+class Interactive(mod):
+ body = ... # type: typing.List[stmt]
+
+class Expression(mod):
+ body = ... # type: expr
+
+class Suite(mod):
+ body = ... # type: typing.List[stmt]
+
+
+class stmt(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class FunctionDef(stmt):
+ name = ... # type: identifier
+ args = ... # type: arguments
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+ returns = ... # type: Optional[expr]
+
+class AsyncFunctionDef(stmt):
+ name = ... # type: identifier
+ args = ... # type: arguments
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+ returns = ... # type: Optional[expr]
+
+class ClassDef(stmt):
+ name = ... # type: identifier
+ bases = ... # type: typing.List[expr]
+ keywords = ... # type: typing.List[keyword]
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+
+class Return(stmt):
+ value = ... # type: Optional[expr]
+
+class Delete(stmt):
+ targets = ... # type: typing.List[expr]
+
+class Assign(stmt):
+ targets = ... # type: typing.List[expr]
+ value = ... # type: expr
+
+class AugAssign(stmt):
+ target = ... # type: expr
+ op = ... # type: operator
+ value = ... # type: expr
+
+class For(stmt):
+ target = ... # type: expr
+ iter = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class AsyncFor(stmt):
+ target = ... # type: expr
+ iter = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class While(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class If(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class With(stmt):
+ items = ... # type: typing.List[withitem]
+ body = ... # type: typing.List[stmt]
+
+class AsyncWith(stmt):
+ items = ... # type: typing.List[withitem]
+ body = ... # type: typing.List[stmt]
+
+class Raise(stmt):
+ exc = ... # type: Optional[expr]
+ cause = ... # type: Optional[expr]
+
+class Try(stmt):
+ body = ... # type: typing.List[stmt]
+ handlers = ... # type: typing.List[ExceptHandler]
+ orelse = ... # type: typing.List[stmt]
+ finalbody = ... # type: typing.List[stmt]
+
+class Assert(stmt):
+ test = ... # type: expr
+ msg = ... # type: Optional[expr]
+
+class Import(stmt):
+ names = ... # type: typing.List[alias]
+
+class ImportFrom(stmt):
+ module = ... # type: Optional[identifier]
+ names = ... # type: typing.List[alias]
+ level = ... # type: Optional[int]
+
+class Global(stmt):
+ names = ... # type: typing.List[identifier]
+
+class Nonlocal(stmt):
+ names = ... # type: typing.List[identifier]
+
+class Expr(stmt):
+ value = ... # type: expr
+
+class Pass(stmt): ...
+class Break(stmt): ...
+class Continue(stmt): ...
+
+
+class slice(AST):
+ ...
+
+_slice = slice # this lets us type the variable named 'slice' below
+
+class Slice(slice):
+ lower = ... # type: Optional[expr]
+ upper = ... # type: Optional[expr]
+ step = ... # type: Optional[expr]
+
+class ExtSlice(slice):
+ dims = ... # type: typing.List[slice]
+
+class Index(slice):
+ value = ... # type: expr
+
+
+class expr(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class BoolOp(expr):
+ op = ... # type: boolop
+ values = ... # type: typing.List[expr]
+
+class BinOp(expr):
+ left = ... # type: expr
+ op = ... # type: operator
+ right = ... # type: expr
+
+class UnaryOp(expr):
+ op = ... # type: unaryop
+ operand = ... # type: expr
+
+class Lambda(expr):
+ args = ... # type: arguments
+ body = ... # type: expr
+
+class IfExp(expr):
+ test = ... # type: expr
+ body = ... # type: expr
+ orelse = ... # type: expr
+
+class Dict(expr):
+ keys = ... # type: typing.List[expr]
+ values = ... # type: typing.List[expr]
+
+class Set(expr):
+ elts = ... # type: typing.List[expr]
+
+class ListComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class SetComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class DictComp(expr):
+ key = ... # type: expr
+ value = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class GeneratorExp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class Await(expr):
+ value = ... # type: expr
+
+class Yield(expr):
+ value = ... # type: Optional[expr]
+
+class YieldFrom(expr):
+ value = ... # type: expr
+
+class Compare(expr):
+ left = ... # type: expr
+ ops = ... # type: typing.List[cmpop]
+ comparators = ... # type: typing.List[expr]
+
+class Call(expr):
+ func = ... # type: expr
+ args = ... # type: typing.List[expr]
+ keywords = ... # type: typing.List[keyword]
+
+class Num(expr):
+ n = ... # type: Union[int, float]
+
+class Str(expr):
+ s = ... # type: str
+
+class Bytes(expr):
+ s = ... # type: bytes
+
+class NameConstant(expr):
+ value = ... # type: Any
+
+class Ellipsis(expr): ...
+
+class Attribute(expr):
+ value = ... # type: expr
+ attr = ... # type: identifier
+ ctx = ... # type: expr_context
+
+class Subscript(expr):
+ value = ... # type: expr
+ slice = ... # type: _slice
+ ctx = ... # type: expr_context
+
+class Starred(expr):
+ value = ... # type: expr
+ ctx = ... # type: expr_context
+
+class Name(expr):
+ id = ... # type: identifier
+ ctx = ... # type: expr_context
+
+class List(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
+
+class Tuple(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
+
+
+class expr_context(AST):
+ ...
+
+class AugLoad(expr_context): ...
+class AugStore(expr_context): ...
+class Del(expr_context): ...
+class Load(expr_context): ...
+class Param(expr_context): ...
+class Store(expr_context): ...
+
+
+class boolop(AST):
+ ...
+
+class And(boolop): ...
+class Or(boolop): ...
+
+class operator(AST):
+ ...
+
+class Add(operator): ...
+class BitAnd(operator): ...
+class BitOr(operator): ...
+class BitXor(operator): ...
+class Div(operator): ...
+class FloorDiv(operator): ...
+class LShift(operator): ...
+class Mod(operator): ...
+class Mult(operator): ...
+class MatMult(operator): ...
+class Pow(operator): ...
+class RShift(operator): ...
+class Sub(operator): ...
+
+class unaryop(AST):
+ ...
+
+class Invert(unaryop): ...
+class Not(unaryop): ...
+class UAdd(unaryop): ...
+class USub(unaryop): ...
+
+class cmpop(AST):
+ ...
+
+class Eq(cmpop): ...
+class Gt(cmpop): ...
+class GtE(cmpop): ...
+class In(cmpop): ...
+class Is(cmpop): ...
+class IsNot(cmpop): ...
+class Lt(cmpop): ...
+class LtE(cmpop): ...
+class NotEq(cmpop): ...
+class NotIn(cmpop): ...
+
+
+class comprehension(AST):
+ target = ... # type: expr
+ iter = ... # type: expr
+ ifs = ... # type: typing.List[expr]
+
+
+class ExceptHandler(AST):
+ type = ... # type: Optional[expr]
+ name = ... # type: Optional[identifier]
+ body = ... # type: typing.List[stmt]
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+
+class arguments(AST):
+ args = ... # type: typing.List[arg]
+ vararg = ... # type: Optional[arg]
+ kwonlyargs = ... # type: typing.List[arg]
+ kw_defaults = ... # type: typing.List[expr]
+ kwarg = ... # type: Optional[arg]
+ defaults = ... # type: typing.List[expr]
+
+class arg(AST):
+ arg = ... # type: identifier
+ annotation = ... # type: Optional[expr]
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class keyword(AST):
+ arg = ... # type: Optional[identifier]
+ value = ... # type: expr
+
+class alias(AST):
+ name = ... # type: identifier
+ asname = ... # type: Optional[identifier]
+
+class withitem(AST):
+ context_expr = ... # type: expr
+ optional_vars = ... # type: Optional[expr]
diff --git a/typeshed/stdlib/3/_compression.pyi b/typeshed/stdlib/3/_compression.pyi
new file mode 100644
index 0000000..16893b9
--- /dev/null
+++ b/typeshed/stdlib/3/_compression.pyi
@@ -0,0 +1,20 @@
+# Stubs for _compression (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import io
+
+BUFFER_SIZE = ... # type: Any
+
+class BaseStream(io.BufferedIOBase): ...
+
+class DecompressReader(io.RawIOBase):
+ def readable(self): ...
+ def __init__(self, fp, decomp_factory, trailing_error=..., **decomp_args): ...
+ def close(self): ...
+ def seekable(self): ...
+ def readinto(self, b): ...
+ def read(self, size=-1): ...
+ def seek(self, offset, whence=...): ...
+ def tell(self): ...
diff --git a/typeshed/stdlib/3/_curses.pyi b/typeshed/stdlib/3/_curses.pyi
new file mode 100644
index 0000000..11f3a7c
--- /dev/null
+++ b/typeshed/stdlib/3/_curses.pyi
@@ -0,0 +1,295 @@
+# Stubs for _curses (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+ALL_MOUSE_EVENTS = ... # type: int
+A_ALTCHARSET = ... # type: int
+A_ATTRIBUTES = ... # type: int
+A_BLINK = ... # type: int
+A_BOLD = ... # type: int
+A_CHARTEXT = ... # type: int
+A_COLOR = ... # type: int
+A_DIM = ... # type: int
+A_HORIZONTAL = ... # type: int
+A_INVIS = ... # type: int
+A_LEFT = ... # type: int
+A_LOW = ... # type: int
+A_NORMAL = ... # type: int
+A_PROTECT = ... # type: int
+A_REVERSE = ... # type: int
+A_RIGHT = ... # type: int
+A_STANDOUT = ... # type: int
+A_TOP = ... # type: int
+A_UNDERLINE = ... # type: int
+A_VERTICAL = ... # type: int
+BUTTON1_CLICKED = ... # type: int
+BUTTON1_DOUBLE_CLICKED = ... # type: int
+BUTTON1_PRESSED = ... # type: int
+BUTTON1_RELEASED = ... # type: int
+BUTTON1_TRIPLE_CLICKED = ... # type: int
+BUTTON2_CLICKED = ... # type: int
+BUTTON2_DOUBLE_CLICKED = ... # type: int
+BUTTON2_PRESSED = ... # type: int
+BUTTON2_RELEASED = ... # type: int
+BUTTON2_TRIPLE_CLICKED = ... # type: int
+BUTTON3_CLICKED = ... # type: int
+BUTTON3_DOUBLE_CLICKED = ... # type: int
+BUTTON3_PRESSED = ... # type: int
+BUTTON3_RELEASED = ... # type: int
+BUTTON3_TRIPLE_CLICKED = ... # type: int
+BUTTON4_CLICKED = ... # type: int
+BUTTON4_DOUBLE_CLICKED = ... # type: int
+BUTTON4_PRESSED = ... # type: int
+BUTTON4_RELEASED = ... # type: int
+BUTTON4_TRIPLE_CLICKED = ... # type: int
+BUTTON_ALT = ... # type: int
+BUTTON_CTRL = ... # type: int
+BUTTON_SHIFT = ... # type: int
+COLOR_BLACK = ... # type: int
+COLOR_BLUE = ... # type: int
+COLOR_CYAN = ... # type: int
+COLOR_GREEN = ... # type: int
+COLOR_MAGENTA = ... # type: int
+COLOR_RED = ... # type: int
+COLOR_WHITE = ... # type: int
+COLOR_YELLOW = ... # type: int
+ERR = ... # type: int
+KEY_A1 = ... # type: int
+KEY_A3 = ... # type: int
+KEY_B2 = ... # type: int
+KEY_BACKSPACE = ... # type: int
+KEY_BEG = ... # type: int
+KEY_BREAK = ... # type: int
+KEY_BTAB = ... # type: int
+KEY_C1 = ... # type: int
+KEY_C3 = ... # type: int
+KEY_CANCEL = ... # type: int
+KEY_CATAB = ... # type: int
+KEY_CLEAR = ... # type: int
+KEY_CLOSE = ... # type: int
+KEY_COMMAND = ... # type: int
+KEY_COPY = ... # type: int
+KEY_CREATE = ... # type: int
+KEY_CTAB = ... # type: int
+KEY_DC = ... # type: int
+KEY_DL = ... # type: int
+KEY_DOWN = ... # type: int
+KEY_EIC = ... # type: int
+KEY_END = ... # type: int
+KEY_ENTER = ... # type: int
+KEY_EOL = ... # type: int
+KEY_EOS = ... # type: int
+KEY_EXIT = ... # type: int
+KEY_F0 = ... # type: int
+KEY_F1 = ... # type: int
+KEY_F10 = ... # type: int
+KEY_F11 = ... # type: int
+KEY_F12 = ... # type: int
+KEY_F13 = ... # type: int
+KEY_F14 = ... # type: int
+KEY_F15 = ... # type: int
+KEY_F16 = ... # type: int
+KEY_F17 = ... # type: int
+KEY_F18 = ... # type: int
+KEY_F19 = ... # type: int
+KEY_F2 = ... # type: int
+KEY_F20 = ... # type: int
+KEY_F21 = ... # type: int
+KEY_F22 = ... # type: int
+KEY_F23 = ... # type: int
+KEY_F24 = ... # type: int
+KEY_F25 = ... # type: int
+KEY_F26 = ... # type: int
+KEY_F27 = ... # type: int
+KEY_F28 = ... # type: int
+KEY_F29 = ... # type: int
+KEY_F3 = ... # type: int
+KEY_F30 = ... # type: int
+KEY_F31 = ... # type: int
+KEY_F32 = ... # type: int
+KEY_F33 = ... # type: int
+KEY_F34 = ... # type: int
+KEY_F35 = ... # type: int
+KEY_F36 = ... # type: int
+KEY_F37 = ... # type: int
+KEY_F38 = ... # type: int
+KEY_F39 = ... # type: int
+KEY_F4 = ... # type: int
+KEY_F40 = ... # type: int
+KEY_F41 = ... # type: int
+KEY_F42 = ... # type: int
+KEY_F43 = ... # type: int
+KEY_F44 = ... # type: int
+KEY_F45 = ... # type: int
+KEY_F46 = ... # type: int
+KEY_F47 = ... # type: int
+KEY_F48 = ... # type: int
+KEY_F49 = ... # type: int
+KEY_F5 = ... # type: int
+KEY_F50 = ... # type: int
+KEY_F51 = ... # type: int
+KEY_F52 = ... # type: int
+KEY_F53 = ... # type: int
+KEY_F54 = ... # type: int
+KEY_F55 = ... # type: int
+KEY_F56 = ... # type: int
+KEY_F57 = ... # type: int
+KEY_F58 = ... # type: int
+KEY_F59 = ... # type: int
+KEY_F6 = ... # type: int
+KEY_F60 = ... # type: int
+KEY_F61 = ... # type: int
+KEY_F62 = ... # type: int
+KEY_F63 = ... # type: int
+KEY_F7 = ... # type: int
+KEY_F8 = ... # type: int
+KEY_F9 = ... # type: int
+KEY_FIND = ... # type: int
+KEY_HELP = ... # type: int
+KEY_HOME = ... # type: int
+KEY_IC = ... # type: int
+KEY_IL = ... # type: int
+KEY_LEFT = ... # type: int
+KEY_LL = ... # type: int
+KEY_MARK = ... # type: int
+KEY_MAX = ... # type: int
+KEY_MESSAGE = ... # type: int
+KEY_MIN = ... # type: int
+KEY_MOUSE = ... # type: int
+KEY_MOVE = ... # type: int
+KEY_NEXT = ... # type: int
+KEY_NPAGE = ... # type: int
+KEY_OPEN = ... # type: int
+KEY_OPTIONS = ... # type: int
+KEY_PPAGE = ... # type: int
+KEY_PREVIOUS = ... # type: int
+KEY_PRINT = ... # type: int
+KEY_REDO = ... # type: int
+KEY_REFERENCE = ... # type: int
+KEY_REFRESH = ... # type: int
+KEY_REPLACE = ... # type: int
+KEY_RESET = ... # type: int
+KEY_RESIZE = ... # type: int
+KEY_RESTART = ... # type: int
+KEY_RESUME = ... # type: int
+KEY_RIGHT = ... # type: int
+KEY_SAVE = ... # type: int
+KEY_SBEG = ... # type: int
+KEY_SCANCEL = ... # type: int
+KEY_SCOMMAND = ... # type: int
+KEY_SCOPY = ... # type: int
+KEY_SCREATE = ... # type: int
+KEY_SDC = ... # type: int
+KEY_SDL = ... # type: int
+KEY_SELECT = ... # type: int
+KEY_SEND = ... # type: int
+KEY_SEOL = ... # type: int
+KEY_SEXIT = ... # type: int
+KEY_SF = ... # type: int
+KEY_SFIND = ... # type: int
+KEY_SHELP = ... # type: int
+KEY_SHOME = ... # type: int
+KEY_SIC = ... # type: int
+KEY_SLEFT = ... # type: int
+KEY_SMESSAGE = ... # type: int
+KEY_SMOVE = ... # type: int
+KEY_SNEXT = ... # type: int
+KEY_SOPTIONS = ... # type: int
+KEY_SPREVIOUS = ... # type: int
+KEY_SPRINT = ... # type: int
+KEY_SR = ... # type: int
+KEY_SREDO = ... # type: int
+KEY_SREPLACE = ... # type: int
+KEY_SRESET = ... # type: int
+KEY_SRIGHT = ... # type: int
+KEY_SRSUME = ... # type: int
+KEY_SSAVE = ... # type: int
+KEY_SSUSPEND = ... # type: int
+KEY_STAB = ... # type: int
+KEY_SUNDO = ... # type: int
+KEY_SUSPEND = ... # type: int
+KEY_UNDO = ... # type: int
+KEY_UP = ... # type: int
+OK = ... # type: int
+REPORT_MOUSE_POSITION = ... # type: int
+_C_API = ... # type: Any
+version = ... # type: bytes
+
+def baudrate(*args, **kwargs): ...
+def beep(*args, **kwargs): ...
+def can_change_color(*args, **kwargs): ...
+def cbreak(*args, **kwargs): ...
+def color_content(*args, **kwargs): ...
+def color_pair(*args, **kwargs): ...
+def curs_set(*args, **kwargs): ...
+def def_prog_mode(*args, **kwargs): ...
+def def_shell_mode(*args, **kwargs): ...
+def delay_output(*args, **kwargs): ...
+def doupdate(*args, **kwargs): ...
+def echo(*args, **kwargs): ...
+def endwin(*args, **kwargs): ...
+def erasechar(*args, **kwargs): ...
+def filter(*args, **kwargs): ...
+def flash(*args, **kwargs): ...
+def flushinp(*args, **kwargs): ...
+def getmouse(*args, **kwargs): ...
+def getsyx(*args, **kwargs): ...
+def getwin(*args, **kwargs): ...
+def halfdelay(*args, **kwargs): ...
+def has_colors(*args, **kwargs): ...
+def has_ic(*args, **kwargs): ...
+def has_il(*args, **kwargs): ...
+def has_key(*args, **kwargs): ...
+def init_color(*args, **kwargs): ...
+def init_pair(*args, **kwargs): ...
+def initscr(*args, **kwargs): ...
+def intrflush(*args, **kwargs): ...
+def is_term_resized(*args, **kwargs): ...
+def isendwin(*args, **kwargs): ...
+def keyname(*args, **kwargs): ...
+def killchar(*args, **kwargs): ...
+def longname(*args, **kwargs): ...
+def meta(*args, **kwargs): ...
+def mouseinterval(*args, **kwargs): ...
+def mousemask(*args, **kwargs): ...
+def napms(*args, **kwargs): ...
+def newpad(*args, **kwargs): ...
+def newwin(*args, **kwargs): ...
+def nl(*args, **kwargs): ...
+def nocbreak(*args, **kwargs): ...
+def noecho(*args, **kwargs): ...
+def nonl(*args, **kwargs): ...
+def noqiflush(*args, **kwargs): ...
+def noraw(*args, **kwargs): ...
+def pair_content(*args, **kwargs): ...
+def pair_number(*args, **kwargs): ...
+def putp(*args, **kwargs): ...
+def qiflush(*args, **kwargs): ...
+def raw(*args, **kwargs): ...
+def reset_prog_mode(*args, **kwargs): ...
+def reset_shell_mode(*args, **kwargs): ...
+def resetty(*args, **kwargs): ...
+def resize_term(*args, **kwargs): ...
+def resizeterm(*args, **kwargs): ...
+def savetty(*args, **kwargs): ...
+def setsyx(*args, **kwargs): ...
+def setupterm(*args, **kwargs): ...
+def start_color(*args, **kwargs): ...
+def termattrs(*args, **kwargs): ...
+def termname(*args, **kwargs): ...
+def tigetflag(*args, **kwargs): ...
+def tigetnum(*args, **kwargs): ...
+def tigetstr(*args, **kwargs): ...
+def tparm(*args, **kwargs): ...
+def typeahead(*args, **kwargs): ...
+def unctrl(*args, **kwargs): ...
+def unget_wch(*args, **kwargs): ...
+def ungetch(*args, **kwargs): ...
+def ungetmouse(*args, **kwargs): ...
+def update_lines_cols(*args, **kwargs): ...
+def use_default_colors(*args, **kwargs): ...
+def use_env(*args, **kwargs): ...
+
+class error(Exception): ...
diff --git a/typeshed/stdlib/3/_operator.pyi b/typeshed/stdlib/3/_operator.pyi
new file mode 100644
index 0000000..0f64f95
--- /dev/null
+++ b/typeshed/stdlib/3/_operator.pyi
@@ -0,0 +1,71 @@
+# Stubs for _operator (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def _compare_digest(*args, **kwargs): ...
+def abs(a): ...
+def add(a, b): ...
+def and_(a, b): ...
+def concat(a, b): ...
+def contains(a, b): ...
+def countOf(a, b): ...
+def delitem(a, b): ...
+def eq(a, b): ...
+def floordiv(a, b): ...
+def ge(a, b): ...
+def getitem(a, b): ...
+def gt(a, b): ...
+def iadd(*args, **kwargs): ...
+def iand(*args, **kwargs): ...
+def iconcat(*args, **kwargs): ...
+def ifloordiv(*args, **kwargs): ...
+def ilshift(*args, **kwargs): ...
+def imatmul(*args, **kwargs): ...
+def imod(*args, **kwargs): ...
+def imul(*args, **kwargs): ...
+def index(a): ...
+def indexOf(a, b): ...
+def inv(a): ...
+def invert(a): ...
+def ior(*args, **kwargs): ...
+def ipow(*args, **kwargs): ...
+def irshift(*args, **kwargs): ...
+def is_(a, b): ...
+def is_not(a, b): ...
+def isub(*args, **kwargs): ...
+def itruediv(*args, **kwargs): ...
+def ixor(*args, **kwargs): ...
+def le(a, b): ...
+def length_hint(obj, default=0): ...
+def lshift(a, b): ...
+def lt(a, b): ...
+def matmul(a, b): ...
+def mod(a, b): ...
+def mul(a, b): ...
+def ne(a, b): ...
+def neg(a): ...
+def not_(a): ...
+def or_(a, b): ...
+def pos(a): ...
+def pow(a, b): ...
+def rshift(a, b): ...
+def setitem(a, b, c): ...
+def sub(a, b): ...
+def truediv(a, b): ...
+def truth(a): ...
+def xor(a, b): ...
+
+class attrgetter:
+ def __init__(self, *args, **kwargs): ...
+ def __call__(self, *args, **kwargs): ...
+ def __reduce__(self): ...
+
+class itemgetter:
+ def __init__(self, *args, **kwargs): ...
+ def __call__(self, *args, **kwargs): ...
+ def __reduce__(self): ...
+
+class methodcaller:
+ def __init__(self, *args, **kwargs): ...
+ def __call__(self, *args, **kwargs): ...
+ def __reduce__(self): ...
diff --git a/typeshed/stdlib/3/abc.pyi b/typeshed/stdlib/3/abc.pyi
index 1e32756..d24b258 100644
--- a/typeshed/stdlib/3/abc.pyi
+++ b/typeshed/stdlib/3/abc.pyi
@@ -1,6 +1,8 @@
+from typing import Any
# Stubs for abc.
# Thesee definitions have special processing in type checker.
-class ABCMeta: ...
+class ABCMeta:
+ def register(cls: "ABCMeta", subclass: Any) -> None: ...
abstractmethod = object()
abstractproperty = object()
diff --git a/typeshed/stdlib/3/argparse.pyi b/typeshed/stdlib/3/argparse.pyi
index 08ea33c..02328cd 100644
--- a/typeshed/stdlib/3/argparse.pyi
+++ b/typeshed/stdlib/3/argparse.pyi
@@ -2,7 +2,7 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any, Sequence
+from typing import Any, Sequence, Union
SUPPRESS = ... # type: Any
OPTIONAL = ... # type: Any
@@ -118,7 +118,7 @@ class _ActionsContainer:
def get_default(self, dest): ...
def add_argument(self,
*args: str,
- action: str = ...,
+ action: Union[str, Action] = ...,
nargs: str = ...,
const: Any = ...,
default: Any = ...,
@@ -127,7 +127,8 @@ class _ActionsContainer:
required: bool = ...,
help: str = ...,
metavar: str = ...,
- dest: str = ...
+ dest: str = ...,
+ version: str = ...
) -> None: ...
def add_argument_group(self, *args, **kwargs): ...
def add_mutually_exclusive_group(self, **kwargs): ...
diff --git a/typeshed/stdlib/3/ast.pyi b/typeshed/stdlib/3/ast.pyi
new file mode 100644
index 0000000..ac80368
--- /dev/null
+++ b/typeshed/stdlib/3/ast.pyi
@@ -0,0 +1,42 @@
+# Python 3.5 ast
+
+import typing
+from typing import Any, Union, Iterator
+
+from _ast import (
+ Add, alias, And, arg, arguments, Assert, Assign, AST, AsyncFor,
+ AsyncFunctionDef, AsyncWith, Attribute, AugAssign, AugLoad, AugStore,
+ Await, BinOp, BitAnd, BitOr, BitXor, BoolOp, boolop, Break, Bytes, Call,
+ ClassDef, cmpop, Compare, comprehension, Continue, Del, Delete, Dict,
+ DictComp, Div, Ellipsis, Eq, ExceptHandler, Expr, expr, Expression,
+ expr_context, ExtSlice, FloorDiv, For, FunctionDef, GeneratorExp, Global,
+ Gt, GtE, If, IfExp, Import, ImportFrom, In, Index, Interactive, Invert, Is,
+ IsNot, keyword, Lambda, List, ListComp, Load, LShift, Lt, LtE, MatMult,
+ Mod, mod, Module, Mult, Name, NameConstant, Nonlocal, Not, NotEq, NotIn,
+ Num, operator, Or, Param, Pass, Pow, Raise, Return, RShift, Set, SetComp,
+ Slice, slice, Starred, stmt, Store, Str, Sub, Subscript, Suite, Try, Tuple,
+ UAdd, UnaryOp, unaryop, USub, While, With, withitem, Yield, YieldFrom
+)
+
+class NodeVisitor():
+ __doc__ = ... # type: str
+ def visit(self, node: AST) -> Any: ...
+ def generic_visit(self, node: AST) -> None: ...
+
+class NodeTransformer(NodeVisitor):
+ __doc__ = ... # type: str
+ def generic_visit(self, node: AST) -> None: ...
+
+def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def copy_location(new_node: AST, old_node: AST) -> AST: ...
+def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
+def fix_missing_locations(node: AST) -> AST: ...
+def get_docstring(node: AST, clean: bool = ...) -> str: ...
+def increment_lineno(node: AST, n: int = ...) -> AST: ...
+def iter_child_nodes(node: AST) -> Iterator[AST]: ...
+def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ...
+def literal_eval(node_or_string: Union[str, AST]) -> Any: ...
+def walk(node: AST) -> Iterator[AST]: ...
+
+PyCF_ONLY_AST = ... # type: int
+
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 7a6a499..f7e8562 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -28,6 +28,7 @@ property = object()
class object:
__doc__ = ... # type: str
__class__ = ... # type: type
+ __dict__ = ... # type: Dict[str, Any]
def __init__(self) -> None: ...
def __new__(cls) -> Any: ...
@@ -44,9 +45,15 @@ class type:
__module__ = ... # type: str
__dict__ = ... # type: Dict[str, Any]
+ @overload
def __init__(self, o: object) -> None: ...
- @staticmethod
+ @overload
+ def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: ...
+ @overload
+ def __new__(cls, o: object) -> type: ...
+ @overload
def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]) -> type: ...
+ def __call__(self, *args: Any, **kwds: Any) -> Any: ...
class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
def __init__(self, x: Union[SupportsInt, str, bytes] = None, base: int = None) -> None: ...
@@ -221,10 +228,10 @@ class str(Sequence[str]):
def zfill(self, width: int) -> str: ...
@staticmethod
@overload
- def maketrans(self, x: Union[Dict[int, Any], Dict[str, Any]]) -> Dict[int, Any]: ...
+ def maketrans(x: Union[Dict[int, Any], Dict[str, Any]]) -> Dict[int, Any]: ...
@staticmethod
@overload
- def maketrans(self, x: str, y: str, z: str = ...) -> Dict[int, Any]: ...
+ def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Any]: ...
def __getitem__(self, i: Union[int, slice]) -> str: ...
def __add__(self, s: str) -> str: ...
@@ -405,7 +412,7 @@ class bytearray(MutableSequence[int], ByteString):
class memoryview():
# TODO arg can be any obj supporting the buffer protocol
- def __init__(self, bytearray) -> None: ...
+ def __init__(self, b: bytearray) -> None: ...
class bool(int, SupportsInt, SupportsFloat):
def __init__(self, o: object = ...) -> None: ...
@@ -485,20 +492,24 @@ class list(MutableSequence[_T], Generic[_T]):
def __le__(self, x: List[_T]) -> bool: ...
class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+ # NOTE: Keyword arguments are special. If they are used, _KT must include
+ # str, but we have no way of enforcing it here.
@overload
- def __init__(self) -> None: ...
+ def __init__(self, **kwargs: _VT) -> None: ...
@overload
- def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+ def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
- def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... # TODO keyword args
+ def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
def clear(self) -> None: ...
def copy(self) -> Dict[_KT, _VT]: ...
def get(self, k: _KT, default: _VT = None) -> _VT: ...
def pop(self, k: _KT, default: _VT = None) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = None) -> _VT: ...
- def update(self, m: Union[Mapping[_KT, _VT],
- Iterable[Tuple[_KT, _VT]]]) -> None: ...
+ @overload
+ def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ @overload
+ def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
def keys(self) -> KeysView[_KT]: ...
def values(self) -> ValuesView[_VT]: ...
def items(self) -> ItemsView[_KT, _VT]: ...
@@ -605,10 +616,6 @@ class module:
__file__ = ... # type: str
__dict__ = ... # type: Dict[str, Any]
-True = ... # type: bool
-False = ... # type: bool
-__debug__ = False
-
NotImplemented = ... # type: Any
def abs(n: SupportsAbs[_T]) -> _T: ...
@@ -779,7 +786,11 @@ class WindowsError(OSError): ...
class OverflowError(ArithmeticError): ...
class ReferenceError(Exception): ...
class StopIteration(Exception): ...
-class SyntaxError(Exception): ...
+class SyntaxError(Exception):
+ msg = ... # type: str
+ lineno = ... # type: int
+ offset = ... # type: int
+ text = ... # type: str
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class SystemError(Exception): ...
diff --git a/typeshed/stdlib/3/calendar.pyi b/typeshed/stdlib/3/calendar.pyi
index c0bfc73..b5de564 100644
--- a/typeshed/stdlib/3/calendar.pyi
+++ b/typeshed/stdlib/3/calendar.pyi
@@ -1,15 +1,75 @@
-# Stubs for calendar
+from typing import Any, Iterable, Optional, Tuple
+import datetime
-# NOTE: These are incomplete!
+LocaleType = Tuple[Optional[str], Optional[str]]
-from typing import overload, Tuple
+class IllegalMonthError(ValueError):
+ def __init__(self, month: int) -> None: ...
+ def __str__(self) -> str: ...
-# TODO actually, any number of items larger than 5 is fine
- at overload
-def timegm(t: Tuple[int, int, int, int, int, int]) -> int: ...
- at overload
-def timegm(t: Tuple[int, int, int, int, int, int, int]) -> int: ...
- at overload
-def timegm(t: Tuple[int, int, int, int, int, int, int, int]) -> int: ...
- at overload
-def timegm(t: Tuple[int, int, int, int, int, int, int, int, int]) -> int: ...
+class IllegalWeekdayError(ValueError):
+ def __init__(self, weekday: int) -> None: ...
+ def __str__(self) -> str: ...
+
+def isleap(year: int) -> bool: ...
+def leapdays(y1: int, y2: int) -> int: ...
+def weekday(year: int, month: int, day: int) -> int: ...
+def monthrange(year: int, month: int) -> Tuple[int, int]: ...
+
+class Calendar(object):
+ def __init__(self, firstweekday: int = 0) -> None: ...
+ def getfirstweekday(self) -> int: ...
+ def setfirstweekday(self, firstweekday: int) -> None: ...
+ def iterweekdays(self) -> Iterable[int]: ...
+ def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ...
+ def itermonthdays2(self, year: int, month: int) -> Iterable[Tuple[int, int]]: ...
+ def itermonthdays(self, year: int, month: int) -> Iterable[int]: ...
+ def monthdatescalendar(self, year: int, month: int) -> List[List[datetime.date]]: ...
+ def monthdays2calendar(self, year: int, month: int) -> List[List[Tuple[int, int]]]: ...
+ def monthdayscalendar(self, year: int, month: int) -> List[List[int]]: ...
+ def yeardatescalendar(self, year: int, width: int = 3) -> List[List[int]]: ...
+ def yeardays2calendar(self, year: int, width: int = 3) -> List[List[Tuple[int, int]]]: ...
+ def yeardayscalendar(self, year: int, width: int = 3) -> List[List[int]]: ...
+
+class TextCalendar(Calendar):
+ def prweek(self, theweek: int, width: int) -> None: ...
+ def formatday(self, day: int, weekday: int, width: int) -> str: ...
+ def formatweek(self, theweek: int, width: int) -> str: ...
+ def formatweekday(self, day: int, width: int) -> str: ...
+ def formatweekheader(self, width: int) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ...
+ def prmonth(self, theyear: int, themonth: int, w: Any=0, l: Any = 0) -> None: ...
+ def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ...
+ def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ...
+ def pryear(self, theyear: int, w: Any = 0, l: Any = 0, c: Any = 6, m: Any = 3) -> None: ...
+
+class HTMLCalendar(Calendar):
+ def formatday(self, day: int, weekday: int) -> str: ...
+ def formatweek(self, theweek: int) -> str: ...
+ def formatweekday(self, day: int) -> str: ...
+ def formatweekheader(self) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ...
+ def formatmonth(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ...
+ def formatyear(self, theyear: int, width: int = 3) -> str: ...
+ def formatyearpage(self, theyear: int, width: int = 3, css: Optional[str] = 'calendar.css', encoding: Optional[str] = ...) -> str: ...
+
+class different_locale:
+ def __init__(self, locale: LocaleType) -> None: ...
+ def __enter__(self) -> LocaleType: ...
+ def __exit__(self, *args) -> None: ...
+
+class LocaleTextCalendar(TextCalendar):
+ def __init__(self, firstweekday: int = 0, locale: Optional[LocaleType] = ...) -> None: ...
+ def formatweekday(self, day: int, width: int) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ...
+
+class LocaleHTMLCalendar(HTMLCalendar):
+ def __init__(self, firstweekday: int = 0, locale: Optional[LocaleType] = ...) -> None: ...
+ def formatweekday(self, day: int) -> str: ...
+ def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ...
+
+c = ... # type: TextCalendar
+def setfirstweekday(firstweekday: int) -> None: ...
+def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
+def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
+def timegm(tuple: Tuple[int]) -> int: ...
diff --git a/typeshed/stdlib/3/collections.pyi b/typeshed/stdlib/3/collections/__init__.pyi
similarity index 96%
rename from typeshed/stdlib/3/collections.pyi
rename to typeshed/stdlib/3/collections/__init__.pyi
index 4939e92..fcf3b2f 100644
--- a/typeshed/stdlib/3/collections.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -102,9 +102,10 @@ class Counter(Dict[_T, int], Generic[_T]):
# it's included so that the signature is compatible with
# Dict.update. Not sure if we should use '# type: ignore' instead
# and omit the type from the union.
- def update(self, m: Union[Mapping[_T, int],
- Iterable[Tuple[_T, int]],
- Iterable[_T]]) -> None: ...
+ @overload
+ def update(self, m: Mapping[_T, int]) -> None: ...
+ @overload
+ def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]]) -> None: ...
class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3/concurrent/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3/concurrent/__init__.pyi
diff --git a/typeshed/stdlib/3/concurrent/futures/__init__.pyi b/typeshed/stdlib/3/concurrent/futures/__init__.pyi
new file mode 100644
index 0000000..91cf274
--- /dev/null
+++ b/typeshed/stdlib/3/concurrent/futures/__init__.pyi
@@ -0,0 +1,7 @@
+# Stubs for concurrent.futures (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from ._base import *
+from .thread import *
+from .process import *
diff --git a/typeshed/stdlib/3/concurrent/futures/_base.pyi b/typeshed/stdlib/3/concurrent/futures/_base.pyi
new file mode 100644
index 0000000..19a23e8
--- /dev/null
+++ b/typeshed/stdlib/3/concurrent/futures/_base.pyi
@@ -0,0 +1,81 @@
+# Stubs for concurrent.futures._base (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from collections import namedtuple
+
+FIRST_COMPLETED = ... # type: Any
+FIRST_EXCEPTION = ... # type: Any
+ALL_COMPLETED = ... # type: Any
+PENDING = ... # type: Any
+RUNNING = ... # type: Any
+CANCELLED = ... # type: Any
+CANCELLED_AND_NOTIFIED = ... # type: Any
+FINISHED = ... # type: Any
+LOGGER = ... # type: Any
+
+class Error(Exception): ...
+class CancelledError(Error): ...
+class TimeoutError(Error): ...
+
+class _Waiter:
+ event = ... # type: Any
+ finished_futures = ... # type: Any
+ def __init__(self): ...
+ def add_result(self, future): ...
+ def add_exception(self, future): ...
+ def add_cancelled(self, future): ...
+
+class _AsCompletedWaiter(_Waiter):
+ lock = ... # type: Any
+ def __init__(self): ...
+ def add_result(self, future): ...
+ def add_exception(self, future): ...
+ def add_cancelled(self, future): ...
+
+class _FirstCompletedWaiter(_Waiter):
+ def add_result(self, future): ...
+ def add_exception(self, future): ...
+ def add_cancelled(self, future): ...
+
+class _AllCompletedWaiter(_Waiter):
+ num_pending_calls = ... # type: Any
+ stop_on_exception = ... # type: Any
+ lock = ... # type: Any
+ def __init__(self, num_pending_calls, stop_on_exception): ...
+ def add_result(self, future): ...
+ def add_exception(self, future): ...
+ def add_cancelled(self, future): ...
+
+class _AcquireFutures:
+ futures = ... # type: Any
+ def __init__(self, futures): ...
+ def __enter__(self): ...
+ def __exit__(self, *args): ...
+
+def as_completed(fs, timeout=None): ...
+
+DoneAndNotDoneFutures = namedtuple('DoneAndNotDoneFutures', 'done not_done')
+
+def wait(fs, timeout=None, return_when=...): ...
+
+class Future:
+ def __init__(self): ...
+ def cancel(self): ...
+ def cancelled(self): ...
+ def running(self): ...
+ def done(self): ...
+ def add_done_callback(self, fn): ...
+ def result(self, timeout=None): ...
+ def exception(self, timeout=None): ...
+ def set_running_or_notify_cancel(self): ...
+ def set_result(self, result): ...
+ def set_exception(self, exception): ...
+
+class Executor:
+ def submit(self, fn, *args, **kwargs): ...
+ def map(self, fn, *iterables, *, timeout=None, chunksize=1): ...
+ def shutdown(self, wait=True): ...
+ def __enter__(self): ...
+ def __exit__(self, exc_type, exc_val, exc_tb): ...
diff --git a/typeshed/stdlib/3/concurrent/futures/process.pyi b/typeshed/stdlib/3/concurrent/futures/process.pyi
new file mode 100644
index 0000000..605cd80
--- /dev/null
+++ b/typeshed/stdlib/3/concurrent/futures/process.pyi
@@ -0,0 +1,46 @@
+# Stubs for concurrent.futures.process (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import _base
+
+EXTRA_QUEUED_CALLS = ... # type: Any
+
+class _RemoteTraceback(Exception):
+ tb = ... # type: Any
+ def __init__(self, tb): ...
+
+class _ExceptionWithTraceback:
+ exc = ... # type: Any
+ tb = ... # type: Any
+ def __init__(self, exc, tb): ...
+ def __reduce__(self): ...
+
+class _WorkItem:
+ future = ... # type: Any
+ fn = ... # type: Any
+ args = ... # type: Any
+ kwargs = ... # type: Any
+ def __init__(self, future, fn, args, kwargs): ...
+
+class _ResultItem:
+ work_id = ... # type: Any
+ exception = ... # type: Any
+ result = ... # type: Any
+ def __init__(self, work_id, exception=None, result=None): ...
+
+class _CallItem:
+ work_id = ... # type: Any
+ fn = ... # type: Any
+ args = ... # type: Any
+ kwargs = ... # type: Any
+ def __init__(self, work_id, fn, args, kwargs): ...
+
+class BrokenProcessPool(RuntimeError): ...
+
+class ProcessPoolExecutor(_base.Executor):
+ def __init__(self, max_workers=None): ...
+ def submit(self, fn, *args, **kwargs): ...
+ def map(self, fn, *iterables, *, timeout=None, chunksize=1): ...
+ def shutdown(self, wait=True): ...
diff --git a/typeshed/stdlib/3/concurrent/futures/thread.pyi b/typeshed/stdlib/3/concurrent/futures/thread.pyi
new file mode 100644
index 0000000..f8242ff
--- /dev/null
+++ b/typeshed/stdlib/3/concurrent/futures/thread.pyi
@@ -0,0 +1,19 @@
+# Stubs for concurrent.futures.thread (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from . import _base
+
+class _WorkItem:
+ future = ... # type: Any
+ fn = ... # type: Any
+ args = ... # type: Any
+ kwargs = ... # type: Any
+ def __init__(self, future, fn, args, kwargs): ...
+ def run(self): ...
+
+class ThreadPoolExecutor(_base.Executor):
+ def __init__(self, max_workers=None): ...
+ def submit(self, fn, *args, **kwargs): ...
+ def shutdown(self, wait=True): ...
diff --git a/typeshed/stdlib/3/configparser.pyi b/typeshed/stdlib/3/configparser.pyi
new file mode 100644
index 0000000..584da44
--- /dev/null
+++ b/typeshed/stdlib/3/configparser.pyi
@@ -0,0 +1,166 @@
+# Stubs for configparser
+
+# Based on http://docs.python.org/3.5/library/configparser.html and on
+# reading configparser.py.
+
+from typing import (MutableMapping, Mapping, Dict, Sequence, List,
+ Iterable, Iterator, Callable, Any, TextIO)
+# Types only used in type comments only
+from typing import Optional, Tuple # noqa
+
+# Internal type aliases
+_section = Dict[str, str]
+_parser = MutableMapping[str, _section]
+_converters = Dict[str, Callable[[str], Any]]
+
+
+DEFAULTSECT = ... # type: str
+
+
+class Interpolation:
+ def before_get(self, parser: _parser,
+ section: str,
+ option: str,
+ value: str,
+ defaults: _section) -> str: ...
+
+ def before_set(self, parser: _parser,
+ section: str,
+ option: str,
+ value: str) -> str: ...
+
+ def before_read(self, parser: _parser,
+ section: str,
+ option: str,
+ value: str) -> str: ...
+
+ def before_write(self, parser: _parser,
+ section: str,
+ option: str,
+ value: str) -> str: ...
+
+
+class BasicInterpolation(Interpolation):
+ pass
+
+
+class ExtendedInterpolation(Interpolation):
+ pass
+
+
+class ConfigParser(_parser):
+ def __init__(self,
+ defaults: _section = None,
+ dict_type: Mapping[str, str] = ...,
+ allow_no_value: bool = ...,
+ delimiters: Sequence[str] = ...,
+ comment_prefixes: Sequence[str] = ...,
+ inline_comment_prefixes: Sequence[str] = None,
+ strict: bool = ...,
+ empty_lines_in_values: bool = ...,
+ default_section: str = ...,
+ interpolation: Interpolation = None,
+ converters: _converters = {}) -> None: ...
+
+ def __len__(self) -> int: ...
+
+ def __getitem__(self, section: str) -> _section: ...
+
+ def __setitem__(self, section: str, options: _section) -> None: ...
+
+ def __delitem__(self, section: str) -> None: ...
+
+ def __iter__(self) -> Iterator[str]: ...
+
+ def defaults(self) -> _section: ...
+
+ def sections(self) -> List[str]: ...
+
+ def add_section(self, section: str) -> None: ...
+
+ def has_section(self, section: str) -> bool: ...
+
+ def options(self, section: str) -> List[str]: ...
+
+ def has_option(self, section: str, option: str) -> bool: ...
+
+ def read(self, filenames: Sequence[str],
+ encoding: str = None) -> List[str]: ...
+
+ def read_file(self, f: Iterable[str], source: str = None) -> None: ...
+
+ def read_string(self, string: str, source: str = ...) -> None: ...
+
+ def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]],
+ source: str = ...) -> None: ...
+
+ def getint(self, section: str, option: str) -> int: ...
+
+ def getfloat(self, section: str, option: str) -> float: ...
+
+ def getboolean(self, section: str, option: str) -> bool: ...
+
+ def set(self, section: str, option: str, value: str) -> None: ...
+
+ def write(self,
+ fileobject: TextIO,
+ space_around_delimiters: bool = True) -> None: ...
+
+ def remove_option(self, section: str, option: str) -> bool: ...
+
+ def remove_section(self, section: str) -> bool: ...
+
+ def optionxform(self, option: str) -> str: ...
+
+
+class Error(Exception):
+ pass
+
+
+class NoSectionError(Error):
+ pass
+
+
+class DuplicateSectionError(Error):
+ section = ... # type: str
+ source = ... # type: Optional[str]
+ lineno = ... # type: Optional[int]
+
+
+class DuplicateOptionError(Error):
+ section = ... # type: str
+ option = ... # type: str
+ source = ... # type: Optional[str]
+ lineno = ... # type: Optional[int]
+
+
+class NoOptionError(Error):
+ section = ... # type: str
+ option = ... # type: str
+
+
+class InterpolationError(Error):
+ section = ... # type: str
+ option = ... # type: str
+
+
+class InterpolationDepthError(InterpolationError):
+ pass
+
+
+class InterpolationMissingOptionError(InterpolationError):
+ reference = ... # type: str
+
+
+class InterpolationSyntaxError(InterpolationError):
+ pass
+
+
+class ParsingError:
+ source = ... # type: str
+ errors = ... # type: Sequence[Tuple[int, str]]
+
+
+class MissingSectionHeaderError(ParsingError):
+ lineno = ... # type: int
+ line = ... # type: str
diff --git a/typeshed/stdlib/3/contextlib.pyi b/typeshed/stdlib/3/contextlib.pyi
index af9f0ab..4400098 100644
--- a/typeshed/stdlib/3/contextlib.pyi
+++ b/typeshed/stdlib/3/contextlib.pyi
@@ -2,14 +2,16 @@
# NOTE: These are incomplete!
-from typing import Any, TypeVar, Generic
-
-# TODO more precise type?
-def contextmanager(func: Any) -> Any: ...
+from typing import Callable, Generic, Iterator, TypeVar
_T = TypeVar('_T')
-class closing(Generic[_T]):
- def __init__(self, thing: _T) -> None: ...
+class ContextManager(Generic[_T]):
def __enter__(self) -> _T: ...
def __exit__(self, *exc_info) -> None: ...
+
+# TODO this doesn't capture the relationship that the returned function's args are the same as func's.
+def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ...
+
+class closing(ContextManager[_T], Generic[_T]):
+ def __init__(self, thing: _T) -> None: ...
diff --git a/typeshed/stdlib/3/copy.pyi b/typeshed/stdlib/3/copy.pyi
index 237f420..0661cb7 100644
--- a/typeshed/stdlib/3/copy.pyi
+++ b/typeshed/stdlib/3/copy.pyi
@@ -2,9 +2,9 @@
# NOTE: These are incomplete!
-from typing import TypeVar
+from typing import TypeVar, Dict, Any
_T = TypeVar('_T')
-def deepcopy(x: _T) -> _T: ...
+def deepcopy(x: _T, memo: Dict[Any, Any] = ...) -> _T: ...
def copy(x: _T) -> _T: ...
diff --git a/typeshed/stdlib/3/curses/__init__.pyi b/typeshed/stdlib/3/curses/__init__.pyi
new file mode 100644
index 0000000..d7cb787
--- /dev/null
+++ b/typeshed/stdlib/3/curses/__init__.pyi
@@ -0,0 +1,12 @@
+# Stubs for curses (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from _curses import *
+# Stubgen imports a python version of has_key only if it's not present
+#in _curses (which it is in this stub)
+# from .has_key import has_key as has_key
+
+def initscr(): ...
+def start_color(): ...
+def wrapper(func, *args, **kwds): ...
diff --git a/typeshed/stdlib/3/fcntl.pyi b/typeshed/stdlib/3/fcntl.pyi
index 4db5947..a50fa0d 100644
--- a/typeshed/stdlib/3/fcntl.pyi
+++ b/typeshed/stdlib/3/fcntl.pyi
@@ -1,11 +1,96 @@
# Stubs for fcntl
-
-# NOTE: These are incomplete!
-
+from typing import Any, IO, Union
import typing
-FD_CLOEXEC = 0
-F_GETFD = 0
-F_SETFD = 0
+FASYNC = ... # type: int
+FD_CLOEXEC = ... # type: int
+DN_ACCESS = ... # type: int
+DN_ATTRIB = ... # type: int
+DN_CREATE = ... # type: int
+DN_DELETE = ... # type: int
+DN_MODIFY = ... # type: int
+DN_MULTISHOT = ... # type: int
+DN_RENAME = ... # type: int
+F_DUPFD = ... # type: int
+F_DUPFD_CLOEXEC = ... # type: int
+F_FULLFSYNC = ... # type: int
+F_EXLCK = ... # type: int
+F_GETFD = ... # type: int
+F_GETFL = ... # type: int
+F_GETLEASE = ... # type: int
+F_GETLK = ... # type: int
+F_GETLK64 = ... # type: int
+F_GETOWN = ... # type: int
+F_NOCACHE = ... # type: int
+F_GETSIG = ... # type: int
+F_NOTIFY = ... # type: int
+F_RDLCK = ... # type: int
+F_SETFD = ... # type: int
+F_SETFL = ... # type: int
+F_SETLEASE = ... # type: int
+F_SETLK = ... # type: int
+F_SETLK64 = ... # type: int
+F_SETLKW = ... # type: int
+F_SETLKW64 = ... # type: int
+F_SETOWN = ... # type: int
+F_SETSIG = ... # type: int
+F_SHLCK = ... # type: int
+F_UNLCK = ... # type: int
+F_WRLCK = ... # type: int
+I_ATMARK = ... # type: int
+I_CANPUT = ... # type: int
+I_CKBAND = ... # type: int
+I_FDINSERT = ... # type: int
+I_FIND = ... # type: int
+I_FLUSH = ... # type: int
+I_FLUSHBAND = ... # type: int
+I_GETBAND = ... # type: int
+I_GETCLTIME = ... # type: int
+I_GETSIG = ... # type: int
+I_GRDOPT = ... # type: int
+I_GWROPT = ... # type: int
+I_LINK = ... # type: int
+I_LIST = ... # type: int
+I_LOOK = ... # type: int
+I_NREAD = ... # type: int
+I_PEEK = ... # type: int
+I_PLINK = ... # type: int
+I_POP = ... # type: int
+I_PUNLINK = ... # type: int
+I_PUSH = ... # type: int
+I_RECVFD = ... # type: int
+I_SENDFD = ... # type: int
+I_SETCLTIME = ... # type: int
+I_SETSIG = ... # type: int
+I_SRDOPT = ... # type: int
+I_STR = ... # type: int
+I_SWROPT = ... # type: int
+I_UNLINK = ... # type: int
+LOCK_EX = ... # type: int
+LOCK_MAND = ... # type: int
+LOCK_NB = ... # type: int
+LOCK_READ = ... # type: int
+LOCK_RW = ... # type: int
+LOCK_SH = ... # type: int
+LOCK_UN = ... # type: int
+LOCK_WRITE = ... # type: int
+
+_AnyFile = Union[int, IO[Any]]
-def fcntl(fd: int, op: int, arg: int = ...) -> int: ...
+# TODO All these return either int or bytes depending on the value of
+# cmd (not on the type of arg).
+def fcntl(fd: _AnyFile,
+ cmd: int,
+ arg: Union[int, bytes] = ...) -> Any: ...
+# TODO This function accepts any object supporting a buffer interface,
+# as arg, is there a better way to express this than bytes?
+def ioctl(fd: _AnyFile,
+ request: int,
+ arg: Union[int, bytes] = ...,
+ mutate_flag: bool = ...) -> Any: ...
+def flock(fd: _AnyFile, operation: int) -> None: ...
+def lockf(fd: _AnyFile,
+ cmd: int,
+ len: int = ...,
+ start: int = ...,
+ whence: int = ...) -> Any: ...
diff --git a/typeshed/stdlib/3/fileinput.pyi b/typeshed/stdlib/3/fileinput.pyi
new file mode 100644
index 0000000..7d68c27
--- /dev/null
+++ b/typeshed/stdlib/3/fileinput.pyi
@@ -0,0 +1,48 @@
+from typing import Iterable, Callable, IO, AnyStr, Generic, Any, Union, Iterator
+
+
+def input(
+ files: Union[str, Iterable[str]]=None,
+ inplace: bool=...,
+ backup: str=...,
+ bufsize: int=...,
+ mode: str=...,
+ openhook: Callable[[str, str], IO[AnyStr]]=...) -> Iterable[AnyStr]: ...
+
+
+def close() -> None: ...
+def nextfile() -> None: ...
+def filename() -> str: ...
+def lineno() -> int: ...
+def isfirstline() -> bool: ...
+def isstdin() -> bool: ...
+
+class FileInput(Iterable[AnyStr], Generic[AnyStr]):
+ def __init__(
+ self,
+ files: Union[str, Iterable[str]]=None,
+ inplace: bool=...,
+ backup: str=...,
+ bufsize: int=...,
+ mode: str=...,
+ openhook: Callable[[str, str], IO[AnyStr]]=...
+ ) -> None: ...
+
+ def __del__(self) -> None: ...
+ def close(self) -> None: ...
+ def __enter__(self) -> 'FileInput[AnyStr]': ...
+ def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ...
+ def __iter__(self) -> Iterator[AnyStr]: ...
+ def __next__(self) -> AnyStr: ...
+ def __getitem__(self, i) -> AnyStr: ...
+ def nextfile(self) -> None: ...
+ def readline(self) -> AnyStr: ...
+ def filename(self) -> str: ...
+ def lineno(self) -> int: ...
+ def filelineno(self) -> int: ...
+ def fileno(self) -> int: ...
+ def isfirstline(self) -> bool: ...
+ def isstdin(self) -> bool: ...
+
+def hook_compressed(filename: str, mode: str) -> IO[AnyStr]: ...
+def hook_encoded(encoding: str) -> IO[AnyStr]: ...
diff --git a/typeshed/stdlib/3/functools.pyi b/typeshed/stdlib/3/functools.pyi
index e8563b5..0982cfe 100644
--- a/typeshed/stdlib/3/functools.pyi
+++ b/typeshed/stdlib/3/functools.pyi
@@ -3,14 +3,19 @@
# NOTE: These are incomplete!
from abc import ABCMeta, abstractmethod
-from typing import Any, Callable, Generic, Dict, Iterator, Optional, Sequence, Tuple, TypeVar, NamedTuple
+from typing import Any, Callable, Generic, Dict, Iterable, Optional, Sequence, Tuple, TypeVar, NamedTuple, overload
from collections import namedtuple
_AnyCallable = Callable[..., Any]
_T = TypeVar("_T")
-def reduce(function: Callable[[_T], _T],
- sequence: Iterator[_T], initial: Optional[_T] = ...) -> _T: ...
+_S = TypeVar("_S")
+ at overload
+def reduce(function: Callable[[_T, _S], _T],
+ sequence: Iterable[_S], initial: _T) -> _T: ...
+ at overload
+def reduce(function: Callable[[_T, _T], _T],
+ sequence: Iterable[_T]) -> _T: ...
class CacheInfo(NamedTuple('CacheInfo', [
diff --git a/typeshed/stdlib/3/gc.pyi b/typeshed/stdlib/3/gc.pyi
index 7f45cdb..4d00dbe 100644
--- a/typeshed/stdlib/3/gc.pyi
+++ b/typeshed/stdlib/3/gc.pyi
@@ -1,10 +1,28 @@
# Stubs for gc
-# NOTE: These are incomplete!
+from typing import Any, List, Tuple
-import typing
-def collect(generation: int = ...) -> int: ...
+DEBUG_COLLECTABLE = ... # type: int
+DEBUG_LEAK = ... # type: int
+DEBUG_SAVEALL = ... # type: int
+DEBUG_STATS = ... # type: int
+DEBUG_UNCOLLECTABLE = ... # type: int
+callbacks = ... # type: List[Any]
+garbage = ... # type: List[Any]
+
+def collect(generations: int = ...) -> int: ...
def disable() -> None: ...
def enable() -> None: ...
+def get_count() -> Tuple[int, int, int]: ...
+def get_debug() -> int: ...
+def get_objects() -> List[Any]: ...
+def get_referents(*objs: Any) -> List[Any]: ...
+def get_referrers(*objs: Any) -> List[Any]: ...
+def get_stats() -> List[Dict[str, Any]]: ...
+def get_threshold() -> Tuple[int, int, int]: ...
+def is_tracked(obj: Any) -> bool: ...
def isenabled() -> bool: ...
+def set_debug(flags: int) -> None: ...
+def set_threshold(threshold0: int, threshold1: int = ...,
+ threshold2: int = ...) -> None: ...
diff --git a/typeshed/stdlib/3/getpass.pyi b/typeshed/stdlib/3/getpass.pyi
index 5938d61..96c5428 100644
--- a/typeshed/stdlib/3/getpass.pyi
+++ b/typeshed/stdlib/3/getpass.pyi
@@ -1,5 +1,13 @@
# Stubs for getpass
-# NOTE: These are incomplete!
+from typing import TextIO
+
+
+def getpass(prompt: str = ..., stream: TextIO = None): ...
+
def getuser() -> str: ...
+
+
+class GetPassWarning(UserWarning):
+ pass
diff --git a/typeshed/stdlib/3/gzip.pyi b/typeshed/stdlib/3/gzip.pyi
new file mode 100644
index 0000000..9d771fc
--- /dev/null
+++ b/typeshed/stdlib/3/gzip.pyi
@@ -0,0 +1,51 @@
+# Stubs for gzip (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import _compression
+
+def open(filename, mode='', compresslevel=9, encoding=None, errors=None, newline=None): ...
+
+class _PaddedFile:
+ file = ... # type: Any
+ def __init__(self, f, prepend=b''): ...
+ def read(self, size): ...
+ def prepend(self, prepend=b''): ...
+ def seek(self, off): ...
+ def seekable(self): ...
+
+class GzipFile(_compression.BaseStream):
+ myfileobj = ... # type: Any
+ mode = ... # type: Any
+ name = ... # type: Any
+ compress = ... # type: Any
+ fileobj = ... # type: Any
+ def __init__(self, filename=None, mode=None, compresslevel=9, fileobj=None, mtime=None): ...
+ @property
+ def filename(self): ...
+ @property
+ def mtime(self): ...
+ crc = ... # type: Any
+ def write(self, data): ...
+ def read(self, size=-1): ...
+ def read1(self, size=-1): ...
+ def peek(self, n): ...
+ @property
+ def closed(self): ...
+ def close(self): ...
+ def flush(self, zlib_mode=...): ...
+ def fileno(self): ...
+ def rewind(self): ...
+ def readable(self): ...
+ def writable(self): ...
+ def seekable(self): ...
+ def seek(self, offset, whence=...): ...
+ def readline(self, size=-1): ...
+
+class _GzipReader(_compression.DecompressReader):
+ def __init__(self, fp): ...
+ def read(self, size=-1): ...
+
+def compress(data, compresslevel=9): ...
+def decompress(data): ...
diff --git a/typeshed/stdlib/3/hashlib.pyi b/typeshed/stdlib/3/hashlib.pyi
index ee8b235..4d3709b 100644
--- a/typeshed/stdlib/3/hashlib.pyi
+++ b/typeshed/stdlib/3/hashlib.pyi
@@ -1,11 +1,17 @@
# Stubs for hashlib
-# NOTE: These are incomplete!
-
from abc import abstractmethod, ABCMeta
-import typing
+from typing import AbstractSet
class Hash(metaclass=ABCMeta):
+ digest_size = ... # type: int
+ block_size = ... # type: int
+
+ # [Python documentation note] Changed in version 3.4: The name attribute has
+ # been present in CPython since its inception, but until Python 3.4 was not
+ # formally specified, so may not exist on some platforms
+ name = ... # type: str
+
@abstractmethod
def update(self, arg: bytes) -> None: ...
@abstractmethod
@@ -23,3 +29,12 @@ def sha384(arg: bytes = ...) -> Hash: ...
def sha512(arg: bytes = ...) -> Hash: ...
def new(name: str, data: bytes = ...) -> Hash: ...
+
+# New in version 3.2
+algorithms_guaranteed = ... # type: AbstractSet[str]
+algorithms_available = ... # type: AbstractSet[str]
+
+# New in version 3.4
+# TODO The documentation says "password and salt are interpreted as buffers of
+# bytes", should we declare something other than bytes here?
+def pbkdf2_hmac(name: str, password: bytes, salt: bytes, rounds: int, dklen: int = ...) -> bytes: ...
diff --git a/typeshed/stdlib/3/hmac.pyi b/typeshed/stdlib/3/hmac.pyi
new file mode 100644
index 0000000..4c7972b
--- /dev/null
+++ b/typeshed/stdlib/3/hmac.pyi
@@ -0,0 +1,27 @@
+# Stubs for hmac (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from _operator import _compare_digest as compare_digest
+
+trans_5C = ... # type: Any
+trans_36 = ... # type: Any
+digest_size = ... # type: Any
+
+class HMAC:
+ blocksize = ... # type: Any
+ digest_cons = ... # type: Any
+ outer = ... # type: Any
+ inner = ... # type: Any
+ digest_size = ... # type: Any
+ block_size = ... # type: Any
+ def __init__(self, key, msg=None, digestmod=None): ...
+ @property
+ def name(self): ...
+ def update(self, msg): ...
+ def copy(self): ...
+ def digest(self): ...
+ def hexdigest(self): ...
+
+def new(key, msg=None, digestmod=None): ...
diff --git a/typeshed/stdlib/3/http/cookies.pyi b/typeshed/stdlib/3/http/cookies.pyi
new file mode 100644
index 0000000..e7e7855
--- /dev/null
+++ b/typeshed/stdlib/3/http/cookies.pyi
@@ -0,0 +1,46 @@
+# Stubs for http.cookies (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class CookieError(Exception): ...
+
+class Morsel(dict):
+ def __init__(self): ...
+ @property
+ def key(self): ...
+ @key.setter
+ def key(self, key): ...
+ @property
+ def value(self): ...
+ @value.setter
+ def value(self, value): ...
+ @property
+ def coded_value(self): ...
+ @coded_value.setter
+ def coded_value(self, coded_value): ...
+ def __setitem__(self, K, V): ...
+ def setdefault(self, key, val=None): ...
+ def __eq__(self, morsel): ...
+ __ne__ = ... # type: Any
+ def copy(self): ...
+ def update(self, values): ...
+ def isReservedKey(self, K): ...
+ def set(self, key, val, coded_val, LegalChars=...): ...
+ def output(self, attrs=None, header=''): ...
+ def js_output(self, attrs=None): ...
+ def OutputString(self, attrs=None): ...
+
+class BaseCookie(dict):
+ def value_decode(self, val): ...
+ def value_encode(self, val): ...
+ def __init__(self, input=None): ...
+ def __setitem__(self, key, value): ...
+ def output(self, attrs=None, header='', sep=''): ...
+ def js_output(self, attrs=None): ...
+ def load(self, rawdata): ...
+
+class SimpleCookie(BaseCookie):
+ def value_decode(self, val): ...
+ def value_encode(self, val): ...
diff --git a/typeshed/stdlib/3/inspect.pyi b/typeshed/stdlib/3/inspect.pyi
index abbc7f6..fa75175 100644
--- a/typeshed/stdlib/3/inspect.pyi
+++ b/typeshed/stdlib/3/inspect.pyi
@@ -1,26 +1,146 @@
# Stubs for inspect
-from typing import Any, Tuple, List, Dict, Callable, NamedTuple
-from types import FrameType
+from typing import (AbstractSet, Any, Tuple, List, Dict, Callable, Generator,
+ Mapping, MutableMapping, NamedTuple, Optional, Sequence, Union,
+ )
+from types import FrameType, ModuleType, TracebackType
-_object = object
+#
+# Types and members
+#
+ModuleInfo = NamedTuple('ModuleInfo', [('name', str),
+ ('suffix', str),
+ ('mode', str),
+ ('module_type', int),
+ ])
+def getmembers(object: object,
+ predicate: Callable[[Any], bool] = ...,
+ ) -> List[Tuple[str, object]]: ...
+def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ...
+def getmodulename(path: str) -> Optional[str]: ...
-def getmembers(obj: object, predicate: Callable[[Any], bool]) -> List[Tuple[str, object]]: ...
+def ismodule(object: object) -> bool: ...
+def isclass(object: object) -> bool: ...
+def ismethod(object: object) -> bool: ...
+def isfunction(object: object) -> bool: ...
+def isisgeneratorfunction(object: object) -> bool: ...
+def isgenerator(object: object) -> bool: ...
-def isclass(obj: object) -> bool: ...
+# Python 3.5+
+def iscoroutinefunction(object: object) -> bool: ...
+def iscoroutine(object: object) -> bool: ...
+def isawaitable(object: object) -> bool: ...
-# namedtuple('Attribute', 'name kind defining_class object')
-class Attribute(tuple):
- name = ... # type: str
- kind = ... # type: str
- defining_class = ... # type: type
- object = ... # type: _object
+def istraceback(object: object) -> bool: ...
+def isframe(object: object) -> bool: ...
+def iscode(object: object) -> bool: ...
+def isbuiltin(object: object) -> bool: ...
+def isroutine(object: object) -> bool: ...
+def isabstract(object: object) -> bool: ...
+def ismethoddescriptor(object: object) -> bool: ...
+def isdatadescriptor(object: object) -> bool: ...
+def isgetsetdescriptor(object: object) -> bool: ...
+def ismemberdescriptor(object: object) -> bool: ...
-def classify_class_attrs(cls: type) -> List[Attribute]: ...
+#
+# Retrieving source code
+#
+def getdoc(object: object) -> str: ...
+def getcomments(object: object) -> str: ...
+def getfile(object: object) -> str: ...
+def getmodule(object: object) -> ModuleType: ...
+def getsourcefile(object: object) -> str: ...
+# TODO restrict to "module, class, method, function, traceback, frame,
+# or code object"
+def getsourcelines(object: object) -> Tuple[List[str], int]: ...
+# TODO restrict to "a module, class, method, function, traceback, frame,
+# or code object"
+def getsource(object: object) -> str: ...
def cleandoc(doc: str) -> str: ...
-def getsourcelines(obj: object) -> Tuple[List[str], int]: ...
+
+#
+# Introspecting callables with the Signature object (Python 3.3+)
+#
+def signature(callable: Callable[..., Any],
+ *,
+ follow_wrapped: bool = True) -> 'Signature': ...
+
+class Signature:
+ def __init__(self,
+ parameters: Optional[Sequence['Parameter']] = ...,
+ *,
+ return_annotation: Any = ...) -> None: ...
+ # TODO: can we be more specific here?
+ empty = ... # type: object
+
+ parameters = ... # type: Mapping[str, 'Parameter']
+
+ # TODO: can we be more specific here?
+ return_annotation = ... # type: Any
+
+ def bind(self, *args: Any, **kwargs: Any) -> 'BoundArguments': ...
+ def bind_partial(self, *args: Any, **kwargs: Any) -> 'BoundArguments': ...
+ def replace(self,
+ *,
+ parameters: Optional[Sequence['Parameter']] = ...,
+ return_annotation: Any = ...) -> 'Signature': ...
+
+ # Python 3.5+
+ @classmethod
+ def from_callable(cls,
+ obj: Callable[..., Any],
+ *,
+ follow_wrapped: bool = True) -> 'Signature': ...
+
+# The name is the same as the enum's name in CPython
+class _ParameterKind: pass
+
+class Parameter:
+ def __init__(self,
+ name: str,
+ kind: _ParameterKind,
+ *,
+ default: Any = ...,
+ annotation: Any = ...) -> None: ...
+ empty = ... # type: Any
+ name = ... # type: str
+ default = ... # type: Any
+ annotation = ... # type: Any
+
+ kind = ... # type: _ParameterKind
+ POSITIONAL_ONLY = ... # type: _ParameterKind
+ POSITIONAL_OR_KEYWORD = ... # type: _ParameterKind
+ VAR_POSITIONAL = ... # type: _ParameterKind
+ KEYWORD_ONLY = ... # type: _ParameterKind
+ VAR_KEYWORD = ... # type: _ParameterKind
+
+ def replace(self,
+ *,
+ name: Optional[str] = ...,
+ kind: Optional[_ParameterKind] = ...,
+ default: Any = ...,
+ annotation: Any = ...) -> 'Parameter': ...
+
+class BoundArguments:
+ arguments = ... # type: MutableMapping[str, Any]
+ args = ... # Tuple[Any, ...]
+ kwargs = ... # Dict[str, Any]
+ signature = ... # type: Signature
+
+ # Python 3.5+
+ def apply_defaults(self) -> None: ...
+
+
+#
+# Classes and functions
+#
+
+# TODO: The actual return type should be List[_ClassTreeItem] but mypy doesn't
+# seem to be supporting this at the moment:
+# _ClassTreeItem = Union[List['_ClassTreeItem'], Tuple[type, Tuple[type, ...]]]
+def getclasstree(classes: List[type], unique: bool = ...) -> Any: ...
ArgSpec = NamedTuple('ArgSpec', [('args', List[str]),
('varargs', str),
@@ -41,4 +161,129 @@ FullArgSpec = NamedTuple('FullArgSpec', [('args', List[str]),
def getfullargspec(func: object) -> FullArgSpec: ...
-def stack() -> List[Tuple[FrameType, str, int, str, List[str], int]]: ...
+# TODO make the field types more specific here
+ArgInfo = NamedTuple('ArgInfo', [('args', List[str]),
+ ('varargs', Optional[str]),
+ ('keywords', Optional[str]),
+ ('locals', Dict[str, Any]),
+ ])
+
+def getargvalues(frame: FrameType) -> ArgInfo: ...
+def formatargspec(args: List[str],
+ varargs: Optional[str] = ...,
+ varkw: Optional[str] = ...,
+ defaults: Optional[Tuple[Any]] = ...,
+ kwonlyargs: Optional[List[str]] = ...,
+ kwonlydefaults: Optional[Dict[str, Any]] = ...,
+ annotations: Optional[Dict[str, Any]] = ...,
+ formatarg: Optional[Callable[[str], str]] = ...,
+ formatvarargs: Optional[Callable[[str], str]] = ...,
+ formatvarkw: Optional[Callable[[str], str]] = ...,
+ formatvalue: Optional[Callable[[Any], str]] = ...,
+ formatreturns: Optional[Callable[[Any], str]] = ...,
+ formatannotations: Optional[Callable[[Any], str]] = ...,
+ ) -> str: ...
+def formatargvalues(args: List[str],
+ varargs: Optional[str] = ...,
+ varkw: Optional[str] = ...,
+ locals: Optional[Dict[str, Any]] = ...,
+ formatarg: Optional[Callable[[str], str]] = ...,
+ formatvarargs: Optional[Callable[[str], str]] = ...,
+ formatvarkw: Optional[Callable[[str], str]] = ...,
+ formatvalue: Optional[Callable[[Any], str]] = ...,
+ ) -> str: ...
+def getmro(cls: type) -> Tuple[type, ...]: ...
+
+# Python 3.2+
+def getcallargs(func: Callable[..., Any],
+ *args: Any,
+ **kwds: Any) -> Dict[str, Any]: ...
+
+
+# Python 3.3+
+ClosureVars = NamedTuple('ClosureVars', [('nonlocals', Mapping[str, Any]),
+ ('globals', Mapping[str, Any]),
+ ('builtins', Mapping[str, Any]),
+ ('unbound', AbstractSet[str]),
+ ])
+def getclosurevars(func: Callable[..., Any]) -> ClosureVars: ...
+
+# Python 3.4+
+def unwrap(func: Callable[..., Any],
+ *,
+ stop: Callable[[Any], Any]) -> Any: ...
+
+
+#
+# The interpreter stack
+#
+
+# Python 3.5+ (functions returning it used to return regular tuples)
+FrameInfo = NamedTuple('FrameInfo', [('frame', FrameType),
+ ('filename', str),
+ ('lineno', int),
+ ('function', str),
+ ('code_context', List[str]),
+ ('index', int),
+ ])
+
+# TODO make the frame type more specific
+def getframeinfo(frame: Any, context: int = 1) -> FrameInfo: ...
+def getouterframes(frame: Any, context: int = 1) -> List[FrameInfo]: ...
+def getinnerframes(traceback: TracebackType, context: int = 1) -> List[FrameInfo]:
+ ...
+def currentframe() -> Optional[FrameType]: ...
+def stack(context: int = 1) -> List[FrameInfo]: ...
+def trace(context: int = 1) -> List[FrameInfo]: ...
+
+#
+# Fetching attributes statically
+#
+
+# Python 3.2+
+def getattr_static(obj: object, attr: str, default: Optional[Any] = ...) -> Any: ...
+
+
+#
+# Current State of Generators and Coroutines
+#
+
+# TODO In the next two blocks of code, can we be more specific regarding the
+# type of the "enums"?
+
+# Python 3.2+
+GEN_CREATED = ... # type: str
+GEN_RUNNING = ... # type: str
+GEN_SUSPENDED = ... # type: str
+GEN_CLOSED = ... # type: str
+def getgeneratorstate(generator: Generator[Any, Any, Any]) -> str: ...
+
+# Python 3.5+
+CORO_CREATED = ... # type: str
+CORO_RUNNING = ... # type: str
+CORO_SUSPENDED = ... # type: str
+CORO_CLOSED = ... # type: str
+# TODO can we be more specific than "object"?
+def getcoroutinestate(coroutine: object) -> str: ...
+
+# Python 3.3+
+def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> Dict[str, Any]: ...
+
+# Python 3.5+
+# TODO can we be more specific than "object"?
+def getcoroutinelocals(coroutine: object) -> Dict[str, Any]: ...
+
+
+#
+# The following seems undocumented but it was already present in this file
+#
+_object = object
+
+# namedtuple('Attribute', 'name kind defining_class object')
+class Attribute(tuple):
+ name = ... # type: str
+ kind = ... # type: str
+ defining_class = ... # type: type
+ object = ... # type: _object
+
+def classify_class_attrs(cls: type) -> List[Attribute]: ...
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
index ca410f0..2cc7ee9 100644
--- a/typeshed/stdlib/3/io.pyi
+++ b/typeshed/stdlib/3/io.pyi
@@ -90,7 +90,7 @@ class BytesIO(BinaryIO):
def __iter__(self) -> Iterator[bytes]: ...
def __enter__(self) -> 'BytesIO': ...
- def __exit__(self, type, value, traceback) -> bool: ...
+ def __exit__(self, t: type = None, value: BaseException = None, traceback: Any = None) -> bool: ...
class StringIO(TextIO):
def __init__(self, initial_value: str = ...,
@@ -117,7 +117,7 @@ class StringIO(TextIO):
def __iter__(self) -> Iterator[str]: ...
def __enter__(self) -> 'StringIO': ...
- def __exit__(self, type, value, traceback) -> bool: ...
+ def __exit__(self, t: type = None, value: BaseException = None, traceback: Any = None) -> bool: ...
class TextIOWrapper(TextIO):
# TODO: This is actually a base class of _io._TextIOBase.
@@ -147,4 +147,4 @@ class TextIOWrapper(TextIO):
def __iter__(self) -> Iterator[str]: ...
def __enter__(self) -> StringIO: ...
- def __exit__(self, type, value, traceback) -> bool: ...
+ def __exit__(self, t: type = None, value: BaseException = None, traceback: Any = None) -> bool: ...
diff --git a/typeshed/stdlib/3/itertools.pyi b/typeshed/stdlib/3/itertools.pyi
index cb219b4..e24bf77 100644
--- a/typeshed/stdlib/3/itertools.pyi
+++ b/typeshed/stdlib/3/itertools.pyi
@@ -3,7 +3,7 @@
# Based on http://docs.python.org/3.2/library/itertools.html
from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
- Union, Sequence)
+ Union, Sequence, Generic)
_T = TypeVar('_T')
_S = TypeVar('_S')
@@ -18,8 +18,14 @@ def repeat(object: _T) -> Iterator[_T]: ...
def repeat(object: _T, times: int) -> Iterator[_T]: ...
def accumulate(iterable: Iterable[_T]) -> Iterator[_T]: ...
-def chain(*iterables: Iterable[_T]) -> Iterator[_T]: ...
-# TODO chain.from_Iterable
+
+class chain(Iterator[_T], Generic[_T]):
+ def __init__(self, *iterables: Iterable[_T]) -> None: ...
+ def __next__(self) -> _T: ...
+ def __iter__(self) -> Iterator[_T]: ...
+ @staticmethod
+ def from_iterable(iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ...
+
def compress(data: Iterable[_T], selectors: Iterable[Any]) -> Iterator[_T]: ...
def dropwhile(predicate: Callable[[_T], Any],
iterable: Iterable[_T]) -> Iterator[_T]: ...
diff --git a/typeshed/stdlib/3/json.pyi b/typeshed/stdlib/3/json.pyi
index aec903f..31a8120 100644
--- a/typeshed/stdlib/3/json.pyi
+++ b/typeshed/stdlib/3/json.pyi
@@ -1,4 +1,4 @@
-from typing import Any, IO, Optional, Tuple, Callable, Dict, List
+from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union
class JSONDecodeError(object):
def dumps(self, obj: Any) -> str: ...
@@ -12,7 +12,7 @@ def dumps(obj: Any,
check_circular: bool = ...,
allow_nan: bool = ...,
cls: Any = ...,
- indent: Optional[int] = ...,
+ indent: Union[None, int, str] = ...,
separators: Optional[Tuple[str, str]] = ...,
default: Optional[Callable[[Any], Any]] = ...,
sort_keys: bool = ...,
@@ -25,7 +25,7 @@ def dump(obj: Any,
check_circular: bool = ...,
allow_nan: bool = ...,
cls: Any = ...,
- indent: Optional[int] = ...,
+ indent: Union[None, int, str] = ...,
separators: Optional[Tuple[str, str]] = ...,
default: Optional[Callable[[Any], Any]] = ...,
sort_keys: bool = ...,
diff --git a/typeshed/stdlib/3/mimetypes.pyi b/typeshed/stdlib/3/mimetypes.pyi
new file mode 100644
index 0000000..a014447
--- /dev/null
+++ b/typeshed/stdlib/3/mimetypes.pyi
@@ -0,0 +1,26 @@
+# Stubs for mimetypes (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class MimeTypes:
+ encodings_map = ... # type: Any
+ suffix_map = ... # type: Any
+ types_map = ... # type: Any
+ types_map_inv = ... # type: Any
+ def __init__(self, filenames=..., strict=True): ...
+ def add_type(self, type, ext, strict=True): ...
+ def guess_type(self, url, strict=True): ...
+ def guess_all_extensions(self, type, strict=True): ...
+ def guess_extension(self, type, strict=True): ...
+ def read(self, filename, strict=True): ...
+ def readfp(self, fp, strict=True): ...
+ def read_windows_registry(self, strict=True): ...
+
+def guess_type(url, strict=True): ...
+def guess_all_extensions(type, strict=True): ...
+def guess_extension(type, strict=True): ...
+def add_type(type, ext, strict=True): ...
+def init(files=None): ...
+def read_mime_types(file): ...
diff --git a/typeshed/stdlib/3/multiprocessing/__init__.pyi b/typeshed/stdlib/3/multiprocessing/__init__.pyi
index 32cf8f7..f3b5bb3 100644
--- a/typeshed/stdlib/3/multiprocessing/__init__.pyi
+++ b/typeshed/stdlib/3/multiprocessing/__init__.pyi
@@ -10,3 +10,6 @@ class Queue():
class Value():
def __init__(typecode_or_type: str, *args: Any, lock: bool = ...) -> None: ...
+
+# ----- multiprocessing function stubs -----
+def cpu_count() -> int: ...
diff --git a/typeshed/stdlib/3/multiprocessing/pool.pyi b/typeshed/stdlib/3/multiprocessing/pool.pyi
index 2f3075a..32fe4f7 100644
--- a/typeshed/stdlib/3/multiprocessing/pool.pyi
+++ b/typeshed/stdlib/3/multiprocessing/pool.pyi
@@ -2,5 +2,35 @@
# NOTE: These are incomplete!
+from typing import Any, Callable, Iterable, List, Sequence
+
+class AsyncResult():
+ def get(self, timeout: float = -1) -> Any: ...
+ def wait(self, timeout: float = -1) -> None: ...
+ def ready(self) -> bool: ...
+ def successful(self) -> bool: ...
+
class ThreadPool():
def __init__(self, processes: int = ...) -> None: ...
+ def apply_async(self, func: Callable[..., Any],
+ args: Sequence[Any]=(),
+ kwds: Dict[str, Any]={},
+ callback: Callable[..., None] = None) -> AsyncResult: ...
+ def apply(self, func: Callable[..., Any],
+ args: Sequence[Any]=(),
+ kwds: Dict[str, Any]={}) -> Any: ...
+ def map(self, func: Callable[..., Any],
+ iterable: Iterable[Any]=()) -> List[Any]: ...
+ def map_async(self, func: Callable[..., Any],
+ iterable: Iterable[Any] = (),
+ chunksize: int = -1,
+ callback: Callable[..., None] = None) -> AsyncResult: ...
+ def imap(self, func: Callable[..., Any],
+ iterable: Iterable[Any]=()) -> Iterable[Any]: ...
+ def imap_async(self, func: Callable[..., Any],
+ chunksize: int = -1,
+ iterable: Iterable[Any]=(),
+ callback: Callable[..., None] = None) -> AsyncResult: ...
+ def close(self) -> None: ...
+ def terminate(self) -> None: ...
+ def join(self) -> None: ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index 0abf313..4352a12 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -62,7 +62,7 @@ W_OK = 0
X_OK = 0
class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]):
- def copy(self) -> _Environ[AnyStr]: ...
+ def copy(self) -> Dict[AnyStr, AnyStr]: ...
environ = ... # type: _Environ[str]
environb = ... # type: _Environ[bytes]
diff --git a/typeshed/stdlib/3/pdb.pyi b/typeshed/stdlib/3/pdb.pyi
new file mode 100644
index 0000000..1c375fe
--- /dev/null
+++ b/typeshed/stdlib/3/pdb.pyi
@@ -0,0 +1,30 @@
+# Stub for pdb (incomplete, only some global functions)
+
+from typing import Any, Dict
+
+def run(statement: str,
+ globals: Dict[str, Any] = None,
+ locals: Dict[str, Any] = None) -> None:
+ ...
+
+def runeval(expression: str,
+ globals: Dict[str, Any] = None,
+ locals: Dict[str, Any] = None) -> Any:
+ ...
+
+def runctx(statement: str,
+ globals: Dict[str, Any],
+ locals: Dict[str, Any]) -> None:
+ ...
+
+def runcall(*args: Any, **kwds: Any) -> Any:
+ ...
+
+def set_trace() -> None:
+ ...
+
+def post_mortem(t: Any = None) -> None:
+ ...
+
+def pm() -> None:
+ ...
diff --git a/typeshed/stdlib/3/pickle.pyi b/typeshed/stdlib/3/pickle.pyi
index b3dcd07..e6a14b2 100644
--- a/typeshed/stdlib/3/pickle.pyi
+++ b/typeshed/stdlib/3/pickle.pyi
@@ -1,12 +1,67 @@
# Stubs for pickle
-# NOTE: These are incomplete!
+from typing import Any, IO, Union, Tuple, Callable, Optional, Iterator
+# Imports used in type comments only.
+from typing import Mapping # noqa
+
+HIGHEST_PROTOCOL = ... # type: int
+DEFAULT_PROTOCOL = ... # type: int
+
+
+def dump(obj: Any, file: IO[bytes], protocol: int = None, *,
+ fix_imports: bool = ...) -> None: ...
-from typing import Any, IO
def dumps(obj: Any, protocol: int = ..., *,
fix_imports: bool = ...) -> bytes: ...
-def loads(p: bytes, *, fix_imports: bool = ...,
+
+
+def loads(bytes_object: bytes, *, fix_imports: bool = ...,
encoding: str = ..., errors: str = ...) -> Any: ...
+
+
def load(file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ...,
errors: str = ...) -> Any: ...
+
+
+class PickleError(Exception):
+ pass
+
+
+class PicklingError(PickleError):
+ pass
+
+
+class UnpicklingError(PickleError):
+ pass
+
+
+_reducedtype = Union[str,
+ Tuple[Callable[..., Any], Tuple],
+ Tuple[Callable[..., Any], Tuple, Any],
+ Tuple[Callable[..., Any], Tuple, Any,
+ Optional[Iterator]],
+ Tuple[Callable[..., Any], Tuple, Any,
+ Optional[Iterator], Optional[Iterator]]]
+
+
+class Pickler:
+ dispatch_table = ... # type: Mapping[type, Callable[[Any], _reducedtype]]
+
+ def __init__(self, file: IO[bytes], protocol: int = None, *,
+ fix_imports: bool = ...) -> None: ...
+
+ def dump(self, obj: Any) -> None: ...
+
+ def persistent_id(self, obj: Any) -> Any: ...
+
+
+class Unpickler:
+ def __init__(self, file: IO[bytes], *, fix_imports: bool = ...,
+ encoding: str = ..., errors: str = ...) -> None: ...
+
+ def load(self) -> Any: ...
+
+ def persistent_load(self, pid: Any) -> Any: ...
+
+ def find_class(self, module: str, name: str) -> Any: ...
diff --git a/typeshed/stdlib/3/pprint.pyi b/typeshed/stdlib/3/pprint.pyi
index 95804e9..b846ff9 100644
--- a/typeshed/stdlib/3/pprint.pyi
+++ b/typeshed/stdlib/3/pprint.pyi
@@ -2,11 +2,11 @@
# Based on http://docs.python.org/3.2/library/pprint.html
-from typing import Any, Dict, Tuple, TextIO
+from typing import Any, Dict, Tuple, IO
def pformat(o: object, indent: int = ..., width: int = ...,
depth: int = ...) -> str: ...
-def pprint(o: object, stream: TextIO = ..., indent: int = ..., width: int = ...,
+def pprint(o: object, stream: IO[str] = ..., indent: int = ..., width: int = ...,
depth: int = ...) -> None: ...
def isreadable(o: object) -> bool: ...
def isrecursive(o: object) -> bool: ...
@@ -14,7 +14,7 @@ def saferepr(o: object) -> str: ...
class PrettyPrinter:
def __init__(self, indent: int = ..., width: int = ..., depth: int = ...,
- stream: TextIO = ...) -> None: ...
+ stream: IO[str] = ...) -> None: ...
def pformat(self, o: object) -> str: ...
def pprint(self, o: object) -> None: ...
def isreadable(self, o: object) -> bool: ...
diff --git a/typeshed/stdlib/3/runpy.pyi b/typeshed/stdlib/3/runpy.pyi
new file mode 100644
index 0000000..f7c257a
--- /dev/null
+++ b/typeshed/stdlib/3/runpy.pyi
@@ -0,0 +1,21 @@
+# Stubs for runpy (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _TempModule:
+ mod_name = ... # type: Any
+ module = ... # type: Any
+ def __init__(self, mod_name): ...
+ def __enter__(self): ...
+ def __exit__(self, *args): ...
+
+class _ModifiedArgv0:
+ value = ... # type: Any
+ def __init__(self, value): ...
+ def __enter__(self): ...
+ def __exit__(self, *args): ...
+
+def run_module(mod_name, init_globals=None, run_name=None, alter_sys=False): ...
+def run_path(path_name, init_globals=None, run_name=None): ...
diff --git a/typeshed/stdlib/3/shelve.pyi b/typeshed/stdlib/3/shelve.pyi
new file mode 100644
index 0000000..ab6b2d9
--- /dev/null
+++ b/typeshed/stdlib/3/shelve.pyi
@@ -0,0 +1,31 @@
+from typing import Any, Dict, Iterator, Optional, Tuple
+import collections
+
+
+class Shelf(collections.MutableMapping):
+ def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+ def __iter__(self) -> Iterator[str]: ...
+ def __len__(self) -> int: ...
+ def __contains__(self, key: Any) -> bool: ... # key should be str, but it would conflict with superclass's type signature
+ def get(self, key: str, default: Any = None) -> Any: ...
+ def __getitem__(self, key: str) -> Any: ...
+ def __setitem__(self, key: str, value: Any) -> None: ...
+ def __delitem__(self, key: str) -> None: ...
+ def __enter__(self) -> Shelf: ...
+ def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ...
+ def close(self) -> None: ...
+ def __del__(self) -> None: ...
+ def sync(self) -> None: ...
+
+class BsdDbShelf(Shelf):
+ def __init__(self, dict: Dict[bytes, Any], protocol: Optional[int] = None, writeback: bool = ..., keyencoding: str = 'utf-8') -> None: ...
+ def set_location(self, key: Any) -> Tuple[str, Any]: ...
+ def next(self) -> Tuple[str, Any]: ...
+ def previous(self) -> Tuple[str, Any]: ...
+ def first(self) -> Tuple[str, Any]: ...
+ def last(self) -> Tuple[str, Any]: ...
+
+class DbfilenameShelf(Shelf):
+ def __init__(self, filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> None: ...
+
+def open(filename: str, flag: str = 'c', protocol: Optional[int] = None, writeback: bool = ...) -> DbfilenameShelf: ...
diff --git a/typeshed/stdlib/3/socket.pyi b/typeshed/stdlib/3/socket.pyi
index d2f3c97..dba7b52 100644
--- a/typeshed/stdlib/3/socket.pyi
+++ b/typeshed/stdlib/3/socket.pyi
@@ -5,7 +5,7 @@
# see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py
# see: http://nullege.com/codes/search/socket
-from typing import Any, Tuple, overload, List
+from typing import Any, Tuple, Union, List, overload
# ----- variables and constants -----
@@ -282,24 +282,10 @@ class socket:
# --- methods ---
# second tuple item is an address
def accept(self) -> Tuple['socket', Any]: ...
-
- @overload
- def bind(self, address: tuple) -> None: ...
- @overload
- def bind(self, address: str) -> None: ...
-
+ def bind(self, address: Union[tuple, str]) -> None: ...
def close(self) -> None: ...
-
- @overload
- def connect(self, address: tuple) -> None: ...
- @overload
- def connect(self, address: str) -> None: ...
-
- @overload
- def connect_ex(self, address: tuple) -> int: ...
- @overload
- def connect_ex(self, address: str) -> int: ...
-
+ def connect(self, address: Union[tuple, str]) -> None: ...
+ def connect_ex(self, address: Union[tuple, str]) -> int: ...
def detach(self) -> int: ...
def fileno(self) -> int: ...
@@ -308,7 +294,7 @@ class socket:
def getsockname(self) -> Any: ...
@overload
- def getsockopt(self, level: int, optname: str) -> bytes: ...
+ def getsockopt(self, level: int, optname: str) -> int: ...
@overload
def getsockopt(self, level: int, optname: str, buflen: int) -> bytes: ...
@@ -332,21 +318,10 @@ class socket:
def send(self, data: bytes, flags=...) -> int: ...
def sendall(self, data: bytes, flags=...) -> Any:
... # return type: None on success
-
- @overload
- def sendto(self, data: bytes, address: tuple, flags: int = ...) -> int: ...
- @overload
- def sendto(self, data: bytes, address: str, flags: int = ...) -> int: ...
-
+ def sendto(self, data: bytes, address: Union[tuple, str], flags: int = ...) -> int: ...
def setblocking(self, flag: bool) -> None: ...
- # TODO None valid for the value argument
- def settimeout(self, value: float) -> None: ...
-
- @overload
- def setsockopt(self, level: int, optname: str, value: int) -> None: ...
- @overload
- def setsockopt(self, level: int, optname: str, value: bytes) -> None: ...
-
+ def settimeout(self, value: Union[float, None]) -> None: ...
+ def setsockopt(self, level: int, optname: str, value: Union[int, bytes]) -> None: ...
def shutdown(self, how: int) -> None: ...
@@ -382,6 +357,5 @@ def inet_aton(ip_string: str) -> bytes: ... # ret val 4 bytes in length
def inet_ntoa(packed_ip: bytes) -> str: ...
def inet_pton(address_family: int, ip_string: str) -> bytes: ...
def inet_ntop(address_family: int, packed_ip: bytes) -> str: ...
-# TODO the timeout may be None
-def getdefaulttimeout() -> float: ...
+def getdefaulttimeout() -> Union[float, None]: ...
def setdefaulttimeout(timeout: float) -> None: ...
diff --git a/typeshed/stdlib/3/sqlite3/__init__.pyi b/typeshed/stdlib/3/sqlite3/__init__.pyi
new file mode 100644
index 0000000..28bc3ba
--- /dev/null
+++ b/typeshed/stdlib/3/sqlite3/__init__.pyi
@@ -0,0 +1,5 @@
+# Stubs for sqlite3 (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from sqlite3.dbapi2 import *
diff --git a/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi b/typeshed/stdlib/3/sqlite3/dbapi2.pyi
similarity index 67%
copy from typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
copy to typeshed/stdlib/3/sqlite3/dbapi2.pyi
index aa423c2..b75a0f8 100644
--- a/typeshed/stdlib/2.7/sqlite3/dbapi2.pyi
+++ b/typeshed/stdlib/3/sqlite3/dbapi2.pyi
@@ -1,15 +1,17 @@
-# Stubs for sqlite3.dbapi2 (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
+# Filip Hron <filip.hron at gmail.com>
+# based heavily on Andrey Vlasovskikh's python-skeletons https://github.com/JetBrains/python-skeletons/blob/master/sqlite3.py
-from typing import Any
+from typing import Any, Union, List, AnyStr
+from numbers import Integral
+from datetime import time, datetime
+from collections import Iterable
-paramstyle = ... # type: Any
-threadsafety = ... # type: Any
-apilevel = ... # type: Any
-Date = ... # type: Any
-Time = ... # type: Any
-Timestamp = ... # type: Any
+paramstyle = ... # type: str
+threadsafety = ... # type: int
+apilevel = ... # type: str
+Date = ... # type: datetime
+Time = ... # type: time
+Timestamp = ... # type: datetime
def DateFromTicks(ticks): ...
def TimeFromTicks(ticks): ...
@@ -61,18 +63,26 @@ converters = ... # type: Any
sqlite_version = ... # type: str
version = ... # type: str
+# TODO: adapt needs to get probed
def adapt(obj, protocol, alternate): ...
-def complete_statement(sql): ...
-def connect(*args, **kwargs): ...
-def enable_callback_tracebacks(flag): ...
-def enable_shared_cache(do_enable): ...
-def register_adapter(type, callable): ...
-def register_converter(typename, callable): ...
+def complete_statement(sql: str) -> bool: ...
+def connect(database: Union[bytes, AnyStr],
+ timeout: float = ... ,
+ detect_types: int = ...,
+ isolation_level: Union[str, None] = ...,
+ check_same_thread: bool = ...,
+ factory: Union[Connection, None] = ...,
+ cached_statements: int = ...) -> Connection: ...
+def enable_callback_tracebacks(flag: bool) -> None: ...
+def enable_shared_cache(do_enable: int) -> None: ...
+def register_adapter(type: type, callable: Any) -> None: ...
+# TODO: sqlite3.register_converter.__doc__ specifies callable as unknown
+def register_converter(typename: str, callable: bytes) -> None: ...
class Cache:
- def __init__(self, *args, **kwargs): ...
- def display(self, *args, **kwargs): ...
- def get(self, *args, **kwargs): ...
+ def __init__(self, *args, **kwargs) -> None: ...
+ def display(self, *args, **kwargs) -> None: ...
+ def get(self, *args, **kwargs) -> None: ...
class Connection:
DataError = ... # type: Any
@@ -91,20 +101,25 @@ class Connection:
text_factory = ... # type: Any
total_changes = ... # type: Any
def __init__(self, *args, **kwargs): ...
- def close(self, *args, **kwargs): ...
- def commit(self, *args, **kwargs): ...
- def create_aggregate(self, *args, **kwargs): ...
- def create_collation(self, *args, **kwargs): ...
- def create_function(self, *args, **kwargs): ...
- def cursor(self, *args, **kwargs): ...
- def execute(self, *args, **kwargs): ...
- def executemany(self, *args, **kwargs): ...
- def executescript(self, *args, **kwargs): ...
- def interrupt(self, *args, **kwargs): ...
- def iterdump(self, *args, **kwargs): ...
+ def close(self) -> None: ...
+ def commit(self) -> None: ...
+ def create_aggregate(self, name: str, num_params: int, aggregate_class: type) -> None: ...
+ def create_collation(self, name: str, callable: Any) -> None: ...
+ def create_function(self, name: str, num_params: int, func: Any) -> None: ...
+ def cursor(self, cursorClass= Union[type, None]) -> Cursor: ...
+ def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ...
+ # TODO: please check in executemany() if seq_of_parameters type is possible like this
+ def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]) -> Cursor: ...
+ def executescript(self, sql_script: Union[bytes, AnyStr]) -> Cursor: ...
+ def interrupt(self, *args, **kwargs) -> None: ...
+ def iterdump(self, *args, **kwargs) -> None: ...
def rollback(self, *args, **kwargs): ...
- def set_authorizer(self, *args, **kwargs): ...
- def set_progress_handler(self, *args, **kwargs): ...
+ # TODO: set_authorizer(authorzer_callback)
+ # see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_authorizer
+ # returns [SQLITE_OK, SQLITE_DENY, SQLITE_IGNORE] so perhaps int
+ def set_authorizer(self, *args, **kwargs) -> None: ...
+ # set_progress_handler(handler, n) -> see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_progress_handler
+ def set_progress_handler(self, *args, **kwargs) -> None: ...
def set_trace_callback(self, *args, **kwargs): ...
def __call__(self, *args, **kwargs): ...
def __enter__(self, *args, **kwargs): ...
@@ -117,14 +132,17 @@ class Cursor:
lastrowid = ... # type: Any
row_factory = ... # type: Any
rowcount = ... # type: Any
+ # TODO: Cursor class accepts exactly 1 argument
+ # required type is sqlite3.Connection (which is imported as _Connection)
+ # however, the name of the __init__ variable is unknown
def __init__(self, *args, **kwargs): ...
def close(self, *args, **kwargs): ...
- def execute(self, *args, **kwargs): ...
- def executemany(self, *args, **kwargs): ...
- def executescript(self, *args, **kwargs): ...
- def fetchall(self, *args, **kwargs): ...
- def fetchmany(self, *args, **kwargs): ...
- def fetchone(self, *args, **kwargs): ...
+ def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ...
+ def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]): ...
+ def executescript(self, sql_script: Union[bytes, AnyStr]) -> Cursor: ...
+ def fetchall(self) -> List[tuple]: ...
+ def fetchmany(self, size: Integral = ...) -> List[tuple]: ...
+ def fetchone(self) -> Union[tuple, None]: ...
def setinputsizes(self, *args, **kwargs): ...
def setoutputsize(self, *args, **kwargs): ...
def __iter__(self): ...
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index 03725bb..c606c33 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -29,7 +29,7 @@ class CalledProcessError(Exception):
cmd = ... # type: str
output = b'' # May be None
- def __init__(self, returncode: int, cmd: str, output: str) -> None: ...
+ def __init__(self, returncode: int, cmd: str, output: str = ...) -> None: ...
class Popen:
stdin = ... # type: IO[Any]
@@ -62,7 +62,7 @@ class Popen:
# Return str/bytes
def communicate(self, input=...) -> Tuple[Any, Any]: ...
def send_signal(self, signal: int) -> None: ...
- def terminatate(self) -> None: ...
+ def terminate(self) -> None: ...
def kill(self) -> None: ...
def __enter__(self) -> 'Popen': ...
def __exit__(self, type, value, traceback) -> bool: ...
diff --git a/typeshed/stdlib/3/tokenize.pyi b/typeshed/stdlib/3/tokenize.pyi
new file mode 100644
index 0000000..947c195
--- /dev/null
+++ b/typeshed/stdlib/3/tokenize.pyi
@@ -0,0 +1,99 @@
+# Stubs for tokenize (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Union, TextIO
+from builtins import open as _builtin_open
+from token import *
+
+COMMENT = ... # type: Any
+NL = ... # type: Any
+ENCODING = ... # type: Any
+
+class TokenInfo:
+ @property
+ def exact_type(self): ...
+
+class TokenError(Exception): ...
+class StopTokenizing(Exception): ...
+
+class Untokenizer:
+ tokens = ... # type: Any
+ prev_row = ... # type: Any
+ prev_col = ... # type: Any
+ encoding = ... # type: Any
+ def __init__(self): ...
+ def add_whitespace(self, start): ...
+ def untokenize(self, iterable): ...
+ def compat(self, token, iterable): ...
+
+def untokenize(iterable): ...
+def detect_encoding(readline): ...
+def tokenize(readline): ...
+
+def open(filename: Union[str, bytes, int]) -> TextIO: ...
+
+# Names in __all__ with no definition:
+# AMPER
+# AMPEREQUAL
+# ASYNC
+# AT
+# ATEQUAL
+# AWAIT
+# CIRCUMFLEX
+# CIRCUMFLEXEQUAL
+# COLON
+# COMMA
+# DEDENT
+# DOT
+# DOUBLESLASH
+# DOUBLESLASHEQUAL
+# DOUBLESTAR
+# DOUBLESTAREQUAL
+# ELLIPSIS
+# ENDMARKER
+# EQEQUAL
+# EQUAL
+# ERRORTOKEN
+# GREATER
+# GREATEREQUAL
+# INDENT
+# ISEOF
+# ISNONTERMINAL
+# ISTERMINAL
+# LBRACE
+# LEFTSHIFT
+# LEFTSHIFTEQUAL
+# LESS
+# LESSEQUAL
+# LPAR
+# LSQB
+# MINEQUAL
+# MINUS
+# NAME
+# NEWLINE
+# NOTEQUAL
+# NT_OFFSET
+# NUMBER
+# N_TOKENS
+# OP
+# PERCENT
+# PERCENTEQUAL
+# PLUS
+# PLUSEQUAL
+# RARROW
+# RBRACE
+# RIGHTSHIFT
+# RIGHTSHIFTEQUAL
+# RPAR
+# RSQB
+# SEMI
+# SLASH
+# SLASHEQUAL
+# STAR
+# STAREQUAL
+# STRING
+# TILDE
+# VBAR
+# VBAREQUAL
+# tok_name
diff --git a/typeshed/stdlib/3/traceback.pyi b/typeshed/stdlib/3/traceback.pyi
index 21c2564..9d4d2fd 100644
--- a/typeshed/stdlib/3/traceback.pyi
+++ b/typeshed/stdlib/3/traceback.pyi
@@ -1,5 +1,6 @@
# Stubs for traceback
+from typing import List
from types import TracebackType
import typing
@@ -8,9 +9,10 @@ def format_exception_only(etype, value): ...
def format_exception(type: type, value: List[str], tb: TracebackType, limit: int, chain: bool) -> str: ...
def format_tb(traceback): ...
def print_exc(limit=..., file=..., chain=...): ...
-def format_exc(limit: int, chain: bool = ...) -> str: ...
+def format_exc(limit: int = ..., chain: bool = ...) -> str: ...
def extract_stack(f=..., limit=...): ...
def extract_tb(traceback, limit=...): ...
def format_list(list): ...
+def format_stack(f=..., limit=...) -> List[str]: ...
# TODO add more
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index 1f31e8a..8bb7606 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -21,13 +21,14 @@ no_type_check = object()
class TypeAlias:
# Class for defining generic aliases for library types.
- def __init__(self, target_type) -> None: ...
- def __getitem__(self, typeargs): ...
+ def __init__(self, target_type: type) -> None: ...
+ def __getitem__(self, typeargs: Any) -> Any: ...
Union = TypeAlias(object)
Optional = TypeAlias(object)
List = TypeAlias(object)
Dict = TypeAlias(object)
+DefaultDict = TypeAlias(object)
Set = TypeAlias(object)
# Predefined type variables.
@@ -102,7 +103,7 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
def send(self, value: _T_contra) -> _T_co:...
@abstractmethod
- def throw(self, typ: BaseException, val: Any=None, tb=None) -> None:...
+ def throw(self, typ: BaseException, val: Any = None, tb: Any = None) -> None:...
@abstractmethod
def close(self) -> None:...
@@ -211,6 +212,8 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_VT_co]: ...
+# TODO: ContextManager (only if contextlib.AbstractContextManager exists)
+
class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT]):
# TODO: Value type should be covariant, but currently we can't give a good signature for
# get if this is the case.
@@ -233,8 +236,22 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
- def update(self, m: Union[Mapping[_KT, _VT],
- Iterable[Tuple[_KT, _VT]]]) -> None: ...
+ # 'update' used to take a Union, but using overloading is better.
+ # The second overloaded type here is a bit too general, because
+ # Mapping[Tuple[_KT, _VT], W] is a subclass of Iterable[Tuple[_KT, _VT]],
+ # but will always have the behavior of the first overloaded type
+ # at runtime, leading to keys of a mix of types _KT and Tuple[_KT, _VT].
+ # We don't currently have any way of forcing all Mappings to use
+ # the first overload, but by using overloading rather than a Union,
+ # mypy will commit to using the first overload when the argument is
+ # known to be a Mapping with unknown type parameters, which is closer
+ # to the behavior we want. See mypy issue #1430.
+ @overload
+ def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ @overload
+ def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+
+Text = str
class IO(Iterable[AnyStr], Generic[AnyStr]):
# TODO detach
@@ -284,7 +301,7 @@ class IO(Iterable[AnyStr], Generic[AnyStr]):
@abstractmethod
def __enter__(self) -> 'IO[AnyStr]': ...
@abstractmethod
- def __exit__(self, type, value, traceback) -> bool: ...
+ def __exit__(self, t: type = None, value: BaseException = None, traceback: Any = None) -> bool: ...
class BinaryIO(IO[bytes]):
# TODO readinto
diff --git a/typeshed/stdlib/3/weakref.pyi b/typeshed/stdlib/3/weakref.pyi
index 08d31ac..6e62f0b 100644
--- a/typeshed/stdlib/3/weakref.pyi
+++ b/typeshed/stdlib/3/weakref.pyi
@@ -13,11 +13,11 @@ _VT = TypeVar('_VT')
class ReferenceType(Generic[_T]):
# TODO rest of members
- def __call__(self) -> Optional[_T]:
- ...
+ def __init__(o: _T, callback: Callable[[ReferenceType[_T]],
+ Any] = ...) -> None: ...
+ def __call__(self) -> Optional[_T]: ...
-def ref(o: _T, callback: Callable[[ReferenceType[_T]],
- Any] = ...) -> ReferenceType[_T]: ...
+ref = ReferenceType
# TODO callback
def proxy(object: _T) -> _T: ...
@@ -69,3 +69,52 @@ class WeakValueDictionary(Generic[_KT, _VT]):
# TODO return type
def valuerefs(self) -> Iterable[Any]: ...
+
+
+class WeakKeyDictionary(Generic[_KT, _VT]):
+ # TODO tuple iterable argument?
+ @overload
+ def __init__(self) -> None: ...
+ @overload
+ def __init__(self, map: Mapping[_KT, _VT]) -> None: ...
+
+ def __len__(self) -> int: ...
+ def __getitem__(self, k: _KT) -> _VT: ...
+ def __setitem__(self, k: _KT, v: _VT) -> None: ...
+ def __delitem__(self, v: _KT) -> None: ...
+ def __contains__(self, o: object) -> bool: ...
+ def __iter__(self) -> Iterator[_KT]: ...
+ def __str__(self) -> str: ...
+
+ def clear(self) -> None: ...
+ def copy(self) -> Dict[_KT, _VT]: ...
+
+ @overload
+ def get(self, k: _KT) -> _VT: ...
+ @overload
+ def get(self, k: _KT, default: _VT) -> _VT: ...
+
+ @overload
+ def pop(self, k: _KT) -> _VT: ...
+ @overload
+ def pop(self, k: _KT, default: _VT) -> _VT: ...
+
+ def popitem(self) -> Tuple[_KT, _VT]: ...
+
+ @overload
+ def setdefault(self, k: _KT) -> _VT: ...
+ @overload
+ def setdefault(self, k: _KT, default: _VT) -> _VT: ...
+
+ @overload
+ def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ @overload
+ def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+
+ # NOTE: incompatible with Mapping
+ def keys(self) -> Iterator[_KT]: ...
+ def values(self) -> Iterator[_VT]: ...
+ def items(self) -> Iterator[Tuple[_KT, _VT]]: ...
+
+ # TODO return type
+ def valuerefs(self) -> Iterable[Any]: ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/stdlib/3/wsgiref/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/stdlib/3/wsgiref/__init__.pyi
diff --git a/typeshed/stdlib/3/wsgiref/validate.pyi b/typeshed/stdlib/3/wsgiref/validate.pyi
new file mode 100644
index 0000000..ecdb252
--- /dev/null
+++ b/typeshed/stdlib/3/wsgiref/validate.pyi
@@ -0,0 +1,47 @@
+# Stubs for wsgiref.validate (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class WSGIWarning(Warning): ...
+
+def validator(application): ...
+
+class InputWrapper:
+ input = ... # type: Any
+ def __init__(self, wsgi_input): ...
+ def read(self, *args): ...
+ def readline(self, *args): ...
+ def readlines(self, *args): ...
+ def __iter__(self): ...
+ def close(self): ...
+
+class ErrorWrapper:
+ errors = ... # type: Any
+ def __init__(self, wsgi_errors): ...
+ def write(self, s): ...
+ def flush(self): ...
+ def writelines(self, seq): ...
+ def close(self): ...
+
+class WriteWrapper:
+ writer = ... # type: Any
+ def __init__(self, wsgi_writer): ...
+ def __call__(self, s): ...
+
+class PartialIteratorWrapper:
+ iterator = ... # type: Any
+ def __init__(self, wsgi_iterator): ...
+ def __iter__(self): ...
+
+class IteratorWrapper:
+ original_iterator = ... # type: Any
+ iterator = ... # type: Any
+ closed = ... # type: Any
+ check_start_response = ... # type: Any
+ def __init__(self, wsgi_iterator, check_start_response): ...
+ def __iter__(self): ...
+ def __next__(self): ...
+ def close(self): ...
+ def __del__(self): ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
index 3369c9e..11198ce 100644
--- a/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
+++ b/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
@@ -2,13 +2,18 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
+from typing import Union, Optional, Callable
+from .ElementTree import _ElementInterface
-XINCLUDE = ... # type: Any
-XINCLUDE_INCLUDE = ... # type: Any
-XINCLUDE_FALLBACK = ... # type: Any
+XINCLUDE = ... # type: str
+XINCLUDE_INCLUDE = ... # type: str
+XINCLUDE_FALLBACK = ... # type: str
class FatalIncludeError(SyntaxError): ...
-def default_loader(href, parse, encoding=...): ...
-def include(elem, loader=...): ...
+def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, _ElementInterface]: ...
+
+# TODO: loader is of type default_loader ie it takes a callable that has the
+# same signature as default_loader. But default_loader has a keyword argument
+# Which can't be represented using Callable...
+def include(elem: _ElementInterface, loader: Callable[..., Union[str, _ElementInterface]]=...) -> None: ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementPath.pyi b/typeshed/stdlib/3/xml/etree/ElementPath.pyi
index dee3bb8..e0cb256 100644
--- a/typeshed/stdlib/3/xml/etree/ElementPath.pyi
+++ b/typeshed/stdlib/3/xml/etree/ElementPath.pyi
@@ -2,27 +2,24 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
-
-xpath_tokenizer_re = ... # type: Any
-
-def xpath_tokenizer(pattern, namespaces=...): ...
-def get_parent_map(context): ...
-def prepare_child(next, token): ...
-def prepare_star(next, token): ...
-def prepare_self(next, token): ...
-def prepare_descendant(next, token): ...
-def prepare_parent(next, token): ...
-def prepare_predicate(next, token): ...
-
-ops = ... # type: Any
-
-class _SelectorContext:
- parent_map = ... # type: Any
- root = ... # type: Any
- def __init__(self, root) -> None: ...
-
-def iterfind(elem, path, namespaces=...): ...
-def find(elem, path, namespaces=...): ...
-def findall(elem, path, namespaces=...): ...
-def findtext(elem, path, default=..., namespaces=...): ...
+from typing import Tuple, List, Union, TypeVar, Callable, Optional
+from .ElementTree import _ElementInterface
+
+xpath_tokenizer_re = ... # type: Callable[..., List[Tuple[str, str]]]
+
+
+class xpath_descendant_or_self: ...
+
+_T = TypeVar('_T')
+
+class Path:
+ def __init__(self, path: str) -> None: ...
+ def find(self, element: _ElementInterface) -> Optional[_ElementInterface]: ...
+ def findtext(self, element: _ElementInterface, default: _T=...) -> Union[str, _T]: ...
+ def findall(self, element: _ElementInterface) -> List[_ElementInterface]: ...
+
+def find(element: _ElementInterface, path: str) -> Optional[_ElementInterface]: ...
+
+def findtext(element: _ElementInterface, path: str, default: _T=...) -> Union[str, _T]: ...
+
+def findall(element: _ElementInterface, path: str) -> List[_ElementInterface]: ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
index 74cc977..f673f97 100644
--- a/typeshed/stdlib/3/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
@@ -2,126 +2,97 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
+from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
import io
-VERSION = ... # type: Any
-
-class ParseError(SyntaxError): ...
-
-def iselement(element): ...
-
-class Element:
- def __init__(self, tag, attrib=..., **extra) -> None: ...
- def append(self, *args, **kwargs): ...
- def clear(self, *args, **kwargs): ...
- def extend(self, *args, **kwargs): ...
- def find(self, *args, **kwargs): ...
- def findall(self, *args, **kwargs): ...
- def findtext(self, match, default=..., namespaces=...): ...
- def get(self, *args, **kwargs): ...
- def getchildren(self): ...
- def getiterator(self, tag=...): ...
- def insert(self, *args, **kwargs): ...
- def items(self, *args, **kwargs): ...
- def iter(self, *args, **kwargs): ...
- def iterfind(self, match, namespaces=...): ...
- def itertext(self): ...
- def keys(self): ...
- def makeelement(self, tag, attrib): ...
- def remove(self, *args, **kwargs): ...
- def set(self, *args, **kwargs): ...
- def __copy__(self): ...
- def __deepcopy__(self): ...
- def __delattr__(self, name): ...
- def __delitem__(self, name): ...
- def __getitem__(self, name): ...
- def __getstate__(self): ...
- def __len__(self): ...
- def __setattr__(self, name, value): ...
- def __setitem__(self, index, object): ...
- def __setstate__(self, state): ...
- def __sizeof__(self): ...
-
-def SubElement(parent, tag, attrib=..., **extra): ...
-def Comment(text=...): ...
-def ProcessingInstruction(target, text=...): ...
-
-PI = ... # type: Any
+VERSION = ... # type: str
+
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
+class _ElementInterface:
+ tag = ... # type: _str_or_bytes
+ attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
+ text = ... # type: Optional[_str_or_bytes]
+ tail = ... # type: Optional[_str_or_bytes]
+ def __init__(self, tag: Union[AnyStr, Callable[..., '_ElementInterface']], attrib: Dict[AnyStr, AnyStr]) -> None: ...
+ def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> '_ElementInterface': ...
+ def __len__(self) -> int: ...
+ def __getitem__(self, index: int) -> '_ElementInterface': ...
+ def __setitem__(self, index: int, element: '_ElementInterface') -> None: ...
+ def __delitem__(self, index: int) -> None: ...
+ def __getslice__(self, start: int, stop: int) -> Sequence['_ElementInterface']: ...
+ def __setslice__(self, start: int, stop: int, elements: Sequence['_ElementInterface']) -> None: ...
+ def __delslice__(self, start: int, stop: int) -> None: ...
+ def append(self, element: '_ElementInterface') -> None: ...
+ def insert(self, index: int, element: '_ElementInterface') -> None: ...
+ def remove(self, element: '_ElementInterface') -> None: ...
+ def getchildren(self) -> List['_ElementInterface']: ...
+ def find(self, path: str) -> Optional['_ElementInterface']: ...
+ def findtext(self, path: str, default: _T=...) -> Union[str, _T]: ...
+ def findall(self, path: str) -> List['_ElementInterface']: ...
+ def clear(self) -> None: ...
+ def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def keys(self) -> KeysView[AnyStr]: ...
+ def items(self) -> ItemsView[AnyStr, AnyStr]: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List['_ElementInterface']: ...
+
+def Element(tag: Union[AnyStr, Callable[..., _ElementInterface]], attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> _ElementInterface: ...
+def SubElement(parent: _ElementInterface, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: Dict[str, AnyStr]) -> _ElementInterface: ...
+def Comment(text: _str_or_bytes=...) -> _ElementInterface: ...
+def ProcessingInstruction(target: str, text: str=...) -> _ElementInterface: ...
+
+PI = ... # type: Callable[..., _ElementInterface]
class QName:
- text = ... # type: Any
- def __init__(self, text_or_uri, tag=...) -> None: ...
- def __hash__(self): ...
- def __le__(self, other): ...
- def __lt__(self, other): ...
- def __ge__(self, other): ...
- def __gt__(self, other): ...
- def __eq__(self, other): ...
- def __ne__(self, other): ...
+ text = ... # type: str
+ def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+
+
+_file_or_filename = Union[str, bytes, int, IO[Any]]
class ElementTree:
- def __init__(self, element=..., file=...) -> None: ...
- def getroot(self): ...
- def parse(self, source, parser=...): ...
- def iter(self, tag=...): ...
- def getiterator(self, tag=...): ...
- def find(self, path, namespaces=...): ...
- def findtext(self, path, default=..., namespaces=...): ...
- def findall(self, path, namespaces=...): ...
- def iterfind(self, path, namespaces=...): ...
- def write(self, file_or_filename, encoding=..., xml_declaration=..., default_namespace=..., method=..., *, short_empty_elements=...): ...
- def write_c14n(self, file): ...
-
-def register_namespace(prefix, uri): ...
-def tostring(element, encoding=..., method=..., *, short_empty_elements=...): ...
-
-class _ListDataStream(io.BufferedIOBase):
- lst = ... # type: Any
- def __init__(self, lst) -> None: ...
- def writable(self): ...
- def seekable(self): ...
- def write(self, b): ...
- def tell(self): ...
-
-def tostringlist(element, encoding=..., method=..., *, short_empty_elements=...): ...
-def dump(elem): ...
-def parse(source, parser=...): ...
-def iterparse(source, events=..., parser=...): ...
-
-class XMLPullParser:
- def __init__(self, events=..., *, _parser=...) -> None: ...
- def feed(self, data): ...
- def close(self): ...
- def read_events(self): ...
-
-class _IterParseIterator:
- root = ... # type: Any
- def __init__(self, source, events, parser, close_source=...) -> None: ...
- def __next__(self): ...
- def __iter__(self): ...
-
-def XML(text, parser=...): ...
-def XMLID(text, parser=...): ...
-
-fromstring = ... # type: Any
-
-def fromstringlist(sequence, parser=...): ...
+ def __init__(self, element: _ElementInterface=..., file: _file_or_filename=...) -> None: ...
+ def getroot(self) -> _ElementInterface: ...
+ def parse(self, source: _file_or_filename, parser: 'XMLTreeBuilder'=...) -> _ElementInterface: ...
+ def getiterator(self, tag: Union[str, AnyStr]=...) -> List[_ElementInterface]: ...
+ def find(self, path: str) -> Optional[_ElementInterface]: ...
+ def findtext(self, path: str, default: _T=...) -> Union[_T, str]: ...
+ def findall(self, path: str) -> List[_ElementInterface]: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=...) -> None: ...
+
+def iselement(element: _ElementInterface) -> bool: ...
+def dump(elem: _ElementInterface) -> None: ...
+def fixtag(tag: Union[str, QName], namespaces: Dict[str, str]) -> Tuple[str, Optional[str]]: ...
+def parse(source: _file_or_filename, parser: 'XMLTreeBuilder'=...) -> ElementTree: ...
+
+
+class iterparse:
+ def __init__(self, source: _file_or_filename, events: Sequence[str]=...) -> None: ...
+ # TODO-figure out this type...
+ def __next__(self) -> Tuple[str, _ElementInterface]: ...
+
+def XML(text: AnyStr) -> _ElementInterface: ...
+def XMLID(text: AnyStr) -> Tuple[_ElementInterface, Dict[str, _ElementInterface]]: ...
+
+# TODO-improve this type
+fromstring = ... # type: Callable[..., _ElementInterface]
+
+def tostring(element: _ElementInterface, encoding: str=...) -> AnyStr: ...
class TreeBuilder:
- def __init__(self, element_factory=...) -> None: ...
- def close(self): ...
- def data(self, data): ...
- def start(self, tag, attrs): ...
- def end(self, tag): ...
-
-class XMLParser:
- target = ... # type: Any
+ def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], _ElementInterface]=...) -> None: ...
+ def close(self) -> _ElementInterface: ...
+ def data(self, data: AnyStr) -> None: ...
+ def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> _ElementInterface: ...
+ def end(self, tag: AnyStr) -> _ElementInterface: ...
+
+class XMLTreeBuilder:
+ # TODO-what is entity used for???
entity = ... # type: Any
- version = ... # type: Any
- def __init__(self, html=..., target=..., encoding=...) -> None: ...
- def _parse_whole(self, *args, **kwargs): ...
- def _setevents(self, *args, **kwargs): ...
- def close(self, *args, **kwargs): ...
- def doctype(self, name, pubid, system): ...
- def feed(self, data): ...
+ def __init__(self, html: int=..., target: TreeBuilder=...) -> None: ...
+ def doctype(self, name: str, pubid: str, system: str) -> None: ...
+ def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
+ def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/zlib.pyi b/typeshed/stdlib/3/zlib.pyi
index 8aefc0e..6dda282 100644
--- a/typeshed/stdlib/3/zlib.pyi
+++ b/typeshed/stdlib/3/zlib.pyi
@@ -1,8 +1,4 @@
-# Stubs for zlib (Python 3.4)
-#
-# NOTE: This stub was automatically generated by stubgen.
-
-# TODO: Compress and Decompress classes are not published by the module.
+# Stubs for zlib
DEFLATED = ... # type: int
DEF_BUF_SIZE = ... # type: int
@@ -21,12 +17,30 @@ Z_HUFFMAN_ONLY = ... # type: int
Z_NO_FLUSH = ... # type: int
Z_SYNC_FLUSH = ... # type: int
-def adler32(data, value=...) -> int: ...
-def compress(data, level: int = ...): ...
-def compressobj(level=..., method=..., wbits=..., memlevel=...,
- strategy=..., zdict=...): ...
-def crc32(data, value=...) -> int: ...
-def decompress(data, wbits=..., bufsize=...): ...
-def decompressobj(wbits=..., zdict=...): ...
class error(Exception): ...
+
+
+class Compress:
+ def compress(self, data: bytes) -> bytes: ...
+ def flush(self, mode: int = ...) -> bytes: ...
+ def copy(self) -> "Compress": ...
+
+
+class Decompress:
+ unused_data = ... # type: bytes
+ unconsumed_tail = ... # type: bytes
+ eof = ... # type: bool
+ def decompress(self, data: bytes, max_length: int = ...) -> bytes: ...
+ def flush(self, length: int = ...) -> bytes: ...
+ def copy(self) -> "Decompress": ...
+
+
+def adler32(data: bytes, value: int = ...) -> int: ...
+def compress(data: bytes, level: int = ...) -> bytes: ...
+def compressobj(level: int = ..., method: int = ..., wbits: int = ...,
+ memlevel: int = ..., strategy: int = ...,
+ zdict: bytes = ...) -> Compress: ...
+def crc32(data: bytes, value: int = ...) -> int: ...
+def decompress(data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ...
+def decompressobj(wbits: int = ..., zdict: bytes = ...) -> Decompress: ...
diff --git a/typeshed/third_party/2.7/selenium/webdriver/remote/webdriver.pyi b/typeshed/third_party/2.7/selenium/webdriver/remote/webdriver.pyi
new file mode 100644
index 0000000..6706cf6
--- /dev/null
+++ b/typeshed/third_party/2.7/selenium/webdriver/remote/webdriver.pyi
@@ -0,0 +1,111 @@
+# Stubs for selenium.webdriver.remote.webdriver (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Dict, List
+from .mobile import Mobile as Mobile
+from selenium.webdriver.remote.webelement import WebElement
+from selenium.webdriver.remote.errorhandler import ErrorHandler
+from selenium.webdriver.remote.file_detector import FileDetector
+
+Capabilities = Dict[basestring, Any]
+ExecuteResult = Dict[basestring, Any] # containing 'success', 'value', 'sessionId'
+
+class WebDriver:
+ command_executor = ... # type: basestring
+ session_id = ... # type: Any
+ capabilities = ... # type: Capabilities
+ error_handler = ... # type: ErrorHandler
+ file_detector = ... # type: FileDetector
+ def __init__(self,
+ command_executor: basestring='',
+ desired_capabilities: Capabilities=None,
+ browser_profile=None,
+ proxy=None,
+ keep_alive:bool=False
+ ) -> None: ...
+ @property
+ def mobile(self) -> Mobile: ...
+ @property
+ def name(self) -> basestring: ...
+ def start_client(self): ...
+ def stop_client(self): ...
+ w3c = ... # type: Any
+ def start_session(self, desired_capabilities, browser_profile=None): ...
+ def create_web_element(self, element_id: basestring) -> WebElement: ...
+ def execute(self, driver_command: basestring, params: Optional[Dict[basestring, Any]]=None) -> ExecuteResult: ...
+ def get(self, url: basestring) -> None: ...
+ @property
+ def title(self) -> basestring: ...
+ def find_element_by_id(self, id_: basestring) -> WebElement: ...
+ def find_elements_by_id(self, id_: basestring) -> List[WebElement]: ...
+ def find_element_by_xpath(self, xpath: basestring) -> WebElement: ...
+ def find_elements_by_xpath(self, xpath: basestring) -> List[WebElement]: ...
+ def find_element_by_link_text(self, link_text: basestring) -> WebElement: ...
+ def find_elements_by_link_text(self, text: basestring) -> List[WebElement]: ...
+ def find_element_by_partial_link_text(self, link_text: basestring) -> WebElement: ...
+ def find_elements_by_partial_link_text(self, link_text: basestring) -> List[WebElement]: ...
+ def find_element_by_name(self, name: basestring) -> WebElement: ...
+ def find_elements_by_name(self, name: basestring) -> List[WebElement]: ...
+ def find_element_by_tag_name(self, name: basestring) -> WebElement: ...
+ def find_elements_by_tag_name(self, name: basestring) -> List[WebElement]: ...
+ def find_element_by_class_name(self, name: basestring) -> WebElement: ...
+ def find_elements_by_class_name(self, name: basestring) -> List[WebElement]: ...
+ def find_element_by_css_selector(self, css_selector: basestring) -> WebElement: ...
+ def find_elements_by_css_selector(self, css_selector: basestring) -> List[WebElement]: ...
+ def execute_script(self, script, *args): ...
+ def execute_async_script(self, script, *args): ...
+ @property
+ def current_url(self) -> basestring: ...
+ @property
+ def page_source(self): ...
+ def close(self): ...
+ def quit(self): ...
+ @property
+ def current_window_handle(self): ...
+ @property
+ def window_handles(self): ...
+ def maximize_window(self): ...
+ @property
+ def switch_to(self): ...
+ def switch_to_active_element(self): ...
+ def switch_to_window(self, window_name): ...
+ def switch_to_frame(self, frame_reference): ...
+ def switch_to_default_content(self): ...
+ def switch_to_alert(self): ...
+ def back(self): ...
+ def forward(self): ...
+ def refresh(self): ...
+ def get_cookies(self): ...
+ def get_cookie(self, name): ...
+ def delete_cookie(self, name): ...
+ def delete_all_cookies(self): ...
+ def add_cookie(self, cookie_dict): ...
+ def implicitly_wait(self, time_to_wait): ...
+ def set_script_timeout(self, time_to_wait): ...
+ def set_page_load_timeout(self, time_to_wait): ...
+ def find_element(self, by=..., value=None): ...
+ def find_elements(self, by=..., value=None): ...
+ @property
+ def desired_capabilities(self): ...
+ def get_screenshot_as_file(self, filename): ...
+ save_screenshot = ... # type: Any
+ def get_screenshot_as_png(self): ...
+ def get_screenshot_as_base64(self): ...
+ def set_window_size(self, width, height, windowHandle=''): ...
+ def get_window_size(self, windowHandle=''): ...
+ def set_window_position(self, x, y, windowHandle=''): ...
+ def get_window_position(self, windowHandle=''): ...
+ @property
+ def file_detector(self): ...
+ @file_detector.setter
+ def file_detector(self, detector): ...
+ @property
+ def orientation(self): ...
+ @orientation.setter
+ def orientation(self, value): ...
+ @property
+ def application_cache(self): ...
+ @property
+ def log_types(self): ...
+ def get_log(self, log_type): ...
diff --git a/typeshed/third_party/2.7/selenium/webdriver/remote/webelement.pyi b/typeshed/third_party/2.7/selenium/webdriver/remote/webelement.pyi
new file mode 100644
index 0000000..05dfae0
--- /dev/null
+++ b/typeshed/third_party/2.7/selenium/webdriver/remote/webelement.pyi
@@ -0,0 +1,65 @@
+# Stubs for selenium.webdriver.remote.webelement (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from selenium.webdriver.remote.webdriver import WebDriver
+from typing import Any, Optional, Dict, List
+
+SizeDict = Dict[str, int] # containing "height", "width"
+PointDict = Dict[str, int] # containing "x", "y"
+
+class WebElement:
+ def __init__(self, parent: WebDriver, id_: Optional[basestring], w3c: bool=False) -> None: ...
+ @property
+ def tag_name(self) -> basestring: ...
+ @property
+ def text(self) -> Optional[basestring]: ...
+ def click(self) -> None: ...
+ def submit(self) -> None: ...
+ def clear(self) -> None: ...
+ def get_attribute(self, name: basestring) -> Optional[basestring]: ...
+ def is_selected(self) -> bool: ...
+ def is_enabled(self) -> bool: ...
+
+ def find_element_by_id(self, id_: basestring) -> WebElement: ...
+ def find_elements_by_id(self, id_: basestring) -> List[WebElement]: ...
+ def find_element_by_name(self, name: basestring) -> WebElement: ...
+ def find_elements_by_name(self, name: basestring) -> List[WebElement]: ...
+ def find_element_by_link_text(self, link_text: basestring) -> WebElement: ...
+ def find_elements_by_link_text(self, link_text: basestring) -> List[WebElement]: ...
+ def find_element_by_partial_link_text(self, link_text: basestring) -> WebElement: ...
+ def find_elements_by_partial_link_text(self, link_text: basestring) -> List[WebElement]: ...
+ def find_element_by_tag_name(self, name: basestring) -> WebElement: ...
+ def find_elements_by_tag_name(self, name: basestring) -> List[WebElement]: ...
+ def find_element_by_xpath(self, xpath: basestring) -> WebElement: ...
+ def find_elements_by_xpath(self, xpath: basestring) -> List[WebElement]: ...
+ def find_element_by_class_name(self, name: basestring) -> WebElement: ...
+ def find_elements_by_class_name(self, name: basestring) -> List[WebElement]: ...
+ def find_element_by_css_selector(self, css_selector: basestring) -> WebElement: ...
+ def find_elements_by_css_selector(self, css_selector: basestring) -> List[WebElement]: ...
+
+ def send_keys(self, *value: *basestring) -> None: ...
+ def is_displayed(self) -> bool: ...
+ @property
+ def location_once_scrolled_into_view(self): ...
+ @property
+ def size(self) -> SizeDict: ...
+ def value_of_css_property(self, property_name): ...
+ @property
+ def location(self) -> PointDict: ...
+ @property
+ def rect(self): ...
+ @property
+ def screenshot_as_base64(self): ...
+ @property
+ def screenshot_as_png(self): ...
+ def screenshot(self, filename: basestring): ...
+ @property
+ def parent(self) -> WebDriver: ...
+ @property
+ def id(self) -> Optional[basestring]: ...
+ def __eq__(self, element: object) -> bool: ...
+ def __ne__(self, element: object) -> bool: ...
+ def find_element(self, by: basestring=..., value: basestring=None) -> WebElement: ...
+ def find_elements(self, by: basestring=..., value: basestring=None) -> List[WebElement]: ...
+ def __hash__(self) -> int: ...
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi
index 2faf87d..846c3b4 100644
--- a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/__init__.pyi
@@ -15,10 +15,8 @@ DATETIME = base.DATETIME
DECIMAL = base.DECIMAL
DOUBLE = base.DOUBLE
ENUM = base.ENUM
-DECIMAL = base.DECIMAL
FLOAT = base.FLOAT
INTEGER = base.INTEGER
-INTEGER = base.INTEGER
LONGBLOB = base.LONGBLOB
LONGTEXT = base.LONGTEXT
MEDIUMBLOB = base.MEDIUMBLOB
@@ -39,4 +37,4 @@ TINYTEXT = base.TINYTEXT
VARBINARY = base.VARBINARY
VARCHAR = base.VARCHAR
YEAR = base.YEAR
-dialect = base.dialect
+## dialect = base.dialect
diff --git a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi
index 1443c26..ac967d8 100644
--- a/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/dialects/mysql/base.pyi
@@ -9,10 +9,10 @@ from ... import util
from ... import types
sqltypes = sql.sqltypes
-compiler = sql.compiler
-reflection = engine.reflection
-default = engine.default
-topological = util.topological
+## compiler = sql.compiler
+## reflection = engine.reflection
+## default = engine.default
+## topological = util.topological
DATE = types.DATE
BOOLEAN = types.BOOLEAN
BLOB = types.BLOB
@@ -28,15 +28,21 @@ class _NumericType:
zerofill = ... # type: Any
def __init__(self, unsigned=..., zerofill=..., **kw) -> None: ...
-class _FloatType(_NumericType, sqltypes.Float):
+class _FloatType(_NumericType,
+ ## sqltypes.Float
+ ):
scale = ... # type: Any
def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-class _IntegerType(_NumericType, sqltypes.Integer):
+class _IntegerType(_NumericType,
+ ## sqltypes.Integer
+ ):
display_width = ... # type: Any
def __init__(self, display_width=..., **kw) -> None: ...
-class _StringType(sqltypes.String):
+class _StringType(object,
+ ## sqltypes.String
+ ):
charset = ... # type: Any
ascii = ... # type: Any
unicode = ... # type: Any
@@ -44,14 +50,21 @@ class _StringType(sqltypes.String):
national = ... # type: Any
def __init__(self, charset=..., collation=..., ascii=..., binary=..., unicode=..., national=..., **kw) -> None: ...
-class _MatchType(sqltypes.Float, sqltypes.MatchType):
+class _MatchType(object,
+ ## sqltypes.Float,
+ ## sqltypes.MatchType
+ ):
def __init__(self, **kw) -> None: ...
-class NUMERIC(_NumericType, sqltypes.NUMERIC):
+class NUMERIC(_NumericType,
+ ## sqltypes.NUMERIC
+ ):
__visit_name__ = ... # type: Any
def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-class DECIMAL(_NumericType, sqltypes.DECIMAL):
+class DECIMAL(_NumericType,
+ ## sqltypes.DECIMAL
+ ):
__visit_name__ = ... # type: Any
def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
@@ -59,20 +72,28 @@ class DOUBLE(_FloatType):
__visit_name__ = ... # type: Any
def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-class REAL(_FloatType, sqltypes.REAL):
+class REAL(_FloatType,
+ ## sqltypes.REAL
+ ):
__visit_name__ = ... # type: Any
def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-class FLOAT(_FloatType, sqltypes.FLOAT):
+class FLOAT(_FloatType,
+ ## sqltypes.FLOAT
+ ):
__visit_name__ = ... # type: Any
def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
def bind_processor(self, dialect): ...
-class INTEGER(_IntegerType, sqltypes.INTEGER):
+class INTEGER(_IntegerType,
+ ## sqltypes.INTEGER
+ ):
__visit_name__ = ... # type: Any
def __init__(self, display_width=..., **kw) -> None: ...
-class BIGINT(_IntegerType, sqltypes.BIGINT):
+class BIGINT(_IntegerType,
+ ## sqltypes.BIGINT
+ ):
__visit_name__ = ... # type: Any
def __init__(self, display_width=..., **kw) -> None: ...
@@ -84,38 +105,52 @@ class TINYINT(_IntegerType):
__visit_name__ = ... # type: Any
def __init__(self, display_width=..., **kw) -> None: ...
-class SMALLINT(_IntegerType, sqltypes.SMALLINT):
+class SMALLINT(_IntegerType,
+ ## sqltypes.SMALLINT
+ ):
__visit_name__ = ... # type: Any
def __init__(self, display_width=..., **kw) -> None: ...
-class BIT(sqltypes.TypeEngine):
+class BIT(object,
+ ## sqltypes.TypeEngine
+ ):
__visit_name__ = ... # type: Any
length = ... # type: Any
def __init__(self, length=...) -> None: ...
def result_processor(self, dialect, coltype): ...
-class TIME(sqltypes.TIME):
+class TIME(object,
+ ## sqltypes.TIME
+ ):
__visit_name__ = ... # type: Any
fsp = ... # type: Any
def __init__(self, timezone=..., fsp=...) -> None: ...
def result_processor(self, dialect, coltype): ...
-class TIMESTAMP(sqltypes.TIMESTAMP):
+class TIMESTAMP(object,
+ ## sqltypes.TIMESTAMP
+ ):
__visit_name__ = ... # type: Any
fsp = ... # type: Any
def __init__(self, timezone=..., fsp=...) -> None: ...
-class DATETIME(sqltypes.DATETIME):
+class DATETIME(object,
+ ## sqltypes.DATETIME
+ ):
__visit_name__ = ... # type: Any
fsp = ... # type: Any
def __init__(self, timezone=..., fsp=...) -> None: ...
-class YEAR(sqltypes.TypeEngine):
+class YEAR(object,
+ ## sqltypes.TypeEngine
+ ):
__visit_name__ = ... # type: Any
display_width = ... # type: Any
def __init__(self, display_width=...) -> None: ...
-class TEXT(_StringType, sqltypes.TEXT):
+class TEXT(_StringType,
+ ## sqltypes.TEXT
+ ):
__visit_name__ = ... # type: Any
def __init__(self, length=..., **kw) -> None: ...
@@ -131,34 +166,50 @@ class LONGTEXT(_StringType):
__visit_name__ = ... # type: Any
def __init__(self, **kwargs) -> None: ...
-class VARCHAR(_StringType, sqltypes.VARCHAR):
+class VARCHAR(_StringType,
+ ## sqltypes.VARCHAR
+ ):
__visit_name__ = ... # type: Any
def __init__(self, length=..., **kwargs) -> None: ...
-class CHAR(_StringType, sqltypes.CHAR):
+class CHAR(_StringType,
+ ## sqltypes.CHAR
+ ):
__visit_name__ = ... # type: Any
def __init__(self, length=..., **kwargs) -> None: ...
-class NVARCHAR(_StringType, sqltypes.NVARCHAR):
+class NVARCHAR(_StringType,
+ ## sqltypes.NVARCHAR
+ ):
__visit_name__ = ... # type: Any
def __init__(self, length=..., **kwargs) -> None: ...
-class NCHAR(_StringType, sqltypes.NCHAR):
+class NCHAR(_StringType,
+ ## sqltypes.NCHAR
+ ):
__visit_name__ = ... # type: Any
def __init__(self, length=..., **kwargs) -> None: ...
-class TINYBLOB(sqltypes._Binary):
+class TINYBLOB(object,
+ ## sqltypes._Binary
+ ):
__visit_name__ = ... # type: Any
-class MEDIUMBLOB(sqltypes._Binary):
+class MEDIUMBLOB(object,
+ ## sqltypes._Binary
+ ):
__visit_name__ = ... # type: Any
-class LONGBLOB(sqltypes._Binary):
+class LONGBLOB(object,
+ ## sqltypes._Binary
+ ):
__visit_name__ = ... # type: Any
class _EnumeratedValues(_StringType): ...
-class ENUM(sqltypes.Enum, _EnumeratedValues):
+class ENUM(## sqltypes.Enum,
+ _EnumeratedValues
+ ):
__visit_name__ = ... # type: Any
strict = ... # type: Any
def __init__(self, *enums, **kw) -> None: ...
@@ -208,10 +259,14 @@ MSInteger = ... # type: Any
colspecs = ... # type: Any
ischema_names = ... # type: Any
-class MySQLExecutionContext(default.DefaultExecutionContext):
+class MySQLExecutionContext(object,
+ ## default.DefaultExecutionContext
+ ):
def should_autocommit_text(self, statement): ...
-class MySQLCompiler(compiler.SQLCompiler):
+class MySQLCompiler(object,
+ ## compiler.SQLCompiler
+ ):
render_table_with_column_in_update_from = ... # type: Any
extract_map = ... # type: Any
def visit_random_func(self, fn, **kw): ...
@@ -233,7 +288,9 @@ class MySQLCompiler(compiler.SQLCompiler):
def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ...
def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ...
-class MySQLDDLCompiler(compiler.DDLCompiler):
+class MySQLDDLCompiler(object,
+ ## compiler.DDLCompiler
+ ):
def create_table_constraints(self, table, **kw): ...
def get_column_specification(self, column, **kw): ...
def post_create_table(self, table): ...
@@ -243,7 +300,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
def visit_drop_constraint(self, drop): ...
def define_constraint_match(self, constraint): ...
-class MySQLTypeCompiler(compiler.GenericTypeCompiler):
+class MySQLTypeCompiler(object,
+ ## compiler.GenericTypeCompiler
+ ):
def visit_NUMERIC(self, type_, **kw): ...
def visit_DECIMAL(self, type_, **kw): ...
def visit_DOUBLE(self, type_, **kw): ...
@@ -279,11 +338,15 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_SET(self, type_, **kw): ...
def visit_BOOLEAN(self, type, **kw): ...
-class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
+class MySQLIdentifierPreparer(object,
+ ## compiler.IdentifierPreparer
+ ):
reserved_words = ... # type: Any
def __init__(self, dialect, server_ansiquotes=..., **kw) -> None: ...
-class MySQLDialect(default.DefaultDialect):
+class MySQLDialect(object,
+ ## default.DefaultDialect
+ ):
name = ... # type: Any
supports_alter = ... # type: Any
supports_native_boolean = ... # type: Any
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi
new file mode 100644
index 0000000..3b89813
--- /dev/null
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/base.pyi
@@ -0,0 +1,3 @@
+# Dummy until I figure out something better.
+class Connectable:
+ pass
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi
index 726741d..372643c 100644
--- a/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/strategies.pyi
@@ -3,7 +3,8 @@
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any
-import base
+
+from . import base
strategies = ... # type: Any
@@ -26,7 +27,6 @@ class MockEngineStrategy(EngineStrategy):
name = ... # type: Any
def create(self, name_or_url, executor, **kwargs): ...
class MockConnection(base.Connectable):
- execute = ... # type: Any
def __init__(self, dialect, execute) -> None: ...
engine = ... # type: Any
dialect = ... # type: Any
diff --git a/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi b/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi
index fa68a57..b422ec9 100644
--- a/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/engine/url.pyi
@@ -5,7 +5,7 @@
from typing import Any
from .. import dialects
-registry = dialects.registry
+## registry = dialects.registry
class URL:
drivername = ... # type: Any
diff --git a/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi b/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi
index 5a76703..bc1fe4b 100644
--- a/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi
+++ b/typeshed/third_party/2.7/sqlalchemy/orm/__init__.pyi
@@ -3,54 +3,54 @@
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any
-from . import mapper
-from . import interfaces
-from . import deprecated_interfaces
-from . import util
-from . import properties
-from . import relationships
-from . import descriptor_props
+## from . import mapper
+## from . import interfaces
+## from . import deprecated_interfaces
+## from . import util
+## from . import properties
+## from . import relationships
+## from . import descriptor_props
from . import session
-from . import scoping
-from . import query
+## from . import scoping
+## from . import query
from ..util import langhelpers
-from . import strategy_options
+## from . import strategy_options
-Mapper = mapper.Mapper
-class_mapper = mapper.class_mapper
-configure_mappers = mapper.configure_mappers
-reconstructor = mapper.reconstructor
-validates = mapper.validates
-EXT_CONTINUE = interfaces.EXT_CONTINUE
-EXT_STOP = interfaces.EXT_STOP
-PropComparator = interfaces.PropComparator
-MapperExtension = deprecated_interfaces.MapperExtension
-SessionExtension = deprecated_interfaces.SessionExtension
-AttributeExtension = deprecated_interfaces.AttributeExtension
-aliased = util.aliased
-join = util.join
-object_mapper = util.object_mapper
-outerjoin = util.outerjoin
-polymorphic_union = util.polymorphic_union
-was_deleted = util.was_deleted
-with_parent = util.with_parent
-with_polymorphic = util.with_polymorphic
-ColumnProperty = properties.ColumnProperty
-RelationshipProperty = relationships.RelationshipProperty
-ComparableProperty = descriptor_props.ComparableProperty
-CompositeProperty = descriptor_props.CompositeProperty
-SynonymProperty = descriptor_props.SynonymProperty
-foreign = relationships.foreign
-remote = relationships.remote
+## Mapper = mapper.Mapper
+## class_mapper = mapper.class_mapper
+## configure_mappers = mapper.configure_mappers
+## reconstructor = mapper.reconstructor
+## validates = mapper.validates
+## EXT_CONTINUE = interfaces.EXT_CONTINUE
+## EXT_STOP = interfaces.EXT_STOP
+## PropComparator = interfaces.PropComparator
+## MapperExtension = deprecated_interfaces.MapperExtension
+## SessionExtension = deprecated_interfaces.SessionExtension
+## AttributeExtension = deprecated_interfaces.AttributeExtension
+## aliased = util.aliased
+## join = util.join
+## object_mapper = util.object_mapper
+## outerjoin = util.outerjoin
+## polymorphic_union = util.polymorphic_union
+## was_deleted = util.was_deleted
+## with_parent = util.with_parent
+## with_polymorphic = util.with_polymorphic
+## ColumnProperty = properties.ColumnProperty
+## RelationshipProperty = relationships.RelationshipProperty
+## ComparableProperty = descriptor_props.ComparableProperty
+## CompositeProperty = descriptor_props.CompositeProperty
+## SynonymProperty = descriptor_props.SynonymProperty
+## foreign = relationships.foreign
+## remote = relationships.remote
Session = session.Session
-object_session = session.object_session
+object_session = Session.object_session
sessionmaker = session.sessionmaker
-make_transient = session.make_transient
-make_transient_to_detached = session.make_transient_to_detached
-scoped_session = scoping.scoped_session
-AliasOption = query.AliasOption
-Query = query.Query
-Bundle = query.Bundle
+## make_transient = session.make_transient
+## make_transient_to_detached = session.make_transient_to_detached
+## scoped_session = scoping.scoped_session
+## AliasOption = query.AliasOption
+## Query = query.Query
+## Bundle = query.Bundle
public_factory = langhelpers.public_factory
def create_session(bind=..., **kwargs): ...
@@ -87,7 +87,7 @@ immediateload = ... # type: Any
noload = ... # type: Any
defaultload = ... # type: Any
-Load = strategy_options.Load
+## Load = strategy_options.Load
def eagerload(*args, **kwargs): ...
def eagerload_all(*args, **kwargs): ...
diff --git a/typeshed/third_party/2.7/tornado/locks.pyi b/typeshed/third_party/2.7/tornado/locks.pyi
new file mode 100644
index 0000000..3ad107c
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/locks.pyi
@@ -0,0 +1,50 @@
+# Stubs for tornado.locks (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+class _TimeoutGarbageCollector:
+ def __init__(self): ...
+
+class Condition(_TimeoutGarbageCollector):
+ io_loop = ... # type: Any
+ def __init__(self): ...
+ def wait(self, timeout=None): ...
+ def notify(self, n=1): ...
+ def notify_all(self): ...
+
+class Event:
+ def __init__(self): ...
+ def is_set(self): ...
+ def set(self): ...
+ def clear(self): ...
+ def wait(self, timeout=None): ...
+
+class _ReleasingContextManager:
+ def __init__(self, obj): ...
+ def __enter__(self): ...
+ def __exit__(self, exc_type, exc_val, exc_tb): ...
+
+class Semaphore(_TimeoutGarbageCollector):
+ def __init__(self, value=1): ...
+ def release(self): ...
+ def acquire(self, timeout=None): ...
+ def __enter__(self): ...
+ __exit__ = ... # type: Any
+ def __aenter__(self): ...
+ def __aexit__(self, typ, value, tb): ...
+
+class BoundedSemaphore(Semaphore):
+ def __init__(self, value=1): ...
+ def release(self): ...
+
+class Lock:
+ def __init__(self): ...
+ def acquire(self, timeout=None): ...
+ def release(self): ...
+ def __enter__(self): ...
+ __exit__ = ... # type: Any
+ def __aenter__(self): ...
+ def __aexit__(self, typ, value, tb): ...
+
diff --git a/typeshed/third_party/2.7/tornado/testing.pyi b/typeshed/third_party/2.7/tornado/testing.pyi
new file mode 100644
index 0000000..f32ca85
--- /dev/null
+++ b/typeshed/third_party/2.7/tornado/testing.pyi
@@ -0,0 +1,64 @@
+# Stubs for tornado.testing (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+import unittest
+import logging
+
+AsyncHTTPClient = ... # type: Any
+gen = ... # type: Any
+HTTPServer = ... # type: Any
+IOLoop = ... # type: Any
+netutil = ... # type: Any
+SimpleAsyncHTTPClient = ... # type: Any
+
+def get_unused_port(): ...
+def bind_unused_port(): ...
+
+class AsyncTestCase(unittest.TestCase):
+ def __init__(self, *args, **kwargs): ...
+ io_loop = ... # type: Any
+ def setUp(self): ...
+ def tearDown(self): ...
+ def get_new_ioloop(self): ...
+ def run(self, result=None): ...
+ def stop(self, _arg=None, **kwargs): ...
+ def wait(self, condition=None, timeout=5): ...
+
+class AsyncHTTPTestCase(AsyncTestCase):
+ http_client = ... # type: Any
+ http_server = ... # type: Any
+ def setUp(self): ...
+ def get_http_client(self): ...
+ def get_http_server(self): ...
+ def get_app(self): ...
+ def fetch(self, path, **kwargs): ...
+ def get_httpserver_options(self): ...
+ def get_http_port(self): ...
+ def get_protocol(self): ...
+ def get_url(self, path): ...
+ def tearDown(self): ...
+
+class AsyncHTTPSTestCase(AsyncHTTPTestCase):
+ def get_http_client(self): ...
+ def get_httpserver_options(self): ...
+ def get_ssl_options(self): ...
+ def get_protocol(self): ...
+
+def gen_test(f): ...
+
+class LogTrapTestCase(unittest.TestCase):
+ def run(self, result=None): ...
+
+class ExpectLog(logging.Filter):
+ logger = ... # type: Any
+ regex = ... # type: Any
+ required = ... # type: Any
+ matched = ... # type: Any
+ def __init__(self, logger, regex, required=True): ...
+ def filter(self, record): ...
+ def __enter__(self): ...
+ def __exit__(self, typ, value, tb): ...
+
+def main(**kwargs): ...
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/third_party/3/dateutil/__init__.pyi
similarity index 100%
copy from typeshed/stdlib/3/bz2.pyi
copy to typeshed/third_party/3/dateutil/__init__.pyi
diff --git a/typeshed/third_party/3/dateutil/parser.pyi b/typeshed/third_party/3/dateutil/parser.pyi
new file mode 100644
index 0000000..34c314e
--- /dev/null
+++ b/typeshed/third_party/3/dateutil/parser.pyi
@@ -0,0 +1,52 @@
+from typing import List, Tuple, Optional, Callable, Union, IO, Any
+from datetime import datetime
+
+__all__ = ... # type: List[str]
+
+
+class parserinfo(object):
+ JUMP = ... # type: List[str]
+ WEEKDAYS = ... # type: List[Tuple[str, str]]
+ MONTHS = ... # type: List[Tuple[str, str]]
+ HMS = ... # type: List[Tuple[str, str, str]]
+ AMPM = ... # type: List[Tuple[str, str, str]]
+ UTCZONE = ... # type: List[str]
+ PERTAIN = ... # type: List[str]
+ TZOFFSET = ... # type: Dict[str, int]
+
+ def __init__(self, dayfirst: bool=..., yearfirst: bool=...) -> None: ...
+ def jump(self, name: str) -> bool: ...
+ def weekday(self, name: str) -> str: ...
+ def month(self, name: str) -> str: ...
+ def hms(self, name: str) -> str: ...
+ def ampm(self, name: str) -> str: ...
+ def pertain(self, name: str) -> bool: ...
+ def utczone(self, name: str) -> bool: ...
+ def tzoffset(self, name: str) -> int: ...
+ def convertyear(self, year: int) -> int: ...
+ def validate(self, year: datetime) -> bool: ...
+
+
+class parser(object):
+ def __init__(self, info: parserinfo=...) -> None: ...
+
+ def parse(
+ self,
+ timestr: Union[str, bytes, IO[Any]],
+ default: Optional[datetime],
+ ignoretz: bool=...,
+ tzinfos =...,
+ ) -> datetime: ...
+
+DEFAULTPARSER = ... # type: parser
+
+
+def parse(timestr, parserinfo: parserinfo=..., **kwargs) -> datetime:
+ ...
+
+
+class _tzparser(object):
+ ...
+
+
+DEFAULTTZPARSER = ... # type: _tzparser
diff --git a/typeshed/third_party/3/pkg_resources/__init__.pyi b/typeshed/third_party/3/pkg_resources/__init__.pyi
new file mode 100644
index 0000000..ef58019
--- /dev/null
+++ b/typeshed/third_party/3/pkg_resources/__init__.pyi
@@ -0,0 +1,508 @@
+# Stubs for pkg_resources (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+# NOTE: Still a little bit incomplete.
+
+from typing import (Any, List, Callable, Union, Tuple, Iterator, Iterable,
+ Dict, Optional, Pattern)
+import io
+from collections import namedtuple
+import zipfile
+
+from ._vendor.packaging import version
+from ._vendor.packaging.specifiers import SpecifierSet
+
+
+SetuptoolsVersionType = Union['SetuptoolsVersion', 'SetuptoolsLegacyVersion']
+StrOrSequenceOfLines = Union[str, Iterable[str]]
+
+PackageOrRequirementType = Union[str, 'Requirement']
+
+# TODO
+LoaderType = Any
+ModuleType = Any
+ProviderFactoryType = Callable[[ModuleType], 'IResourceProvider']
+
+# entry point funcs types
+EntryPointFuncsDist = Union['Distribution', 'Requirement', str]
+EntryPointFuncsGroup = Dict[str, 'EntryPoint']
+EntryPointFuncsMap = Dict[str, EntryPointFuncsGroup]
+
+OnChangeCallback = Callable[['Distribution'], None]
+InstallerCallback = Callable[['Requirement'], 'Distribution']
+FindPluginsOutput = Tuple[List['Distribution'], Dict['Distribution', 'ResolutionError']]
+
+ImporterClassType = Any
+FinderCallable = Callable[[ImporterClassType, str, bool], Iterator['Distribution']]
+ImporterType = Any
+NamespaceHandlerCallable = Callable[[ImporterType, str, str, ModuleType], str]
+
+EPAttrsType = Tuple[str, ...]
+EPExtrasType = Tuple[str, ...]
+
+require = ... # type: Optional[Callable[..., List[Distribution]]]
+working_set = ... # type: Optional[WorkingSet]
+
+class PEP440Warning(RuntimeWarning): ...
+
+class _SetuptoolsVersionMixin:
+ def __hash__(self) -> int: ...
+ def __lt__(self, other) -> bool: ...
+ def __le__(self, other) -> bool: ...
+ def __eq__(self, other) -> bool: ...
+ def __ge__(self, other) -> bool: ...
+ def __gt__(self, other) -> bool: ...
+ def __ne__(self, other) -> bool: ...
+ def __getitem__(self, key: Any) -> Any: ...
+ def __iter__(self) -> Iterator[str]: ...
+
+
+class SetuptoolsVersion(_SetuptoolsVersionMixin, version.Version): ...
+class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, version.LegacyVersion): ...
+
+
+def parse_version(v: str) -> SetuptoolsVersionType: ...
+
+class ResolutionError(Exception): ...
+
+class VersionConflict(ResolutionError):
+ @property
+ def dist(self) -> Distribution: ...
+ @property
+ def req(self) -> Requirement: ...
+ def report(self) -> str: ...
+ # TODO: fill required_by
+ def with_context(self, required_by) -> Union['VersionConflict', 'ContextualVersionConflict']: ...
+
+
+class ContextualVersionConflict(VersionConflict):
+ @property
+ # TODO: fill required_by
+ def required_by(self): ...
+
+# TODO
+class DistributionNotFound(ResolutionError):
+ @property
+ def req(self) -> Requirement: ...
+ @property
+ def requirers(self): ...
+ @property
+ def requirers_str(self) -> str: ...
+ def report(self) -> str: ...
+
+class UnknownExtra(ResolutionError): ...
+
+EGG_DIST = ... # type: int
+BINARY_DIST = ... # type: int
+SOURCE_DIST = ... # type: int
+CHECKOUT_DIST = ... # type: int
+DEVELOP_DIST = ... # type: int
+
+# TODO
+def register_loader_type(loader_type: LoaderType,
+ provider_factory: ProviderFactoryType) -> None: ...
+def get_provider(moduleOrReq: PackageOrRequirementType) -> Union['IResourceProvider', 'Distribution']: ...
+
+get_platform = ... # type: Callable[[], str]
+
+def compatible_platforms(provided: Optional[str], required: Optional[str]) -> bool: ...
+def run_script(dist_spec, script_name: str) -> None: ...
+
+run_main = ... # type: Any
+
+def get_distribution(dist: EntryPointFuncsDist) -> 'Distribution': ...
+def load_entry_point(dist: EntryPointFuncsDist, group: str, name: str) -> 'EntryPoint': ...
+def get_entry_map(dist: EntryPointFuncsDist,
+ group: Optional[str] = None) -> EntryPointFuncsMap: ...
+def get_entry_info(dist: EntryPointFuncsDist, group: str, name: str) -> Optional['EntryPoint']: ...
+
+# TODO
+class IMetadataProvider:
+ def has_metadata(name): ...
+ def get_metadata(name): ...
+ def get_metadata_lines(name): ...
+ def metadata_isdir(name): ...
+ def metadata_listdir(name): ...
+ def run_script(script_name, namespace): ...
+
+# TODO
+class IResourceProvider(IMetadataProvider):
+ def get_resource_filename(manager, resource_name): ...
+ def get_resource_stream(manager, resource_name): ...
+ def get_resource_string(manager, resource_name): ...
+ def has_resource(resource_name): ...
+ def resource_isdir(resource_name): ...
+ def resource_listdir(resource_name): ...
+
+class WorkingSet:
+ entries = ... # type: List[str]
+ entry_keys = ... # type: Dict[str, List[str]]
+ by_key = ... # type: Dict[str, Distribution]
+ callbacks = ... # type: List[OnChangeCallback]
+
+ def __init__(self, entries: List[str] = None) -> None: ...
+ def add_entry(self, entry: str) -> None: ...
+ def __contains__(self, dist: Distribution) -> bool: ...
+ def find(self, req: Requirement) -> Distribution: ...
+ def iter_entry_points(self, group: str,
+ name: str = None) -> Iterator[EntryPoint]: ...
+ # TODO: add type RequirementsType and add here
+ def run_script(self, requires, script_name: str) -> None: ...
+ def __iter__(self) -> Iterator[Distribution]: ...
+ def add(self, dist: Distribution,
+ entry: str = None,
+ insert: bool = ...,
+ replace: bool = ...) -> None: ...
+ def resolve(self, requirements: Iterable[Requirement],
+ env: Environment = None,
+ installer: Optional[InstallerCallback] = None,
+ replace_conflicting: bool = ...) -> List[Distribution]: ...
+ def find_plugins(self, plugin_env: Environment,
+ full_env: Optional[Environment] = None,
+ installer: Optional[InstallerCallback] = None,
+ fallback: bool = ...) -> FindPluginsOutput: ...
+ # TODO: check requirements type
+ def require(self, *requirements: StrOrSequenceOfLines) -> List[Distribution]: ...
+ def subscribe(self, callback: OnChangeCallback) -> None: ...
+
+class Environment:
+ platform = ... # type: str
+ python = ... # type: str
+ def __init__(self, search_path: Iterable[str] = None,
+ platform: str = ...,
+ python: str = ...) -> None: ...
+ def can_add(self, dist: Distribution) -> bool: ...
+ def remove(self, dist: Distribution) -> None: ...
+ def scan(self, search_path: Optional[Iterable[str]] = None) -> None: ...
+ def __getitem__(self, project_name: str) -> List[Distribution]: ...
+ def add(self, dist: Distribution) -> None: ...
+ def best_match(self, req: Requirement,
+ working_set: WorkingSet,
+ installer: Optional[InstallerCallback] = None) -> Optional[Distribution]: ...
+ def obtain(self, requirement: Requirement,
+ installer: Optional[InstallerCallback] = None) -> Optional[Distribution]: ...
+ def __iter__(self) -> Iterator[str]: ...
+ def __iadd__(self, other: Union[Distribution, 'Environment']) -> 'Environment': ...
+ def __add__(self, other: Union[Distribution, 'Environment']) -> 'Environment': ...
+
+AvailableDistributions = ... # type: Environment
+
+class ExtractionError(RuntimeError): ...
+
+class ResourceManager:
+ extraction_path = ... # type: Any
+ cached_files = ... # type: Any
+ def __init__(self) -> None: ...
+ def resource_exists(self, package_or_requirement: PackageOrRequirementType,
+ resource_name: str) -> bool: ...
+ def resource_isdir(self, package_or_requirement: PackageOrRequirementType,
+ resource_name: str) -> bool: ...
+ def resource_filename(self, package_or_requirement: PackageOrRequirementType,
+ resource_name: str) -> str: ...
+ # TODO: return type
+ def resource_stream(self, package_or_requirement: PackageOrRequirementType,
+ resource_name: str): ...
+ # TODO: return type
+ def resource_string(self, package_or_requirement: PackageOrRequirementType,
+ resource_name: str): ...
+ def resource_listdir(self, package_or_requirement: PackageOrRequirementType,
+ resource_name: str) -> List[str]: ...
+ def extraction_error(self) -> None: ...
+ def get_cache_path(self, archive_name: str,
+ names: Iterable[str] = ...) -> str: ...
+ def postprocess(self, tempname: str, filename: str) -> None: ...
+ def set_extraction_path(self, path: str) -> None: ...
+ def cleanup_resources(self, force: bool = ...) -> List[str]: ...
+
+def get_default_cache() -> str: ...
+
+def safe_name(name: str) -> str: ...
+
+def safe_version(version: str) -> str: ...
+
+def safe_extra(extra: str) -> str: ...
+
+def to_filename(name: str) -> str: ...
+
+def invalid_marker(text: str) -> Union[SyntaxError, bool]: ...
+
+def evaluate_marker(text: str, extra=None) -> bool: ...
+
+
+class NullProvider:
+ egg_name = ... # type: Optional[str]
+ egg_info = ... # type: Optional[str]
+ loader = ... # type: Optional[LoaderType]
+ module_path = ... # type: Optional[str]
+ # TODO: init param
+ def __init__(self, module) -> None: ...
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str) -> str: ...
+ # TODO: return type
+ def get_resource_stream(self, manager: ResourceManager, resource_name: str) -> io.BytesIO: ...
+ # TODO: return type
+ def get_resource_string(self, manager: ResourceManager, resource_name: str): ...
+ def has_resource(self, resource_name: str) -> bool: ...
+ def has_metadata(self, name: str) -> bool: ...
+ def get_metadata(self, name: str) -> str: ...
+ def get_metadata_lines(self, name: str) -> Iterator[str]: ...
+ def resource_isdir(self, resource_name: str) -> bool: ...
+ def metadata_isdir(self, name: str) -> bool: ...
+ def resource_listdir(self, resource_name: str) -> List[str]: ...
+ def metadata_listdir(self, name: str) -> List[str]: ...
+ def run_script(self, script_name: str, namespace: Dict[str, Any]) -> None: ...
+
+
+class EggProvider(NullProvider):
+ # TODO: module type
+ def __init__(self, module) -> None: ...
+
+
+class DefaultProvider(EggProvider):
+ # TODO: return type
+ def get_resource_stream(self, manager: ResourceManager, resource_name: str): ...
+
+
+class EmptyProvider(NullProvider):
+ module_path = ... # type: Optional[str]
+ def __init__(self) -> None: ...
+
+
+empty_provider = ... # type: EmptyProvider
+
+
+class ZipManifests(dict):
+ @classmethod
+ def build(cls, path): ...
+ load = ... # type: Any
+
+
+class MemoizedZipManifests(ZipManifests): ...
+
+
+manifest_mod = namedtuple('manifest_mod', 'manifest mtime')
+def load(self, path): ...
+
+
+class ContextualZipFile(zipfile.ZipFile):
+ def __enter__(self): ...
+ def __exit__(self, type, value, traceback): ...
+ def __new__(cls, *args, **kwargs): ...
+
+
+class ZipProvider(EggProvider):
+ eagers = ... # type: Optional[List[str]]
+
+ zip_pre = ... # type: str
+
+ def __init__(self, module) -> None: ...
+
+ @property
+ def zipinfo(self): ...
+
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str) -> str: ...
+
+class FileMetadata(EmptyProvider):
+ path = ... # type: Any
+
+ def __init__(self, path) -> None: ...
+
+ def has_metadata(self, name: str) -> bool: ...
+ # TODO
+ def get_metadata(self, name: str): ...
+
+ def get_metadata_lines(self, name): ...
+
+
+class PathMetadata(DefaultProvider):
+ module_path = ... # type: Optional[str]
+
+ egg_info = ... # type: Optional[str]
+
+ def __init__(self, path: str, egg_info: str) -> None: ...
+
+
+class EggMetadata(ZipProvider):
+ zip_pre = ... # type: str
+
+ loader = ... # type: Optional[LoaderType]
+
+ module_path = ... # type: Optional[str]
+
+ def __init__(self, importer: ImporterType) -> None: ...
+
+def register_finder(importer_type: ImporterClassType,
+ distribution_finder: FinderCallable): ...
+
+def find_distributions(path_item: str,
+ only: bool = ...) -> Iterator['Distribution']: ...
+
+def register_namespace_handler(importer_type: ImporterClassType,
+ namespace_handler: NamespaceHandlerCallable): ...
+
+def declare_namespace(packageName: str) -> None: ...
+
+# TODO:
+def fixup_namespace_packages(path_item, parent=None): ...
+
+def normalize_path(filename: str) -> str: ...
+
+def yield_lines(strs: StrOrSequenceOfLines) -> Iterator[str]: ...
+
+class EntryPoint:
+ name = ... # type: str
+ module_name = ... # type: str
+ attrs = ... # type: EPAttrsType
+ extras = ... # type: EPExtrasType
+ dist = ... # type: Optional['Distribution']
+ def __init__(self, name: str, module_name: str,
+ attrs: EPAttrsType=...,
+ extras: EPExtrasType=...,
+ dist: Optional['Distribution'] = None) -> None: ...
+ def load(self,
+ require: bool = ...,
+ *args, **kwargs) -> Any: ...
+ def resolve(self) -> Any: ...
+ def require(self,
+ env: Optional[Environment] = None,
+ installer: Optional[InstallerCallback] = None) -> None: ...
+ pattern = ... # type: Pattern
+ @classmethod
+ def parse(cls, src: str,
+ dist: Optional['Distribution'] = None) -> 'EntryPoint': ...
+ @classmethod
+ def parse_group(cls, group: str, lines: StrOrSequenceOfLines,
+ dist: Optional['Distribution'] = None) -> EntryPointFuncsGroup: ...
+ @classmethod
+ def parse_map(cls,
+ data: Union[Dict[str, StrOrSequenceOfLines], StrOrSequenceOfLines],
+ dist: Optional['Distribution'] = None) -> EntryPointFuncsMap: ...
+
+class Distribution:
+ PKG_INFO = ... # type: str
+ project_name = ... # type: str
+ py_version = ... # type: str
+ platform = ... # type: Optional[str]
+ location = ... # type: str
+ precedence = ... # type: int
+
+ def __init__(self,
+ location: Optional[str] = None,
+ metadata: Optional[IResourceProvider] = None,
+ project_name: Optional[str] = None,
+ version: Optional[str] = None,
+ py_version: str = ...,
+ platform: Optional[str] = None,
+ precedence: int = ...) -> None: ...
+
+ @classmethod
+ def from_location(cls,
+ location: str,
+ basename: str,
+ metadata: Optional[IResourceProvider] = None,
+ **kw) -> 'Distribution': ...
+
+ # TODO: add exact tuple form
+ @property
+ def hashcmp(self) -> Tuple[SetuptoolsVersionType, int, str, str, str, str]: ...
+ def __hash__(self) -> int: ...
+ def __lt__(self, other: 'Distribution') -> bool: ...
+ def __le__(self, other: 'Distribution') -> bool: ...
+ def __gt__(self, other: 'Distribution') -> bool: ...
+ def __ge__(self, other: 'Distribution') -> bool: ...
+ def __eq__(self, other: Any) -> bool: ...
+ def __ne__(self, other: Any) -> bool: ...
+
+ @property
+ def key(self) -> str: ...
+
+ @property
+ def parsed_version(self) -> SetuptoolsVersionType: ...
+
+ @property
+ def version(self) -> str: ...
+
+ def requires(self,
+ extras: Iterable[str] = ...) -> List[Requirement]: ...
+
+ def activate(self,
+ path: Optional[Iterable[str]] = None) -> None: ...
+
+ def egg_name(self) -> str: ...
+
+ def __getattr__(self, attr: str) -> Any: ...
+
+ @classmethod
+ def from_filename(cls, filename: str,
+ metadata: Optional[IResourceProvider] = None, **kw) -> 'Distribution': ...
+
+ def as_requirement(self) -> Requirement: ...
+
+ def load_entry_point(self, group: str, name: str) -> Any: ...
+
+ def get_entry_map(self,
+ group: Optional[str] = None) -> EntryPointFuncsMap: ...
+
+ def get_entry_info(self, group: str, name: str) -> Optional[EntryPoint]: ...
+
+ def insert_on(self, path: List[str],
+ loc: Optional[str] = None,
+ replace: bool = ...) -> None: ...
+
+ def check_version_conflict(self) -> None: ...
+
+ def has_version(self) -> bool: ...
+
+ def clone(self, **kw) -> 'Distribution': ...
+
+ @property
+ def extras(self) -> List[str]: ...
+
+
+class EggInfoDistribution(Distribution): ...
+
+
+class DistInfoDistribution(Distribution):
+ PKG_INFO = ... # type: str
+ EQEQ = ... # type: Pattern
+
+
+class RequirementParseError(ValueError): ...
+
+
+def parse_requirements(strs) -> Iterator['Requirement']: ...
+
+
+class Requirement:
+ project_name = ... # type: str
+ key = ... # type: str
+ specifier = ... # type: SpecifierSet
+ specs = ... # type: List[Tuple[str, str]]
+ extras = ... # type: Tuple[str]
+ hashCmp = ... # type: Tuple[str, SpecifierSet, frozenset]
+
+ def __init__(self, project_name: str, specs: List[Tuple[str, str]],
+ extras: Tuple[str]) -> None: ...
+ def __eq__(self, other: Any) -> bool: ...
+ def __ne__(self, other: Any) -> bool: ...
+ def __contains__(self, item: Union[Distribution, str]) -> bool: ...
+ def __hash__(self) -> int: ...
+
+ @staticmethod
+ def parse(s: StrOrSequenceOfLines) -> 'Requirement': ...
+
+def ensure_directory(path: str) -> None: ...
+
+ContentType = List[str]
+def split_sections(s) -> Iterator[Tuple[Optional[str], ContentType]]: ...
+
+# Names in __all__ with no definition:
+# add_activation_listener
+# cleanup_resources
+# iter_entry_points
+# resource_exists
+# resource_filename
+# resource_isdir
+# resource_listdir
+# resource_stream
+# resource_string
+# set_extraction_path
diff --git a/typeshed/stdlib/3/bz2.pyi b/typeshed/third_party/3/pkg_resources/_vendor/__init__.py
similarity index 100%
rename from typeshed/stdlib/3/bz2.pyi
rename to typeshed/third_party/3/pkg_resources/_vendor/__init__.py
diff --git a/typeshed/third_party/3/pkg_resources/_vendor/packaging/__init__.pyi b/typeshed/third_party/3/pkg_resources/_vendor/packaging/__init__.pyi
new file mode 100644
index 0000000..6ccb56e
--- /dev/null
+++ b/typeshed/third_party/3/pkg_resources/_vendor/packaging/__init__.pyi
@@ -0,0 +1,4 @@
+# Stubs for packaging (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
diff --git a/typeshed/third_party/3/pkg_resources/_vendor/packaging/specifiers.pyi b/typeshed/third_party/3/pkg_resources/_vendor/packaging/specifiers.pyi
new file mode 100644
index 0000000..df128e0
--- /dev/null
+++ b/typeshed/third_party/3/pkg_resources/_vendor/packaging/specifiers.pyi
@@ -0,0 +1,58 @@
+# Stubs for packaging.specifiers (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+import abc
+
+class InvalidSpecifier(ValueError): ...
+
+class BaseSpecifier:
+ def __hash__(self): ...
+ def __eq__(self, other): ...
+ def __ne__(self, other): ...
+ @abc.abstractproperty
+ def prereleases(self): ...
+ @prereleases.setter
+ def prereleases(self, value): ...
+ def contains(self, item, prereleases=None): ...
+ def filter(self, iterable, prereleases=None): ...
+
+class _IndividualSpecifier(BaseSpecifier):
+ def __init__(self, spec:str=..., prereleases=None) -> None: ...
+ def __hash__(self): ...
+ def __eq__(self, other): ...
+ def __ne__(self, other): ...
+ @property
+ def operator(self): ...
+ @property
+ def version(self): ...
+ @property
+ def prereleases(self): ...
+ @prereleases.setter
+ def prereleases(self, value): ...
+ def __contains__(self, item): ...
+ def contains(self, item, prereleases=None): ...
+ def filter(self, iterable, prereleases=None): ...
+
+class LegacySpecifier(_IndividualSpecifier): ...
+
+class Specifier(_IndividualSpecifier):
+ @property
+ def prereleases(self): ...
+ @prereleases.setter
+ def prereleases(self, value): ...
+
+class SpecifierSet(BaseSpecifier):
+ def __init__(self, specifiers:str=..., prereleases=None) -> None: ...
+ def __hash__(self): ...
+ def __and__(self, other): ...
+ def __eq__(self, other): ...
+ def __ne__(self, other): ...
+ def __len__(self): ...
+ def __iter__(self): ...
+ @property
+ def prereleases(self): ...
+ @prereleases.setter
+ def prereleases(self, value): ...
+ def __contains__(self, item): ...
+ def contains(self, item, prereleases=None): ...
+ def filter(self, iterable, prereleases=None): ...
diff --git a/typeshed/third_party/3/pkg_resources/_vendor/packaging/version.pyi b/typeshed/third_party/3/pkg_resources/_vendor/packaging/version.pyi
new file mode 100644
index 0000000..8f5cd41
--- /dev/null
+++ b/typeshed/third_party/3/pkg_resources/_vendor/packaging/version.pyi
@@ -0,0 +1,49 @@
+# Stubs for packaging.version (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from collections import namedtuple
+
+_Version = namedtuple('_Version', ['epoch', 'release', 'dev', 'pre', 'post', 'local'])
+
+def parse(version): ...
+
+class InvalidVersion(ValueError): ...
+
+class _BaseVersion:
+ def __hash__(self): ...
+ def __lt__(self, other): ...
+ def __le__(self, other): ...
+ def __eq__(self, other): ...
+ def __ge__(self, other): ...
+ def __gt__(self, other): ...
+ def __ne__(self, other): ...
+
+class LegacyVersion(_BaseVersion):
+ def __init__(self, version): ...
+ @property
+ def public(self): ...
+ @property
+ def base_version(self): ...
+ @property
+ def local(self): ...
+ @property
+ def is_prerelease(self): ...
+ @property
+ def is_postrelease(self): ...
+
+VERSION_PATTERN = ... # type: Any
+
+class Version(_BaseVersion):
+ def __init__(self, version): ...
+ @property
+ def public(self): ...
+ @property
+ def base_version(self): ...
+ @property
+ def local(self): ...
+ @property
+ def is_prerelease(self): ...
+ @property
+ def is_postrelease(self): ...
diff --git a/typeshed/third_party/3/typed_ast/__init__.pyi b/typeshed/third_party/3/typed_ast/__init__.pyi
new file mode 100644
index 0000000..92e1216
--- /dev/null
+++ b/typeshed/third_party/3/typed_ast/__init__.pyi
@@ -0,0 +1,2 @@
+# This module is a fork of the CPython 2.7 and 3.5 ast modules with PEP 484 support.
+# See: https://github.com/dropbox/typed_ast
diff --git a/typeshed/third_party/3/typed_ast/ast27.pyi b/typeshed/third_party/3/typed_ast/ast27.pyi
new file mode 100644
index 0000000..40b44da
--- /dev/null
+++ b/typeshed/third_party/3/typed_ast/ast27.pyi
@@ -0,0 +1,361 @@
+import typing
+from typing import Any, Optional, Union, Generic, Iterator
+
+class NodeVisitor():
+ __doc__ = ... # type: str
+ def visit(self, node: AST) -> Any: ...
+ def generic_visit(self, node: AST) -> None: ...
+
+class NodeTransformer(NodeVisitor):
+ __doc__ = ... # type: str
+ def generic_visit(self, node: AST) -> None: ...
+
+def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def copy_location(new_node: AST, old_node: AST) -> AST: ...
+def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
+def fix_missing_locations(node: AST) -> AST: ...
+def get_docstring(node: AST, clean: bool = ...) -> str: ...
+def increment_lineno(node: AST, n: int = ...) -> AST: ...
+def iter_child_nodes(node: AST) -> Iterator[AST]: ...
+def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ...
+def literal_eval(node_or_string: Union[str, AST]) -> Any: ...
+def walk(node: AST) -> Iterator[AST]: ...
+
+PyCF_ONLY_AST = ... # type: int
+
+# ast classes
+
+identifier = str
+
+class AST:
+ _attributes = ... # type: typing.Tuple[str, ...]
+ _fields = ... # type: typing.Tuple[str, ...]
+ def __init__(self, *args, **kwargs) -> None: ...
+
+class mod(AST):
+ ...
+
+class Module(mod):
+ body = ... # type: typing.List[stmt]
+ type_ignores = ... # type: typing.List[TypeIgnore]
+
+class Interactive(mod):
+ body = ... # type: typing.List[stmt]
+
+class Expression(mod):
+ body = ... # type: expr
+
+class FunctionType(mod):
+ argtypes = ... # type: typing.List[expr]
+ returns = ... # type: expr
+
+class Suite(mod):
+ body = ... # type: typing.List[stmt]
+
+
+class stmt(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class FunctionDef(stmt):
+ name = ... # type: identifier
+ args = ... # type: arguments
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+ type_comment = ... # type: Optional[str]
+
+class ClassDef(stmt):
+ name = ... # type: identifier
+ bases = ... # type: typing.List[expr]
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+
+class Return(stmt):
+ value = ... # type: Optional[expr]
+
+class Delete(stmt):
+ targets = ... # type: typing.List[expr]
+
+class Assign(stmt):
+ targets = ... # type: typing.List[expr]
+ value = ... # type: expr
+ type_comment = ... # type: Optional[str]
+
+class AugAssign(stmt):
+ target = ... # type: expr
+ op = ... # type: operator
+ value = ... # type: expr
+
+class Print(stmt):
+ dest = ... # type: Optional[expr]
+ values = ... # type: typing.List[expr]
+ nl = ... # type: bool
+
+class For(stmt):
+ target = ... # type: expr
+ iter = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+ type_comment = ... # type: Optional[str]
+
+class While(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class If(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class With(stmt):
+ context_expr = ... # type: expr
+ optional_vars = ... # type: Optional[expr]
+ body = ... # type: typing.List[stmt]
+ type_comment = ... # type: Optional[str]
+
+class Raise(stmt):
+ type = ... # type: Optional[expr]
+ inst = ... # type: Optional[expr]
+ tback = ... # type: Optional[expr]
+
+class TryExcept(stmt):
+ body = ... # type: typing.List[stmt]
+ handlers = ... # type: typing.List[ExceptHandler]
+ orelse = ... # type: typing.List[stmt]
+
+class TryFinally(stmt):
+ body = ... # type: typing.List[stmt]
+ finalbody = ... # type: typing.List[stmt]
+
+class Assert(stmt):
+ test = ... # type: expr
+ msg = ... # type: Optional[expr]
+
+class Import(stmt):
+ names = ... # type: typing.List[alias]
+
+class ImportFrom(stmt):
+ module = ... # type: Optional[identifier]
+ names = ... # type: typing.List[alias]
+ level = ... # type: Optional[int]
+
+class Exec(stmt):
+ body = ... # type: expr
+ globals = ... # type: Optional[expr]
+ locals = ... # type: Optional[expr]
+
+class Global(stmt):
+ names = ... # type: typing.List[identifier]
+
+class Expr(stmt):
+ value = ... # type: expr
+
+class Pass(stmt): ...
+class Break(stmt): ...
+class Continue(stmt): ...
+
+
+class slice(AST):
+ ...
+
+_slice = slice # this lets us type the variable named 'slice' below
+
+class Slice(slice):
+ lower = ... # type: Optional[expr]
+ upper = ... # type: Optional[expr]
+ step = ... # type: Optional[expr]
+
+class ExtSlice(slice):
+ dims = ... # type: typing.List[slice]
+
+class Index(slice):
+ value = ... # type: expr
+
+class Ellipsis(slice): ...
+
+
+class expr(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class BoolOp(expr):
+ op = ... # type: boolop
+ values = ... # type: typing.List[expr]
+
+class BinOp(expr):
+ left = ... # type: expr
+ op = ... # type: operator
+ right = ... # type: expr
+
+class UnaryOp(expr):
+ op = ... # type: unaryop
+ operand = ... # type: expr
+
+class Lambda(expr):
+ args = ... # type: arguments
+ body = ... # type: expr
+
+class IfExp(expr):
+ test = ... # type: expr
+ body = ... # type: expr
+ orelse = ... # type: expr
+
+class Dict(expr):
+ keys = ... # type: typing.List[expr]
+ values = ... # type: typing.List[expr]
+
+class Set(expr):
+ elts = ... # type: typing.List[expr]
+
+class ListComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class SetComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class DictComp(expr):
+ key = ... # type: expr
+ value = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class GeneratorExp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class Yield(expr):
+ value = ... # type: Optional[expr]
+
+class Compare(expr):
+ left = ... # type: expr
+ ops = ... # type: typing.List[cmpop]
+ comparators = ... # type: typing.List[expr]
+
+class Call(expr):
+ func = ... # type: expr
+ args = ... # type: typing.List[expr]
+ keywords = ... # type: typing.List[keyword]
+ starargs = ... # type: Optional[expr]
+ kwargs = ... # type: Optional[expr]
+
+class Repr(expr):
+ value = ... # type: expr
+
+class Num(expr):
+ n = ... # type: Union[int, float]
+
+class Str(expr):
+ s = ... # type: str
+
+class Attribute(expr):
+ value = ... # type: expr
+ attr = ... # type: identifier
+ ctx = ... # type: expr_context
+
+class Subscript(expr):
+ value = ... # type: expr
+ slice = ... # type: _slice
+ ctx = ... # type: expr_context
+
+class Name(expr):
+ id = ... # type: identifier
+ ctx = ... # type: expr_context
+
+class List(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
+
+class Tuple(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
+
+
+class expr_context(AST):
+ ...
+
+class AugLoad(expr_context): ...
+class AugStore(expr_context): ...
+class Del(expr_context): ...
+class Load(expr_context): ...
+class Param(expr_context): ...
+class Store(expr_context): ...
+
+
+class boolop(AST):
+ ...
+
+class And(boolop): ...
+class Or(boolop): ...
+
+class operator(AST):
+ ...
+
+class Add(operator): ...
+class BitAnd(operator): ...
+class BitOr(operator): ...
+class BitXor(operator): ...
+class Div(operator): ...
+class FloorDiv(operator): ...
+class LShift(operator): ...
+class Mod(operator): ...
+class Mult(operator): ...
+class Pow(operator): ...
+class RShift(operator): ...
+class Sub(operator): ...
+
+class unaryop(AST):
+ ...
+
+class Invert(unaryop): ...
+class Not(unaryop): ...
+class UAdd(unaryop): ...
+class USub(unaryop): ...
+
+class cmpop(AST):
+ ...
+
+class Eq(cmpop): ...
+class Gt(cmpop): ...
+class GtE(cmpop): ...
+class In(cmpop): ...
+class Is(cmpop): ...
+class IsNot(cmpop): ...
+class Lt(cmpop): ...
+class LtE(cmpop): ...
+class NotEq(cmpop): ...
+class NotIn(cmpop): ...
+
+
+class comprehension(AST):
+ target = ... # type: expr
+ iter = ... # type: expr
+ ifs = ... # type: typing.List[expr]
+
+
+class ExceptHandler(AST):
+ type = ... # type: Optional[expr]
+ name = ... # type: Optional[expr]
+ body = ... # type: typing.List[stmt]
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+
+class arguments(AST):
+ args = ... # type: typing.List[expr]
+ vararg = ... # type: Optional[identifier]
+ kwarg = ... # type: Optional[identifier]
+ defaults = ... # type: typing.List[expr]
+
+class keyword(AST):
+ arg = ... # type: identifier
+ value = ... # type: expr
+
+class alias(AST):
+ name = ... # type: identifier
+ asname = ... # type: Optional[identifier]
+
+
+class TypeIgnore(AST):
+ lineno = ... # type: int
diff --git a/typeshed/third_party/3/typed_ast/ast35.pyi b/typeshed/third_party/3/typed_ast/ast35.pyi
new file mode 100644
index 0000000..738ac7d
--- /dev/null
+++ b/typeshed/third_party/3/typed_ast/ast35.pyi
@@ -0,0 +1,393 @@
+import typing
+from typing import Any, Optional, Union, Generic, Iterator
+
+class NodeVisitor():
+ __doc__ = ... # type: str
+ def visit(self, node: AST) -> Any: ...
+ def generic_visit(self, node: AST) -> None: ...
+
+class NodeTransformer(NodeVisitor):
+ __doc__ = ... # type: str
+ def generic_visit(self, node: AST) -> None: ...
+
+def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def copy_location(new_node: AST, old_node: AST) -> AST: ...
+def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
+def fix_missing_locations(node: AST) -> AST: ...
+def get_docstring(node: AST, clean: bool = ...) -> str: ...
+def increment_lineno(node: AST, n: int = ...) -> AST: ...
+def iter_child_nodes(node: AST) -> Iterator[AST]: ...
+def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ...
+def literal_eval(node_or_string: Union[str, AST]) -> Any: ...
+def walk(node: AST) -> Iterator[AST]: ...
+
+PyCF_ONLY_AST = ... # type: int
+
+# ast classes
+
+identifier = str
+
+class AST:
+ _attributes = ... # type: typing.Tuple[str, ...]
+ _fields = ... # type: typing.Tuple[str, ...]
+ def __init__(self, *args, **kwargs) -> None: ...
+
+class mod(AST):
+ ...
+
+class Module(mod):
+ body = ... # type: typing.List[stmt]
+ type_ignores = ... # type: typing.List[TypeIgnore]
+
+class Interactive(mod):
+ body = ... # type: typing.List[stmt]
+
+class Expression(mod):
+ body = ... # type: expr
+
+class FunctionType(mod):
+ argtypes = ... # type: typing.List[expr]
+ returns = ... # type: expr
+
+class Suite(mod):
+ body = ... # type: typing.List[stmt]
+
+
+class stmt(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class FunctionDef(stmt):
+ name = ... # type: identifier
+ args = ... # type: arguments
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+ returns = ... # type: Optional[expr]
+ type_comment = ... # type: Optional[str]
+
+class AsyncFunctionDef(stmt):
+ name = ... # type: identifier
+ args = ... # type: arguments
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+ returns = ... # type: Optional[expr]
+ type_comment = ... # type: Optional[str]
+
+class ClassDef(stmt):
+ name = ... # type: identifier
+ bases = ... # type: typing.List[expr]
+ keywords = ... # type: typing.List[keyword]
+ body = ... # type: typing.List[stmt]
+ decorator_list = ... # type: typing.List[expr]
+
+class Return(stmt):
+ value = ... # type: Optional[expr]
+
+class Delete(stmt):
+ targets = ... # type: typing.List[expr]
+
+class Assign(stmt):
+ targets = ... # type: typing.List[expr]
+ value = ... # type: expr
+ type_comment = ... # type: Optional[str]
+
+class AugAssign(stmt):
+ target = ... # type: expr
+ op = ... # type: operator
+ value = ... # type: expr
+
+class For(stmt):
+ target = ... # type: expr
+ iter = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+ type_comment = ... # type: Optional[str]
+
+class AsyncFor(stmt):
+ target = ... # type: expr
+ iter = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class While(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class If(stmt):
+ test = ... # type: expr
+ body = ... # type: typing.List[stmt]
+ orelse = ... # type: typing.List[stmt]
+
+class With(stmt):
+ items = ... # type: typing.List[withitem]
+ body = ... # type: typing.List[stmt]
+ type_comment = ... # type: Optional[str]
+
+class AsyncWith(stmt):
+ items = ... # type: typing.List[withitem]
+ body = ... # type: typing.List[stmt]
+
+class Raise(stmt):
+ exc = ... # type: Optional[expr]
+ cause = ... # type: Optional[expr]
+
+class Try(stmt):
+ body = ... # type: typing.List[stmt]
+ handlers = ... # type: typing.List[ExceptHandler]
+ orelse = ... # type: typing.List[stmt]
+ finalbody = ... # type: typing.List[stmt]
+
+class Assert(stmt):
+ test = ... # type: expr
+ msg = ... # type: Optional[expr]
+
+class Import(stmt):
+ names = ... # type: typing.List[alias]
+
+class ImportFrom(stmt):
+ module = ... # type: Optional[identifier]
+ names = ... # type: typing.List[alias]
+ level = ... # type: Optional[int]
+
+class Global(stmt):
+ names = ... # type: typing.List[identifier]
+
+class Nonlocal(stmt):
+ names = ... # type: typing.List[identifier]
+
+class Expr(stmt):
+ value = ... # type: expr
+
+class Pass(stmt): ...
+class Break(stmt): ...
+class Continue(stmt): ...
+
+
+class slice(AST):
+ ...
+
+_slice = slice # this lets us type the variable named 'slice' below
+
+class Slice(slice):
+ lower = ... # type: Optional[expr]
+ upper = ... # type: Optional[expr]
+ step = ... # type: Optional[expr]
+
+class ExtSlice(slice):
+ dims = ... # type: typing.List[slice]
+
+class Index(slice):
+ value = ... # type: expr
+
+
+class expr(AST):
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class BoolOp(expr):
+ op = ... # type: boolop
+ values = ... # type: typing.List[expr]
+
+class BinOp(expr):
+ left = ... # type: expr
+ op = ... # type: operator
+ right = ... # type: expr
+
+class UnaryOp(expr):
+ op = ... # type: unaryop
+ operand = ... # type: expr
+
+class Lambda(expr):
+ args = ... # type: arguments
+ body = ... # type: expr
+
+class IfExp(expr):
+ test = ... # type: expr
+ body = ... # type: expr
+ orelse = ... # type: expr
+
+class Dict(expr):
+ keys = ... # type: typing.List[expr]
+ values = ... # type: typing.List[expr]
+
+class Set(expr):
+ elts = ... # type: typing.List[expr]
+
+class ListComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class SetComp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class DictComp(expr):
+ key = ... # type: expr
+ value = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class GeneratorExp(expr):
+ elt = ... # type: expr
+ generators = ... # type: typing.List[comprehension]
+
+class Await(expr):
+ value = ... # type: expr
+
+class Yield(expr):
+ value = ... # type: Optional[expr]
+
+class YieldFrom(expr):
+ value = ... # type: expr
+
+class Compare(expr):
+ left = ... # type: expr
+ ops = ... # type: typing.List[cmpop]
+ comparators = ... # type: typing.List[expr]
+
+class Call(expr):
+ func = ... # type: expr
+ args = ... # type: typing.List[expr]
+ keywords = ... # type: typing.List[keyword]
+
+class Num(expr):
+ n = ... # type: Union[int, float]
+
+class Str(expr):
+ s = ... # type: str
+
+class Bytes(expr):
+ s = ... # type: bytes
+
+class NameConstant(expr):
+ value = ... # type: Any
+
+class Ellipsis(expr): ...
+
+class Attribute(expr):
+ value = ... # type: expr
+ attr = ... # type: identifier
+ ctx = ... # type: expr_context
+
+class Subscript(expr):
+ value = ... # type: expr
+ slice = ... # type: _slice
+ ctx = ... # type: expr_context
+
+class Starred(expr):
+ value = ... # type: expr
+ ctx = ... # type: expr_context
+
+class Name(expr):
+ id = ... # type: identifier
+ ctx = ... # type: expr_context
+
+class List(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
+
+class Tuple(expr):
+ elts = ... # type: typing.List[expr]
+ ctx = ... # type: expr_context
+
+
+class expr_context(AST):
+ ...
+
+class AugLoad(expr_context): ...
+class AugStore(expr_context): ...
+class Del(expr_context): ...
+class Load(expr_context): ...
+class Param(expr_context): ...
+class Store(expr_context): ...
+
+
+class boolop(AST):
+ ...
+
+class And(boolop): ...
+class Or(boolop): ...
+
+class operator(AST):
+ ...
+
+class Add(operator): ...
+class BitAnd(operator): ...
+class BitOr(operator): ...
+class BitXor(operator): ...
+class Div(operator): ...
+class FloorDiv(operator): ...
+class LShift(operator): ...
+class Mod(operator): ...
+class Mult(operator): ...
+class MatMult(operator): ...
+class Pow(operator): ...
+class RShift(operator): ...
+class Sub(operator): ...
+
+class unaryop(AST):
+ ...
+
+class Invert(unaryop): ...
+class Not(unaryop): ...
+class UAdd(unaryop): ...
+class USub(unaryop): ...
+
+class cmpop(AST):
+ ...
+
+class Eq(cmpop): ...
+class Gt(cmpop): ...
+class GtE(cmpop): ...
+class In(cmpop): ...
+class Is(cmpop): ...
+class IsNot(cmpop): ...
+class Lt(cmpop): ...
+class LtE(cmpop): ...
+class NotEq(cmpop): ...
+class NotIn(cmpop): ...
+
+
+class comprehension(AST):
+ target = ... # type: expr
+ iter = ... # type: expr
+ ifs = ... # type: typing.List[expr]
+
+
+class ExceptHandler(AST):
+ type = ... # type: Optional[expr]
+ name = ... # type: Optional[identifier]
+ body = ... # type: typing.List[stmt]
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+
+class arguments(AST):
+ args = ... # type: typing.List[arg]
+ vararg = ... # type: Optional[arg]
+ kwonlyargs = ... # type: typing.List[arg]
+ kw_defaults = ... # type: typing.List[expr]
+ kwarg = ... # type: Optional[arg]
+ defaults = ... # type: typing.List[expr]
+
+class arg(AST):
+ arg = ... # type: identifier
+ annotation = ... # type: Optional[expr]
+ lineno = ... # type: int
+ col_offset = ... # type: int
+
+class keyword(AST):
+ arg = ... # type: Optional[identifier]
+ value = ... # type: expr
+
+class alias(AST):
+ name = ... # type: identifier
+ asname = ... # type: Optional[identifier]
+
+class withitem(AST):
+ context_expr = ... # type: expr
+ optional_vars = ... # type: Optional[expr]
+
+
+class TypeIgnore(AST):
+ lineno = ... # type: int
diff --git a/typeshed/third_party/3/typed_ast/conversions.pyi b/typeshed/third_party/3/typed_ast/conversions.pyi
new file mode 100644
index 0000000..53fcc32
--- /dev/null
+++ b/typeshed/third_party/3/typed_ast/conversions.pyi
@@ -0,0 +1,4 @@
+from . import ast27
+from . import ast35
+
+def py2to3(ast: ast27.AST) -> ast35.AST: ...
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git
More information about the debian-med-commit
mailing list