[med-svn] [mypy] 01/01: New upstream version 0.511
Michael Crusoe
misterc-guest at moszumanska.debian.org
Sat Jun 24 16:50:41 UTC 2017
This is an automated email from the git hooks/post-receive script.
misterc-guest pushed a commit to annotated tag upstream/0.511
in repository mypy.
commit 9adeeea5a9b4d032aa4b9718f9858d52c16922df
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date: Sat Jun 24 08:49:16 2017 -0700
New upstream version 0.511
---
.gitignore | 29 -
.gitmodules | 3 -
.travis.yml | 20 -
CONTRIBUTING.md | 152 -
CREDITS | 101 -
LICENSE | 228 --
PKG-INFO | 2 +-
README.md | 310 --
appveyor.yml | 32 -
build-requirements.txt | 2 -
conftest.py | 3 -
docs/Makefile | 177 -
docs/README.md | 49 -
docs/make.bat | 242 --
docs/requirements-docs.txt | 2 -
docs/source/additional_features.rst | 9 -
docs/source/basics.rst | 194 --
docs/source/builtin_types.rst | 37 -
docs/source/casts.rst | 39 -
docs/source/cheat_sheet.rst | 241 --
docs/source/cheat_sheet_py3.rst | 288 --
docs/source/class_basics.rst | 157 -
docs/source/command_line.rst | 413 ---
docs/source/common_issues.rst | 386 ---
docs/source/conf.py | 268 --
docs/source/config_file.rst | 184 --
docs/source/duck_type_compatibility.rst | 40 -
docs/source/dynamic_typing.rst | 86 -
docs/source/faq.rst | 270 --
docs/source/function_overloading.rst | 60 -
docs/source/generics.rst | 390 ---
docs/source/getting_started.rst | 24 -
docs/source/index.rst | 42 -
docs/source/introduction.rst | 30 -
docs/source/kinds_of_types.rst | 1002 ------
docs/source/python2.rst | 136 -
docs/source/python36.rst | 100 -
docs/source/revision_history.rst | 176 -
docs/source/supported_python_features.rst | 20 -
docs/source/type_inference_and_annotations.rst | 172 -
extensions/README.md | 6 -
extensions/mypy_extensions.py | 97 -
extensions/setup.py | 44 -
lib-typing/2.7/setup.py | 46 -
lib-typing/2.7/test_typing.py | 1629 ----------
lib-typing/2.7/typing.py | 2016 ------------
lib-typing/3.2/test_typing.py | 2063 ------------
lib-typing/3.2/typing.py | 2160 -------------
misc/actions_stubs.py | 111 -
misc/analyze_cache.py | 189 --
misc/async_matrix.py | 120 -
misc/fix_annotate.py | 219 --
misc/incremental_checker.py | 356 --
misc/macs.el | 22 -
misc/perf_checker.py | 93 -
misc/remove-eol-whitespace.sh | 8 -
misc/test_case_to_actual.py | 71 -
misc/touch_checker.py | 151 -
misc/variadics.py | 54 -
mypy.egg-info/PKG-INFO | 2 +-
mypy.egg-info/SOURCES.txt | 584 +---
mypy.egg-info/requires.txt | 2 +-
mypy/api.py | 12 +-
mypy/applytype.py | 7 +-
mypy/binder.py | 64 +-
mypy/build.py | 212 +-
mypy/checker.py | 1127 ++++---
mypy/checkexpr.py | 349 +-
mypy/checkmember.py | 61 +-
mypy/checkstrformat.py | 105 +-
mypy/constraints.py | 60 +-
mypy/defaults.py | 2 +-
mypy/erasetype.py | 20 +-
mypy/errors.py | 157 +-
mypy/expandtype.py | 13 +-
mypy/exprtotype.py | 93 +-
mypy/fastparse.py | 523 +--
mypy/fastparse2.py | 67 +-
mypy/fixup.py | 73 +-
mypy/indirection.py | 11 +-
mypy/infer.py | 4 +-
mypy/join.py | 103 +-
mypy/lex.py | 904 ------
mypy/main.py | 129 +-
mypy/maptype.py | 9 +-
mypy/meet.py | 126 +-
mypy/messages.py | 117 +-
mypy/moduleinfo.py | 6 +
mypy/myunit/__init__.py | 380 ---
mypy/myunit/__main__.py | 18 -
mypy/nodes.py | 494 ++-
mypy/options.py | 25 +-
mypy/parse.py | 2047 +-----------
mypy/parsetype.py | 249 --
mypy/report.py | 59 +-
mypy/sametypes.py | 11 +-
mypy/semanal.py | 1152 +++++--
mypy/sharedparse.py | 1 +
mypy/solve.py | 25 +-
mypy/stats.py | 9 +-
mypy/strconv.py | 104 +-
mypy/stubgen.py | 55 +-
mypy/stubgenc.py | 13 +-
mypy/stubutil.py | 3 +-
mypy/subtypes.py | 325 +-
mypy/test/__init__.py | 0
mypy/test/collect.py | 0
mypy/test/config.py | 19 -
mypy/test/data.py | 483 ---
mypy/test/helpers.py | 285 --
mypy/test/testargs.py | 18 -
mypy/test/testcheck.py | 332 --
mypy/test/testcmdline.py | 104 -
mypy/test/testextensions.py | 125 -
mypy/test/testgraph.py | 69 -
mypy/test/testinfer.py | 223 --
mypy/test/testlex.py | 466 ---
mypy/test/testmoduleinfo.py | 14 -
mypy/test/testparse.py | 79 -
mypy/test/testpythoneval.py | 135 -
mypy/test/testreports.py | 40 -
mypy/test/testsemanal.py | 224 --
mypy/test/testsolve.py | 156 -
mypy/test/teststubgen.py | 186 --
mypy/test/testsubtypes.py | 208 --
mypy/test/testtransform.py | 85 -
mypy/test/testtypegen.py | 128 -
mypy/test/testtypes.py | 847 -----
mypy/test/update.py | 0
mypy/traverser.py | 48 +-
mypy/treetransform.py | 48 +-
mypy/tvar_scope.py | 82 +
mypy/typeanal.py | 418 ++-
mypy/typefixture.py | 9 +-
mypy/types.py | 562 ++--
mypy/typevars.py | 2 +-
mypy/util.py | 58 +-
mypy/version.py | 2 +-
mypy/visitor.py | 137 +-
mypy/waiter.py | 108 +-
mypy_self_check.ini | 8 -
mypy_strict_optional.ini | 5 -
pinfer/.gitignore | 3 -
pinfer/LICENSE | 27 -
pinfer/README | 47 -
pinfer/__init__.py | 0
pinfer/inspect3.py | 122 -
pinfer/p.py | 83 -
pinfer/pinfer.py | 686 ----
pinfer/test_pinfer.py | 302 --
pinfer/test_pinfer3.py | 31 -
pinfer/unparse.py | 610 ----
pinfer/unparse3.py | 610 ----
pytest.ini | 14 -
runtests.py | 428 ---
scripts/dumpmodule.py | 161 +
scripts/stubtest.py | 205 ++
setup.cfg | 10 +-
setup.py | 9 +-
test-data/.flake8 | 21 -
test-data/samples/bottles.py | 13 -
test-data/samples/class.py | 18 -
test-data/samples/cmdline.py | 8 -
test-data/samples/crawl.py | 863 -----
test-data/samples/crawl2.py | 852 -----
test-data/samples/dict.py | 8 -
test-data/samples/fib.py | 5 -
test-data/samples/files.py | 14 -
test-data/samples/for.py | 4 -
test-data/samples/generators.py | 24 -
test-data/samples/greet.py | 8 -
test-data/samples/guess.py | 32 -
test-data/samples/hello.py | 2 -
test-data/samples/input.py | 3 -
test-data/samples/itertool.py | 16 -
test-data/samples/readme.txt | 25 -
test-data/samples/regexp.py | 7 -
test-data/stdlib-samples/3.2/base64.py | 411 ---
test-data/stdlib-samples/3.2/fnmatch.py | 112 -
test-data/stdlib-samples/3.2/genericpath.py | 112 -
test-data/stdlib-samples/3.2/getopt.py | 220 --
test-data/stdlib-samples/3.2/glob.py | 84 -
.../3.2/incomplete/logging/__init__.py | 1873 -----------
.../3.2/incomplete/urllib/__init__.py | 0
.../stdlib-samples/3.2/incomplete/urllib/parse.py | 980 ------
test-data/stdlib-samples/3.2/posixpath.py | 466 ---
test-data/stdlib-samples/3.2/pprint.py | 380 ---
test-data/stdlib-samples/3.2/random.py | 743 -----
test-data/stdlib-samples/3.2/shutil.py | 790 -----
test-data/stdlib-samples/3.2/subprocess.py | 1703 ----------
test-data/stdlib-samples/3.2/tempfile.py | 717 -----
test-data/stdlib-samples/3.2/test/__init__.py | 0
test-data/stdlib-samples/3.2/test/randv2_32.pck | 633 ----
test-data/stdlib-samples/3.2/test/randv2_64.pck | 633 ----
test-data/stdlib-samples/3.2/test/randv3.pck | 633 ----
.../3.2/test/subprocessdata/fd_status.py | 24 -
.../3.2/test/subprocessdata/input_reader.py | 7 -
.../stdlib-samples/3.2/test/subprocessdata/qcat.py | 7 -
.../3.2/test/subprocessdata/qgrep.py | 10 -
.../3.2/test/subprocessdata/sigchild_ignore.py | 6 -
test-data/stdlib-samples/3.2/test/support.py | 1602 ---------
test-data/stdlib-samples/3.2/test/test_base64.py | 267 --
test-data/stdlib-samples/3.2/test/test_fnmatch.py | 93 -
.../stdlib-samples/3.2/test/test_genericpath.py | 313 --
test-data/stdlib-samples/3.2/test/test_getopt.py | 190 --
test-data/stdlib-samples/3.2/test/test_glob.py | 122 -
.../stdlib-samples/3.2/test/test_posixpath.py | 531 ---
test-data/stdlib-samples/3.2/test/test_pprint.py | 488 ---
test-data/stdlib-samples/3.2/test/test_random.py | 533 ---
test-data/stdlib-samples/3.2/test/test_set.py | 1884 -----------
test-data/stdlib-samples/3.2/test/test_shutil.py | 978 ------
.../stdlib-samples/3.2/test/test_subprocess.py | 1764 ----------
test-data/stdlib-samples/3.2/test/test_tempfile.py | 1122 -------
test-data/stdlib-samples/3.2/test/test_textwrap.py | 601 ----
.../stdlib-samples/3.2/test/tf_inherit_check.py | 25 -
test-data/stdlib-samples/3.2/textwrap.py | 391 ---
test-data/unit/check-abstract.test | 734 -----
test-data/unit/check-async-await.test | 393 ---
test-data/unit/check-basic.test | 310 --
test-data/unit/check-bound.test | 203 --
test-data/unit/check-callable.test | 345 --
test-data/unit/check-class-namedtuple.test | 378 ---
test-data/unit/check-classes.test | 2761 ----------------
test-data/unit/check-columns.test | 68 -
test-data/unit/check-dynamic-typing.test | 676 ----
test-data/unit/check-expressions.test | 1652 ----------
test-data/unit/check-fastparse.test | 301 --
test-data/unit/check-flags.test | 305 --
test-data/unit/check-functions.test | 1666 ----------
test-data/unit/check-generic-subtyping.test | 746 -----
test-data/unit/check-generics.test | 1462 ---------
test-data/unit/check-ignore.test | 218 --
test-data/unit/check-incremental.test | 1780 ----------
test-data/unit/check-inference-context.test | 880 -----
test-data/unit/check-inference.test | 1765 ----------
test-data/unit/check-isinstance.test | 1330 --------
test-data/unit/check-kwargs.test | 339 --
test-data/unit/check-lists.test | 72 -
test-data/unit/check-modules.test | 1408 --------
test-data/unit/check-multiple-inheritance.test | 242 --
test-data/unit/check-namedtuple.test | 429 ---
test-data/unit/check-newsyntax.test | 100 -
test-data/unit/check-newtype.test | 324 --
test-data/unit/check-optional.test | 546 ----
test-data/unit/check-overloading.test | 759 -----
test-data/unit/check-python2.test | 242 --
test-data/unit/check-selftype.test | 358 ---
test-data/unit/check-semanal-error.test | 81 -
test-data/unit/check-statements.test | 1451 ---------
test-data/unit/check-super.test | 109 -
test-data/unit/check-tuples.test | 927 ------
test-data/unit/check-type-aliases.test | 74 -
test-data/unit/check-type-checks.test | 113 -
test-data/unit/check-type-promotion.test | 39 -
test-data/unit/check-typeddict.test | 462 ---
test-data/unit/check-typevar-values.test | 505 ---
test-data/unit/check-underscores.test | 16 -
test-data/unit/check-unions.test | 219 --
test-data/unit/check-unreachable-code.test | 459 ---
test-data/unit/check-unsupported.test | 15 -
test-data/unit/check-varargs.test | 592 ----
test-data/unit/check-warnings.test | 132 -
test-data/unit/cmdline.test | 479 ---
test-data/unit/fixtures/__new__.pyi | 14 -
test-data/unit/fixtures/alias.pyi | 12 -
test-data/unit/fixtures/args.pyi | 29 -
test-data/unit/fixtures/async_await.pyi | 9 -
test-data/unit/fixtures/bool.pyi | 15 -
test-data/unit/fixtures/callable.pyi | 26 -
test-data/unit/fixtures/classmethod.pyi | 22 -
test-data/unit/fixtures/complex.pyi | 11 -
test-data/unit/fixtures/dict.pyi | 35 -
test-data/unit/fixtures/exception.pyi | 13 -
test-data/unit/fixtures/for.pyi | 19 -
test-data/unit/fixtures/function.pyi | 10 -
test-data/unit/fixtures/isinstance.pyi | 22 -
test-data/unit/fixtures/isinstancelist.pyi | 44 -
test-data/unit/fixtures/list.pyi | 30 -
test-data/unit/fixtures/module.pyi | 18 -
test-data/unit/fixtures/module_all.pyi | 15 -
test-data/unit/fixtures/module_all_python2.pyi | 16 -
test-data/unit/fixtures/ops.pyi | 58 -
test-data/unit/fixtures/primitives.pyi | 17 -
test-data/unit/fixtures/property.pyi | 17 -
test-data/unit/fixtures/python2.pyi | 18 -
test-data/unit/fixtures/set.pyi | 21 -
test-data/unit/fixtures/slice.pyi | 13 -
test-data/unit/fixtures/staticmethod.pyi | 19 -
test-data/unit/fixtures/transform.pyi | 30 -
test-data/unit/fixtures/tuple-simple.pyi | 20 -
test-data/unit/fixtures/tuple.pyi | 29 -
test-data/unit/fixtures/union.pyi | 18 -
test-data/unit/lib-stub/__builtin__.pyi | 27 -
test-data/unit/lib-stub/abc.pyi | 3 -
test-data/unit/lib-stub/builtins.pyi | 23 -
test-data/unit/lib-stub/collections.pyi | 3 -
test-data/unit/lib-stub/mypy_extensions.pyi | 6 -
test-data/unit/lib-stub/sys.pyi | 2 -
test-data/unit/lib-stub/types.pyi | 4 -
test-data/unit/lib-stub/typing.pyi | 90 -
test-data/unit/parse-errors.test | 448 ---
test-data/unit/parse-python2.test | 399 ---
test-data/unit/parse.test | 3386 --------------------
test-data/unit/python2eval.test | 474 ---
test-data/unit/pythoneval-asyncio.test | 486 ---
test-data/unit/pythoneval-enum.test | 134 -
test-data/unit/pythoneval.test | 1214 -------
test-data/unit/semanal-abstractclasses.test | 119 -
test-data/unit/semanal-basic.test | 459 ---
test-data/unit/semanal-classes.test | 623 ----
test-data/unit/semanal-errors.test | 1336 --------
test-data/unit/semanal-expressions.test | 395 ---
test-data/unit/semanal-modules.test | 877 -----
test-data/unit/semanal-namedtuple.test | 177 -
test-data/unit/semanal-python2.test | 76 -
test-data/unit/semanal-statements.test | 929 ------
test-data/unit/semanal-symtable.test | 52 -
test-data/unit/semanal-typealiases.test | 440 ---
test-data/unit/semanal-typeddict.test | 81 -
test-data/unit/semanal-typeinfo.test | 80 -
test-data/unit/semanal-types.test | 1465 ---------
test-data/unit/stubgen.test | 565 ----
test-data/unit/typexport-basic.test | 1159 -------
test-requirements.txt | 9 -
tmp-test-dirs/.gitignore | 4 -
typeshed/stdlib/2/ConfigParser.pyi | 22 +-
typeshed/stdlib/2/SimpleHTTPServer.pyi | 16 +
typeshed/stdlib/2/StringIO.pyi | 8 +-
typeshed/stdlib/2/__builtin__.pyi | 130 +-
typeshed/stdlib/2/__future__.pyi | 13 -
typeshed/stdlib/2/_codecs.pyi | 55 -
typeshed/stdlib/2/_io.pyi | 30 +-
typeshed/stdlib/2/_sre.pyi | 6 +-
typeshed/stdlib/2/_struct.pyi | 8 +-
typeshed/stdlib/2/_warnings.pyi | 6 +-
typeshed/stdlib/2/abc.pyi | 14 +-
typeshed/stdlib/2/array.pyi | 40 +-
typeshed/stdlib/2/base64.pyi | 25 -
typeshed/stdlib/2/binascii.pyi | 21 -
typeshed/stdlib/2/builtins.pyi | 130 +-
typeshed/stdlib/2/cStringIO.pyi | 10 +-
typeshed/stdlib/2/collections.pyi | 29 +-
typeshed/stdlib/2/compileall.pyi | 13 +-
typeshed/stdlib/2/copy.pyi | 10 -
typeshed/stdlib/2/csv.pyi | 8 +-
typeshed/stdlib/2/datetime.pyi | 27 +-
typeshed/stdlib/2/encodings/utf_8.pyi | 6 +-
typeshed/stdlib/2/fnmatch.pyi | 12 +-
typeshed/stdlib/2/hashlib.pyi | 26 +-
typeshed/stdlib/2/inspect.pyi | 42 +-
typeshed/stdlib/2/io.pyi | 27 +-
typeshed/stdlib/2/itertools.pyi | 36 +-
typeshed/stdlib/2/macpath.pyi | 50 +
typeshed/stdlib/2/md5.pyi | 7 +-
typeshed/stdlib/2/ntpath.pyi | 50 +
typeshed/stdlib/2/nturl2path.pyi | 4 +
typeshed/stdlib/2/optparse.pyi | 249 --
typeshed/stdlib/2/os/__init__.pyi | 467 ++-
typeshed/stdlib/2/os/path.pyi | 45 +-
typeshed/stdlib/2/os2emxpath.pyi | 50 +
typeshed/stdlib/2/pickle.pyi | 10 +-
typeshed/stdlib/2/posix.pyi | 2 +-
typeshed/stdlib/2/pprint.pyi | 24 -
typeshed/stdlib/2/pydoc.pyi | 181 ++
typeshed/stdlib/2/quopri.pyi | 8 -
typeshed/stdlib/2/repr.pyi | 31 +
typeshed/stdlib/2/sets.pyi | 61 +
typeshed/stdlib/2/socket.pyi | 362 ---
typeshed/stdlib/2/ssl.pyi | 6 +-
typeshed/stdlib/2/string.pyi | 6 +-
typeshed/stdlib/2/struct.pyi | 28 -
typeshed/stdlib/2/subprocess.pyi | 56 +-
typeshed/stdlib/2/symbol.pyi | 91 +
typeshed/stdlib/2/tempfile.pyi | 6 +-
typeshed/stdlib/2/time.pyi | 4 +-
typeshed/stdlib/2/token.pyi | 62 -
typeshed/stdlib/2/tokenize.pyi | 4 -
typeshed/stdlib/2/types.pyi | 14 +-
typeshed/stdlib/2/typing.pyi | 49 +-
typeshed/stdlib/2/unicodedata.pyi | 40 -
typeshed/stdlib/2/unittest.pyi | 13 +-
typeshed/stdlib/2/urllib2.pyi | 77 +-
typeshed/stdlib/2/uuid.pyi | 36 -
typeshed/stdlib/2/xmlrpclib.pyi | 199 ++
typeshed/stdlib/2/xxsubtype.pyi | 17 -
typeshed/stdlib/2/zlib.pyi | 42 -
typeshed/stdlib/{3 => 2and3}/__future__.pyi | 0
typeshed/stdlib/2and3/_bisect.pyi | 14 +-
typeshed/stdlib/2and3/_codecs.pyi | 74 +
typeshed/stdlib/{2 => 2and3}/_random.pyi | 6 +-
typeshed/stdlib/2and3/asynchat.pyi | 6 +-
typeshed/stdlib/2and3/asyncore.pyi | 2 +-
typeshed/stdlib/{3 => 2and3}/base64.pyi | 8 +-
typeshed/stdlib/2and3/binascii.pyi | 45 +
typeshed/stdlib/2and3/binhex.pyi | 48 +
typeshed/stdlib/2and3/cgi.pyi | 119 +
typeshed/stdlib/2and3/cmd.pyi | 41 +
typeshed/stdlib/{2 => 2and3}/codecs.pyi | 64 +-
typeshed/stdlib/2and3/contextlib.pyi | 7 +-
typeshed/stdlib/2and3/copy.pyi | 14 +
typeshed/stdlib/2and3/dis.pyi | 75 +
typeshed/stdlib/2and3/distutils/core.pyi | 15 +-
typeshed/stdlib/2and3/filecmp.pyi | 48 +
typeshed/stdlib/2and3/fractions.pyi | 6 +-
typeshed/stdlib/2and3/ftplib.pyi | 134 +
typeshed/stdlib/2and3/hmac.pyi | 2 +
typeshed/stdlib/2and3/lib2to3/__init__.pyi | 1 +
typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi | 10 +
typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi | 24 +
typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi | 29 +
typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi | 9 +
typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi | 29 +
typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi | 49 +
typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi | 73 +
typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi | 30 +
typeshed/stdlib/2and3/lib2to3/pygram.pyi | 116 +
typeshed/stdlib/2and3/lib2to3/pytree.pyi | 86 +
typeshed/stdlib/2and3/locale.pyi | 2 +-
typeshed/stdlib/2and3/logging/__init__.pyi | 114 +-
typeshed/stdlib/2and3/logging/handlers.pyi | 2 +-
typeshed/stdlib/2and3/mmap.pyi | 8 +-
typeshed/stdlib/{3 => 2and3}/opcode.pyi | 13 +-
typeshed/stdlib/2and3/optparse.pyi | 226 ++
typeshed/stdlib/2and3/pickletools.pyi | 145 +
typeshed/stdlib/2and3/plistlib.pyi | 2 +-
typeshed/stdlib/2and3/poplib.pyi | 75 +
typeshed/stdlib/{3 => 2and3}/pprint.pyi | 3 +-
typeshed/stdlib/2and3/pstats.pyi | 12 +-
typeshed/stdlib/2and3/pty.pyi | 20 +
typeshed/stdlib/2and3/py_compile.pyi | 20 +
typeshed/stdlib/{3 => 2and3}/pyclbr.pyi | 2 +-
typeshed/stdlib/2and3/quopri.pyi | 8 +
typeshed/stdlib/2and3/sched.pyi | 29 +
typeshed/stdlib/2and3/smtpd.pyi | 87 +
typeshed/stdlib/2and3/sndhdr.pyi | 25 +
typeshed/stdlib/{3 => 2and3}/socket.pyi | 96 +-
typeshed/stdlib/{2 => 2and3}/spwd.pyi | 4 +-
typeshed/stdlib/2and3/stringprep.pyi | 23 +
typeshed/stdlib/2and3/struct.pyi | 40 +
typeshed/stdlib/2and3/sunau.pyi | 88 +
typeshed/stdlib/2and3/symtable.pyi | 45 +
typeshed/stdlib/2and3/sysconfig.pyi | 19 +
typeshed/stdlib/2and3/tabnanny.pyi | 22 +
typeshed/stdlib/2and3/tarfile.pyi | 6 +-
typeshed/stdlib/2and3/telnetlib.pyi | 115 +
typeshed/stdlib/2and3/timeit.pyi | 33 +
typeshed/stdlib/2and3/token.pyi | 71 +
typeshed/stdlib/2and3/trace.pyi | 35 +
typeshed/stdlib/2and3/traceback.pyi | 8 +-
typeshed/stdlib/2and3/tty.pyi | 13 +
typeshed/stdlib/2and3/unicodedata.pyi | 38 +
typeshed/stdlib/2and3/uu.pyi | 10 +
typeshed/stdlib/2and3/uuid.pyi | 87 +
typeshed/stdlib/2and3/wave.pyi | 75 +
typeshed/stdlib/2and3/webbrowser.pyi | 42 +-
typeshed/stdlib/2and3/xdrlib.pyi | 56 +
typeshed/stdlib/2and3/xml/etree/ElementTree.pyi | 136 +-
typeshed/stdlib/2and3/xml/sax/__init__.pyi | 27 +-
typeshed/stdlib/{3 => 2and3}/zlib.pyi | 25 +-
typeshed/stdlib/3.4/asyncio/__init__.pyi | 8 +
typeshed/stdlib/3.4/asyncio/events.pyi | 111 +-
typeshed/stdlib/3.4/asyncio/futures.pyi | 3 +
typeshed/stdlib/3.4/asyncio/locks.pyi | 20 +-
typeshed/stdlib/3.4/asyncio/queues.pyi | 28 +-
typeshed/stdlib/3.4/asyncio/streams.pyi | 6 +-
typeshed/stdlib/3.4/asyncio/subprocess.pyi | 17 +-
typeshed/stdlib/3.4/asyncio/tasks.pyi | 2 +-
typeshed/stdlib/3.4/enum.pyi | 25 +-
typeshed/stdlib/3.4/pathlib.pyi | 20 +-
typeshed/stdlib/3.4/statistics.pyi | 24 +
typeshed/stdlib/3.4/tracemalloc.pyi | 65 +
typeshed/stdlib/3.5/zipapp.pyi | 11 +
typeshed/stdlib/3/_codecs.pyi | 51 -
typeshed/stdlib/3/_imp.pyi | 22 +
typeshed/stdlib/3/_importlib_modulespec.pyi | 5 +-
typeshed/stdlib/3/_markupbase.pyi | 2 +-
typeshed/stdlib/3/_operator.pyi | 128 +-
typeshed/stdlib/3/_posixsubprocess.pyi | 9 +-
typeshed/stdlib/3/_random.pyi | 12 -
typeshed/stdlib/3/_warnings.pyi | 6 +-
typeshed/stdlib/3/abc.pyi | 21 +-
typeshed/stdlib/3/binascii.pyi | 26 -
typeshed/stdlib/3/builtins.pyi | 196 +-
typeshed/stdlib/3/calendar.pyi | 2 +-
typeshed/stdlib/3/cgi.pyi | 4 -
typeshed/stdlib/3/codecs.pyi | 194 --
typeshed/stdlib/3/collections/__init__.pyi | 63 +-
typeshed/stdlib/3/collections/abc.pyi | 1 +
typeshed/stdlib/3/compileall.pyi | 18 +
typeshed/stdlib/3/concurrent/futures/_base.pyi | 4 +-
typeshed/stdlib/3/copy.pyi | 10 -
typeshed/stdlib/3/datetime.pyi | 2 +-
typeshed/stdlib/3/difflib.pyi | 2 +-
typeshed/stdlib/3/dis.pyi | 63 -
typeshed/stdlib/3/email/message.pyi | 2 +-
typeshed/stdlib/3/encodings/utf_8.pyi | 6 +-
typeshed/stdlib/3/fileinput.pyi | 29 +-
typeshed/stdlib/3/hashlib.pyi | 24 +-
typeshed/stdlib/3/http/__init__.pyi | 114 +-
typeshed/stdlib/3/http/client.pyi | 4 +
typeshed/stdlib/3/imp.pyi | 55 +-
typeshed/stdlib/3/inspect.pyi | 4 +-
typeshed/stdlib/3/io.pyi | 4 +-
typeshed/stdlib/3/itertools.pyi | 16 +-
typeshed/stdlib/3/json.pyi | 88 -
typeshed/stdlib/3/json/__init__.pyi | 51 +
typeshed/stdlib/3/json/decoder.pyi | 28 +
typeshed/stdlib/3/json/encoder.pyi | 20 +
typeshed/stdlib/3/linecache.pyi | 6 +-
typeshed/stdlib/3/macpath.pyi | 46 +
typeshed/stdlib/3/nntplib.pyi | 104 +
typeshed/stdlib/3/ntpath.pyi | 46 +
typeshed/stdlib/3/nturl2path.pyi | 2 +
typeshed/stdlib/3/os/__init__.pyi | 213 +-
typeshed/stdlib/3/os/path.pyi | 56 +-
typeshed/stdlib/3/queue.pyi | 2 +-
typeshed/stdlib/3/reprlib.pyi | 37 +
typeshed/stdlib/3/shlex.pyi | 4 +-
typeshed/stdlib/3/shutil.pyi | 96 +-
typeshed/stdlib/3/sqlite3/dbapi2.pyi | 19 +-
typeshed/stdlib/3/ssl.pyi | 4 +-
typeshed/stdlib/3/string.pyi | 6 +-
typeshed/stdlib/3/struct.pyi | 30 -
typeshed/stdlib/3/subprocess.pyi | 306 +-
typeshed/stdlib/3/symbol.pyi | 98 +
typeshed/stdlib/3/sysconfig.pyi | 8 -
typeshed/stdlib/3/tempfile.pyi | 117 +-
typeshed/stdlib/3/time.pyi | 8 +-
typeshed/stdlib/3/token.pyi | 63 -
typeshed/stdlib/3/tokenize.pyi | 47 +-
typeshed/stdlib/3/types.pyi | 2 +-
typeshed/stdlib/3/typing.pyi | 51 +-
typeshed/stdlib/3/unicodedata.pyi | 37 -
typeshed/stdlib/3/unittest/__init__.pyi | 48 +-
typeshed/stdlib/3/unittest/mock.pyi | 140 +-
typeshed/stdlib/3/urllib/parse.pyi | 19 +-
typeshed/stdlib/3/urllib/request.pyi | 12 +-
typeshed/stdlib/3/urllib/response.pyi | 3 +
typeshed/stdlib/3/uuid.pyi | 73 -
typeshed/tests/mypy_selftest.py | 27 +
typeshed/tests/mypy_test.py | 4 +-
.../third_party/2/concurrent/futures/__init__.pyi | 14 +-
typeshed/third_party/2/dateutil/relativedelta.pyi | 49 +-
typeshed/third_party/2/dateutil/tz/tz.pyi | 2 +-
.../third_party/2/google/protobuf/descriptor.pyi | 2 +-
typeshed/third_party/2/requests/__init__.pyi | 39 -
typeshed/third_party/2/requests/adapters.pyi | 72 -
typeshed/third_party/2/requests/api.pyi | 26 -
typeshed/third_party/2/requests/cookies.pyi | 61 -
typeshed/third_party/2/requests/models.pyi | 135 -
.../2/requests/packages/urllib3/__init__.pyi | 12 -
.../2/requests/packages/urllib3/connection.pyi | 51 -
.../2/requests/packages/urllib3/connectionpool.pyi | 87 -
.../packages/ssl_match_hostname/__init__.pyi | 1 -
.../2/requests/packages/urllib3/response.pyi | 58 -
.../2/requests/packages/urllib3/util/__init__.pyi | 6 -
typeshed/third_party/2/requests/sessions.pyi | 106 -
typeshed/third_party/2/six/moves/__init__.pyi | 53 +-
typeshed/third_party/2/six/moves/cPickle.pyi | 6 -
.../third_party/2/six/moves/urllib/__init__.pyi | 5 -
typeshed/third_party/2/six/moves/urllib/error.pyi | 5 -
typeshed/third_party/2/six/moves/urllib/parse.pyi | 54 +-
.../third_party/2/six/moves/urllib/request.pyi | 6 +-
.../third_party/2/six/moves/urllib/response.pyi | 6 +-
.../third_party/2/six/moves/urllib/robotparser.pyi | 5 -
typeshed/third_party/2/six/moves/urllib_error.pyi | 10 -
typeshed/third_party/2/six/moves/urllib_parse.pyi | 28 -
.../third_party/2/six/moves/urllib_request.pyi | 40 -
.../third_party/2/six/moves/urllib_response.pyi | 11 -
.../third_party/2/six/moves/urllib_robotparser.pyi | 8 -
typeshed/third_party/2/werkzeug/wrappers.pyi | 117 +-
typeshed/third_party/2and3/boto/kms/__init__.pyi | 9 +
typeshed/third_party/2and3/boto/kms/exceptions.pyi | 21 +
typeshed/third_party/2and3/boto/kms/layer1.pyi | 41 +
typeshed/third_party/2and3/boto/s3/bucket.pyi | 2 +-
.../third_party/2and3/characteristic/__init__.pyi | 4 +-
typeshed/third_party/2and3/jinja2/environment.pyi | 10 +
typeshed/third_party/2and3/mypy_extensions.pyi | 12 +-
typeshed/third_party/2and3/pytz/__init__.pyi | 33 +-
.../third_party/{3 => 2and3}/requests/__init__.pyi | 0
.../third_party/{3 => 2and3}/requests/adapters.pyi | 8 +-
typeshed/third_party/2and3/requests/api.pyi | 26 +
.../third_party/{2 => 2and3}/requests/auth.pyi | 0
.../third_party/{2 => 2and3}/requests/compat.pyi | 0
.../third_party/{3 => 2and3}/requests/cookies.pyi | 10 +-
.../{2 => 2and3}/requests/exceptions.pyi | 0
.../third_party/{2 => 2and3}/requests/hooks.pyi | 0
.../third_party/{3 => 2and3}/requests/models.pyi | 8 +-
.../{2 => 2and3}/requests/packages/__init__.pyi | 0
.../requests/packages/urllib3/__init__.pyi | 0
.../requests/packages/urllib3/_collections.pyi | 0
.../requests/packages/urllib3/connection.pyi | 19 +-
.../requests/packages/urllib3/connectionpool.pyi | 0
.../requests/packages/urllib3/contrib/__init__.pyi | 0
.../requests/packages/urllib3/exceptions.pyi | 0
.../requests/packages/urllib3/fields.pyi | 0
.../requests/packages/urllib3/filepost.pyi | 0
.../packages/urllib3/packages/__init__.pyi | 0
.../packages/ssl_match_hostname/__init__.pyi | 0
.../ssl_match_hostname/_implementation.pyi | 0
.../requests/packages/urllib3/poolmanager.pyi | 0
.../requests/packages/urllib3/request.pyi | 0
.../requests/packages/urllib3/response.pyi | 6 +-
.../requests/packages/urllib3/util/__init__.pyi | 0
.../requests/packages/urllib3/util/connection.pyi | 0
.../requests/packages/urllib3/util/request.pyi | 0
.../requests/packages/urllib3/util/response.pyi | 0
.../requests/packages/urllib3/util/retry.pyi | 0
.../requests/packages/urllib3/util/ssl_.pyi | 0
.../requests/packages/urllib3/util/timeout.pyi | 0
.../requests/packages/urllib3/util/url.pyi | 0
.../third_party/{3 => 2and3}/requests/sessions.pyi | 46 +-
.../{2 => 2and3}/requests/status_codes.pyi | 0
.../{2 => 2and3}/requests/structures.pyi | 6 +-
.../third_party/{2 => 2and3}/requests/utils.pyi | 0
typeshed/third_party/2and3/sqlalchemy/__init__.pyi | 124 -
.../2and3/sqlalchemy/databases/__init__.pyi | 12 -
.../2and3/sqlalchemy/databases/mysql.pyi | 1 -
.../2and3/sqlalchemy/dialects/__init__.pyi | 12 -
.../2and3/sqlalchemy/dialects/mysql/__init__.pyi | 40 -
.../2and3/sqlalchemy/dialects/mysql/base.pyi | 413 ---
.../2and3/sqlalchemy/engine/__init__.pyi | 11 -
.../third_party/2and3/sqlalchemy/engine/base.pyi | 21 -
.../2and3/sqlalchemy/engine/strategies.pyi | 39 -
.../third_party/2and3/sqlalchemy/engine/url.pyi | 27 -
typeshed/third_party/2and3/sqlalchemy/exc.pyi | 77 -
.../third_party/2and3/sqlalchemy/inspection.pyi | 5 -
typeshed/third_party/2and3/sqlalchemy/log.pyi | 14 -
.../third_party/2and3/sqlalchemy/orm/__init__.pyi | 95 -
.../third_party/2and3/sqlalchemy/orm/session.pyi | 93 -
typeshed/third_party/2and3/sqlalchemy/orm/util.pyi | 12 -
typeshed/third_party/2and3/sqlalchemy/pool.pyi | 118 -
typeshed/third_party/2and3/sqlalchemy/schema.pyi | 50 -
.../third_party/2and3/sqlalchemy/sql/__init__.pyi | 66 -
.../2and3/sqlalchemy/sql/annotation.pyi | 11 -
typeshed/third_party/2and3/sqlalchemy/sql/base.pyi | 42 -
typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi | 25 -
typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi | 20 -
.../third_party/2and3/sqlalchemy/sql/elements.pyi | 93 -
.../2and3/sqlalchemy/sql/expression.pyi | 87 -
.../third_party/2and3/sqlalchemy/sql/functions.pyi | 47 -
.../third_party/2and3/sqlalchemy/sql/naming.pyi | 1 -
.../third_party/2and3/sqlalchemy/sql/operators.pyi | 99 -
.../third_party/2and3/sqlalchemy/sql/schema.pyi | 126 -
.../2and3/sqlalchemy/sql/selectable.pyi | 76 -
.../third_party/2and3/sqlalchemy/sql/sqltypes.pyi | 57 -
.../third_party/2and3/sqlalchemy/sql/type_api.pyi | 16 -
.../third_party/2and3/sqlalchemy/sql/visitors.pyi | 33 -
typeshed/third_party/2and3/sqlalchemy/types.pyi | 51 -
.../third_party/2and3/sqlalchemy/util/__init__.pyi | 133 -
.../2and3/sqlalchemy/util/_collections.pyi | 214 --
.../third_party/2and3/sqlalchemy/util/compat.pyi | 67 -
.../2and3/sqlalchemy/util/deprecations.pyi | 13 -
.../2and3/sqlalchemy/util/langhelpers.pyi | 134 -
.../third_party/{2 => 2and3}/yaml/__init__.pyi | 2 +-
.../third_party/{2 => 2and3}/yaml/composer.pyi | 0
.../third_party/{2 => 2and3}/yaml/constructor.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/dumper.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/emitter.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/error.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/events.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/loader.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/nodes.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/parser.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/reader.pyi | 0
.../third_party/{2 => 2and3}/yaml/representer.pyi | 0
.../third_party/{2 => 2and3}/yaml/resolver.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/scanner.pyi | 0
.../third_party/{2 => 2and3}/yaml/serializer.pyi | 0
typeshed/third_party/{2 => 2and3}/yaml/tokens.pyi | 0
typeshed/third_party/3.6/click/core.pyi | 10 +-
typeshed/third_party/3.6/click/decorators.pyi | 32 +-
typeshed/third_party/3.6/click/termui.pyi | 8 +-
typeshed/third_party/3.6/click/types.pyi | 22 +-
typeshed/third_party/3.6/click/utils.pyi | 8 +-
typeshed/third_party/3/dateutil/parser.pyi | 10 +-
typeshed/third_party/3/dateutil/relativedelta.pyi | 6 +-
typeshed/third_party/3/dateutil/tz/_common.pyi | 2 +-
typeshed/third_party/3/dateutil/tz/tz.pyi | 4 +-
typeshed/third_party/3/enum.pyi | 25 +-
typeshed/third_party/3/itsdangerous.pyi | 97 +-
typeshed/third_party/3/lxml/etree.pyi | 135 +-
typeshed/third_party/3/requests/api.pyi | 26 -
typeshed/third_party/3/requests/auth.pyi | 41 -
typeshed/third_party/3/requests/compat.pyi | 6 -
typeshed/third_party/3/requests/exceptions.pyi | 26 -
typeshed/third_party/3/requests/hooks.pyi | 8 -
.../third_party/3/requests/packages/__init__.pyi | 8 -
.../3/requests/packages/urllib3/_collections.pyi | 51 -
.../requests/packages/urllib3/contrib/__init__.pyi | 3 -
.../3/requests/packages/urllib3/exceptions.pyi | 54 -
.../3/requests/packages/urllib3/fields.pyi | 16 -
.../3/requests/packages/urllib3/filepost.pyi | 19 -
.../packages/urllib3/packages/__init__.pyi | 3 -
.../ssl_match_hostname/_implementation.pyi | 7 -
.../3/requests/packages/urllib3/poolmanager.pyi | 31 -
.../3/requests/packages/urllib3/request.pyi | 13 -
.../requests/packages/urllib3/util/connection.pyi | 11 -
.../3/requests/packages/urllib3/util/request.pyi | 12 -
.../3/requests/packages/urllib3/util/response.pyi | 5 -
.../3/requests/packages/urllib3/util/retry.pyi | 36 -
.../3/requests/packages/urllib3/util/timeout.pyi | 24 -
.../3/requests/packages/urllib3/util/url.pyi | 26 -
typeshed/third_party/3/requests/status_codes.pyi | 8 -
typeshed/third_party/3/requests/structures.pyi | 12 -
typeshed/third_party/3/requests/utils.pyi | 52 -
typeshed/third_party/3/six/moves/__init__.pyi | 60 +-
typeshed/third_party/3/six/moves/cPickle.pyi | 6 -
.../third_party/3/six/moves/urllib/__init__.pyi | 5 -
typeshed/third_party/3/six/moves/urllib/error.pyi | 5 -
typeshed/third_party/3/six/moves/urllib/parse.pyi | 47 +-
.../third_party/3/six/moves/urllib/request.pyi | 69 +-
.../third_party/3/six/moves/urllib/response.pyi | 7 +
.../third_party/3/six/moves/urllib/robotparser.pyi | 5 -
typeshed/third_party/3/six/moves/urllib_error.pyi | 10 -
typeshed/third_party/3/six/moves/urllib_parse.pyi | 20 -
.../third_party/3/six/moves/urllib_request.pyi | 41 -
.../third_party/3/six/moves/urllib_response.pyi | 1 -
.../third_party/3/six/moves/urllib_robotparser.pyi | 8 -
typeshed/third_party/3/typed_ast/__init__.pyi | 4 +-
.../3/typed_ast/{ast35.pyi => ast3.pyi} | 26 +-
typeshed/third_party/3/typed_ast/conversions.pyi | 4 +-
typeshed/third_party/3/werkzeug/wrappers.pyi | 117 +-
723 files changed, 12276 insertions(+), 105046 deletions(-)
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 731180e..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,29 +0,0 @@
-build/
-__pycache__
-*.py[cod]
-*~
-@*
-/build
-/env
-docs/build/
-*.iml
-/out/
-.venv/
-.mypy_cache/
-.incremental_checker_cache.json
-.cache
-
-# Packages
-*.egg
-*.egg-info
-
-# IDEs
-.idea
-*.swp
-
-# Operating Systems
-.DS_store
-
-# Coverage Files
-htmlcov
-.coverage*
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 6b366ad..0000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "typeshed"]
- path = typeshed
- url = http://github.com/python/typeshed
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 4c25d0e..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-sudo: false
-language: python
-python:
- - "3.3"
- - "3.4"
- # Specifically request 3.5.1 because we need to be compatible with that.
- - "3.5.1"
- - "3.6"
- - "3.7-dev"
- # Pypy build is disabled because it doubles the travis build time, and it rarely fails
- # unless one one of the other builds fails.
- # - "pypy3"
-
-install:
- - pip install -r test-requirements.txt
- - python setup.py install
-
-script:
- - python runtests.py -x lint
- - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index c01af3b..0000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,152 +0,0 @@
-Contributing to Mypy
-====================
-
-Welcome! Mypy is a community project that aims to work for a wide
-range of Python users and Python codebases. If you're trying Mypy on
-your Python code, your experience and what you can contribute are
-important to the project's success.
-
-
-Getting started, building, and testing
---------------------------------------
-
-If you haven't already, take a look at the project's
-[README.md file](README.md)
-and the [Mypy documentation](http://mypy.readthedocs.io/en/latest/),
-and try adding type annotations to your file and type-checking it with Mypy.
-
-
-Discussion
-----------
-
-If you've run into behavior in Mypy you don't understand, or you're
-having trouble working out a good way to apply it to your code, or
-you've found a bug or would like a feature it doesn't have, we want to
-hear from you!
-
-Our main forum for discussion is the project's [GitHub issue
-tracker](https://github.com/python/mypy/issues). This is the right
-place to start a discussion of any of the above or most any other
-topic concerning the project.
-
-For less formal discussion we have a chat room on
-[gitter.im](https://gitter.im/python/mypy). Some Mypy core developers
-are almost always present; feel free to find us there and we're happy
-to chat. Substantive technical discussion will be directed to the
-issue tracker.
-
-(We also have an IRC channel, `#python-mypy` on irc.freenode.net.
-This is lightly used, we have mostly switched to the gitter room
-mentioned above.)
-
-#### Code of Conduct
-
-Everyone participating in the Mypy community, and in particular in our
-issue tracker, pull requests, and IRC channel, is expected to treat
-other people with respect and more generally to follow the guidelines
-articulated in the [Python Community Code of
-Conduct](https://www.python.org/psf/codeofconduct/).
-
-
-Submitting Changes
-------------------
-
-Even more excellent than a good bug report is a fix for a bug, or the
-implementation of a much-needed new feature. (*) We'd love to have
-your contributions.
-
-(*) If your new feature will be a lot of work, we recommend talking to
- us early -- see below.
-
-We use the usual GitHub pull-request flow, which may be familiar to
-you if you've contributed to other projects on GitHub. For the mechanics,
-see [our git and GitHub workflow help page](https://github.com/python/mypy/wiki/Using-Git-And-GitHub),
-or [GitHub's own documentation](https://help.github.com/articles/using-pull-requests/).
-
-Anyone interested in Mypy may review your code. One of the Mypy core
-developers will merge your pull request when they think it's ready.
-For every pull request, we aim to promptly either merge it or say why
-it's not yet ready; if you go a few days without a reply, please feel
-free to ping the thread by adding a new comment.
-
-At present the core developers are (alphabetically):
-* David Fisher (@ddfisher)
-* Jukka Lehtosalo (@JukkaL)
-* Greg Price (@gnprice)
-* Guido van Rossum (@gvanrossum)
-
-
-Preparing Changes
------------------
-
-Before you begin: if your change will be a significant amount of work
-to write, we highly recommend starting by opening an issue laying out
-what you want to do. That lets a conversation happen early in case
-other contributors disagree with what you'd like to do or have ideas
-that will help you do it.
-
-The best pull requests are focused, clearly describe what they're for
-and why they're correct, and contain tests for whatever changes they
-make to the code's behavior. As a bonus these are easiest for someone
-to review, which helps your pull request get merged quickly! Standard
-advice about good pull requests for open-source projects applies; we
-have [our own writeup](https://github.com/python/mypy/wiki/Good-Pull-Request)
-of this advice.
-
-See also our [coding conventions](https://github.com/python/mypy/wiki/Code-Conventions) --
-which consist mainly of a reference to
-[PEP 8](https://www.python.org/dev/peps/pep-0008/) -- for the code you
-put in the pull request.
-
-You may also find other pages in the
-[Mypy developer guide](https://github.com/python/mypy/wiki/Developer-Guides)
-helpful in developing your change.
-
-
-Issue-tracker conventions
--------------------------
-
-We aim to reply to all new issues promptly. We'll assign a milestone
-to help us track which issues we intend to get to when, and may apply
-labels to carry some other information. Here's what our milestones
-and labels mean.
-
-### Task priority and sizing
-
-We use GitHub "labels" ([see our
-list](https://github.com/python/mypy/labels)) to roughly order what we
-want to do soon and less soon. There's two dimensions taken into
-account: **priority** (does it matter to our users) and **size** (how
-long will it take to complete).
-
-Bugs that aren't a huge deal but do matter to users and don't seem
-like a lot of work to fix generally will be dealt with sooner; things
-that will take longer may go further out.
-
-We are trying to keep the backlog at a manageable size, an issue that is
-unlikely to be acted upon in foreseeable future is going to be
-respectfully closed. This doesn't mean the issue is not important, but
-rather reflects the limits of the team.
-
-The **question** label is for issue threads where a user is asking a
-question but it isn't yet clear that it represents something to actually
-change. We use the issue tracker as the preferred venue for such
-questions, even when they aren't literally issues, to keep down the
-number of distinct discussion venues anyone needs to track. These might
-evolve into a bug or feature request.
-
-Issues **without a priority or size** haven't been triaged. We aim to
-triage all new issues promptly, but there are some issues from previous
-years that we haven't yet re-reviewed since adopting these conventions.
-
-### Other labels
-
-* **needs discussion**: This issue needs agreement on some kind of
- design before it makes sense to implement it, and it either doesn't
- yet have a design or doesn't yet have agreement on one.
-* **feature**, **bug**, **crash**, **refactoring**, **documentation**:
- These classify the user-facing impact of the change. Specifically
- "refactoring" means there should be no user-facing effect.
-* **topic-** labels group issues touching a similar aspect of the
- project, for example PEP 484 compatibility, a specific command-line
- option or dependency.
diff --git a/CREDITS b/CREDITS
deleted file mode 100644
index d4fe9ee..0000000
--- a/CREDITS
+++ /dev/null
@@ -1,101 +0,0 @@
-Credits
--------
-
-Lead developer:
-
- Jukka Lehtosalo <jukka.lehtosalo at iki.fi>
-
-Core team:
-
- Guido <guido at dropbox.com>
- David Fisher <ddfisher at dropbox.com>
- Greg Price <gregprice at dropbox.com>
-
-Contributors (in alphabetical order, including typeshed):
-
- Tim Abbott
- Steven Allen (@Stebalien)
- Della Anjeh
- Reid Barton (@rwbarton)
- Matthias Bussonnier
- Anup Chenthamarakshan
- Kyle Consalus
- Ian Cordasco
- ctcutler
- Ben Darnell
- Miguel Garcia (@rockneurotiko)
- Mihnea Giurgea
- Ryan Gonzalez (@kirbyfan64)
- James Guthrie
- Jared Hance
- Ashley Hewson (@ashleyh)
- icoxfog417
- Bob Ippolito (@etrepum)
- ismail-s
- Sander Kersten (@spkersten)
- Matthias Kramm
- Ian Kronquist (@iankronquist)
- Yuval Langer
- Howard Lee
- Tad Leonard
- Li Haoyi
- Darjus Loktevic
- Ben Longbons
- Florian Ludwig (@FlorianLudwig)
- Robert T. McGibbon
- Ron Murawski <ron at horizonchess.com>
- Motoki Naruse
- Jared Pochtar (@jaredp)
- Michal Pokorný
- Eric Price (@ecprice)
- Brodie Rao
- Sebastian Reuße
- Sebastian Riikonen
- Seo Sanghyeon
- Marek Sapota
- Gigi Sayfan
- Vlad Shcherbina
- Anders Schuller
- Daniel Shaulov
- David Shea
- Vita Smid
- Schuyler Smith
- Marcell Vazquez-Chanlatte (@mvcisback)
- Prayag Verma
- Igor Vuk (@ivuk)
- Jeff Walden (@jswalden)
- Michael Walter
- Jing Wang
- Wen Zhang
- Roy Williams
- wizzardx
- Matthew Wright
- Yuanchao Zhu (@yczhu)
- Gennadiy Zlobin (@gennad)
-
-Additional thanks to:
-
- Alex Allain
- Max Bolingbroke
- Peter Calvert
- Kannan Goundan
- Kathy Gray
- David J Greaves
- Riitta Ikonen
- Terho Ikonen
- Stephen Kell
- Łukasz Langa
- Laura Lehtosalo
- Peter Ludemann
- Seppo Mattila
- Robin Message
- Alan Mycroft
- Dominic Orchard
- Pekka Rapinoja
- Matt Robben
- Satnam Singh
- Juha Sorva
- Clay Sweetser
- Jorma Tarhio
- Jussi Tuovila
- Andrey Vlasovskikh
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index 8145cc3..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,228 +0,0 @@
-Mypy is licensed under the terms of the MIT license, reproduced below.
-
-= = = = =
-
-The MIT License
-
-Copyright (c) 2015-2016 Jukka Lehtosalo and contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
-= = = = =
-
-Portions of mypy are licensed under different licenses. The files
-under stdlib-samples and lib-typing are licensed under the PSF 2
-License, reproduced below.
-
-= = = = =
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
-alone or in any derivative version prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee. This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions. Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee. This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party. As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee. Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement. This Agreement together with
-Python 1.6.1 may be located on the Internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013. This
-Agreement may also be obtained from a proxy server on the Internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee. This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
- ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands. All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-= = = = =
\ No newline at end of file
diff --git a/PKG-INFO b/PKG-INFO
index 236181d..135e4bd 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: mypy
-Version: 0.480.dev0
+Version: 0.511
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
diff --git a/README.md b/README.md
deleted file mode 100644
index 159038d..0000000
--- a/README.md
+++ /dev/null
@@ -1,310 +0,0 @@
-Mypy: Optional Static Typing for Python
-=======================================
-
-[![Build Status](https://travis-ci.org/python/mypy.svg)](https://travis-ci.org/python/mypy)
-[![Chat at https://gitter.im/python/mypy](https://badges.gitter.im/python/mypy.svg)](https://gitter.im/python/mypy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
-
-
-Got a question? File an issue!
-------------------------------
-
-We don't have a mailing list; but we are always happy to answer
-questions on [gitter chat](https://gitter.im/python/mypy) or filed as
-issues in our trackers:
-
-- [mypy tracker](https://github.com/python/mypy/issues)
- for mypy isues
-- [typeshed tracker](https://github.com/python/typeshed/issues)
- for issues with specific modules
-- [typing tracker](https://github.com/python/typing/issues)
- for discussion of new type system features (PEP 484 changes) and
- runtime bugs in the typing module
-
-What is mypy?
--------------
-
-Mypy is an optional static type checker for Python. You can add type
-hints to your Python programs using the standard for type
-annotations introduced in Python 3.5 ([PEP 484](https://www.python.org/dev/peps/pep-0484/)), and use mypy to
-type check them statically. Find bugs in your programs without even
-running them!
-
-The type annotation standard has also been backported to earlier
-Python 3.x versions. Mypy supports Python 3.3 and later.
-
-For Python 2.7, you can add annotations as comments (this is also
-specified in [PEP 484](https://www.python.org/dev/peps/pep-0484/)).
-
-You can mix dynamic and static typing in your programs. You can always
-fall back to dynamic typing when static typing is not convenient, such
-as for legacy code.
-
-Here is a small example to whet your appetite:
-
-```python
-from typing import Iterator
-
-def fib(n: int) -> Iterator[int]:
- a, b = 0, 1
- while a < n:
- yield a
- a, b = b, a + b
-```
-
-Mypy is in development; some features are missing and there are bugs.
-See 'Development status' below.
-
-
-Requirements
-------------
-
-You need Python 3.3 or later to run mypy. You can have multiple Python
-versions (2.x and 3.x) installed on the same system without problems.
-
-In Ubuntu, Mint and Debian you can install Python 3 like this:
-
- $ sudo apt-get install python3 python3-pip
-
-For other Linux flavors, OS X and Windows, packages are available at
-
- http://www.python.org/getit/
-
-
-Quick start
------------
-
-Mypy can be installed using pip:
-
- $ python3 -m pip install -U mypy
-
-If you want to run the latest version of the code, you can install from git:
-
- $ python3 -m pip install -U git+git://github.com/python/mypy.git
-
-
-Now, if Python on your system is configured properly (else see
-"Troubleshooting" below), you can type-check the [statically typed parts] of a
-program like this:
-
- $ mypy PROGRAM
-
-You can always use a Python interpreter to run your statically typed
-programs, even if they have type errors:
-
- $ python3 PROGRAM
-
-[statically typed parts]: http://mypy.readthedocs.io/en/latest/basics.html#function-signatures
-
-
-Web site and documentation
---------------------------
-
-Documentation and additional information is available at the web site:
-
- http://www.mypy-lang.org/
-
-Or you can jump straight to the documentation:
-
- http://mypy.readthedocs.io/
-
-
-Troubleshooting
----------------
-
-Depending on your configuration, you may have to run `pip3` like
-this:
-
- $ python3 -m pip install -U mypy
-
-Except on Windows, it's best to always use the `--fast-parser`
-option to mypy; this requires installing `typed-ast`:
-
- $ python3 -m pip install -U typed-ast
-
-If the `mypy` command isn't found after installation: After either
-`pip3 install` or `setup.py install`, the `mypy` script and
-dependencies, including the `typing` module, will be installed to
-system-dependent locations. Sometimes the script directory will not
-be in `PATH`, and you have to add the target directory to `PATH`
-manually or create a symbolic link to the script. In particular, on
-Mac OS X, the script may be installed under `/Library/Frameworks`:
-
- /Library/Frameworks/Python.framework/Versions/<version>/bin
-
-In Windows, the script is generally installed in
-`\PythonNN\Scripts`. So, type check a program like this (replace
-`\Python34` with your Python installation path):
-
- C:\>\Python34\python \Python34\Scripts\mypy PROGRAM
-
-### Working with `virtualenv`
-
-If you are using [`virtualenv`](https://virtualenv.pypa.io/en/stable/),
-make sure you are running a python3 environment. Installing via `pip3`
-in a v2 environment will not configure the environment to run installed
-modules from the command line.
-
- $ python3 -m pip install -U virtualenv
- $ python3 -m virtualenv env
-
-
-Quick start for contributing to mypy
-------------------------------------
-
-If you want to contribute, first clone the mypy git repository:
-
- $ git clone --recurse-submodules https://github.com/python/mypy.git
-
-From the mypy directory, use pip to install mypy:
-
- $ cd mypy
- $ python3 -m pip install -U .
-
-Replace `python3` with your Python 3 interpreter. You may have to do
-the above as root. For example, in Ubuntu:
-
- $ sudo python3 -m pip install -U .
-
-Now you can use the `mypy` program just as above. In case of trouble
-see "Troubleshooting" above.
-
-The mypy wiki contains some useful information for contributors:
-
- https://github.com/python/mypy/wiki/Developer-Guides
-
-Working with the git version of mypy
-------------------------------------
-
-mypy contains a submodule, "typeshed". See http://github.com/python/typeshed.
-This submodule contains types for the Python standard library.
-
-Due to the way git submodules work, you'll have to do
-```
- git submodule update typeshed
-```
-whenever you change branches, merge, rebase, or pull.
-
-(It's possible to automate this: Search Google for "git hook update submodule")
-
-Running tests and linting
--------------------------
-
-First install any additional dependencies needed for testing:
-
- $ python3 -m pip install -U -r test-requirements.txt
-
-To run all tests, run the script `runtests.py` in the mypy repository:
-
- $ ./runtests.py
-
-Note that some tests will be disabled for older python versions.
-
-This will run all tests, including integration and regression tests,
-and will type check mypy and verify that all stubs are valid.
-
-You can run a subset of test suites by passing positive or negative
-filters:
-
- $ ./runtests.py lex parse -x lint -x stub
-
-For example, to run unit tests only, which run pretty quickly:
-
- $ ./runtests.py unit-test pytest
-
-The unit test suites are driven by a mixture of test frameworks:
-mypy's own `myunit` framework, and `pytest`, which we're in the
-process of migrating to. For finer control over which unit tests are
-run and how, you can run `py.test` or `scripts/myunit` directly, or
-pass inferior arguments via `-a`:
-
- $ py.test mypy/test/testcheck.py -v -k MethodCall
- $ ./runtests.py -v 'pytest mypy/test/testcheck' -a -v -a -k -a MethodCall
-
- $ PYTHONPATH=$PWD scripts/myunit -m mypy.test.testlex -v '*backslash*'
- $ ./runtests.py mypy.test.testlex -a -v -a '*backslash*'
-
-You can also run the type checker for manual testing without
-installing anything by setting up the Python module search path
-suitably (the lib-typing/3.2 path entry is not needed for Python 3.5
-or when you have manually installed the `typing` module):
-
- $ export PYTHONPATH=$PWD:$PWD/lib-typing/3.2
- $ python<version> -m mypy PROGRAM.py
-
-You can add the entry scripts to PATH for a single python3 version:
-
- $ export PATH=$PWD/scripts
- $ mypy PROGRAM.py
-
-You can check a module or string instead of a file:
-
- $ mypy PROGRAM.py
- $ mypy -m MODULE
- $ mypy -c 'import MODULE'
-
-To run the linter:
-
- $ ./runtests.py lint
-
-
-Coverage reports
-----------------
-
-There is an experimental feature to generate coverage reports. To use
-this feature, you need to `pip install -U lxml`. This is an extension
-module and requires various library headers to install; on a
-Debian-derived system the command
- `apt-get install python3-dev libxml2-dev libxslt1-dev`
-may provide the necessary dependencies.
-
-To use the feature, pass e.g. `--txt-report "$(mktemp -d)"`.
-
-
-Development status
-------------------
-
-Mypy is work in progress and is not yet production quality, though
-mypy development has been done using mypy for a while!
-
-Here are some of the more significant Python features not supported
-right now (but all of these will improve):
-
- - properties with setters not supported
- - limited metaclass support
- - only a subset of Python standard library modules are supported, and some
- only partially
- - 3rd party module support is limited
-
-The current development focus is to have a good coverage of Python
-features and the standard library (both 3.x and 2.7).
-
-
-Issue tracker
--------------
-
-Please report any bugs and enhancement ideas using the mypy issue
-tracker:
-
- https://github.com/python/mypy/issues
-
-Feel free to also ask questions on the tracker.
-
-
-Help wanted
------------
-
-Any help in testing, development, documentation and other tasks is
-highly appreciated and useful to the project. There are tasks for
-contributors of all experience levels. If you're just getting started,
-check out the
-[difficulty/easy](https://github.com/python/mypy/labels/difficulty%2Feasy)
-label.
-
-For more details, see the file [CONTRIBUTING.md](CONTRIBUTING.md).
-
-
-License
--------
-
-Mypy is licensed under the terms of the MIT License (see the file
-LICENSE).
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 80fd0f8..0000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-environment:
- matrix:
-
- - PYTHON: "C:\\Python35"
- PYTHON_VERSION: "3.5.1"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python35-x64"
- PYTHON_VERSION: "3.5.1"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python36"
- PYTHON_VERSION: "3.6.x"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python36-x64"
- PYTHON_VERSION: "3.6.x"
- PYTHON_ARCH: "64"
-
-
-install:
- - "%PYTHON%\\python.exe -m pip install -r test-requirements.txt"
- - "git submodule update --init typeshed"
- - "%PYTHON%\\python.exe setup.py -q install"
-
-build: off
-
-test_script:
- # Ignore lint (it's run separately below), reports (since we don't have lxml),
- # and cmdline (since one of its tests depend on lxml)
- - "%PYTHON%\\python.exe runtests.py -x lint -x reports -x cmdline"
- - ps: if ($env:PYTHON_VERSION -Match "3.6.x" -And $env:PYTHON_ARCH -Match "64") { iex "$env:PYTHON\\python.exe -m flake8" }
diff --git a/build-requirements.txt b/build-requirements.txt
deleted file mode 100644
index 0a8547b..0000000
--- a/build-requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-setuptools
-wheel
diff --git a/conftest.py b/conftest.py
deleted file mode 100644
index 9673db2..0000000
--- a/conftest.py
+++ /dev/null
@@ -1,3 +0,0 @@
-pytest_plugins = [
- 'mypy.test.data',
-]
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index be69e9d..0000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,177 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-PAPER =
-BUILDDIR = build
-
-# User-friendly check for sphinx-build
-ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
-$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
-endif
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " dirhtml to make HTML files named index.html in directories"
- @echo " singlehtml to make a single large HTML file"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " qthelp to make HTML files and a qthelp project"
- @echo " devhelp to make HTML files and a Devhelp project"
- @echo " epub to make an epub"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " latexpdf to make LaTeX files and run them through pdflatex"
- @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
- @echo " text to make text files"
- @echo " man to make manual pages"
- @echo " texinfo to make Texinfo files"
- @echo " info to make Texinfo files and run them through makeinfo"
- @echo " gettext to make PO message catalogs"
- @echo " changes to make an overview of all changed/added/deprecated items"
- @echo " xml to make Docutils-native XML files"
- @echo " pseudoxml to make pseudoxml-XML files for display purposes"
- @echo " linkcheck to check all external links for integrity"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
-
-clean:
- rm -rf $(BUILDDIR)/*
-
-html:
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
- $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
- $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
- @echo
- @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
- $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
- @echo
- @echo "Build finished; now you can process the pickle files."
-
-json:
- $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
- @echo
- @echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
- $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
- @echo
- @echo "Build finished; now you can run HTML Help Workshop with the" \
- ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
- $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
- @echo
- @echo "Build finished; now you can run "qcollectiongenerator" with the" \
- ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
- @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mypy.qhcp"
- @echo "To view the help file:"
- @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mypy.qhc"
-
-devhelp:
- $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
- @echo
- @echo "Build finished."
- @echo "To view the help file:"
- @echo "# mkdir -p $$HOME/.local/share/devhelp/Mypy"
- @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mypy"
- @echo "# devhelp"
-
-epub:
- $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
- @echo
- @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo
- @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
- @echo "Run \`make' in that directory to run these through (pdf)latex" \
- "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo "Running LaTeX files through pdflatex..."
- $(MAKE) -C $(BUILDDIR)/latex all-pdf
- @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-latexpdfja:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo "Running LaTeX files through platex and dvipdfmx..."
- $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
- @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
- $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
- @echo
- @echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
- $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
- @echo
- @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
- $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
- @echo
- @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
- @echo "Run \`make' in that directory to run these through makeinfo" \
- "(use \`make info' here to do that automatically)."
-
-info:
- $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
- @echo "Running Texinfo files through makeinfo..."
- make -C $(BUILDDIR)/texinfo info
- @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
- $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
- @echo
- @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
- $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
- @echo
- @echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
- $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
- @echo
- @echo "Link check complete; look for any errors in the above output " \
- "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
- $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
- @echo "Testing of doctests in the sources finished, look at the " \
- "results in $(BUILDDIR)/doctest/output.txt."
-
-xml:
- $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
- @echo
- @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
-
-pseudoxml:
- $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
- @echo
- @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/docs/README.md b/docs/README.md
deleted file mode 100644
index 2122eef..0000000
--- a/docs/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-Mypy Documentation
-==================
-
-What's this?
-------------
-
-This directory contains the source code for Mypy documentation (under `source/`)
-and build scripts. The documentation uses Sphinx and reStructuredText. We use
-`sphinx-rtd-theme` as the documentation theme.
-
-Building the documentation
---------------------------
-
-Install Sphinx and other dependencies (i.e. theme) needed for the documentation.
-From the `docs` directory, use `pip`:
-
-```
-$ pip install -r requirements-docs.txt
-```
-
-Build the documentation like this:
-
-```
-$ make html
-```
-
-The built documentation will be placed in the `docs/build` directory. Open
-`docs/build/index.html` to view the documentation.
-
-Helpful documentation build commands
-------------------------------------
-
-Clean the documentation build:
-
-```
-$ make clean
-```
-
-Test and check the links found in the documentation:
-
-```
-$ make linkcheck
-```
-
-Documentation on Read The Docs
-------------------------------
-
-The mypy documentation is hosted on Read The Docs, and the latest version
-can be found at https://mypy.readthedocs.io/en/latest.
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100755
index 1e3d843..0000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,242 +0,0 @@
- at ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set BUILDDIR=build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
-set I18NSPHINXOPTS=%SPHINXOPTS% source
-if NOT "%PAPER%" == "" (
- set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
- set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
- :help
- echo.Please use `make ^<target^>` where ^<target^> is one of
- echo. html to make standalone HTML files
- echo. dirhtml to make HTML files named index.html in directories
- echo. singlehtml to make a single large HTML file
- echo. pickle to make pickle files
- echo. json to make JSON files
- echo. htmlhelp to make HTML files and a HTML help project
- echo. qthelp to make HTML files and a qthelp project
- echo. devhelp to make HTML files and a Devhelp project
- echo. epub to make an epub
- echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
- echo. text to make text files
- echo. man to make manual pages
- echo. texinfo to make Texinfo files
- echo. gettext to make PO message catalogs
- echo. changes to make an overview over all changed/added/deprecated items
- echo. xml to make Docutils-native XML files
- echo. pseudoxml to make pseudoxml-XML files for display purposes
- echo. linkcheck to check all external links for integrity
- echo. doctest to run all doctests embedded in the documentation if enabled
- goto end
-)
-
-if "%1" == "clean" (
- for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
- del /q /s %BUILDDIR%\*
- goto end
-)
-
-
-%SPHINXBUILD% 2> nul
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.http://sphinx-doc.org/
- exit /b 1
-)
-
-if "%1" == "html" (
- %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/html.
- goto end
-)
-
-if "%1" == "dirhtml" (
- %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
- goto end
-)
-
-if "%1" == "singlehtml" (
- %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
- goto end
-)
-
-if "%1" == "pickle" (
- %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the pickle files.
- goto end
-)
-
-if "%1" == "json" (
- %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the JSON files.
- goto end
-)
-
-if "%1" == "htmlhelp" (
- %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run HTML Help Workshop with the ^
-.hhp project file in %BUILDDIR%/htmlhelp.
- goto end
-)
-
-if "%1" == "qthelp" (
- %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run "qcollectiongenerator" with the ^
-.qhcp project file in %BUILDDIR%/qthelp, like this:
- echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Mypy.qhcp
- echo.To view the help file:
- echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Mypy.ghc
- goto end
-)
-
-if "%1" == "devhelp" (
- %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished.
- goto end
-)
-
-if "%1" == "epub" (
- %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The epub file is in %BUILDDIR%/epub.
- goto end
-)
-
-if "%1" == "latex" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "latexpdf" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- cd %BUILDDIR%/latex
- make all-pdf
- cd %BUILDDIR%/..
- echo.
- echo.Build finished; the PDF files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "latexpdfja" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- cd %BUILDDIR%/latex
- make all-pdf-ja
- cd %BUILDDIR%/..
- echo.
- echo.Build finished; the PDF files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "text" (
- %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The text files are in %BUILDDIR%/text.
- goto end
-)
-
-if "%1" == "man" (
- %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The manual pages are in %BUILDDIR%/man.
- goto end
-)
-
-if "%1" == "texinfo" (
- %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
- goto end
-)
-
-if "%1" == "gettext" (
- %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
- goto end
-)
-
-if "%1" == "changes" (
- %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
- if errorlevel 1 exit /b 1
- echo.
- echo.The overview file is in %BUILDDIR%/changes.
- goto end
-)
-
-if "%1" == "linkcheck" (
- %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
- if errorlevel 1 exit /b 1
- echo.
- echo.Link check complete; look for any errors in the above output ^
-or in %BUILDDIR%/linkcheck/output.txt.
- goto end
-)
-
-if "%1" == "doctest" (
- %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
- goto end
-)
-
-if "%1" == "xml" (
- %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The XML files are in %BUILDDIR%/xml.
- goto end
-)
-
-if "%1" == "pseudoxml" (
- %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
- goto end
-)
-
-:end
diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt
deleted file mode 100644
index d20641e..0000000
--- a/docs/requirements-docs.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Sphinx >= 1.4.4
-sphinx-rtd-theme >= 0.1.9
diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst
deleted file mode 100644
index b9dd07f..0000000
--- a/docs/source/additional_features.rst
+++ /dev/null
@@ -1,9 +0,0 @@
-Additional features
--------------------
-
-Several mypy features are not currently covered by this tutorial,
-including the following:
-
-- inheritance between generic classes
-- compatibility and subtyping of generic types, including covariance of generic types
-- ``super()``
diff --git a/docs/source/basics.rst b/docs/source/basics.rst
deleted file mode 100644
index 572364d..0000000
--- a/docs/source/basics.rst
+++ /dev/null
@@ -1,194 +0,0 @@
-Basics
-======
-
-This chapter introduces some core concepts of mypy, including function
-annotations, the ``typing`` module and library stubs. Read it carefully,
-as the rest of documentation may not make much sense otherwise.
-
-Function signatures
-*******************
-
-A function without a type annotation is considered dynamically typed:
-
-.. code-block:: python
-
- def greeting(name):
- return 'Hello, {}'.format(name)
-
-You can declare the signature of a function using the Python 3
-annotation syntax (Python 2 is discussed later in :ref:`python2`).
-This makes the the function statically typed, and that causes type
-checker report type errors within the function.
-
-Here's a version of the above function that is statically typed and
-will be type checked:
-
-.. code-block:: python
-
- def greeting(name: str) -> str:
- return 'Hello, {}'.format(name)
-
-If a function does not explicitly return a value we give the return
-type as ``None``. Using a ``None`` result in a statically typed
-context results in a type check error:
-
-.. code-block:: python
-
- def p() -> None:
- print('hello')
-
- a = p() # Type check error: p has None return value
-
-Arguments with default values can be annotated as follows:
-
-.. code-block:: python
-
- def greeting(name: str, prefix: str = 'Mr.') -> str:
- return 'Hello, {} {}'.format(name, prefix)
-
-Mixing dynamic and static typing
-********************************
-
-Mixing dynamic and static typing within a single file is often
-useful. For example, if you are migrating existing Python code to
-static typing, it may be easiest to do this incrementally, such as by
-migrating a few functions at a time. Also, when prototyping a new
-feature, you may decide to first implement the relevant code using
-dynamic typing and only add type signatures later, when the code is
-more stable.
-
-.. code-block:: python
-
- def f():
- 1 + 'x' # No static type error (dynamically typed)
-
- def g() -> None:
- 1 + 'x' # Type check error (statically typed)
-
-.. note::
-
- The earlier stages of mypy, known as the semantic analysis, may
- report errors even for dynamically typed functions. However, you
- should not rely on this, as this may change in the future.
-
-The typing module
-*****************
-
-The ``typing`` module contains many definitions that are useful in
-statically typed code. You typically use ``from ... import`` to import
-them (we'll explain ``Iterable`` later in this document):
-
-.. code-block:: python
-
- from typing import Iterable
-
- def greet_all(names: Iterable[str]) -> None:
- for name in names:
- print('Hello, {}'.format(name))
-
-For brevity, we often omit the ``typing`` import in code examples, but
-you should always include it in modules that contain statically typed
-code.
-
-The presence or absence of the ``typing`` module does not affect
-whether your code is type checked; it is only required when you use
-one or more special features it defines.
-
-Type checking programs
-**********************
-
-You can type check a program by using the ``mypy`` tool, which is
-basically a linter -- it checks your program for errors without actually
-running it::
-
- $ mypy program.py
-
-All errors reported by mypy are essentially warnings that you are free
-to ignore, if you so wish.
-
-The next chapter explains how to download and install mypy:
-:ref:`getting-started`.
-
-More command line options are documented in :ref:`command-line`.
-
-.. note::
-
- Depending on how mypy is configured, you may have to explicitly use
- the Python 3 interpreter to run mypy. The mypy tool is an ordinary
- mypy (and so also Python) program. For example::
-
- $ python3 -m mypy program.py
-
-.. _library-stubs:
-
-Library stubs and the Typeshed repo
-***********************************
-
-In order to type check code that uses library modules such as those
-included in the Python standard library, you need to have library
-*stubs*. A library stub defines a skeleton of the public interface
-of the library, including classes, variables and functions and
-their types, but dummy function bodies.
-
-For example, consider this code:
-
-.. code-block:: python
-
- x = chr(4)
-
-Without a library stub, the type checker would have no way of
-inferring the type of ``x`` and checking that the argument to ``chr``
-has a valid type. Mypy incorporates the `typeshed
-<https://github.com/python/typeshed>`_ project, which contains library
-stubs for the Python builtins and the standard library. The stub for
-the builtins contains a definition like this for ``chr``:
-
-.. code-block:: python
-
- def chr(code: int) -> str: ...
-
-In stub files we don't care about the function bodies, so we use
-an ellipsis instead. That ``...`` is three literal dots!
-
-Mypy complains if it can't find a stub (or a real module) for a
-library module that you import. You can create a stub easily; here is
-an overview:
-
-* Write a stub file for the library and store it as a ``.pyi`` file in
- the same directory as the library module.
-* Alternatively, put your stubs (``.pyi`` files) in a directory
- reserved for stubs (e.g., ``myproject/stubs``). In this case you
- have to set the environment variable ``MYPYPATH`` to refer to the
- directory. For example::
-
- $ export MYPYPATH=~/work/myproject/stubs
-
-Use the normal Python file name conventions for modules, e.g. ``csv.pyi``
-for module ``csv``. Use a subdirectory with ``__init__.pyi`` for packages.
-
-If a directory contains both a ``.py`` and a ``.pyi`` file for the
-same module, the ``.pyi`` file takes precedence. This way you can
-easily add annotations for a module even if you don't want to modify
-the source code. This can be useful, for example, if you use 3rd party
-open source libraries in your program (and there are no stubs in
-typeshed yet).
-
-That's it! Now you can access the module in mypy programs and type check
-code that uses the library. If you write a stub for a library module,
-consider making it available for other programmers that use mypy
-by contributing it back to the typeshed repo.
-
-There is more information about creating stubs in the
-`mypy wiki <https://github.com/python/mypy/wiki/Creating-Stubs-For-Python-Modules>`_.
-The following sections explain the kinds of type annotations you can use
-in your programs and stub files.
-
-.. note::
-
- You may be tempted to point ``MYPYPATH`` to the standard library or
- to the ``site-packages`` directory where your 3rd party packages
- are installed. This is almost always a bad idea -- you will likely
- get tons of error messages about code you didn't write and that
- mypy can't analyze all that well yet, and in the worst case
- scenario mypy may crash due to some construct in a 3rd party
- package that it didn't expect.
diff --git a/docs/source/builtin_types.rst b/docs/source/builtin_types.rst
deleted file mode 100644
index 4426df7..0000000
--- a/docs/source/builtin_types.rst
+++ /dev/null
@@ -1,37 +0,0 @@
-Built-in types
-==============
-
-These are examples of some of the most common built-in types:
-
-=================== ===============================
-Type Description
-=================== ===============================
-``int`` integer of arbitrary size
-``float`` floating point number
-``bool`` boolean value
-``str`` unicode string
-``bytes`` 8-bit string
-``object`` an arbitrary object (``object`` is the common base class)
-``List[str]`` list of ``str`` objects
-``Dict[str, int]`` dictionary from ``str`` keys to ``int`` values
-``Iterable[int]`` iterable object containing ints
-``Sequence[bool]`` sequence of booleans
-``Any`` dynamically typed value with an arbitrary type
-=================== ===============================
-
-The type ``Any`` and type constructors ``List``, ``Dict``,
-``Iterable`` and ``Sequence`` are defined in the ``typing`` module.
-
-The type ``Dict`` is a *generic* class, signified by type arguments within
-``[...]``. For example, ``Dict[int, str]`` is a dictionary from integers to
-strings and and ``Dict[Any, Any]`` is a dictionary of dynamically typed
-(arbitrary) values and keys. ``List`` is another generic class. ``Dict`` and
-``List`` are aliases for the built-ins ``dict`` and ``list``, respectively.
-
-``Iterable`` and ``Sequence`` are generic abstract base classes that
-correspond to Python protocols. For example, a ``str`` object or a
-``List[str]`` object is valid
-when ``Iterable[str]`` or ``Sequence[str]`` is expected. Note that even though
-they are similar to abstract base classes defined in ``abc.collections``
-(formerly ``collections``), they are not identical, since the built-in
-collection type objects do not support indexing.
diff --git a/docs/source/casts.rst b/docs/source/casts.rst
deleted file mode 100644
index 900ee0c..0000000
--- a/docs/source/casts.rst
+++ /dev/null
@@ -1,39 +0,0 @@
-.. _casts:
-
-Casts
-=====
-
-Mypy supports type casts that are usually used to coerce a statically
-typed value to a subtype. Unlike languages such as Java or C#,
-however, mypy casts are only used as hints for the type checker, and they
-don't perform a runtime type check. Use the function ``cast`` to perform a
-cast:
-
-.. code-block:: python
-
- from typing import cast, List
-
- o = [1] # type: object
- x = cast(List[int], o) # OK
- y = cast(List[str], o) # OK (cast performs no actual runtime check)
-
-To support runtime checking of casts such as the above, we'd have to check
-the types of all list items, which would be very inefficient for large lists.
-Use assertions if you want to
-perform an actual runtime check. Casts are used to silence spurious
-type checker warnings and give the type checker a little help when it can't
-quite understand what is going on.
-
-You don't need a cast for expressions with type ``Any``, or when
-assigning to a variable with type ``Any``, as was explained earlier.
-You can also use ``Any`` as the cast target type -- this lets you perform
-any operations on the result. For example:
-
-.. code-block:: python
-
- from typing import cast, Any
-
- x = 1
- x + 'x' # Type check error
- y = cast(Any, x)
- y + 'x' # Type check OK (runtime error)
diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst
deleted file mode 100644
index b93a063..0000000
--- a/docs/source/cheat_sheet.rst
+++ /dev/null
@@ -1,241 +0,0 @@
-.. _cheat-sheet-py2:
-
-Mypy syntax cheat sheet (Python 2)
-==================================
-
-This document is a quick cheat sheet showing how the `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type
-language represents various common types in Python 2.
-
-.. note::
-
- Technically many of the type annotations shown below are redundant,
- because mypy can derive them from the type of the expression. So
- many of the examples have a dual purpose: show how to write the
- annotation, and show the inferred types.
-
-
-Built-in types
-**************
-
-.. code-block:: python
-
- from typing import List, Set, Dict, Tuple, Text, Optional
-
- # For simple built-in types, just use the name of the type.
- x = 1 # type: int
- x = 1.0 # type: float
- x = True # type: bool
- x = "test" # type: str
- x = u"test" # type: unicode
-
- # For collections, the name of the type is capitalized, and the
- # name of the type inside the collection is in brackets.
- x = [1] # type: List[int]
- x = set([6, 7]) # type: Set[int]
-
- # For mappings, we need the types of both keys and values.
- x = dict(field=2.0) # type: Dict[str, float]
-
- # For tuples, we specify the types of all the elements.
- x = (3, "yes", 7.5) # type: Tuple[int, str, float]
-
- # For textual data, use Text.
- # This is `unicode` in Python 2 and `str` in Python 3.
- x = ["string", u"unicode"] # type: List[Text]
-
- # Use Optional for values that could be None.
- input_str = f() # type: Optional[str]
- if input_str is not None:
- print input_str
-
-
-Functions
-*********
-
-.. code-block:: python
-
- from typing import Callable, Iterable
-
- # This is how you annotate a function definition.
- def stringify(num):
- # type: (int) -> str
- """Your function docstring goes here after the type definition."""
- return str(num)
-
- # This function has no parameters and also returns nothing. Annotations
- # can also be placed on the same line as their function headers.
- def greet_world(): # type: () -> None
- print "Hello, world!"
-
- # And here's how you specify multiple arguments.
- def plus(num1, num2):
- # type: (int, int) -> int
- return num1 + num2
-
- # Add type annotations for kwargs as though they were positional args.
- def f(num1, my_float=3.5):
- # type: (int, float) -> float
- return num1 + my_float
-
- # An argument can be declared positional-only by giving it a name
- # starting with two underscores:
- def quux(__x):
- # type: (int) -> None
- pass
- quux(3) # Fine
- quux(__x=3) # Error
-
- # This is how you annotate a function value.
- x = f # type: Callable[[int, float], float]
-
- # A generator function that yields ints is secretly just a function that
- # returns an iterable (see below) of ints, so that's how we annotate it.
- def f(n):
- # type: (int) -> Iterable[int]
- i = 0
- while i < n:
- yield i
- i += 1
-
- # There's alternative syntax for functions with many arguments.
- def send_email(address, # type: Union[str, List[str]]
- sender, # type: str
- cc, # type: Optional[List[str]]
- bcc, # type: Optional[List[str]]
- subject='',
- body=None # type: List[str]
- ):
- # type: (...) -> bool
- <code>
-
-
-When you're puzzled or when things are complicated
-**************************************************
-
-.. code-block:: python
-
- from typing import Union, Any, cast
-
- # To find out what type mypy infers for an expression anywhere in
- # your program, wrap it in reveal_type. Mypy will print an error
- # message with the type; remove it again before running the code.
- reveal_type(1) # -> error: Revealed type is 'builtins.int'
-
- # Use Union when something could be one of a few types.
- x = [3, 5, "test", "fun"] # type: List[Union[int, str]]
-
- # Use Any if you don't know the type of something or it's too
- # dynamic to write a type for.
- x = mystery_function() # type: Any
-
- # This is how to deal with varargs.
- # This makes each positional arg and each keyword arg a 'str'.
- def call(self, *args, **kwargs):
- # type: (*str, **str) -> str
- request = make_request(*args, **kwargs)
- return self.do_api_query(request)
-
-
- # Use `ignore` to suppress type-checking on a given line, when your
- # code confuses mypy or runs into an outright bug in mypy.
- # Good practice is to comment every `ignore` with a bug link
- # (in mypy, typeshed, or your own code) or an explanation of the issue.
- x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167
-
- # cast is a helper function for mypy that allows for guidance of how to convert types.
- # it does not cast at runtime
- a = [4]
- b = cast(List[int], a) # passes fine
- c = cast(List[str], a) # passes fine (no runtime check)
- reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]'
- print(c) # -> [4] the object is not cast
-
- # TODO: explain "Need type annotation for variable" when
- # initializing with None or an empty container
-
-
-Standard duck types
-*******************
-
-In typical Python code, many functions that can take a list or a dict
-as an argument only need their argument to be somehow "list-like" or
-"dict-like". A specific meaning of "list-like" or "dict-like" (or
-something-else-like) is called a "duck type", and several duck types
-that are common in idiomatic Python are standardized.
-
-.. code-block:: python
-
- from typing import Mapping, MutableMapping, Sequence, Iterable
-
- # Use Iterable for generic iterables (anything usable in `for`),
- # and Sequence where a sequence (supporting `len` and `__getitem__`) is required.
- def f(iterable_of_ints):
- # type: (Iterable[int]) -> List[str]
- return [str(x) for x in iterator_of_ints]
- f(range(1, 3))
-
- # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate,
- # and MutableMapping one (with `__setitem__`) that we might.
- def f(my_dict):
- # type: (Mapping[int, str]) -> List[int]
- return list(my_dict.keys())
- f({3: 'yes', 4: 'no'})
- def f(my_mapping):
- # type: (MutableMapping[int, str]) -> Set[str]
- my_dict[5] = 'maybe'
- return set(my_dict.values())
- f({3: 'yes', 4: 'no'})
-
-
-Classes
-*******
-
-.. code-block:: python
-
- class MyClass(object):
-
- # For instance methods, omit `self`.
- def my_method(self, num, str1):
- # type: (int, str) -> str
- return num * str1
-
- # The __init__ method doesn't return anything, so it gets return
- # type None just like any other method that doesn't return anything.
- def __init__(self):
- # type: () -> None
- pass
-
- # User-defined classes are written with just their own names.
- x = MyClass() # type: MyClass
-
-
-Other stuff
-***********
-
-.. code-block:: python
-
- import sys
- # typing.Match describes regex matches from the re module.
- from typing import Match, AnyStr, IO
- x = re.match(r'[0-9]+', "15") # type: Match[str]
-
- # Use AnyStr for functions that should accept any kind of string
- # without allowing different kinds of strings to mix.
- def concat(a: AnyStr, b: AnyStr) -> AnyStr:
- return a + b
- concat(u"foo", u"bar") # type: unicode
- concat(b"foo", b"bar") # type: bytes
-
- # Use IO[] for functions that should accept or return any
- # object that comes from an open() call. The IO[] does not
- # distinguish between reading, writing or other modes.
- def get_sys_IO(mode='w') -> IO[str]:
- if mode == 'w':
- return sys.stdout
- elif mode == 'r':
- return sys.stdin
- else:
- return sys.stdout
-
- # TODO: add TypeVar and a simple generic function
-
diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst
deleted file mode 100644
index 964e78e..0000000
--- a/docs/source/cheat_sheet_py3.rst
+++ /dev/null
@@ -1,288 +0,0 @@
-.. _cheat-sheet-py3:
-
-Mypy syntax cheat sheet (Python 3)
-==================================
-
-This document is a quick cheat sheet showing how the `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type
-language represents various common types in Python 3. Unless otherwise noted, the syntax is valid on all versions of Python 3.
-
-.. note::
-
- Technically many of the type annotations shown below are redundant,
- because mypy can derive them from the type of the expression. So
- many of the examples have a dual purpose: show how to write the
- annotation, and show the inferred types.
-
-
-Built-in types
-**************
-
-.. code-block:: python
-
- from typing import List, Set, Dict, Tuple, Text, Optional, AnyStr
-
- # For simple built-in types, just use the name of the type.
- x = 1 # type: int
- x = 1.0 # type: float
- x = True # type: bool
- x = "test" # type: str
- x = u"test" # type: str
- x = b"test" # type: bytes
-
- # For collections, the name of the type is capitalized, and the
- # name of the type inside the collection is in brackets.
- x = [1] # type: List[int]
- x = {6, 7} # type: Set[int]
-
- # For mappings, we need the types of both keys and values.
- x = {'field': 2.0} # type: Dict[str, float]
-
- # For tuples, we specify the types of all the elements.
- x = (3, "yes", 7.5) # type: Tuple[int, str, float]
-
- # For textual data, use Text.
- # This is `unicode` in Python 2 and `str` in Python 3.
- x = ["string", u"unicode"] # type: List[Text]
-
-
-
- # Use Optional for values that could be None.
- input_str = f() # type: Optional[str]
- if input_str is not None:
- print(input_str)
-
-
-Functions
-*********
-
-Python 3 introduces an annotation syntax for function declarations in `PEP 3107 <https://www.python.org/dev/peps/pep-3107/>`_.
-
-.. code-block:: python
-
- from typing import Callable, Iterable, Union, Optional, List
-
- # This is how you annotate a function definition.
- def stringify(num: int) -> str:
- return str(num)
-
- # And here's how you specify multiple arguments.
- def plus(num1: int, num2: int) -> int:
- return num1 + num2
-
- # Add type annotations for kwargs as though they were positional args.
- def f(num1: int, my_float: float = 3.5) -> float:
- return num1 + my_float
-
- # An argument can be declared positional-only by giving it a name
- # starting with two underscores:
- def quux(__x: int) -> None:
- pass
- quux(3) # Fine
- quux(__x=3) # Error
-
- # This is how you annotate a function value.
- x = f # type: Callable[[int, float], float]
-
- # A generator function that yields ints is secretly just a function that
- # returns an iterable (see below) of ints, so that's how we annotate it.
- def f(n: int) -> Iterable[int]:
- i = 0
- while i < n:
- yield i
- i += 1
-
- # For a function with many arguments, you can of course split it over multiple lines
- def send_email(address: Union[str, List[str]],
- sender: str,
- cc: Optional[List[str]],
- bcc: Optional[List[str]],
- subject='',
- body: List[str] = None
- ) -> bool:
-
- ...
-
-
-When you're puzzled or when things are complicated
-**************************************************
-
-.. code-block:: python
-
- from typing import Union, Any, List, cast
-
- # To find out what type mypy infers for an expression anywhere in
- # your program, wrap it in reveal_type. Mypy will print an error
- # message with the type; remove it again before running the code.
- reveal_type(1) # -> error: Revealed type is 'builtins.int'
-
- # Use Union when something could be one of a few types.
- x = [3, 5, "test", "fun"] # type: List[Union[int, str]]
-
- # Use Any if you don't know the type of something or it's too
- # dynamic to write a type for.
- x = mystery_function() # type: Any
-
- # Use `ignore` to suppress type-checking on a given line, when your
- # code confuses mypy or runs into an outright bug in mypy.
- # Good practice is to comment every `ignore` with a bug link
- # (in mypy, typeshed, or your own code) or an explanation of the issue.
- x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167
-
- # cast is a helper function for mypy that allows for guidance of how to convert types.
- # it does not cast at runtime
- a = [4]
- b = cast(List[int], a) # passes fine
- c = cast(List[str], a) # passes fine (no runtime check)
- reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]'
- print(c) # -> [4] the object is not cast
-
- # TODO: explain "Need type annotation for variable" when
- # initializing with None or an empty container
-
-
-Standard duck types
-*******************
-
-In typical Python code, many functions that can take a list or a dict
-as an argument only need their argument to be somehow "list-like" or
-"dict-like". A specific meaning of "list-like" or "dict-like" (or
-something-else-like) is called a "duck type", and several duck types
-that are common in idiomatic Python are standardized.
-
-.. code-block:: python
-
- from typing import Mapping, MutableMapping, Sequence, Iterable, List, Set
-
- # Use Iterable for generic iterables (anything usable in `for`),
- # and Sequence where a sequence (supporting `len` and `__getitem__`) is required.
- def f(iterable_of_ints: Iterable[int]) -> List[str]:
- return [str(x) for x in iterable_of_ints]
- f(range(1, 3))
-
- # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate,
- # and MutableMapping one (with `__setitem__`) that we might.
- def f(my_dict: Mapping[int, str])-> List[int]:
- return list(my_dict.keys())
- f({3: 'yes', 4: 'no'})
- def f(my_mapping: MutableMapping[int, str]) -> Set[str]:
- my_mapping[5] = 'maybe'
- return set(my_mapping.values())
- f({3: 'yes', 4: 'no'})
-
-
-Classes
-*******
-
-.. code-block:: python
-
- class MyClass:
- # The __init__ method doesn't return anything, so it gets return
- # type None just like any other method that doesn't return anything.
- def __init__(self) -> None:
- ...
- # For instance methods, omit `self`.
- def my_method(self, num: int, str1: str) -> str:
- return num * str1
-
-
-
- # User-defined classes are written with just their own names.
- x = MyClass() # type: MyClass
-
-
-Other stuff
-***********
-
-.. code-block:: python
-
- import sys
- import re
- # typing.Match describes regex matches from the re module.
- from typing import Match, AnyStr, IO
- x = re.match(r'[0-9]+', "15") # type: Match[str]
-
- # You can use AnyStr to indicate that any string type will work
- # but not to mix types
- def full_name(first: AnyStr, last: AnyStr) -> AnyStr:
- return first+last
- full_name('Jon','Doe') # same str ok
- full_name(b'Bill', b'Bit') # same binary ok
- full_name(b'Terry', 'Trouble') # different str types, fails
-
- # Use IO[] for functions that should accept or return any
- # object that comes from an open() call. The IO[] does not
- # distinguish between reading, writing or other modes.
- def get_sys_IO(mode='w') -> IO[str]:
- if mode == 'w':
- return sys.stdout
- elif mode == 'r':
- return sys.stdin
- else:
- return sys.stdout
-
- # forward references are useful if you want to referemce a class before it is designed
-
- def f(foo: A) -> int: # this will fail
- ...
-
- class A:
- ...
-
- # however, using the string 'A', it will pass as long as there is a class of that name later on
- def f(foo: 'A') -> int:
- ...
-
- # TODO: add TypeVar and a simple generic function
-
-Variable Annotation in Python 3.6 with PEP 526
-**********************************************
-
-Python 3.6 brings new syntax for annotating variables with `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_.
-Mypy brings limited support for PEP 526 annotations.
-
-
-.. code-block:: python
-
- # annotation is similar to arguments to functions
- name: str = "Eric Idle"
-
- # class instances can be annotated as follows
- mc : MyClass = MyClass()
-
- # tuple packing can be done as follows
- tu: Tuple[str, ...] = ('a', 'b', 'c')
-
- # annotations are not checked at runtime
- year: int = '1972' # error in type checking, but works at runtime
-
- # these are all equivalent
- hour = 24 # type: int
- hour: int; hour = 24
- hour: int = 24
-
- # you do not (!) need to initialize a variable to annotate it
- a: int # ok for type checking and runtime
-
- # which is useful in conditional branches
- child: bool
- if age < 18:
- child = True
- else:
- child = False
-
- # annotations for classes are for instance variables (those created in __init__ or __new__)
- class Battery:
- charge_percent: int = 100 # this is an instance variable with a default value
- capacity: int # an instance variable without a default
-
- # you can use the ClassVar annotation to make the variable a class variable instead of an instance variable.
- class Car:
- seats: ClassVar[int] = 4
- passengers: ClassVar[List[str]]
-
- # You can also declare the type of an attribute in __init__
- class Box:
- def __init__(self) -> None:
- self.items: List[str] = []
-
-Please see :ref:`python-36` for more on mypy's compatability with Python 3.6's new features.
diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst
deleted file mode 100644
index dc778d3..0000000
--- a/docs/source/class_basics.rst
+++ /dev/null
@@ -1,157 +0,0 @@
-Class basics
-============
-
-Instance and class attributes
-*****************************
-
-Mypy type checker detects if you are trying to access a missing
-attribute, which is a very common programming error. For this to work
-correctly, instance and class attributes must be defined or
-initialized within the class. Mypy infers the types of attributes:
-
-.. code-block:: python
-
- class A:
- def __init__(self, x: int) -> None:
- self.x = x # Attribute x of type int
-
- a = A(1)
- a.x = 2 # OK
- a.y = 3 # Error: A has no attribute y
-
-This is a bit like each class having an implicitly defined
-``__slots__`` attribute. This is only enforced during type
-checking and not when your program is running.
-
-You can declare types of variables in the class body explicitly using
-a type comment:
-
-.. code-block:: python
-
- class A:
- x = None # type: List[int] # Declare attribute x of type List[int]
-
- a = A()
- a.x = [1] # OK
-
-As in Python, a variable defined in the class body can used as a class
-or an instance variable.
-
-Similarly, you can give explicit types to instance variables defined
-in a method:
-
-.. code-block:: python
-
- class A:
- def __init__(self) -> None:
- self.x = [] # type: List[int]
-
- def f(self) -> None:
- self.y = 0 # type: Any
-
-You can only define an instance variable within a method if you assign
-to it explicitly using ``self``:
-
-.. code-block:: python
-
- class A:
- def __init__(self) -> None:
- self.y = 1 # Define y
- a = self
- a.x = 1 # Error: x not defined
-
-Overriding statically typed methods
-***********************************
-
-When overriding a statically typed method, mypy checks that the
-override has a compatible signature:
-
-.. code-block:: python
-
- class A:
- def f(self, x: int) -> None:
- ...
-
- class B(A):
- def f(self, x: str) -> None: # Error: type of x incompatible
- ...
-
- class C(A):
- def f(self, x: int, y: int) -> None: # Error: too many arguments
- ...
-
- class D(A):
- def f(self, x: int) -> None: # OK
- ...
-
-.. note::
-
- You can also vary return types **covariantly** in overriding. For
- example, you could override the return type ``object`` with a subtype
- such as ``int``.
-
-You can also override a statically typed method with a dynamically
-typed one. This allows dynamically typed code to override methods
-defined in library classes without worrying about their type
-signatures.
-
-There is no runtime enforcement that the method override returns a
-value that is compatible with the original return type, since
-annotations have no effect at runtime:
-
-.. code-block:: python
-
- class A:
- def inc(self, x: int) -> int:
- return x + 1
-
- class B(A):
- def inc(self, x): # Override, dynamically typed
- return 'hello'
-
- b = B()
- print(b.inc(1)) # hello
- a = b # type: A
- print(a.inc(1)) # hello
-
-Abstract base classes and multiple inheritance
-**********************************************
-
-Mypy uses Python abstract base classes for protocol types. There are
-several built-in abstract base classes types (for example,
-``Sequence``, ``Iterable`` and ``Iterator``). You can define abstract
-base classes using the ``abc.ABCMeta`` metaclass and the
-``abc.abstractmethod`` function decorator.
-
-.. code-block:: python
-
- from abc import ABCMeta, abstractmethod
- import typing
-
- class A(metaclass=ABCMeta):
- @abstractmethod
- def foo(self, x: int) -> None: pass
-
- @abstractmethod
- def bar(self) -> str: pass
-
- class B(A):
- def foo(self, x: int) -> None: ...
- def bar(self) -> str:
- return 'x'
-
- a = A() # Error: A is abstract
- b = B() # OK
-
-Unlike most Python code, abstract base classes are likely to play a
-significant role in many complex mypy programs.
-
-A class can inherit any number of classes, both abstract and
-concrete. As with normal overrides, a dynamically typed method can
-implement a statically typed abstract method defined in an abstract
-base class.
-
-.. note::
-
- There are also plans to support more Python-style "duck typing" in
- the type system. The details are still open.
diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
deleted file mode 100644
index 3ada442..0000000
--- a/docs/source/command_line.rst
+++ /dev/null
@@ -1,413 +0,0 @@
-.. _command-line:
-
-The mypy command line
-=====================
-
-This section documents many of mypy's command line flags. A quick
-summary of command line flags can always be printed using the ``-h``
-flag (or its long form ``--help``)::
-
- $ mypy -h
- usage: mypy [-h] [-v] [-V] [--python-version x.y] [--platform PLATFORM] [-2]
- [--ignore-missing-imports]
- [--follow-imports {normal,silent,skip,error}]
- [--disallow-untyped-calls] [--disallow-untyped-defs]
- [--check-untyped-defs] [--disallow-subclassing-any]
- [--warn-incomplete-stub] [--warn-redundant-casts]
- [--warn-no-return] [--warn-unused-ignores] [--show-error-context]
- [--fast-parser] [-i] [--cache-dir DIR] [--strict-optional]
- [--strict-optional-whitelist [GLOB [GLOB ...]]] [--strict]
- [--junit-xml JUNIT_XML] [--pdb] [--show-traceback] [--stats]
- [--inferstats] [--custom-typing MODULE]
- [--custom-typeshed-dir DIR] [--scripts-are-modules]
- [--config-file CONFIG_FILE] [--show-column-numbers]
- [--find-occurrences CLASS.MEMBER] [--strict-boolean]
- [--cobertura-xml-report DIR] [--html-report DIR]
- [--linecount-report DIR] [--linecoverage-report DIR]
- [--memory-xml-report DIR] [--old-html-report DIR]
- [--txt-report DIR] [--xml-report DIR] [--xslt-html-report DIR]
- [--xslt-txt-report DIR] [-m MODULE] [-c PROGRAM_TEXT] [-p PACKAGE]
- [files [files ...]]
-
- (etc., too long to show everything here)
-
-Specifying files and directories to be checked
-**********************************************
-
-You've already seen ``mypy program.py`` as a way to type check the
-file ``program.py``. More generally you can pass any number of files
-and directories on the command line and they will all be type checked
-together.
-
-- Files ending in ``.py`` (and stub files ending in ``.pyi``) are
- checked as Python modules.
-
-- Files not ending in ``.py`` or ``.pyi`` are assumed to be Python
- scripts and checked as such.
-
-- Directories representing Python packages (i.e. containing a
- ``__init__.py[i]`` file) are checked as Python packages; all
- submodules and subpackages will be checked (subpackages must
- themselves have a ``__init__.py[i]`` file).
-
-- Directories that don't represent Python packages (i.e. not directly
- containing an ``__init__.py[i]`` file) are checked as follows:
-
- - All ``*.py[i]`` files contained directly therein are checked as
- toplevel Python modules;
-
- - All packages contained directly therein (i.e. immediate
- subdirectories with an ``__init__.py[i]`` file) are checked as
- toplevel Python packages.
-
-One more thing about checking modules and packages: if the directory
-*containing* a module or package specified on the command line has an
-``__init__.py[i]`` file, mypy assigns these an absolute module name by
-crawling up the path until no ``__init__.py[i]`` file is found. For
-example, suppose we run the command ``mypy foo/bar/baz.py`` where
-``foo/bar/__init__.py`` exists but ``foo/__init__.py`` does not. Then
-the module name assumed is ``bar.baz`` and the directory ``foo`` is
-added to mypy's module search path. On the other hand, if
-``foo/bar/__init__.py`` did not exist, ``foo/bar`` would be added to
-the module search path instead, and the module name assumed is just
-``baz``.
-
-If a script (a file not ending in ``.py[i]``) is processed, the module
-name assumed is always ``__main__`` (matching the behavior of the
-Python interpreter).
-
-Other ways of specifying code to be checked
-*******************************************
-
-The flag ``-m`` (long form: ``--module``) lets you specify a module
-name to be found using the default module search path. The module
-name may contain dots. For example::
-
- $ mypy -m html.parser
-
-will type check the module ``html.parser`` (this happens to be a
-library stub).
-
-The flag ``-p`` (long form: ``--package``) is similar to ``-m`` but
-you give it a package name and it will type check all submodules and
-subpackages (recursively) of that package. (If you pass a package
-name to ``-m`` it will just type check the package's ``__init__.py``
-and anything imported from there.) For example::
-
- $ mypy -p html
-
-will type check the entire ``html`` package (of library stubs).
-
-Finally the flag ``-c`` (long form: ``--command``) will take a string
-from the command line and type check it as a small program. For
-example::
-
- $ mypy -c 'x = [1, 2]; print(x())'
-
-will type check that little program (and complain that ``List[int]``
-is not callable).
-
-Reading a list of files from a file
-***********************************
-
-Finally, any command-line argument starting with ``@`` reads additional
-command-line arguments from the file following the ``@`` character.
-This is primarily useful if you have a file containing a list of files
-that you want to be type-checked: instead of using shell syntax like::
-
- mypy $(cat file_of_files)
-
-you can use this instead::
-
- mypy @file_of_files
-
-Such a file can also contain other flags, but a preferred way of
-reading flags (not files) from a file is to use a
-:ref:`configuration file <config-file>`.
-
-
-.. _finding-imports:
-
-How imports are found
-*********************
-
-When mypy encounters an `import` statement it tries to find the module
-on the file system, similar to the way Python finds it.
-However, there are some differences.
-
-First, mypy has its own search path.
-This is computed from the following items:
-
-- The ``MYPYPATH`` environment variable
- (a colon-separated list of directories).
-- The directories containing the sources given on the command line
- (see below).
-- The relevant directories of the
- `typeshed <https://github.com/python/typeshed>`_ repo.
-
-For sources given on the command line, the path is adjusted by crawling
-up from the given file or package to the nearest directory that does not
-contain an ``__init__.py`` or ``__init__.pyi`` file.
-
-Second, mypy searches for stub files in addition to regular Python files
-and packages.
-The rules for searching a module ``foo`` are as follows:
-
-- The search looks in each of the directories in the search path
- (see above) until a match is found.
-- If a package named ``foo`` is found (i.e. a directory
- ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file)
- that's a match.
-- If a stub file named ``foo.pyi`` is found, that's a match.
-- If a Python module named ``foo.py`` is found, that's a match.
-
-These matches are tried in order, so that if multiple matches are found
-in the same directory on the search path
-(e.g. a package and a Python file, or a stub file and a Python file)
-the first one in the above list wins.
-
-In particular, if a Python file and a stub file are both present in the
-same directory on the search path, only the stub file is used.
-(However, if the files are in different directories, the one found
-in the earlier directory is used.)
-
-NOTE: These rules are relevant to the following section too:
-the ``--follow-imports`` flag described below is applied _after_ the
-above algorithm has determined which package, stub or module to use.
-
-.. _follow-imports:
-
-Following imports or not?
-*************************
-
-When you're first attacking a large existing codebase with mypy, you
-may only want to check selected files. For example, you may only want
-to check those files to which you have already added annotations.
-This is easily accomplished using a shell pipeline like this::
-
- mypy $(find . -name \*.py | xargs grep -l '# type:')
-
-(While there are many improvements possible to make this example more
-robust, this is not the place for a tutorial in shell programming.)
-
-However, by default mypy doggedly tries to :ref:`follow imports
-<finding-imports>`. This may cause several types of problems that you
-may want to silence during your initial conquest:
-
-- Your code may import library modules for which no stub files exist
- yet. This can cause a lot of errors like the following::
-
- main.py:1: error: No library stub file for standard library module 'antigravity'
- main.py:2: error: No library stub file for module 'flask'
- main.py:3: error: Cannot find module named 'sir_not_appearing_in_this_film'
-
- If you see only a few of these you may be able to silence them by
- putting ``# type: ignore`` on the respective ``import`` statements,
- but it's usually easier to silence all such errors by using
- :ref:`--ignore-missing-imports <ignore-missing-imports>`.
-
-- Your project's directory structure may hinder mypy in finding
- certain modules that are part of your project, e.g. modules hidden
- away in a subdirectory that's not a package. You can usually deal
- with this by setting the ``MYPYPATH`` variable (see
- :ref:`finding-imports`).
-
-- When following imports mypy may find a module that's part of your
- project but which you haven't annotated yet, mypy may report errors
- for the top level code in that module (where the top level includes
- class bodies and function/method default values). Here the
- ``--follow-imports`` flag comes in handy.
-
-The ``--follow-imports`` flag takes a mandatory string value that can
-take one of four values. It only applies to modules for which a
-``.py`` file is found (but no corresponding ``.pyi`` stub file) and
-that are not given on the command line. Passing a package or
-directory on the command line implies all modules in that package or
-directory. The four possible values are:
-
-- ``normal`` (the default) follow imports normally and type check all
- top level code (as well as the bodies of all functions and methods
- with at least one type annotation in the signature).
-
-- ``silent`` follow imports normally and even "type check" them
- normally, but *suppress any error messages*. This is typically the
- best option for a new codebase.
-
-- ``skip`` *don't* follow imports, silently replacing the module (and
- everything imported *from* it) with an object of type ``Any``.
- (This option used to be known as ``--silent-imports`` and while it
- is very powerful it can also cause hard-to-debug errors, hence the
- recommendation of using ``silent`` instead.)
-
-- ``error`` the same behavior as ``skip`` but not quite as silent --
- it flags the import as an error, like this::
-
- main.py:1: note: Import of 'submodule' ignored
- main.py:1: note: (Using --follow-imports=error, module not passed on command line)
-
-
-Additional command line flags
-*****************************
-
-Here are some more useful flags:
-
-.. _ignore-missing-imports:
-
-- ``--ignore-missing-imports`` suppresses error messages about imports
- that cannot be resolved (see :ref:`follow-imports` for some examples).
-
-- ``--strict-optional`` enables experimental strict checking of ``Optional[...]``
- types and ``None`` values. Without this option, mypy doesn't generally check the
- use of ``None`` values -- they are valid everywhere. See :ref:`strict_optional` for
- more about this feature.
-
-- ``--strict-optional-whitelist`` attempts to suppress strict Optional-related
- errors in non-whitelisted files. Takes an arbitrary number of globs as the
- whitelist. This option is intended to be used to incrementally roll out
- ``--strict-optional`` to a large codebase that already has mypy annotations.
- However, this flag comes with some significant caveats. It does not suppress
- all errors caused by turning on ``--strict-optional``, only most of them, so
- there may still be a bit of upfront work to be done before it can be used in
- CI. It will also suppress some errors that would be caught in a
- non-strict-Optional run. Therefore, when using this flag, you should also
- re-check your code without ``--strict-optional`` to ensure new type errors
- are not introduced.
-
-- ``--disallow-untyped-defs`` reports an error whenever it encounters
- a function definition without type annotations.
-
-- ``--check-untyped-defs`` is less severe than the previous option --
- it type checks the body of every function, regardless of whether it
- has type annotations. (By default the bodies of functions without
- annotations are not type checked.) It will assume all arguments
- have type ``Any`` and always infer ``Any`` as the return type.
-
-- ``--disallow-untyped-calls`` reports an error whenever a function
- with type annotations calls a function defined without annotations.
-
-.. _disallow-subclassing-any:
-
-- ``--disallow-subclassing-any`` reports an error whenever a class
- subclasses a value of type ``Any``. This may occur when the base
- class is imported from a module that doesn't exist (when using
- :ref:`--ignore-missing-imports <ignore-missing-imports>`) or is
- ignored due to :ref:`--follow-imports=skip <follow-imports>` or a
- ``# type: ignore`` comment on the ``import`` statement. Since the
- module is silenced, the imported class is given a type of ``Any``.
- By default mypy will assume that the subclass correctly inherited
- the base class even though that may not actually be the case. This
- flag makes mypy raise an error instead.
-
-- ``--incremental`` is an experimental option that enables incremental
- type checking. When enabled, mypy caches results from previous runs
- to speed up type checking. Incremental mode can help when most parts
- of your program haven't changed since the previous mypy run.
-
-- ``--fast-parser`` enables an experimental parser implemented in C that
- is faster than the default parser and supports multi-line comment
- function annotations (see :ref:`multi_line_annotation` for the details).
-
-- ``--python-version X.Y`` will make mypy typecheck your code as if it were
- run under Python version X.Y. Without this option, mypy will default to using
- whatever version of Python is running mypy. Note that the ``-2`` and
- ``--py2`` flags are aliases for ``--python-version 2.7``. See
- :ref:`version_and_platform_checks` for more about this feature.
-
-- ``--platform PLATFORM`` will make mypy typecheck your code as if it were
- run under the the given operating system. Without this option, mypy will
- default to using whatever operating system you are currently using. See
- :ref:`version_and_platform_checks` for more about this feature.
-
-- ``--show-column-numbers`` will add column offsets to error messages,
- for example, the following indicates an error in line 12, column 9
- (note that column offsets are 0-based):
-
- .. code-block:: python
-
- main.py:12:9: error: Unsupported operand types for / ("int" and "str")
-
-- ``--scripts-are-modules`` will give command line arguments that
- appear to be scripts (i.e. files whose name does not end in ``.py``)
- a module name derived from the script name rather than the fixed
- name ``__main__``. This allows checking more than one script in a
- single mypy invocation. (The default ``__main__`` is technically
- more correct, but if you have many scripts that import a large
- package, the behavior enabled by this flag is often more
- convenient.)
-
-- ``--custom-typeshed-dir DIR`` specifies the directory where mypy looks for
- typeshed stubs, instead of the typeshed that ships with mypy. This is
- primarily intended to make it easier to test typeshed changes before
- submitting them upstream, but also allows you to use a forked version of
- typeshed.
-
-.. _config-file-flag:
-
-- ``--config-file CONFIG_FILE`` causes configuration settings to be
- read from the given file. By default settings are read from ``mypy.ini``
- in the current directory. Settings override mypy's built-in defaults
- and command line flags can override settings. See :ref:`config-file`
- for the syntax of configuration files.
-
-- ``--junit-xml JUNIT_XML`` will make mypy generate a JUnit XML test
- result document with type checking results. This can make it easier
- to integrate mypy with continuous integration (CI) tools.
-
-- ``--find-occurrences CLASS.MEMBER`` will make mypy print out all
- usages of a class member based on static type information. This
- feature is experimental.
-
-- ``--cobertura-xml-report DIR`` causes mypy to generate a Cobertura
- XML type checking coverage report.
-
-- ``--warn-no-return`` causes mypy to generate errors for missing return
- statements on some execution paths. Mypy doesn't generate these errors
- for functions with ``None`` or ``Any`` return types. Mypy
- also currently ignores functions with an empty body or a body that is
- just ellipsis (``...``), since these can be valid as abstract methods.
-
-- ``--strict-boolean`` will make using non-boolean expressions in conditions
- an error. This means ``if x`` and ``while x`` are disallowed when ``x`` has any
- type other than ``bool``. Instead use explicit checks like ``if x > 0`` or
- ``while x is not None``.
-
-- ``--strict`` mode enables all optional error checking flags. You can see the
- list of flags enabled by strict mode in the full ``mypy -h`` output.
-
-For the remaining flags you can read the full ``mypy -h`` output.
-
-.. note::
-
- Command line flags are liable to change between releases.
-
-.. _integrating-mypy:
-
-Integrating mypy into another Python application
-************************************************
-
-It is possible to integrate mypy into another Python 3 application by
-importing ``mypy.api`` and calling the ``run`` function with a parameter of type ``List[str]``, containing
-what normally would have been the command line arguments to mypy.
-
-Function ``run`` returns a ``Tuple[str, str, int]``, namely
-``(<normal_report>, <error_report>, <exit_status>)``, in which ``<normal_report>``
-is what mypy normally writes to ``sys.stdout``, ``<error_report>`` is what mypy
-normally writes to ``sys.stderr`` and ``exit_status`` is the exit status mypy normally
-returns to the operating system.
-
-A trivial example of using the api is the following::
-
- import sys
- from mypy import api
-
- result = api.run(sys.argv[1:])
-
- if result[0]:
- print('\nType checking report:\n')
- print(result[0]) # stdout
-
- if result[1]:
- print('\nError report:\n')
- print(result[1]) # stderr
-
- print ('\nExit status:', result[2])
diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst
deleted file mode 100644
index 3477d90..0000000
--- a/docs/source/common_issues.rst
+++ /dev/null
@@ -1,386 +0,0 @@
-.. _common_issues:
-
-Common issues
-=============
-
-This section has examples of cases when you need to update your code
-to use static typing, and ideas for working around issues if mypy
-doesn't work as expected. Statically typed code is often identical to
-normal Python code, but sometimes you need to do things slightly
-differently.
-
-Can't install mypy using pip
-----------------------------
-
-If installation fails, you've probably hit one of these issues:
-
-* Mypy needs Python 3.3 or later to run.
-* You may have to run pip like this:
- ``python3 -m pip install mypy``.
-
-.. _annotations_needed:
-
-No errors reported for obviously wrong code
--------------------------------------------
-
-There are several common reasons why obviously wrong code is not
-flagged as an error.
-
-- **The function containing the error is not annotated.** Functions that
- do not have any annotations (neither for any argument nor for the
- return type) are not type-checked, and even the most blatant type
- errors (e.g. ``2 + 'a'``) pass silently. The solution is to add
- annotations.
-
- Example:
-
- .. code-block:: python
-
- def foo(a):
- return '(' + a.split() + ')' # No error!
-
- This gives no error even though ``a.split()`` is "obviously" a list
- (the author probably meant ``a.strip()``). The error is reported
- once you add annotations:
-
- .. code-block:: python
-
- def foo(a: str) -> str:
- return '(' + a.split() + ')'
- # error: Unsupported operand types for + ("str" and List[str])
-
- If you don't know what types to add, you can use ``Any``, but beware:
-
-- **One of the values involved has type ``Any``.** Extending the above
- example, if we were to leave out the annotation for ``a``, we'd get
- no error:
-
- .. code-block:: python
-
- def foo(a) -> str:
- return '(' + a.split() + ')' # No error!
-
- The reason is that if the type of ``a`` is unknown, the type of
- ``a.split()`` is also unknown, so it is inferred as having type
- ``Any``, and it is no error to add a string to an ``Any``.
-
- If you're having trouble debugging such situations,
- :ref:`reveal_type() <reveal-type>` might come in handy.
-
- Note that sometimes library stubs have imprecise type information,
- e.g. the ``pow()`` builtin returns ``Any`` (see `typeshed issue 285
- <https://github.com/python/typeshed/issues/285>`_ for the reason).
-
-- **Some imports may be silently ignored**. Another source of
- unexpected ``Any`` values are the :ref:`"--ignore-missing-imports"
- <ignore-missing-imports>` and :ref:`"--follow-imports=skip"
- <follow-imports>` flags. When you use ``--ignore-missing-imports``,
- any imported module that cannot be found is silently replaced with
- ``Any``. When using ``--follow-imports=skip`` the same is true for
- modules for which a ``.py`` file is found but that are not specified
- on the command line. (If a ``.pyi`` stub is found it is always
- processed normally, regardless of the value of
- ``--follow-imports``.) To help debug the former situation (no
- module found at all) leave out ``--ignore-missing-imports``; to get
- clarity about the latter use ``--follow-imports=error``. You can
- read up about these and other useful flags in :ref:`command-line`.
-
-.. _silencing_checker:
-
-Spurious errors and locally silencing the checker
--------------------------------------------------
-
-You can use a ``# type: ignore`` comment to silence the type checker
-on a particular line. For example, let's say our code is using
-the C extension module ``frobnicate``, and there's no stub available.
-Mypy will complain about this, as it has no information about the
-module:
-
-.. code-block:: python
-
- import frobnicate # Error: No module "frobnicate"
- frobnicate.start()
-
-You can add a ``# type: ignore`` comment to tell mypy to ignore this
-error:
-
-.. code-block:: python
-
- import frobnicate # type: ignore
- frobnicate.start() # Okay!
-
-The second line is now fine, since the ignore comment causes the name
-``frobnicate`` to get an implicit ``Any`` type.
-
-.. note::
-
- The ``# type: ignore`` comment will only assign the implicit ``Any``
- type if mypy cannot find information about that particular module. So,
- if we did have a stub available for ``frobnicate`` then mypy would
- ignore the ``# type: ignore`` comment and typecheck the stub as usual.
-
-Types of empty collections
---------------------------
-
-You often need to specify the type when you assign an empty list or
-dict to a new variable, as mentioned earlier:
-
-.. code-block:: python
-
- a = [] # type: List[int]
-
-Without the annotation mypy can't always figure out the
-precise type of ``a``.
-
-You can use a simple empty list literal in a dynamically typed function (as the
-type of ``a`` would be implicitly ``Any`` and need not be inferred), if type
-of the variable has been declared or inferred before, or if you perform a simple
-modification operation in the same scope (such as ``append`` for a list):
-
-.. code-block:: python
-
- a = [] # Okay because followed by append, inferred type List[int]
- for i in range(n):
- a.append(i * i)
-
-However, in more complex cases an explicit type annotation can be
-required (mypy will tell you this). Often the annotation can
-make your code easier to understand, so it doesn't only help mypy but
-everybody who is reading the code!
-
-Redefinitions with incompatible types
--------------------------------------
-
-Each name within a function only has a single 'declared' type. You can
-reuse for loop indices etc., but if you want to use a variable with
-multiple types within a single function, you may need to declare it
-with the ``Any`` type.
-
-.. code-block:: python
-
- def f() -> None:
- n = 1
- ...
- n = 'x' # Type error: n has type int
-
-.. note::
-
- This limitation could be lifted in a future mypy
- release.
-
-Note that you can redefine a variable with a more *precise* or a more
-concrete type. For example, you can redefine a sequence (which does
-not support ``sort()``) as a list and sort it in-place:
-
-.. code-block:: python
-
- def f(x: Sequence[int]) -> None:
- # Type of x is Sequence[int] here; we don't know the concrete type.
- x = list(x)
- # Type of x is List[int] here.
- x.sort() # Okay!
-
-Declaring a supertype as variable type
---------------------------------------
-
-Sometimes the inferred type is a subtype (subclass) of the desired
-type. The type inference uses the first assignment to infer the type
-of a name (assume here that ``Shape`` is the base class of both
-``Circle`` and ``Triangle``):
-
-.. code-block:: python
-
- shape = Circle() # Infer shape to be Circle
- ...
- shape = Triangle() # Type error: Triangle is not a Circle
-
-You can just give an explicit type for the variable in cases such the
-above example:
-
-.. code-block:: python
-
- shape = Circle() # type: Shape # The variable s can be any Shape,
- # not just Circle
- ...
- shape = Triangle() # OK
-
-Complex type tests
-------------------
-
-Mypy can usually infer the types correctly when using ``isinstance()``
-type tests, but for other kinds of checks you may need to add an
-explicit type cast:
-
-.. code-block:: python
-
- def f(o: object) -> None:
- if type(o) is int:
- o = cast(int, o)
- g(o + 1) # This would be an error without the cast
- ...
- else:
- ...
-
-.. note::
-
- Note that the ``object`` type used in the above example is similar
- to ``Object`` in Java: it only supports operations defined for *all*
- objects, such as equality and ``isinstance()``. The type ``Any``,
- in contrast, supports all operations, even if they may fail at
- runtime. The cast above would have been unnecessary if the type of
- ``o`` was ``Any``.
-
-Mypy can't infer the type of ``o`` after the ``type()`` check
-because it only knows about ``isinstance()`` (and the latter is better
-style anyway). We can write the above code without a cast by using
-``isinstance()``:
-
-.. code-block:: python
-
- def f(o: object) -> None:
- if isinstance(o, int): # Mypy understands isinstance checks
- g(o + 1) # Okay; type of o is inferred as int here
- ...
-
-Type inference in mypy is designed to work well in common cases, to be
-predictable and to let the type checker give useful error
-messages. More powerful type inference strategies often have complex
-and difficult-to-predict failure modes and could result in very
-confusing error messages. The tradeoff is that you as a programmer
-sometimes have to give the type checker a little help.
-
-.. _version_and_platform_checks:
-
-Python version and system platform checks
------------------------------------------
-
-Mypy supports the ability to perform Python version checks and platform
-checks (e.g. Windows vs Posix), ignoring code paths that won't be run on
-the targeted Python version or platform. This allows you to more effectively
-typecheck code that supports multiple versions of Python or multiple operating
-systems.
-
-More specifically, mypy will understand the use of ``sys.version_info`` and
-``sys.platform`` checks within ``if/elif/else`` statements. For example:
-
-.. code-block:: python
-
- import sys
-
- # Distinguishing between different versions of Python:
- if sys.version_info >= (3, 5):
- # Python 3.5+ specific definitions and imports
- elif sys.version_info[0] >= 3:
- # Python 3 specific definitions and imports
- else:
- # Python 2 specific definitions and imports
-
- # Distinguishing between different operating systems:
- if sys.platform.startswith("linux"):
- # Linux-specific code
- elif sys.platform == "darwin":
- # Mac-specific code
- elif sys.platform == "win32":
- # Windows-specific code
- else:
- # Other systems
-
-.. note::
-
- Mypy currently does not support more complex checks, and does not assign
- any special meaning when assigning a ``sys.version_info`` or ``sys.platform``
- check to a variable. This may change in future versions of mypy.
-
-By default, mypy will use your current version of Python and your current
-operating system as default values for ``sys.version_info`` and
-``sys.platform``.
-
-To target a different Python version, use the ``--python-version X.Y`` flag.
-For example, to verify your code typechecks if were run using Python 2, pass
-in ``--python-version 2.7`` from the command line. Note that you do not need
-to have Python 2.7 installed to perform this check.
-
-To target a different operating system, use the ``--platform PLATFORM`` flag.
-For example, to verify your code typechecks if it were run in Windows, pass
-in ``--platform win32``. See the documentation for
-`sys.platform <https://docs.python.org/3/library/sys.html#sys.platform>`_
-for examples of valid platform parameters.
-
-.. _reveal-type:
-
-Displaying the type of an expression
-------------------------------------
-
-You can use ``reveal_type(expr)`` to ask mypy to display the inferred
-static type of an expression. This can be useful when you don't quite
-understand how mypy handles a particular piece of code. Example:
-
-.. code-block:: python
-
- reveal_type((1, 'hello')) # Revealed type is 'Tuple[builtins.int, builtins.str]'
-
-.. note::
-
- ``reveal_type`` is only understood by mypy and doesn't exist
- in Python, if you try to run your program. You'll have to remove
- any ``reveal_type`` calls before you can run your code.
- ``reveal_type`` is always available and you don't need to import it.
-
-.. _import-cycles:
-
-Import cycles
--------------
-
-An import cycle occurs where module A imports module B and module B
-imports module A (perhaps indirectly, e.g. ``A -> B -> C -> A``).
-Sometimes in order to add type annotations you have to add extra
-imports to a module and those imports cause cycles that didn't exist
-before. If those cycles become a problem when running your program,
-there's a trick: if the import is only needed for type annotations in
-forward references (string literals) or comments, you can write the
-imports inside ``if TYPE_CHECKING:`` so that they are not executed at runtime.
-Example:
-
-File ``foo.py``:
-
-.. code-block:: python
-
- from typing import List, TYPE_CHECKING
-
- if TYPE_CHECKING:
- import bar
-
- def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
- return [arg]
-
-File ``bar.py``:
-
-.. code-block:: python
-
- from typing import List
- from foo import listify
-
- class BarClass:
- def listifyme(self) -> 'List[BarClass]':
- return listify(self)
-
-.. note::
-
- The ``TYPE_CHECKING`` constant defined by the ``typing`` module
- is ``False`` at runtime but ``True`` while type checking.
-
-Python 3.5.1 doesn't have ``typing.TYPE_CHECKING``. An alternative is
-to define a constant named ``MYPY`` that has the value ``False``
-at runtime. Mypy considers it to be ``True`` when type checking.
-Here's the above example modified to use ``MYPY``:
-
-.. code-block:: python
-
- from typing import List
-
- MYPY = False
- if MYPY:
- import bar
-
- def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
- return [arg]
diff --git a/docs/source/conf.py b/docs/source/conf.py
deleted file mode 100644
index cf64842..0000000
--- a/docs/source/conf.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Mypy documentation build configuration file, created by
-# sphinx-quickstart on Sun Sep 14 19:50:35 2014.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys
-import os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.insert(0, os.path.abspath('../..'))
-
-from mypy.version import __version__ as mypy_version
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = []
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'Mypy'
-copyright = u'2016, Jukka Lehtosalo'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = mypy_version.split('-')[0]
-# The full version, including alpha/beta/rc tags.
-release = mypy_version
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = []
-
-# The reST default role (used for this markup: `text`) to use for all
-# documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-# If true, keep warnings as "system message" paragraphs in the built documents.
-#keep_warnings = False
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-try:
- import sphinx_rtd_theme
-except:
- html_theme = 'default'
-else:
- html_theme = 'sphinx_rtd_theme'
- html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
-
-# The name for this set of Sphinx documents. If None, it defaults to
-# "<project> v<release> documentation".
-#html_title = None
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
-
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-#html_extra_path = []
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'Mypydoc'
-
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- ('index', 'Mypy.tex', u'Mypy Documentation',
- u'Jukka', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'mypy', u'Mypy Documentation',
- [u'Jukka Lehtosalo'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- ('index', 'Mypy', u'Mypy Documentation',
- u'Jukka', 'Mypy', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
-
-rst_prolog = '.. |...| unicode:: U+2026 .. ellipsis\n'
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
deleted file mode 100644
index 001f6c9..0000000
--- a/docs/source/config_file.rst
+++ /dev/null
@@ -1,184 +0,0 @@
-.. _config-file:
-
-The mypy configuration file
-===========================
-
-Mypy supports reading configuration settings from a file. By default
-it uses the file ``mypy.ini`` in the current directory; the
-``--config-file`` command-line flag can be used to read a different
-file instead (see :ref:`--config-file <config-file-flag>`).
-
-Most flags correspond closely to :ref:`command-line flags
-<command-line>` but there are some differences in flag names and some
-flags may take a different value based on the module being processed.
-
-The configuration file format is the usual
-`ini file <https://docs.python.org/3.6/library/configparser.html>`_
-format. It should contain section names in square brackets and flag
-settings of the form `NAME = VALUE`. Comments start with ``#``
-characters.
-
-- A section named ``[mypy]`` must be present. This specifies
- the global flags.
-
-- Additional sections named ``[mypy-PATTERN1,PATTERN2,...]`` may be
- present, where ``PATTERN1``, ``PATTERN2`` etc. are `fnmatch patterns
- <https://docs.python.org/3.6/library/fnmatch.html>`_
- separated by commas. These sections specify additional flags that
- only apply to *modules* whose name matches at least one of the patterns.
-
-Global flags
-************
-
-The following global flags may only be set in the global section
-(``[mypy]``).
-
-- ``python_version`` (string) specifies the Python version used to
- parse and check the target program. The format is ``DIGIT.DIGIT``
- for example ``2.7``. The default is the version of the Python
- interpreter used to run mypy.
-
-- ``platform`` (string) specifies the OS platform for the target
- program, for example ``darwin`` or ``win32`` (meaning OS X or
- Windows, respectively). The default is the current platform as
- revealed by Python's ``sys.platform`` variable.
-
-- ``custom_typing_module`` (string) specifies the name of an
- alternative module which is to be considered equivalent to the
- ``typing`` module.
-
-- ``custom_typeshed_dir`` (string) specifies the name of an
- alternative directory which is used to look for stubs instead of the
- default ``typeshed`` directory.
-
-- ``mypy_path`` (string) specifies the paths to use, after trying the paths
- from ``MYPYPATH`` environment variable. Useful if you'd like to keep stubs
- in your repo, along with the config file.
-
-- ``warn_incomplete_stub`` (Boolean, default False) warns for missing
- type annotation in typeshed. This is only relevant in combination
- with ``check_untyped_defs``.
-
-- ``warn_redundant_casts`` (Boolean, default False) warns about
- casting an expression to its inferred type.
-
-- ``warn_unused_ignores`` (Boolean, default False) warns about
- unneeded ``# type: ignore`` comments.
-
-- ``strict_optional`` (Boolean, default False) enables experimental
- strict Optional checks.
-
-- ``scripts_are_modules`` (Boolean, default False) makes script ``x``
- become module ``x`` instead of ``__main__``. This is useful when
- checking multiple scripts in a single run.
-
-- ``verbosity`` (integer, default 0) controls how much debug output
- will be generated. Higher numbers are more verbose.
-
-- ``pdb`` (Boolean, default False) invokes pdb on fatal error.
-
-- ``show_traceback`` (Boolean, default False) shows traceback on fatal
- error.
-
-- ``dump_type_stats`` (Boolean, default False) dumps stats about type
- definitions.
-
-- ``dump_inference_stats`` (Boolean, default False) dumps stats about
- type inference.
-
-- ``fast_parser`` (Boolean, default False) enables the experimental
- fast parser.
-
-- ``incremental`` (Boolean, default False) enables the experimental
- module cache.
-
-- ``cache_dir`` (string, default ``.mypy_cache``) stores module cache
- info in the given folder in incremental mode.
-
-- ``show_error_context`` (Boolean, default False) shows
- context notes before errors.
-
-- ``show_column_numbers`` (Boolean, default False) shows column numbers in
- error messages.
-
-
-.. _per-module-flags:
-
-Per-module flags
-****************
-
-The following flags may vary per module. They may also be specified in
-the global section; the global section provides defaults which are
-overridden by the pattern sections matching the module name.
-
-.. note::
-
- If multiple pattern sections match a module they are processed in
- unspecified order.
-
-- ``follow_imports`` (string, default ``normal``) directs what to do
- with imports when the imported module is found as a ``.py`` file and
- not part of the files, modules and packages on the command line.
- The four possible values are ``normal``, ``silent``, ``skip`` and
- ``error``. For explanations see the discussion for the
- :ref:`--follow-imports <follow-imports>` command line flag. Note
- that if pattern matching is used, the pattern should match the name
- of the _imported_ module, not the module containing the import
- statement.
-
-- ``ignore_missing_imports`` (Boolean, default False) suppress error
- messages about imports that cannot be resolved. Note that if
- pattern matching is used, the pattern should match the name of the
- _imported_ module, not the module containing the import statement.
-
-- ``silent_imports`` (Boolean, deprecated) equivalent to
- ``follow_imports=skip`` plus ``ignore_missing_imports=True``.
-
-- ``almost_silent`` (Boolean, deprecated) equivalent to
- ``follow_imports=skip``.
-
-- ``disallow_untyped_calls`` (Boolean, default False) disallows
- calling functions without type annotations from functions with type
- annotations.
-
-- ``disallow_untyped_defs`` (Boolean, default False) disallows
- defining functions without type annotations or with incomplete type
- annotations.
-
-- ``check_untyped_defs`` (Boolean, default False) type-checks the
- interior of functions without type annotations.
-
-- ``debug_cache`` (Boolean, default False) writes the incremental
- cache JSON files using a more readable, but slower format.
-
-- ``show_none_errors`` (Boolean, default True) shows errors related
- to strict ``None`` checking, if the global ``strict_optional`` flag
- is enabled.
-
-- ``ignore_errors`` (Boolean, default False) ignores all non-fatal
- errors.
-
-- ``warn_no_return`` (Boolean, default False) shows errors for
- missing return statements on some execution paths.
-
-Example
-*******
-
-You might put this in your ``mypy.ini`` file at the root of your repo:
-
-.. code-block:: text
-
- [mypy]
- python_version = 2.7
- [mypy-foo.*]
- disallow_untyped_defs = True
-
-This automatically sets ``--python-version 2.7`` (a.k.a. ``--py2``)
-for all mypy runs in this tree, and also selectively turns on the
-``--disallow-untyped-defs`` flag for all modules in the ``foo``
-package. This issues an error for function definitions without
-type annotations in that subdirectory only.
-
-.. note::
-
- Configuration flags are liable to change between releases.
diff --git a/docs/source/duck_type_compatibility.rst b/docs/source/duck_type_compatibility.rst
deleted file mode 100644
index a128b69..0000000
--- a/docs/source/duck_type_compatibility.rst
+++ /dev/null
@@ -1,40 +0,0 @@
-Duck type compatibility
------------------------
-
-In Python, certain types are compatible even though they aren't subclasses of
-each other. For example, ``int`` objects are valid whenever ``float`` objects
-are expected. Mypy supports this idiom via *duck type compatibility*. As of
-now, this is only supported for a small set of built-in types:
-
-* ``int`` is duck type compatible with ``float`` and ``complex``.
-* ``float`` is duck type compatible with ``complex``.
-* In Python 2, ``str`` is duck type compatible with ``unicode``.
-
-.. note::
-
- Mypy support for Python 2 is still work in progress.
-
-For example, mypy considers an ``int`` object to be valid whenever a
-``float`` object is expected. Thus code like this is nice and clean
-and also behaves as expected:
-
-.. code-block:: python
-
- def degrees_to_radians(x: float) -> float:
- return math.pi * degrees / 180
-
- n = 90 # Inferred type 'int'
- print(degrees_to_radians(n)) # Okay!
-
-.. note::
-
- Note that in Python 2 a ``str`` object with non-ASCII characters is
- often *not valid* when a unicode string is expected. The mypy type
- system does not consider a string with non-ASCII values as a
- separate type so some programs with this kind of error will
- silently pass type checking. In Python 3 ``str`` and ``bytes`` are
- separate, unrelated types and this kind of error is easy to
- detect. This a good reason for preferring Python 3 over Python 2!
-
- See :ref:`text-and-anystr` for details on how to enforce that a
- value must be a unicode string in a cross-compatible way.
diff --git a/docs/source/dynamic_typing.rst b/docs/source/dynamic_typing.rst
deleted file mode 100644
index ba76442..0000000
--- a/docs/source/dynamic_typing.rst
+++ /dev/null
@@ -1,86 +0,0 @@
-.. _dynamic_typing:
-
-
-Dynamically typed code
-======================
-
-As mentioned earlier, bodies of functions that don't have have any
-explicit types in their function annotation are dynamically typed
-(operations are checked at runtime). Code outside functions is
-statically typed by default, and types of variables are inferred. This
-does usually the right thing, but you can also make any variable
-dynamically typed by defining it explicitly with the type ``Any``:
-
-.. code-block:: python
-
- from typing import Any
-
- s = 1 # Statically typed (type int)
- d = 1 # type: Any # Dynamically typed (type Any)
- s = 'x' # Type check error
- d = 'x' # OK
-
-Operations on Any values
-------------------------
-
-You can do anything using a value with type ``Any``, and type checker
-does not complain:
-
-.. code-block:: python
-
- def f(x: Any) -> int:
- # All of these are valid!
- x.foobar(1, y=2)
- print(x[3] + 'f')
- if x:
- x.z = x(2)
- open(x).read()
- return x
-
-Values derived from an ``Any`` value also often have the type ``Any``
-implicitly, as mypy can't infer a more precise result type. For
-example, if you get the attribute of an ``Any`` value or call a
-``Any`` value the result is ``Any``:
-
-.. code-block:: python
-
- def f(x: Any) -> None:
- y = x.foo() # y has type Any
- y.bar() # Okay as well!
-
-``Any`` types may propagate through your program, making type checking
-less effective, unless you are careful.
-
-Any vs. object
---------------
-
-The type ``object`` is another type that can have an instance of arbitrary
-type as a value. Unlike ``Any``, ``object`` is an ordinary static type (it
-is similar to ``Object`` in Java), and only operations valid for *all*
-types are accepted for ``object`` values. These are all valid:
-
-.. code-block:: python
-
- def f(o: object) -> None:
- if o:
- print(o)
- print(isinstance(o, int))
- o = 2
- o = 'foo'
-
-These are, however, flagged as errors, since not all objects support these
-operations:
-
-.. code-block:: python
-
- def f(o: object) -> None:
- o.foo() # Error!
- o + 2 # Error!
- open(o) # Error!
- n = 1 # type: int
- n = o # Error!
-
-You can use ``cast()`` (see chapter :ref:`casts`) or ``isinstance`` to
-go from a general type such as ``object`` to a more specific
-type (subtype) such as ``int``. ``cast()`` is not needed with
-dynamically typed values (values with type ``Any``).
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
deleted file mode 100644
index 9fd73b4..0000000
--- a/docs/source/faq.rst
+++ /dev/null
@@ -1,270 +0,0 @@
-Frequently Asked Questions
-==========================
-
-Why have both dynamic and static typing?
-****************************************
-
-Dynamic typing can be flexible, powerful, convenient and easy. But
-it's not always the best approach; there are good reasons why many
-developers choose to use statically typed languages.
-
-Here are some potential benefits of mypy-style static typing:
-
-- Static typing can make programs easier to understand and
- maintain. Type declarations can serve as machine-checked
- documentation. This is important as code is typically read much more
- often than modified, and this is especially important for large and
- complex programs.
-
-- Static typing can help you find bugs earlier and with less testing
- and debugging. Especially in large and complex projects this can be
- a major time-saver.
-
-- Static typing can help you find difficult-to-find bugs before your
- code goes into production. This can improve reliability and reduce
- the number of security issues.
-
-- Static typing makes it practical to build very useful development
- tools that can improve programming productivity or software quality,
- including IDEs with precise and reliable code completion, static
- analysis tools, etc.
-
-- You can get the benefits of both dynamic and static typing in a
- single language. Dynamic typing can be perfect for a small project
- or for writing the UI of your program, for example. As your program
- grows, you can adapt tricky application logic to static typing to
- help maintenance.
-
-See also the `front page <http://www.mypy-lang.org>`_ of the mypy web
-site.
-
-Would my project benefit from static typing?
-********************************************
-
-For many projects dynamic typing is perfectly fine (we think that
-Python is a great language). But sometimes your projects demand bigger
-guns, and that's when mypy may come in handy.
-
-If some of these ring true for your projects, mypy (and static typing)
-may be useful:
-
-- Your project is large or complex.
-
-- Your codebase must be maintained for a long time.
-
-- Multiple developers are working on the same code.
-
-- Running tests takes a lot of time or work (type checking may help
- you find errors early in development, reducing the number of testing
- iterations).
-
-- Some project members (devs or management) don't like dynamic typing,
- but others prefer dynamic typing and Python syntax. Mypy could be a
- solution that everybody finds easy to accept.
-
-- You want to future-proof your project even if currently none of the
- above really apply.
-
-Can I use mypy to type check my existing Python code?
-*****************************************************
-
-It depends. Compatibility is pretty good, but some Python features are
-not yet implemented or fully supported. The ultimate goal is to make
-using mypy practical for most Python code. Code that uses complex
-introspection or metaprogramming may be impractical to type check, but
-it should still be possible to use static typing in other parts of a
-program.
-
-Will static typing make my programs run faster?
-***********************************************
-
-Mypy only does static type checking and it does not improve
-performance. It has a minimal performance impact. In the future, there
-could be other tools that can compile statically typed mypy code to C
-modules or to efficient JVM bytecode, for example, but this is outside
-the scope of the mypy project. It may also be possible to modify
-existing Python VMs to take advantage of static type information, but
-whether this is feasible is still unknown. This is nontrivial since
-the runtime types do not necessarily correspond to the static types.
-
-How do I type check my Python 2 code?
-*************************************
-
-You can use a `comment-based function annotation syntax
-<https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code>`_
-and use the ``--py2`` command-line option to type check your Python 2 code.
-You'll also need to install ``typing`` for Python 2 via ``pip install typing``.
-
-Is mypy free?
-*************
-
-Yes. Mypy is free software, and it can also be used for commercial and
-proprietary projects. Mypy is available under the MIT license.
-
-Why not use structural subtyping?
-*********************************
-
-Mypy primarily uses `nominal subtyping
-<https://en.wikipedia.org/wiki/Nominative_type_system>`_ instead of
-`structural subtyping
-<https://en.wikipedia.org/wiki/Structural_type_system>`_. Some argue
-that structural subtyping is better suited for languages with duck
-typing such as Python.
-
-Here are some reasons why mypy uses nominal subtyping:
-
-1. It is easy to generate short and informative error messages when
- using a nominal type system. This is especially important when
- using type inference.
-
-2. Python supports basically nominal isinstance tests and they are
- widely used in programs. It is not clear how to support isinstance
- in a purely structural type system while remaining compatible with
- Python idioms.
-
-3. Many programmers are already familiar with nominal subtyping and it
- has been successfully used in languages such as Java, C++ and
- C#. Only few languages use structural subtyping.
-
-However, structural subtyping can also be useful. Structural subtyping
-is a likely feature to be added to mypy in the future, even though we
-expect that most mypy programs will still primarily use nominal
-subtyping.
-
-I like Python and I have no need for static typing
-**************************************************
-
-That wasn't really a question, was it? Mypy is not aimed at replacing
-Python. The goal is to give more options for Python programmers, to
-make Python a more competitive alternative to other statically typed
-languages in large projects, to improve programmer productivity and to
-improve software quality.
-
-How are mypy programs different from normal Python?
-***************************************************
-
-Since you use a vanilla Python implementation to run mypy programs,
-mypy programs are also Python programs. The type checker may give
-warnings for some valid Python code, but the code is still always
-runnable. Also, some Python features and syntax are still not
-supported by mypy, but this is gradually improving.
-
-The obvious difference is the availability of static type
-checking. The section :ref:`common_issues` mentions some
-modifications to Python code that may be required to make code type
-check without errors. Also, your code must make attributes explicit and
-use a explicit protocol representation. For example, you may want to
-subclass an Abstract Base Class such as ``typing.Iterable``.
-
-Mypy will support modular, efficient type checking, and this seems to
-rule out type checking some language features, such as arbitrary
-runtime addition of methods. However, it is likely that many of these
-features will be supported in a restricted form (for example, runtime
-modification is only supported for classes or methods registered as
-dynamic or 'patchable').
-
-How is mypy different from PyPy?
-********************************
-
-*This answer relates to PyPy as a Python implementation. See also the answer related to RPython below.*
-
-Mypy and PyPy are orthogonal. Mypy does static type checking, i.e. it
-is basically a linter, but static typing has no runtime effect,
-whereas the PyPy is an Python implementation. You can use PyPy to run
-mypy programs.
-
-How is mypy different from Cython?
-**********************************
-
-`Cython <http://cython.org/>`_ is a variant of Python that supports
-compilation to CPython C modules. It can give major speedups to
-certain classes of programs compared to CPython, and it provides
-static typing (though this is different from mypy). Mypy differs in
-the following aspects, among others:
-
-- Cython is much more focused on performance than mypy. Mypy is only
- about static type checking, and increasing performance is not a
- direct goal.
-
-- The mypy syntax is arguably simpler and more "Pythonic" (no cdef/cpdef, etc.) for statically typed code.
-
-- The mypy syntax is compatible with Python. Mypy programs are normal
- Python programs that can be run using any Python
- implementation. Cython has many incompatible extensions to Python
- syntax, and Cython programs generally cannot be run without first
- compiling them to CPython extension modules via C. Cython also has a
- pure Python mode, but it seems to support only a subset of Cython
- functionality, and the syntax is quite verbose.
-
-- Mypy has a different set of type system features. For example, mypy
- has genericity (parametric polymorphism), function types and
- bidirectional type inference, which are not supported by
- Cython. (Cython has fused types that are different but related to
- mypy generics. Mypy also has a similar feature as an extension of
- generics.)
-
-- The mypy type checker knows about the static types of many Python
- stdlib modules and can effectively type check code that uses them.
-
-- Cython supports accessing C functions directly and many features are
- defined in terms of translating them to C or C++. Mypy just uses
- Python semantics, and mypy does not deal with accessing C library
- functionality.
-
-How is mypy different from Nuitka?
-**********************************
-
-`Nuitka <http://nuitka.net/>`_ is a static compiler that can translate
-Python programs to C++. Nuitka integrates with the CPython
-runtime. Nuitka has additional future goals, such as using type
-inference and whole-program analysis to further speed up code. Here
-are some differences:
-
-- Nuitka is primarily focused on speeding up Python code. Mypy focuses
- on static type checking and facilitating better tools.
-
-- Whole-program analysis tends to be slow and scale poorly to large or
- complex programs. It is still unclear if Nuitka can solve these
- issues. Mypy does not use whole-program analysis and will support
- modular type checking (though this has not been implemented yet).
-
-How is mypy different from RPython or Shed Skin?
-************************************************
-
-`RPython <http://doc.pypy.org/en/latest/coding-guide.html>`_ and `Shed
-Skin <http://shed-skin.blogspot.co.uk/>`_ are basically statically
-typed subsets of Python. Mypy does the following important things
-differently:
-
-- RPython is primarily designed for implementing virtual machines;
- mypy is a general-purpose tool.
-
-- Mypy supports both static and dynamic typing. Dynamically typed and
- statically typed code can be freely mixed and can interact
- seamlessly.
-
-- Mypy aims to support (in the future) fast and modular type
- checking. Both RPython and Shed Skin use whole-program type
- inference which is very slow, does not scale well to large programs
- and often produces confusing error messages. Mypy can support
- modularity since it only uses local type inference; static type
- checking depends on having type annotations for functions
- signatures.
-
-- Mypy will support introspection, dynamic loading of code and many
- other dynamic language features (though using these may make static
- typing less effective). RPython and Shed Skin only support a
- restricted Python subset without several of these features.
-
-- Mypy supports user-defined generic types.
-
-Mypy is a cool project. Can I help?
-***********************************
-
-Any help is much appreciated! `Contact
-<http://www.mypy-lang.org/contact.html>`_ the developers if you would
-like to contribute. Any help related to development, design,
-publicity, documentation, testing, web site maintenance, financing,
-etc. can be helpful. You can learn a lot by contributing, and anybody
-can help, even beginners! However, some knowledge of compilers and/or
-type systems is essential if you want to work on mypy internals.
diff --git a/docs/source/function_overloading.rst b/docs/source/function_overloading.rst
deleted file mode 100644
index b55cddd..0000000
--- a/docs/source/function_overloading.rst
+++ /dev/null
@@ -1,60 +0,0 @@
-Function overloading in stubs
-=============================
-
-Sometimes you have a library function that seems to call for two or
-more signatures. That's okay -- you can define multiple *overloaded*
-instances of a function with the same name but different signatures in
-a stub file (this feature is not supported for user code, at least not
-yet) using the ``@overload`` decorator. For example, we can define an
-``abs`` function that works for both ``int`` and ``float`` arguments:
-
-.. code-block:: python
-
- # This is a stub file!
-
- from typing import overload
-
- @overload
- def abs(n: int) -> int: pass
-
- @overload
- def abs(n: float) -> float: pass
-
-Note that we can't use ``Union[int, float]`` as the argument type,
-since this wouldn't allow us to express that the return
-type depends on the argument type.
-
-Now if we import ``abs`` as defined in the above library stub, we can
-write code like this, and the types are inferred correctly:
-
-.. code-block:: python
-
- n = abs(-2) # 2 (int)
- f = abs(-1.5) # 1.5 (float)
-
-Overloaded function variants are still ordinary Python functions and
-they still define a single runtime object. The following code is
-thus valid:
-
-.. code-block:: python
-
- my_abs = abs
- my_abs(-2) # 2 (int)
- my_abs(-1.5) # 1.5 (float)
-
-The overload variants must be adjacent in the code. This makes code
-clearer, as you don't have to hunt for overload variants across the
-file.
-
-.. note::
-
- As generic type variables are erased at runtime when constructing
- instances of generic types, an overloaded function cannot have
- variants that only differ in a generic type argument,
- e.g. ``List[int]`` versus ``List[str]``.
-
-.. note::
-
- If you are writing a regular module rather than a stub, you can
- often use a type variable with a value restriction to represent
- functions as ``abs`` above (see :ref:`type-variable-value-restriction`).
diff --git a/docs/source/generics.rst b/docs/source/generics.rst
deleted file mode 100644
index f6c0640..0000000
--- a/docs/source/generics.rst
+++ /dev/null
@@ -1,390 +0,0 @@
-Generics
-========
-
-Defining generic classes
-************************
-
-The built-in collection classes are generic classes. Generic types
-have one or more type parameters, which can be arbitrary types. For
-example, ``Dict[int, str]`` has the type parameters ``int`` and
-``str``, and ``List[int]`` has a type parameter ``int``.
-
-Programs can also define new generic classes. Here is a very simple
-generic class that represents a stack:
-
-.. code-block:: python
-
- from typing import TypeVar, Generic
-
- T = TypeVar('T')
-
- class Stack(Generic[T]):
- def __init__(self) -> None:
- # Create an empty list with items of type T
- self.items = [] # type: List[T]
-
- def push(self, item: T) -> None:
- self.items.append(item)
-
- def pop(self) -> T:
- return self.items.pop()
-
- def empty(self) -> bool:
- return not self.items
-
-The ``Stack`` class can be used to represent a stack of any type:
-``Stack[int]``, ``Stack[Tuple[int, str]]``, etc.
-
-Using ``Stack`` is similar to built-in container types:
-
-.. code-block:: python
-
- # Construct an empty Stack[int] instance
- stack = Stack[int]()
- stack.push(2)
- stack.pop()
- stack.push('x') # Type error
-
-Type inference works for user-defined generic types as well:
-
-.. code-block:: python
-
- def process(stack: Stack[int]) -> None: ...
-
- process(Stack()) # Argument has inferred type Stack[int]
-
-Construction of instances of generic types is also type checked:
-
-.. code-block:: python
-
- class Box(Generic[T]):
- def __init__(self, content: T) -> None:
- self.content = content
-
- Box(1) # OK, inferred type is Box[int]
- Box[int](1) # Also OK
- s = 'some string'
- Box[int](s) # Type error
-
-Generic class internals
-***********************
-
-You may wonder what happens at runtime when you index
-``Stack``. Actually, indexing ``Stack`` returns essentially a copy
-of ``Stack`` that returns instances of the original class on
-instantiation:
-
->>> print(Stack)
-__main__.Stack
->>> print(Stack[int])
-__main__.Stack[int]
->>> print(Stack[int]().__class__)
-__main__.Stack
-
-Note that built-in types ``list``, ``dict`` and so on do not support
-indexing in Python. This is why we have the aliases ``List``, ``Dict``
-and so on in the ``typing`` module. Indexing these aliases gives
-you a class that directly inherits from the target class in Python:
-
->>> from typing import List
->>> List[int]
-typing.List[int]
->>> List[int].__bases__
-(<class 'list'>, typing.MutableSequence)
-
-Generic types could be instantiated or subclassed as usual classes,
-but the above examples illustrate that type variables are erased at
-runtime. Generic ``Stack`` instances are just ordinary
-Python objects, and they have no extra runtime overhead or magic due
-to being generic, other than a metaclass that overloads the indexing
-operator.
-
-.. _generic-functions:
-
-Generic functions
-*****************
-
-Generic type variables can also be used to define generic functions:
-
-.. code-block:: python
-
- from typing import TypeVar, Sequence
-
- T = TypeVar('T') # Declare type variable
-
- def first(seq: Sequence[T]) -> T: # Generic function
- return seq[0]
-
-As with generic classes, the type variable can be replaced with any
-type. That means ``first`` can be used with any sequence type, and the
-return type is derived from the sequence item type. For example:
-
-.. code-block:: python
-
- # Assume first defined as above.
-
- s = first('foo') # s has type str.
- n = first([1, 2, 3]) # n has type int.
-
-Note also that a single definition of a type variable (such as ``T``
-above) can be used in multiple generic functions or classes. In this
-example we use the same type variable in two generic functions:
-
-.. code-block:: python
-
- from typing import TypeVar, Sequence
-
- T = TypeVar('T') # Declare type variable
-
- def first(seq: Sequence[T]) -> T:
- return seq[0]
-
- def last(seq: Sequence[T]) -> T:
- return seq[-1]
-
-.. _generic-methods-and-generic-self:
-
-Generic methods and generic self
-********************************
-
-You can also define generic methods — just use a type variable in the
-method signature that is different from class type variables. In particular,
-``self`` may also be generic, allowing a method to return the most precise
-type known at the point of access.
-
-.. note::
-
- This feature is experimental. Checking code with type annotations for self
- arguments is still not fully implemented. Mypy may disallow valid code or
- allow unsafe code.
-
-In this way, for example, you can typecheck chaining of setter methods:
-
-.. code-block:: python
-
- from typing import TypeVar
-
- T = TypeVar('T', bound='Shape')
-
- class Shape:
- def set_scale(self: T, scale: float) -> T:
- self.scale = scale
- return self
-
- class Circle(Shape):
- def set_radius(self, r: float) -> 'Circle':
- self.radius = r
- return self
-
- class Square(Shape):
- def set_width(self, w: float) -> 'Square':
- self.width = w
- return self
-
- circle = Circle().set_scale(0.5).set_radius(2.7) # type: Circle
- square = Square().set_scale(0.5).set_width(3.2) # type: Square
-
-Without using generic ``self``, the last two lines could not be type-checked properly.
-
-Other uses are factory methods, such as copy and deserialization.
-For class methods, you can also define generic ``cls``, using ``Type[T]``:
-
-.. code-block:: python
-
- from typing import TypeVar, Tuple, Type
-
- T = TypeVar('T', bound='Friend')
-
- class Friend:
- other = None # type: Friend
-
- @classmethod
- def make_pair(cls: Type[T]) -> Tuple[T, T]:
- a, b = cls(), cls()
- a.other = b
- b.other = a
- return a, b
-
- class SuperFriend(Friend):
- pass
-
- a, b = SuperFriend.make_pair()
-
-Note that when overriding a method with generic ``self``, you must either
-return a generic ``self`` too, or return an instance of the current class.
-In the latter case, you must implement this method in all future subclasses.
-
-Note also that mypy cannot always verify that the implementation of a copy
-or a deserialization method returns the actual type of self. Therefore
-you may need to silence mypy inside these methods (but not at the call site),
-possibly by making use of the ``Any`` type.
-
-.. _type-variable-value-restriction:
-
-Type variables with value restriction
-*************************************
-
-By default, a type variable can be replaced with any type. However, sometimes
-it's useful to have a type variable that can only have some specific types
-as its value. A typical example is a type variable that can only have values
-``str`` and ``bytes``:
-
-.. code-block:: python
-
- from typing import TypeVar
-
- AnyStr = TypeVar('AnyStr', str, bytes)
-
-This is actually such a common type variable that ``AnyStr`` is
-defined in ``typing`` and we don't need to define it ourselves.
-
-We can use ``AnyStr`` to define a function that can concatenate
-two strings or bytes objects, but it can't be called with other
-argument types:
-
-.. code-block:: python
-
- from typing import AnyStr
-
- def concat(x: AnyStr, y: AnyStr) -> AnyStr:
- return x + y
-
- concat('a', 'b') # Okay
- concat(b'a', b'b') # Okay
- concat(1, 2) # Error!
-
-Note that this is different from a union type, since combinations
-of ``str`` and ``bytes`` are not accepted:
-
-.. code-block:: python
-
- concat('string', b'bytes') # Error!
-
-In this case, this is exactly what we want, since it's not possible
-to concatenate a string and a bytes object! The type checker
-will reject this function:
-
-.. code-block:: python
-
- def union_concat(x: Union[str, bytes], y: Union[str, bytes]) -> Union[str, bytes]:
- return x + y # Error: can't concatenate str and bytes
-
-Another interesting special case is calling ``concat()`` with a
-subtype of ``str``:
-
-.. code-block:: python
-
- class S(str): pass
-
- ss = concat(S('foo'), S('bar')))
-
-You may expect that the type of ``ss`` is ``S``, but the type is
-actually ``str``: a subtype gets promoted to one of the valid values
-for the type variable, which in this case is ``str``. This is thus
-subtly different from *bounded quantification* in languages such as
-Java, where the return type would be ``S``. The way mypy implements
-this is correct for ``concat``, since ``concat`` actually returns a
-``str`` instance in the above example:
-
-.. code-block:: python
-
- >>> print(type(ss))
- <class 'str'>
-
-You can also use a ``TypeVar`` with a restricted set of possible
-values when defining a generic class. For example, mypy uses the type
-``typing.Pattern[AnyStr]`` for the return value of ``re.compile``,
-since regular expressions can be based on a string or a bytes pattern.
-
-.. _type-variable-upper-bound:
-
-Type variables with upper bounds
-********************************
-
-A type variable can also be restricted to having values that are
-subtypes of a specific type. This type is called the upper bound of
-the type variable, and is specified with the ``bound=...`` keyword
-argument to ``TypeVar``.
-
-.. code-block:: python
-
- from typing import TypeVar, SupportsAbs
-
- T = TypeVar('T', bound=SupportsAbs[float])
-
-In the definition of a generic function that uses such a type variable
-``T``, the type represented by ``T`` is assumed to be a subtype of
-its upper bound, so the function can use methods of the upper bound on
-values of type ``T``.
-
-.. code-block:: python
-
- def largest_in_absolute_value(*xs: T) -> T:
- return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float].
-
-In a call to such a function, the type ``T`` must be replaced by a
-type that is a subtype of its upper bound. Continuing the example
-above,
-
-.. code-block:: python
-
- largest_in_absolute_value(-3.5, 2) # Okay, has type float.
- largest_in_absolute_value(5+6j, 7) # Okay, has type complex.
- largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float].
-
-Type parameters of generic classes may also have upper bounds, which
-restrict the valid values for the type parameter in the same way.
-
-A type variable may not have both a value restriction (see
-:ref:`type-variable-value-restriction`) and an upper bound.
-
-.. _declaring-decorators:
-
-Declaring decorators
-********************
-
-One common application of type variable upper bounds is in declaring a
-decorator that preserves the signature of the function it decorates,
-regardless of that signature. Here's a complete example:
-
-.. code-block:: python
-
- from typing import Any, Callable, TypeVar, Tuple, cast
-
- FuncType = Callable[..., Any]
- F = TypeVar('F', bound=FuncType)
-
- # A decorator that preserves the signature.
- def my_decorator(func: F) -> F:
- def wrapper(*args, **kwds):
- print("Calling", func)
- return func(*args, **kwds)
- return cast(F, wrapper)
-
- # A decorated function.
- @my_decorator
- def foo(a: int) -> str:
- return str(a)
-
- # Another.
- @my_decorator
- def bar(x: float, y: float) -> Tuple[float, float, bool]:
- return (x, y, x > y)
-
- a = foo(12)
- reveal_type(a) # str
- b = bar(3.14, 0)
- reveal_type(b) # Tuple[float, float, bool]
- foo('x') # Type check error: incompatible type "str"; expected "int"
-
-From the final block we see that the signatures of the decorated
-functions ``foo()`` and ``bar()`` are the same as those of the original
-functions (before the decorator is applied).
-
-The bound on ``F`` is used so that calling the decorator on a
-non-function (e.g. ``my_decorator(1)``) will be rejected.
-
-Also note that the ``wrapper()`` function is not type-checked. Wrapper
-functions are typically small enough that this is not a big
-problem. This is also the reason for the ``cast()`` call in the
-``return`` statement in ``my_decorator()``. See :ref:`casts`.
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
deleted file mode 100644
index a41c125..0000000
--- a/docs/source/getting_started.rst
+++ /dev/null
@@ -1,24 +0,0 @@
-.. _getting-started:
-
-Getting started
-===============
-
-Installation
-************
-
-Mypy requires Python 3.3 or later. Once you've `installed Python 3 <https://www.python.org/downloads/>`_, you can install mypy with:
-
-.. code-block:: text
-
- $ python3 -m pip install mypy
-
-Installing from source
-**********************
-
-To install mypy from source, clone the github repository and then run pip install locally:
-
-.. code-block:: text
-
- $ git clone https://github.com/python/mypy.git
- $ cd mypy
- $ sudo python3 -m pip install --upgrade .
diff --git a/docs/source/index.rst b/docs/source/index.rst
deleted file mode 100644
index 90cc749..0000000
--- a/docs/source/index.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-.. Mypy documentation master file, created by
- sphinx-quickstart on Sun Sep 14 19:50:35 2014.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
-
-Welcome to Mypy documentation!
-==============================
-
-Mypy is a static type checker for Python.
-
-.. toctree::
- :maxdepth: 2
-
- introduction
- basics
- getting_started
- builtin_types
- python2
- type_inference_and_annotations
- kinds_of_types
- class_basics
- dynamic_typing
- function_overloading
- casts
- duck_type_compatibility
- common_issues
- generics
- supported_python_features
- additional_features
- command_line
- config_file
- python36
- faq
- cheat_sheet
- cheat_sheet_py3
- revision_history
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`search`
diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst
deleted file mode 100644
index 3bcd0ad..0000000
--- a/docs/source/introduction.rst
+++ /dev/null
@@ -1,30 +0,0 @@
-Introduction
-============
-
-Mypy is a static type checker for Python. If you sprinkle your code
-with type annotations, mypy can type check your code and find common bugs.
-As mypy is a static analyzer, or a lint-like tool, your code's type
-annotations are just hints and don't interfere when running your program.
-You run your program with a standard Python interpreter, and the annotations
-are treated primarily as comments.
-
-Using the Python 3 function annotation syntax (using the PEP 484 notation) or
-a comment-based annotation syntax for Python 2 code, you will be able to
-efficiently annotate your code and use mypy to check the code for common
-errors. Mypy has a powerful, easy-to-use, type system with modern features
-such as type inference, generics, function types, tuple types and
-union types.
-
-As a developer, you decide how to use mypy in your workflow. You can always
-escape to dynamic typing as mypy's approach to static typing doesn't restrict
-what you can do in your programs. Using mypy will make your programs easier to
-debug, maintain, and understand.
-
-This documentation provides a short introduction to mypy. It will help you
-get started writing statically typed code. Knowledge of Python and a
-statically typed object-oriented language, such as Java, are assumed.
-
-.. note::
-
- Mypy is still experimental. There will be changes
- that break backward compatibility.
diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst
deleted file mode 100644
index dc639d9..0000000
--- a/docs/source/kinds_of_types.rst
+++ /dev/null
@@ -1,1002 +0,0 @@
-Kinds of types
-==============
-
-User-defined types
-******************
-
-Each class is also a type. Any instance of a subclass is also
-compatible with all superclasses. All values are compatible with the
-``object`` type (and also the ``Any`` type).
-
-.. code-block:: python
-
- class A:
- def f(self) -> int: # Type of self inferred (A)
- return 2
-
- class B(A):
- def f(self) -> int:
- return 3
- def g(self) -> int:
- return 4
-
- a = B() # type: A # OK (explicit type for a; override type inference)
- print(a.f()) # 3
- a.g() # Type check error: A has no method g
-
-The Any type
-************
-
-A value with the ``Any`` type is dynamically typed. Mypy doesn't know
-anything about the possible runtime types of such value. Any
-operations are permitted on the value, and the operations are checked
-at runtime, similar to normal Python code without type annotations.
-
-``Any`` is compatible with every other type, and vice versa. No
-implicit type check is inserted when assigning a value of type ``Any``
-to a variable with a more precise type:
-
-.. code-block:: python
-
- a = None # type: Any
- s = '' # type: str
- a = 2 # OK
- s = a # OK
-
-Declared (and inferred) types are *erased* at runtime. They are
-basically treated as comments, and thus the above code does not
-generate a runtime error, even though ``s`` gets an ``int`` value when
-the program is run. Note that the declared type of ``s`` is actually
-``str``!
-
-If you do not define a function return value or argument types, these
-default to ``Any``:
-
-.. code-block:: python
-
- def show_heading(s) -> None:
- print('=== ' + s + ' ===') # No static type checking, as s has type Any
-
- show_heading(1) # OK (runtime error only; mypy won't generate an error)
-
-You should give a statically typed function an explicit ``None``
-return type even if it doesn't return a value, as this lets mypy catch
-additional type errors:
-
-.. code-block:: python
-
- def wait(t: float): # Implicit Any return value
- print('Waiting...')
- time.sleep(t)
-
- if wait(2) > 1: # Mypy doesn't catch this error!
- ...
-
-If we had used an explicit ``None`` return type, mypy would have caught
-the error:
-
-.. code-block:: python
-
- def wait(t: float) -> None:
- print('Waiting...')
- time.sleep(t)
-
- if wait(2) > 1: # Error: can't compare None and int
- ...
-
-The ``Any`` type is discussed in more detail in section :ref:`dynamic_typing`.
-
-.. note::
-
- A function without any types in the signature is dynamically
- typed. The body of a dynamically typed function is not checked
- statically, and local variables have implicit ``Any`` types.
- This makes it easier to migrate legacy Python code to mypy, as
- mypy won't complain about dynamically typed functions.
-
-.. _tuple-types:
-
-Tuple types
-***********
-
-The type ``Tuple[T1, ..., Tn]`` represents a tuple with the item types ``T1``, ..., ``Tn``:
-
-.. code-block:: python
-
- def f(t: Tuple[int, str]) -> None:
- t = 1, 'foo' # OK
- t = 'foo', 1 # Type check error
-
-A tuple type of this kind has exactly a specific number of items (2 in
-the above example). Tuples can also be used as immutable,
-varying-length sequences. You can use the type ``Tuple[T, ...]`` (with
-a literal ``...`` -- it's part of the syntax) for this
-purpose. Example:
-
-.. code-block:: python
-
- def print_squared(t: Tuple[int, ...]) -> None:
- for n in t:
- print(n, n ** 2)
-
- print_squared(()) # OK
- print_squared((1, 3, 5)) # OK
- print_squared([1, 2]) # Error: only a tuple is valid
-
-.. note::
-
- Usually it's a better idea to use ``Sequence[T]`` instead of ``Tuple[T, ...]``, as
- ``Sequence`` is also compatible with lists and other non-tuple sequences.
-
-.. note::
-
- ``Tuple[...]`` is not valid as a base class outside stub files. This is a
- limitation of the ``typing`` module. One way to work around
- this is to use a named tuple as a base class (see section :ref:`named-tuples`).
-
-.. _callable-types:
-
-Callable types (and lambdas)
-****************************
-
-You can pass around function objects and bound methods in statically
-typed code. The type of a function that accepts arguments ``A1``, ..., ``An``
-and returns ``Rt`` is ``Callable[[A1, ..., An], Rt]``. Example:
-
-.. code-block:: python
-
- from typing import Callable
-
- def twice(i: int, next: Callable[[int], int]) -> int:
- return next(next(i))
-
- def add(i: int) -> int:
- return i + 1
-
- print(twice(3, add)) # 5
-
-You can only have positional arguments, and only ones without default
-values, in callable types. These cover the vast majority of uses of
-callable types, but sometimes this isn't quite enough. Mypy recognizes
-a special form ``Callable[..., T]`` (with a literal ``...``) which can
-be used in less typical cases. It is compatible with arbitrary
-callable objects that return a type compatible with ``T``, independent
-of the number, types or kinds of arguments. Mypy lets you call such
-callable values with arbitrary arguments, without any checking -- in
-this respect they are treated similar to a ``(*args: Any, **kwargs:
-Any)`` function signature. Example:
-
-.. code-block:: python
-
- from typing import Callable
-
- def arbitrary_call(f: Callable[..., int]) -> int:
- return f('x') + f(y=2) # OK
-
- arbitrary_call(ord) # No static error, but fails at runtime
- arbitrary_call(open) # Error: does not return an int
- arbitrary_call(1) # Error: 'int' is not callable
-
-Lambdas are also supported. The lambda argument and return value types
-cannot be given explicitly; they are always inferred based on context
-using bidirectional type inference:
-
-.. code-block:: python
-
- l = map(lambda x: x + 1, [1, 2, 3]) # Infer x as int and l as List[int]
-
-If you want to give the argument or return value types explicitly, use
-an ordinary, perhaps nested function definition.
-
-.. _union-types:
-
-Union types
-***********
-
-Python functions often accept values of two or more different
-types. You can use overloading to model this in statically typed code,
-but union types can make code like this easier to write.
-
-Use the ``Union[T1, ..., Tn]`` type constructor to construct a union
-type. For example, the type ``Union[int, str]`` is compatible with
-both integers and strings. You can use an ``isinstance()`` check to
-narrow down the type to a specific type:
-
-.. code-block:: python
-
- from typing import Union
-
- def f(x: Union[int, str]) -> None:
- x + 1 # Error: str + int is not valid
- if isinstance(x, int):
- # Here type of x is int.
- x + 1 # OK
- else:
- # Here type of x is str.
- x + 'a' # OK
-
- f(1) # OK
- f('x') # OK
- f(1.1) # Error
-
-.. _optional:
-
-The type of None and optional types
-***********************************
-
-Mypy treats the type of ``None`` as special. ``None`` is a valid value
-for every type, which resembles ``null`` in Java. Unlike Java, mypy
-doesn't treat primitives types
-specially: ``None`` is also valid for primitive types such as ``int``
-and ``float``.
-
-.. note::
-
- See :ref:`strict_optional` for an experimental mode which allows
- mypy to check ``None`` values precisely.
-
-When initializing a variable as ``None``, ``None`` is usually an
-empty place-holder value, and the actual value has a different type.
-This is why you need to annotate an attribute in a case like this:
-
-.. code-block:: python
-
- class A:
- def __init__(self) -> None:
- self.count = None # type: int
-
-Mypy will complain if you omit the type annotation, as it wouldn't be
-able to infer a non-trivial type for the ``count`` attribute
-otherwise.
-
-Mypy generally uses the first assignment to a variable to
-infer the type of the variable. However, if you assign both a ``None``
-value and a non-``None`` value in the same scope, mypy can often do
-the right thing:
-
-.. code-block:: python
-
- def f(i: int) -> None:
- n = None # Inferred type int because of the assignment below
- if i > 0:
- n = i
- ...
-
-Often it's useful to know whether a variable can be
-``None``. For example, this function accepts a ``None`` argument,
-but it's not obvious from its signature:
-
-.. code-block:: python
-
- def greeting(name: str) -> str:
- if name:
- return 'Hello, {}'.format(name)
- else:
- return 'Hello, stranger'
-
- print(greeting('Python')) # Okay!
- print(greeting(None)) # Also okay!
-
-Mypy lets you use ``Optional[t]`` to document that ``None`` is a
-valid argument type:
-
-.. code-block:: python
-
- from typing import Optional
-
- def greeting(name: Optional[str]) -> str:
- if name:
- return 'Hello, {}'.format(name)
- else:
- return 'Hello, stranger'
-
-Mypy treats this as semantically equivalent to the previous example,
-since ``None`` is implicitly valid for any type, but it's much more
-useful for a programmer who is reading the code. You can equivalently
-use ``Union[str, None]``, but ``Optional`` is shorter and more
-idiomatic.
-
-.. note::
-
- ``None`` is also used as the return type for functions that don't
- return a value, i.e. that implicitly return ``None``. Mypy doesn't
- use ``NoneType`` for this, since it would
- look awkward, even though that is the real name of the type of ``None``
- (try ``type(None)`` in the interactive interpreter to see for yourself).
-
-.. _strict_optional:
-
-Experimental strict optional type and None checking
-***************************************************
-
-Currently, ``None`` is a valid value for each type, similar to
-``null`` or ``NULL`` in many languages. However, you can use the
-experimental ``--strict-optional`` command line option to tell mypy
-that types should not include ``None``
-by default. The ``Optional`` type modifier is then used to define
-a type variant that includes ``None``, such as ``Optional[int]``:
-
-.. code-block:: python
-
- from typing import Optional
-
- def f() -> Optional[int]:
- return None # OK
-
- def g() -> int:
- ...
- return None # Error: None not compatible with int
-
-Also, most operations will not be allowed on unguarded ``None``
-or ``Optional`` values:
-
-.. code-block:: python
-
- def f(x: Optional[int]) -> int:
- return x + 1 # Error: Cannot add None and int
-
-Instead, an explicit ``None`` check is required. Mypy has
-powerful type inference that lets you use regular Python
-idioms to guard against ``None`` values. For example, mypy
-recognizes ``is None`` checks:
-
-.. code-block:: python
-
- def f(x: Optional[int]) -> int:
- if x is None:
- return 0
- else:
- # The inferred type of x is just int here.
- return x + 1
-
-Mypy will infer the type of ``x`` to be ``int`` in the else block due to the
-check against ``None`` in the if condition.
-
-.. note::
-
- ``--strict-optional`` is experimental and still has known issues.
-
-Class name forward references
-*****************************
-
-Python does not allow references to a class object before the class is
-defined. Thus this code does not work as expected:
-
-.. code-block:: python
-
- def f(x: A) -> None: # Error: Name A not defined
- ....
-
- class A:
- ...
-
-In cases like these you can enter the type as a string literal — this
-is a *forward reference*:
-
-.. code-block:: python
-
- def f(x: 'A') -> None: # OK
- ...
-
- class A:
- ...
-
-Of course, instead of using a string literal type, you could move the
-function definition after the class definition. This is not always
-desirable or even possible, though.
-
-Any type can be entered as a string literal, and you can combine
-string-literal types with non-string-literal types freely:
-
-.. code-block:: python
-
- def f(a: List['A']) -> None: ... # OK
- def g(n: 'int') -> None: ... # OK, though not useful
-
- class A: pass
-
-String literal types are never needed in ``# type:`` comments.
-
-String literal types must be defined (or imported) later *in the same
-module*. They cannot be used to leave cross-module references
-unresolved. (For dealing with import cycles, see
-:ref:`import-cycles`.)
-
-.. _type-aliases:
-
-Type aliases
-************
-
-In certain situations, type names may end up being long and painful to type:
-
-.. code-block:: python
-
- def f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]:
- ...
-
-When cases like this arise, you can define a type alias by simply
-assigning the type to a variable:
-
-.. code-block:: python
-
- AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
-
- # Now we can use AliasType in place of the full name:
-
- def f() -> AliasType:
- ...
-
-Type aliases can be generic, in this case they could be used in two variants:
-Subscripted aliases are equivalent to original types with substituted type variables,
-number of type arguments must match the number of free type variables
-in generic type alias. Unsubscripted aliases are treated as original types with free
-variables replaced with ``Any``. Examples (following `PEP 484
-<https://www.python.org/dev/peps/pep-0484/#type-aliases>`_):
-
-.. code-block:: python
-
- from typing import TypeVar, Iterable, Tuple, Union, Callable
- S = TypeVar('S')
- TInt = Tuple[int, S]
- UInt = Union[S, int]
- CBack = Callable[..., S]
-
- def response(query: str) -> UInt[str]: # Same as Union[str, int]
- ...
- def activate(cb: CBack[S]) -> S: # Same as Callable[..., S]
- ...
- table_entry: TInt # Same as Tuple[int, Any]
-
- T = TypeVar('T', int, float, complex)
- Vec = Iterable[Tuple[T, T]]
-
- def inproduct(v: Vec[T]) -> T:
- return sum(x*y for x, y in v)
-
- def dilate(v: Vec[T], scale: T) -> Vec[T]:
- return ((x * scale, y * scale) for x, y in v)
-
- v1: Vec[int] = [] # Same as Iterable[Tuple[int, int]]
- v2: Vec = [] # Same as Iterable[Tuple[Any, Any]]
- v3: Vec[int, int] = [] # Error: Invalid alias, too many type arguments!
-
-Type aliases can be imported from modules like any names. Aliases can target another
-aliases (although building complex chains of aliases is not recommended, this
-impedes code readability, thus defeating the purpose of using aliases).
-Following previous examples:
-
-.. code-block:: python
-
- from typing import TypeVar, Generic, Optional
- from first_example import AliasType
- from second_example import Vec
-
- def fun() -> AliasType:
- ...
-
- T = TypeVar('T')
- class NewVec(Generic[T], Vec[T]):
- ...
- for i, j in NewVec[int]():
- ...
-
- OIntVec = Optional[Vec[int]]
-
-.. note::
-
- A type alias does not create a new type. It's just a shorthand notation for
- another type -- it's equivalent to the target type. For generic type aliases
- this means that variance of type variables used for alias definition does not
- apply to aliases. A parameterized generic alias is treated simply as an original
- type with the corresponding type variables substituted.
-
-.. _newtypes:
-
-NewTypes
-********
-
-(Freely after `PEP 484
-<https://www.python.org/dev/peps/pep-0484/#newtype-helper-function>`_.)
-
-There are also situations where a programmer might want to avoid logical errors by
-creating simple classes. For example:
-
-.. code-block:: python
-
- class UserId(int):
- pass
-
- get_by_user_id(user_id: UserId):
- ...
-
-However, this approach introduces some runtime overhead. To avoid this, the typing
-module provides a helper function ``NewType`` that creates simple unique types with
-almost zero runtime overhead. Mypy will treat the statement
-``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following
-definition:
-
-.. code-block:: python
-
- class Derived(Base):
- def __init__(self, _x: Base) -> None:
- ...
-
-However, at runtime, ``NewType('Derived', Base)`` will return a dummy function that
-simply returns its argument:
-
-.. code-block:: python
-
- def Derived(_x):
- return _x
-
-Mypy will require explicit casts from ``int`` where ``UserId`` is expected, while
-implicitly casting from ``UserId`` where ``int`` is expected. Examples:
-
-.. code-block:: python
-
- from typing import NewType
-
- UserId = NewType('UserId', int)
-
- def name_by_id(user_id: UserId) -> str:
- ...
-
- UserId('user') # Fails type check
-
- name_by_id(42) # Fails type check
- name_by_id(UserId(42)) # OK
-
- num = UserId(5) + 1 # type: int
-
-``NewType`` accepts exactly two arguments. The first argument must be a string literal
-containing the name of the new type and must equal the name of the variable to which the new
-type is assigned. The second argument must be a properly subclassable class, i.e.,
-not a type construct like ``Union``, etc.
-
-The function returned by ``NewType`` accepts only one argument; this is equivalent to
-supporting only one constructor accepting an instance of the base class (see above).
-Example:
-
-.. code-block:: python
-
- from typing import NewType
-
- class PacketId:
- def __init__(self, major: int, minor: int) -> None:
- self._major = major
- self._minor = minor
-
- TcpPacketId = NewType('TcpPacketId', PacketId)
-
- packet = PacketId(100, 100)
- tcp_packet = TcpPacketId(packet) # OK
-
- tcp_packet = TcpPacketId(127, 0) # Fails in type checker and at runtime
-
-Both ``isinstance`` and ``issubclass``, as well as subclassing will fail for
-``NewType('Derived', Base)`` since function objects don't support these operations.
-
-.. note::
-
- Note that unlike type aliases, ``NewType`` will create an entirely new and
- unique type when used. The intended purpose of ``NewType`` is to help you
- detect cases where you accidentally mixed together the old base type and the
- new derived type.
-
- For example, the following will successfully typecheck when using type
- aliases:
-
- .. code-block:: python
-
- UserId = int
-
- def name_by_id(user_id: UserId) -> str:
- ...
-
- name_by_id(3) # ints and UserId are synonymous
-
- But a similar example using ``NewType`` will not typecheck:
-
- .. code-block:: python
-
- from typing import NewType
-
- UserId = NewType('UserId', int)
-
- def name_by_id(user_id: UserId) -> str:
- ...
-
- name_by_id(3) # int is not the same as UserId
-
-.. _named-tuples:
-
-Named tuples
-************
-
-Mypy recognizes named tuples and can type check code that defines or
-uses them. In this example, we can detect code trying to access a
-missing attribute:
-
-.. code-block:: python
-
- Point = namedtuple('Point', ['x', 'y'])
- p = Point(x=1, y=2)
- print(p.z) # Error: Point has no attribute 'z'
-
-If you use ``namedtuple`` to define your named tuple, all the items
-are assumed to have ``Any`` types. That is, mypy doesn't know anything
-about item types. You can use ``typing.NamedTuple`` to also define
-item types:
-
-.. code-block:: python
-
- from typing import NamedTuple
-
- Point = NamedTuple('Point', [('x', int),
- ('y', int)])
- p = Point(x=1, y='x') # Argument has incompatible type "str"; expected "int"
-
-Python 3.6 will have an alternative, class-based syntax for named tuples with types.
-Mypy supports it already:
-
-.. code-block:: python
-
- from typing import NamedTuple
-
- class Point(NamedTuple):
- x: int
- y: int
-
- p = Point(x=1, y='x') # Argument has incompatible type "str"; expected "int"
-
-.. note::
-
- The Python 3.6 syntax requires the ``--fast-parser`` flag. You must also have the
- `typed_ast <https://pypi.python.org/pypi/typed-ast>`_ package
- installed and have at least version 0.6.1. Use ``pip3 install -U typed_ast``.
-
-.. _type-of-class:
-
-The type of class objects
-*************************
-
-(Freely after `PEP 484
-<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.)
-
-Sometimes you want to talk about class objects that inherit from a
-given class. This can be spelled as ``Type[C]`` where ``C`` is a
-class. In other words, when ``C`` is the name of a class, using ``C``
-to annotate an argument declares that the argument is an instance of
-``C`` (or of a subclass of ``C``), but using ``Type[C]`` as an
-argument annotation declares that the argument is a class object
-deriving from ``C`` (or ``C`` itself).
-
-For example, assume the following classes:
-
-.. code-block:: python
-
- class User:
- # Defines fields like name, email
-
- class BasicUser(User):
- def upgrade(self):
- """Upgrade to Pro"""
-
- class ProUser(User):
- def pay(self):
- """Pay bill"""
-
-Note that ``ProUser`` doesn't inherit from ``BasicUser``.
-
-Here's a function that creates an instance of one of these classes if
-you pass it the right class object:
-
-.. code-block:: python
-
- def new_user(user_class):
- user = user_class()
- # (Here we could write the user object to a database)
- return user
-
-How would we annotate this function? Without ``Type[]`` the best we
-could do would be:
-
-.. code-block:: python
-
- def new_user(user_class: type) -> User:
- # Same implementation as before
-
-This seems reasonable, except that in the following example, mypy
-doesn't see that the ``buyer`` variable has type ``ProUser``:
-
-.. code-block:: python
-
- buyer = new_user(ProUser)
- buyer.pay() # Rejected, not a method on User
-
-However, using ``Type[]`` and a type variable with an upper bound (see
-:ref:`type-variable-upper-bound`) we can do better:
-
-.. code-block:: python
-
- U = TypeVar('U', bound=User)
-
- def new_user(user_class: Type[U]) -> U:
- # Same implementation as before
-
-Now mypy will infer the correct type of the result when we call
-``new_user()`` with a specific subclass of ``User``:
-
-.. code-block:: python
-
- beginner = new_user(BasicUser) # Inferred type is BasicUser
- beginner.upgrade() # OK
-
-.. note::
-
- The value corresponding to ``Type[C]`` must be an actual class
- object that's a subtype of ``C``. Its constructor must be
- compatible with the constructor of ``C``. If ``C`` is a type
- variable, its upper bound must be a class object.
-
-For more details about ``Type[]`` see `PEP 484
-<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.
-
-.. _text-and-anystr:
-
-Text and AnyStr
-***************
-
-Sometimes you may want to write a function which will accept only unicode
-strings. This can be challenging to do in a codebase intended to run in
-both Python 2 and Python 3 since ``str`` means something different in both
-versions and ``unicode`` is not a keyword in Python 3.
-
-To help solve this issue, use ``typing.Text`` which is aliased to
-``unicode`` in Python 2 and to ``str`` in Python 3. This allows you to
-indicate that a function should accept only unicode strings in a
-cross-compatible way:
-
-.. code-block:: python
-
- from typing import Text
-
- def unicode_only(s: Text) -> Text:
- return s + u'\u2713'
-
-In other cases, you may want to write a function that will work with any
-kind of string but will not let you mix two different string types. To do
-so use ``typing.AnyStr``:
-
-.. code-block:: python
-
- from typing import AnyStr
-
- def concat(x: AnyStr, y: AnyStr) -> AnyStr:
- return x + y
-
- concat('a', 'b') # Okay
- concat(b'a', b'b') # Okay
- concat('a', b'b') # Error: cannot mix bytes and unicode
-
-For more details, see :ref:`type-variable-value-restriction`.
-
-.. note::
-
- How ``bytes``, ``str``, and ``unicode`` are handled between Python 2 and
- Python 3 may change in future versions of mypy.
-
-.. _generators:
-
-Generators
-**********
-
-A basic generator that only yields values can be annotated as having a return
-type of either ``Iterator[YieldType]`` or ``Iterable[YieldType]``. For example:
-
-.. code-block:: python
-
- def squares(n: int) -> Iterator[int]:
- for i in range(n):
- yield i * i
-
-If you want your generator to accept values via the ``send`` method or return
-a value, you should use the
-``Generator[YieldType, SendType, ReturnType]`` generic type instead. For example:
-
-.. code-block:: python
-
- def echo_round() -> Generator[int, float, str]:
- sent = yield 0
- while sent >= 0:
- sent = yield round(sent)
- return 'Done'
-
-Note that unlike many other generics in the typing module, the ``SendType`` of
-``Generator`` behaves contravariantly, not covariantly or invariantly.
-
-If you do not plan on recieving or returning values, then set the ``SendType``
-or ``ReturnType`` to ``None``, as appropriate. For example, we could have
-annotated the first example as the following:
-
-.. code-block:: python
-
- def squares(n: int) -> Generator[int, None, None]:
- for i in range(n):
- yield i * i
-
-.. _async-and-await:
-
-Typing async/await
-******************
-
-.. note::
-
- Currently, you must pass in the ``--fast-parser`` flag if you want to run
- mypy against code containing the ``async/await`` keywords. The fast parser
- will be enabled by default in a future version of mypy.
-
- Note that mypy will understand coroutines created using the ``@asyncio.coroutine``
- decorator both with and without the fast parser enabled.
-
-Mypy supports the ability to type coroutines that use the ``async/await``
-syntax introduced in Python 3.5. For more information regarding coroutines and
-this new syntax, see `PEP 492 <https://www.python.org/dev/peps/pep-0492/>`_.
-
-Functions defined using ``async def`` are typed just like normal functions.
-The return type annotation should be the same as the type of the value you
-expect to get back when ``await``-ing the coroutine.
-
-.. code-block:: python
-
- import asyncio
-
- async def format_string(tag: str, count: int) -> str:
- return 'T-minus {} ({})'.format(count, tag)
-
- async def countdown_1(tag: str, count: int) -> str:
- while count > 0:
- my_str = await format_string(tag, count) # has type 'str'
- print(my_str)
- await asyncio.sleep(0.1)
- count -= 1
- return "Blastoff!"
-
- loop = asyncio.get_event_loop()
- loop.run_until_complete(countdown_1("Millennium Falcon", 5))
- loop.close()
-
-The result of calling an ``async def`` function *without awaiting* will be a
-value of type ``Awaitable[T]``:
-
-.. code-block:: python
-
- my_coroutine = countdown_1("Millennium Falcon", 5)
- reveal_type(my_coroutine) # has type 'Awaitable[str]'
-
-If you want to use coroutines in older versions of Python that do not support
-the ``async def`` syntax, you can instead use the ``@asyncio.coroutine``
-decorator to convert a generator into a coroutine.
-
-Note that we set the ``YieldType`` of the generator to be ``Any`` in the
-following example. This is because the exact yield type is an implementation
-detail of the coroutine runner (e.g. the ``asyncio`` event loop) and your
-coroutine shouldn't have to know or care about what precisely that type is.
-
-.. code-block:: python
-
- from typing import Any, Generator
- import asyncio
-
- @asyncio.coroutine
- def countdown_2(tag: str, count: int) -> Generator[Any, None, str]:
- while count > 0:
- print('T-minus {} ({})'.format(count, tag))
- yield from asyncio.sleep(0.1)
- count -= 1
- return "Blastoff!"
-
- loop = asyncio.get_event_loop()
- loop.run_until_complete(countdown_2("USS Enterprise", 5))
- loop.close()
-
-As before, the result of calling a generator decorated with ``@asyncio.coroutine``
-will be a value of type ``Awaitable[T]``.
-
-.. note::
-
- At runtime, you are allowed to add the ``@asyncio.coroutine`` decorator to
- both functions and generators. This is useful when you want to mark a
- work-in-progress function as a coroutine, but have not yet added ``yield`` or
- ``yield from`` statements:
-
- .. code-block:: python
-
- import asyncio
-
- @asyncio.coroutine
- def serialize(obj: object) -> str:
- # todo: add yield/yield from to turn this into a generator
- return "placeholder"
-
- However, mypy currently does not support converting functions into
- coroutines. Support for this feature will be added in a future version, but
- for now, you can manually force the function to be a generator by doing
- something like this:
-
- .. code-block:: python
-
- from typing import Generator
- import asyncio
-
- @asyncio.coroutine
- def serialize(obj: object) -> Generator[None, None, str]:
- # todo: add yield/yield from to turn this into a generator
- if False:
- yield
- return "placeholder"
-
-You may also choose to create a subclass of ``Awaitable`` instead:
-
-.. code-block:: python
-
- from typing import Any, Awaitable, Generator
- import asyncio
-
- class MyAwaitable(Awaitable[str]):
- def __init__(self, tag: str, count: int) -> None:
- self.tag = tag
- self.count = count
-
- def __await__(self) -> Generator[Any, None, str]:
- for i in range(n, 0, -1):
- print('T-minus {} ({})'.format(i, tag))
- yield from asyncio.sleep(0.1)
- return "Blastoff!"
-
- def countdown_3(tag: str, count: int) -> Awaitable[str]:
- return MyAwaitable(tag, count)
-
- loop = asyncio.get_event_loop()
- loop.run_until_complete(countdown_3("Heart of Gold", 5))
- loop.close()
-
-To create an iterable coroutine, subclass ``AsyncIterator``:
-
-.. code-block:: python
-
- from typing import Optional, AsyncIterator
- import asyncio
-
- class arange(AsyncIterator[int]):
- def __init__(self, start: int, stop: int, step: int) -> None:
- self.start = start
- self.stop = stop
- self.step = step
- self.count = start - step
-
- def __aiter__(self) -> AsyncIterator[int]:
- return self
-
- async def __anext__(self) -> int:
- self.count += self.step
- if self.count == self.stop:
- raise StopAsyncIteration
- else:
- return self.count
-
- async def countdown_4(tag: str, n: int) -> str:
- async for i in arange(n, 0, -1):
- print('T-minus {} ({})'.format(i, tag))
- await asyncio.sleep(0.1)
- return "Blastoff!"
-
- loop = asyncio.get_event_loop()
- loop.run_until_complete(countdown_4("Serenity", 5))
- loop.close()
-
-For a more concrete example, the mypy repo has a toy webcrawler that
-demonstrates how to work with coroutines. One version
-`uses async/await <https://github.com/python/mypy/blob/master/test-data/samples/crawl2.py>`_
-and one
-`uses yield from <https://github.com/python/mypy/blob/master/test-data/samples/crawl.py>`_.
diff --git a/docs/source/python2.rst b/docs/source/python2.rst
deleted file mode 100644
index 2cb8f32..0000000
--- a/docs/source/python2.rst
+++ /dev/null
@@ -1,136 +0,0 @@
-.. _python2:
-
-Type checking Python 2 code
-===========================
-
-For code that needs to be Python 2.7 compatible, function type
-annotations are given in comments, since the function annotation
-syntax was introduced in Python 3. The comment-based syntax is
-specified in `PEP 484 <https://www.python.org/dev/peps/pep-0484>`_.
-
-Run mypy in Python 2 mode by using the ``--py2`` option::
-
- $ mypy --py2 program.py
-
-To run your program, you must have the ``typing`` module in your
-Python 2 module search path. Use ``pip install typing`` to install the
-module. This also works for Python 3 versions prior to 3.5 that don't
-include ``typing`` in the standard library.
-
-The example below illustrates the Python 2 function type annotation
-syntax. This syntax is also valid in Python 3 mode:
-
-.. code-block:: python
-
- from typing import List
-
- def hello(): # type: () -> None
- print 'hello'
-
- class Example:
- def method(self, lst, opt=0, *args, **kwargs):
- # type: (List[str], int, *str, **bool) -> int
- """Docstring comes after type comment."""
- ...
-
-It's worth going through these details carefully to avoid surprises:
-
-- You don't provide an annotation for the ``self`` / ``cls`` variable of
- methods.
-
-- Docstring always comes *after* the type comment.
-
-- For ``*args`` and ``**kwargs`` the type should be prefixed with
- ``*`` or ``**``, respectively (except when using the multi-line
- annotation syntax described below). Again, the above example
- illustrates this.
-
-- Things like ``Any`` must be imported from ``typing``, even if they
- are only used in comments.
-
-- In Python 2 mode ``str`` is implicitly promoted to ``unicode``, similar
- to how ``int`` is compatible with ``float``. This is unlike ``bytes`` and
- ``str`` in Python 3, which are incompatible. ``bytes`` in Python 2 is
- equivalent to ``str``. (This might change in the future.)
-
-.. _multi_line_annotation:
-
-Multi-line Python 2 function annotations
-----------------------------------------
-
-Mypy also supports a multi-line comment annotation syntax. You
-can provide a separate annotation for each argument using the variable
-annotation syntax. When using the single-line annotation syntax
-described above, functions with long argument lists tend to result in
-overly long type comments and it's often tricky to see which argument
-type corresponds to which argument. The alternative, multi-line
-annotation syntax makes long annotations easier to read and write.
-
-.. note::
-
- Multi-line comment annotations currently only work when using the
- ``--fast-parser`` command line option. This is not enabled by
- default because the option isn’t supported on Windows yet.
-
-Here is an example (from PEP 484):
-
-.. code-block:: python
-
- def send_email(address, # type: Union[str, List[str]]
- sender, # type: str
- cc, # type: Optional[List[str]]
- bcc, # type: Optional[List[str]]
- subject='',
- body=None # type: List[str]
- ):
- # type: (...) -> bool
- """Send an email message. Return True if successful."""
- <code>
-
-You write a separate annotation for each function argument on the same
-line as the argument. Each annotation must be on a separate line. If
-you leave out an annotation for an argument, it defaults to
-``Any``. You provide a return type annotation in the body of the
-function using the form ``# type: (...) -> rt``, where ``rt`` is the
-return type. Note that the return type annotation contains literal
-three dots.
-
-Note that when using multi-line comments, you do not need to prefix the
-types of your ``*arg`` and ``**kwarg`` parameters with ``*`` or ``**``.
-For example, here is how you would annotate the first example using
-multi-line comments.
-
-.. code-block:: python
-
- from typing import List
-
- class Example:
- def method(self,
- lst, # type: List[str]
- opt=0, # type: int
- *args, # type: str
- **kwargs # type: bool
- ):
- # type: (...) -> int
- """Docstring comes after type comment."""
- ...
-
-
-Additional notes
-----------------
-
-- You should include types for arguments with default values in the
- annotation. The ``opt`` argument of ``method`` in the example at the
- beginning of this section is an example of this.
-
-- The annotation can be on the same line as the function header or on
- the following line.
-
-- The type syntax for variables is the same as for Python 3.
-
-- You don't need to use string literal escapes for forward references
- within comments.
-
-- Mypy uses a separate set of library stub files in `typeshed
- <https://github.com/python/typeshed>`_ for Python 2. Library support
- may vary between Python 2 and Python 3.
diff --git a/docs/source/python36.rst b/docs/source/python36.rst
deleted file mode 100644
index f676864..0000000
--- a/docs/source/python36.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-.. _python-36:
-
-New features in Python 3.6
-==========================
-
-Python 3.6 will be `released
-<https://www.python.org/dev/peps/pep-0494>`_ in December 2016. The
-`first beta <https://www.python.org/downloads/release/python-360b1/>`_
-came out in September and adds some exciting features. Here's the
-support matrix for these in mypy (to be updated with each new mypy
-release). The intention is to support all of these by the time Python
-3.6 is released.
-
-.. note::
-
- Mypy only understands Python 3.6 syntax if you use the ``--fast-parser`` flag.
- This requires that the `typed_ast <https://pypi.python.org/pypi/typed-ast>`_ package is
- installed and has at least version 0.6.1. Use ``pip3 install -U typed_ast``.
- If running mypy on an earlier Python version, you also need to enable 3.6 support
- through ``--python-version 3.6``.
-
- Example command line (or use :ref:`config-file`):
-
- .. code-block:: text
-
- $ pip3 install -U typed_ast
- $ mypy --fast-parser --python-version 3.6 program.py
-
-Syntax for variable annotations (`PEP 526 <https://www.python.org/dev/peps/pep-0526>`_)
----------------------------------------------------------------------------------------
-
-Python 3.6 feature: variables (in global, class or local scope) can
-now have type annotations using either of the two forms:
-
-.. code-block:: python
-
- foo: Optional[int]
- bar: List[str] = []
-
-Mypy fully supports this syntax, interpreting them as equivalent to
-
-.. code-block:: python
-
- foo = None # type: Optional[int]
- bar = [] # type: List[str]
-
-.. note::
-
- See above for how to enable Python 3.6 syntax.
-
-Literal string formatting (`PEP 498 <https://www.python.org/dev/peps/pep-0498>`_)
----------------------------------------------------------------------------------
-
-Python 3.6 feature: string literals of the form
-``f"text {expression} text"`` evaluate ``expression`` using the
-current evaluation context (locals and globals).
-
-Mypy does not yet support this.
-
-Underscores in numeric literals (`PEP 515 <https://www.python.org/dev/peps/pep-0515>`_)
----------------------------------------------------------------------------------------
-
-Python 3.6 feature: numeric literals can contain underscores,
-e.g. ``1_000_000``.
-
-Mypy fully supports this syntax:
-
-.. code-block:: python
-
- precise_val = 1_000_000.000_000_1
- hexes: List[int] = []
- hexes.append(0x_FF_FF_FF_FF)
-
-.. note::
-
- This requires the ``--fast-parser`` flag and it requires that the
- `typed_ast <https://pypi.python.org/pypi/typed-ast>`_ package is
- installed and has at least version 0.6.2. Use ``pip3 install -U typed_ast``.
-
-Asynchronous generators (`PEP 525 <https://www.python.org/dev/peps/pep-0525>`_)
--------------------------------------------------------------------------------
-
-Python 3.6 feature: coroutines defined with ``async def`` (PEP 492)
-can now also be generators, i.e. contain ``yield`` expressions.
-
-Mypy does not yet support this.
-
-Asynchronous comprehensions (`PEP 530 <https://www.python.org/dev/peps/pep-0530>`_)
------------------------------------------------------------------------------------
-
-Python 3.6 feature: coroutines defined with ``async def`` (PEP 492)
-can now also contain list, set and dict comprehensions that use
-``async for`` syntax.
-
-Mypy does not yet support this.
-
-New named tuple syntax
-----------------------
-
-Python 3.6 supports an alternative syntax for named tuples. See :ref:`named-tuples`.
diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst
deleted file mode 100644
index 2537c51..0000000
--- a/docs/source/revision_history.rst
+++ /dev/null
@@ -1,176 +0,0 @@
-Revision history
-================
-
-List of major changes to this document:
-
-- January 2017
- * Publish ``mypy`` version 0.470 on PyPI.
-
- * Change package name from ``mypy-lang`` to ``mypy``.
-
- * Add :ref:`integrating-mypy`.
-
- * Add :ref:`cheat-sheet-py3`.
-
- * Major update to :ref:`finding-imports`.
-
- * Add :ref:`--ignore-missing-imports <ignore-missing-imports>`.
-
- * Updates to :ref:`config-file`.
-
- * Document underscore support in numeric literals.
-
- * Document that arguments prefixed with ``__`` are positional-only.
-
- * Document that ``--hide-error-context`` is now on by default,
- and there is a new flag ``--show-error-context``.
-
- * Add ``ignore_errors`` to :ref:`per-module-flags`.
-
-- November 2016
- * Publish ``mypy-lang`` version 0.4.6 on PyPI.
-
- * Add :ref:`getting-started`.
-
- * Add :ref:`generic-methods-and-generic-self` (experimental).
-
- * Add :ref:`declaring-decorators`.
-
- * Discuss generic type aliases in :ref:`type-aliases`.
-
- * Discuss Python 3.6 named tuple syntax in :ref:`named-tuples`.
-
- * Updates to :ref:`common_issues`.
-
- * Updates to :ref:`python-36`.
-
- * Updates to :ref:`command-line`:
-
- * ``--custom-typeshed-dir``
-
- * ``--junit-xml``
-
- * ``--find-occurrences``
-
- * ``--cobertura-xml-report``
-
- * ``--warn-no-return``
-
- * Updates to :ref:`config-file`:
-
- * Sections with fnmatch patterns now use
- module name patterns (previously they were path patterns).
- * Added ``custom_typeshed_dir``, ``mypy_path`` and ``show_column_numbers``.
-
- * Mention the magic ``MYPY`` constant in :ref:`import-cycles`.
-
-- October 2016
- * Publish ``mypy-lang`` version 0.4.5 on PyPI.
-
- * Add :ref:`python-36`.
-
- * Add :ref:`config-file`.
-
- * Updates to :ref:`command-line`: ``--strict-optional-white-list``,
- ``--disallow-subclassing-any``, ``--config-file``, ``@flagfile``,
- ``--hide-error-context`` (replaces ``--suppress-error-context``),
- ``--show-column-numbers`` and ``--scripts-are-modules``.
-
- * Mention ``typing.TYPE_CHECKING`` in :ref:`import-cycles`.
-
-- August 2016
- * Publish ``mypy-lang`` version 0.4.4 on PyPI.
-
- * Add :ref:`newtypes`.
-
- * Add :ref:`async-and-await`.
-
- * Add :ref:`text-and-anystr`.
-
- * Add :ref:`version_and_platform_checks`.
-
-- July 2016
- * Publish ``mypy-lang`` version 0.4.3 on PyPI.
-
- * Add :ref:`strict_optional`.
-
- * Add :ref:`multi_line_annotation`.
-
-- June 2016
- * Publish ``mypy-lang`` version 0.4.2 on PyPI.
-
- * Add :ref:`type-of-class`.
-
- * Add :ref:`cheat-sheet-py2`.
-
- * Add :ref:`reveal-type`.
-
-- May 2016
- * Publish ``mypy-lang`` version 0.4 on PyPI.
-
- * Add :ref:`type-variable-upper-bound`.
-
- * Document :ref:`command-line`.
-
-- Feb 2016
- * Publish ``mypy-lang`` version 0.3.1 on PyPI.
-
- * Document Python 2 support.
-
-- Nov 2015
- Add :ref:`library-stubs`.
-
-- Jun 2015
- Remove ``Undefined`` and ``Dynamic``, as they are not in PEP 484.
-
-- Apr 2015
- Publish ``mypy-lang`` version 0.2.0 on PyPI.
-
-- Mar 2015
- Update documentation to reflect PEP 484:
-
- * Add :ref:`named-tuples` and :ref:`optional`.
-
- * Do not mention type application syntax (for
- example, ``List[int]()``), as it's no longer supported,
- due to PEP 484 compatibility.
-
- * Rename ``typevar`` to ``TypeVar``.
-
- * Document ``# type: ignore`` which allows
- locally ignoring spurious errors (:ref:`silencing_checker`).
-
- * No longer mention
- ``Any(x)`` as a valid cast, as it will be phased out soon.
-
- * Mention the new ``.pyi`` stub file extension. Stubs can live
- in the same directory as the rest of the program.
-
-- Jan 2015
- Mypy moves closer to PEP 484:
-
- * Add :ref:`type-aliases`.
-
- * Update discussion of overloading -- it's now only supported in stubs.
-
- * Rename ``Function[...]`` to ``Callable[...]``.
-
-- Dec 2014
- Publish mypy version 0.1.0 on PyPI.
-
-- Oct 2014
- Major restructuring.
- Split the HTML documentation into
- multiple pages.
-
-- Sep 2014
- Migrated docs to Sphinx.
-
-- Aug 2014
- Don't discuss native semantics. There is only Python
- semantics.
-
-- Jul 2013
- Rewrite to use new syntax. Shift focus to discussing
- Python semantics. Add more content, including short discussions of
- :ref:`generic-functions` and :ref:`union-types`.
diff --git a/docs/source/supported_python_features.rst b/docs/source/supported_python_features.rst
deleted file mode 100644
index ca68d4c..0000000
--- a/docs/source/supported_python_features.rst
+++ /dev/null
@@ -1,20 +0,0 @@
-Supported Python features and modules
-=====================================
-
-A list of unsupported Python features is maintained in the mypy wiki:
-
-- `Unsupported Python features <https://github.com/python/mypy/wiki/Unsupported-Python-Features>`_
-
-Runtime definition of methods and functions
-*******************************************
-
-By default, mypy will complain if you add a function to a class
-or module outside its definition -- but only if this is visible to the
-type checker. This only affects static checking, as mypy performs no
-additional type checking at runtime. You can easily work around
-this. For example, you can use dynamically typed code or values with
-``Any`` types, or you can use ``setattr`` or other introspection
-features. However, you need to be careful if you decide to do this. If
-used indiscriminately, you may have difficulty using static typing
-effectively, since the type checker cannot see functions defined at
-runtime.
diff --git a/docs/source/type_inference_and_annotations.rst b/docs/source/type_inference_and_annotations.rst
deleted file mode 100644
index 76e9cf9..0000000
--- a/docs/source/type_inference_and_annotations.rst
+++ /dev/null
@@ -1,172 +0,0 @@
-Type inference and type annotations
-===================================
-
-Type inference
-**************
-
-The initial assignment defines a variable. If you do not explicitly
-specify the type of the variable, mypy infers the type based on the
-static type of the value expression:
-
-.. code-block:: python
-
- i = 1 # Infer type int for i
- l = [1, 2] # Infer type List[int] for l
-
-Type inference is bidirectional and takes context into account. For
-example, the following is valid:
-
-.. code-block:: python
-
- def f(l: List[object]) -> None:
- l = [1, 2] # Infer type List[object] for [1, 2]
-
-In an assignment, the type context is determined by the assignment
-target. In this case this is ``l``, which has the type
-``List[object]``. The value expression ``[1, 2]`` is type checked in
-this context and given the type ``List[object]``. In the previous
-example we introduced a new variable ``l``, and here the type context
-was empty.
-
-Note that the following is not valid, since ``List[int]`` is not
-compatible with ``List[object]``:
-
-.. code-block:: python
-
- def f(l: List[object], k: List[int]) -> None:
- l = k # Type check error: incompatible types in assignment
-
-The reason why the above assignment is disallowed is that allowing the
-assignment could result in non-int values stored in a list of ``int``:
-
-.. code-block:: python
-
- def f(l: List[object], k: List[int]) -> None:
- l = k
- l.append('x')
- print(k[-1]) # Ouch; a string in List[int]
-
-You can still run the above program; it prints ``x``. This illustrates
-the fact that static types are used during type checking, but they do
-not affect the runtime behavior of programs. You can run programs with
-type check failures, which is often very handy when performing a large
-refactoring. Thus you can always 'work around' the type system, and it
-doesn't really limit what you can do in your program.
-
-Type inference is not used in dynamically typed functions (those
-without an explicit return type) — every local variable type defaults
-to ``Any``, which is discussed later.
-
-Explicit types for variables
-****************************
-
-You can override the inferred type of a variable by using a
-special type comment after an assignment statement:
-
-.. code-block:: python
-
- x = 1 # type: Union[int, str]
-
-Without the type comment, the type of ``x`` would be just ``int``. We
-use an annotation to give it a more general type ``Union[int, str]``.
-Mypy checks that the type of the initializer is compatible with the
-declared type. The following example is not valid, since the initializer is
-a floating point number, and this is incompatible with the declared
-type:
-
-.. code-block:: python
-
- x = 1.1 # type: Union[int, str] # Error!
-
-.. note::
-
- The best way to think about this is that the type comment sets the
- type of the variable, not the type of the expression. To force the
- type of an expression you can use ``cast(<type>, <expression>)``.
-
-Explicit types for collections
-******************************
-
-The type checker cannot always infer the type of a list or a
-dictionary. This often arises when creating an empty list or
-dictionary and assigning it to a new variable that doesn't have an explicit
-variable type. In these cases you can give the type explicitly using
-a type annotation comment:
-
-.. code-block:: python
-
- l = [] # type: List[int] # Create empty list with type List[int]
- d = {} # type: Dict[str, int] # Create empty dictionary (str -> int)
-
-Similarly, you can also give an explicit type when creating an empty set:
-
-.. code-block:: python
-
- s = set() # type: Set[int]
-
-Declaring multiple variable types at a time
-*******************************************
-
-You can declare more than a single variable at a time. In order to
-nicely work with multiple assignment, you must give each variable a
-type separately:
-
-.. code-block:: python
-
- i, found = 0, False # type: int, bool
-
-You can optionally use parentheses around the types, assignment targets
-and assigned expression:
-
-.. code-block:: python
-
- i, found = 0, False # type: (int, bool) # OK
- (i, found) = 0, False # type: int, bool # OK
- i, found = (0, False) # type: int, bool # OK
- (i, found) = (0, False) # type: (int, bool) # OK
-
-Starred expressions
-*******************
-
-In most cases, mypy can infer the type of starred expressions from the
-right-hand side of an assignment, but not always:
-
-.. code-block:: python
-
- a, *bs = 1, 2, 3 # OK
- p, q, *rs = 1, 2 # Error: Type of rs cannot be inferred
-
-On first line, the type of ``bs`` is inferred to be
-``List[int]``. However, on the second line, mypy cannot infer the type
-of ``rs``, because there is no right-hand side value for ``rs`` to
-infer the type from. In cases like these, the starred expression needs
-to be annotated with a starred type:
-
-.. code-block:: python
-
- p, q, *rs = 1, 2 # type: int, int, *List[int]
-
-Here, the type of ``rs`` is set to ``List[int]``.
-
-Types in stub files
-*******************
-
-:ref:`Stub files <library-stubs>` are written in normal Python 3
-syntax, but generally leaving out runtime logic like variable
-initializers, function bodies, and default arguments, replacing them
-with ellipses.
-
-In this example, each ellipsis ``...`` is literally written in the
-stub file as three dots:
-
-.. code-block:: python
-
- x = ... # type: int
- def afunc(code: str) -> int: ...
- def afunc(a: int, b: int=...) -> int: ...
-
-.. note::
-
- The ellipsis ``...`` is also used with a different meaning in
- :ref:`callable types <callable-types>` and :ref:`tuple types
- <tuple-types>`.
diff --git a/extensions/README.md b/extensions/README.md
deleted file mode 100644
index 73b786b..0000000
--- a/extensions/README.md
+++ /dev/null
@@ -1,6 +0,0 @@
-Mypy Extensions
-===============
-
-The "mypy_extensions" module defines experimental extensions to the
-standard "typing" module that are supported by the mypy typechecker.
-
diff --git a/extensions/mypy_extensions.py b/extensions/mypy_extensions.py
deleted file mode 100644
index 26e568c..0000000
--- a/extensions/mypy_extensions.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""Defines experimental extensions to the standard "typing" module that are
-supported by the mypy typechecker.
-
-Example usage:
- from mypy_extensions import TypedDict
-"""
-
-# NOTE: This module must support Python 2.7 in addition to Python 3.x
-
-import sys
-# _type_check is NOT a part of public typing API, it is used here only to mimic
-# the (convenient) behavior of types provided by typing module.
-from typing import _type_check # type: ignore
-
-
-def _check_fails(cls, other):
- try:
- if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']:
- # Typed dicts are only for static structural subtyping.
- raise TypeError('TypedDict does not support instance and class checks')
- except (AttributeError, ValueError):
- pass
- return False
-
-
-def _dict_new(cls, *args, **kwargs):
- return dict(*args, **kwargs)
-
-
-def _typeddict_new(cls, _typename, _fields=None, **kwargs):
- if _fields is None:
- _fields = kwargs
- elif kwargs:
- raise TypeError("TypedDict takes either a dict or keyword arguments,"
- " but not both")
- return _TypedDictMeta(_typename, (), {'__annotations__': dict(_fields)})
-
-
-class _TypedDictMeta(type):
- def __new__(cls, name, bases, ns):
- # Create new typed dict class object.
- # This method is called directly when TypedDict is subclassed,
- # or via _typeddict_new when TypedDict is instantiated. This way
- # TypedDict supports all three syntaxes described in its docstring.
- # Subclasses and instanes of TypedDict return actual dictionaries
- # via _dict_new.
- ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
- tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
- try:
- # Setting correct module is necessary to make typed dict classes pickleable.
- tp_dict.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
- anns = ns.get('__annotations__', {})
- msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
- anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
- for base in bases:
- anns.update(base.__dict__.get('__annotations__', {}))
- tp_dict.__annotations__ = anns
- return tp_dict
-
- __instancecheck__ = __subclasscheck__ = _check_fails
-
-
-TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
-TypedDict.__module__ = __name__
-TypedDict.__doc__ = \
- """A simple typed name space. At runtime it is equivalent to a plain dict.
-
- TypedDict creates a dictionary type that expects all of its
- instances to have a certain set of keys, with each key
- associated with a value of a consistent type. This expectation
- is not checked at runtime but is only enforced by typecheckers.
- Usage::
-
- Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
- a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
- b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
- assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-
- The type info could be accessed via Point2D.__annotations__. TypedDict
- supports two additional equivalent forms::
-
- Point2D = TypedDict('Point2D', x=int, y=int, label=str)
-
- class Point2D(TypedDict):
- x: int
- y: int
- label: str
-
- The latter syntax is only supported in Python 3.6+, while two other
- syntax forms work for Python 2.7 and 3.2+
- """
-
-
-# Return type that indicates a function does not return
-class NoReturn: pass
diff --git a/extensions/setup.py b/extensions/setup.py
deleted file mode 100644
index 3490840..0000000
--- a/extensions/setup.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# NOTE: This package must support Python 2.7 in addition to Python 3.x
-
-from distutils.core import setup
-
-version = '0.2.0-dev'
-description = 'Experimental type system extensions for programs checked with the mypy typechecker.'
-long_description = '''
-Mypy Extensions
-===============
-
-The "mypy_extensions" module defines experimental extensions to the
-standard "typing" module that are supported by the mypy typechecker.
-'''.lstrip()
-
-classifiers = [
- 'Development Status :: 2 - Pre-Alpha',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: MIT License',
- 'Operating System :: POSIX',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Topic :: Software Development',
-]
-
-setup(
- name='mypy_extensions',
- version=version,
- description=description,
- long_description=long_description,
- author='David Foster',
- author_email='david at dafoster.net',
- url='http://www.mypy-lang.org/',
- license='MIT License',
- platforms=['POSIX'],
- py_modules=['mypy_extensions'],
- classifiers=classifiers,
-)
diff --git a/lib-typing/2.7/setup.py b/lib-typing/2.7/setup.py
deleted file mode 100644
index 18c34d8..0000000
--- a/lib-typing/2.7/setup.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-"""setup.py for Python 2.x typing module"""
-
-import glob
-import os
-import os.path
-import sys
-
-from distutils.core import setup
-
-if sys.version_info >= (3, 0, 0):
- sys.stderr.write("ERROR: You need Python 2.x to install this module.\n")
- exit(1)
-
-version = '0.0.1.dev1'
-description = 'typing (Python 2.x)'
-long_description = '''
-typing (Python 2.x)
-===================
-
-This module is part of mypy, a static type checker for Python.
-'''.lstrip()
-
-classifiers = [
- 'Development Status :: 2 - Pre-Alpha',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: MIT License',
- 'Operating System :: POSIX',
- 'Programming Language :: Python :: 2.7',
- 'Topic :: Software Development',
-]
-
-setup(name='typing',
- version=version,
- description=description,
- long_description=long_description,
- author='Jukka Lehtosalo',
- author_email='jukka.lehtosalo at iki.fi',
- url='http://www.mypy-lang.org/',
- license='MIT License',
- platforms=['POSIX'],
- py_modules=['typing'],
- classifiers=classifiers,
- )
diff --git a/lib-typing/2.7/test_typing.py b/lib-typing/2.7/test_typing.py
deleted file mode 100644
index 39eb7c1..0000000
--- a/lib-typing/2.7/test_typing.py
+++ /dev/null
@@ -1,1629 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-
-import collections
-import pickle
-import re
-import sys
-from unittest import TestCase, main, SkipTest
-from copy import copy, deepcopy
-
-from typing import Any
-from typing import TypeVar, AnyStr
-from typing import T, KT, VT # Not in __all__.
-from typing import Union, Optional
-from typing import Tuple, List, MutableMapping
-from typing import Callable
-from typing import Generic, ClassVar
-from typing import cast
-from typing import Type
-from typing import NewType
-from typing import NamedTuple
-from typing import IO, TextIO, BinaryIO
-from typing import Pattern, Match
-import abc
-import typing
-try:
- import collections.abc as collections_abc
-except ImportError:
- import collections as collections_abc # Fallback for PY3.2.
-
-
-class BaseTestCase(TestCase):
-
- def assertIsSubclass(self, cls, class_or_tuple, msg=None):
- if not issubclass(cls, class_or_tuple):
- message = '%r is not a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
- def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
- if issubclass(cls, class_or_tuple):
- message = '%r is a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
- def clear_caches(self):
- for f in typing._cleanups:
- f()
-
-
-class Employee(object):
- pass
-
-
-class Manager(Employee):
- pass
-
-
-class Founder(Employee):
- pass
-
-
-class ManagingFounder(Manager, Founder):
- pass
-
-
-class AnyTests(BaseTestCase):
-
- def test_any_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance(42, Any)
-
- def test_any_subclass_type_error(self):
- with self.assertRaises(TypeError):
- issubclass(Employee, Any)
- with self.assertRaises(TypeError):
- issubclass(Any, Employee)
-
- def test_repr(self):
- self.assertEqual(repr(Any), 'typing.Any')
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- issubclass(42, Any)
- with self.assertRaises(TypeError):
- Any[int] # Any is not a generic type.
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError):
- class A(Any):
- pass
- with self.assertRaises(TypeError):
- class A(type(Any)):
- pass
-
- def test_cannot_instantiate(self):
- with self.assertRaises(TypeError):
- Any()
- with self.assertRaises(TypeError):
- type(Any)()
-
- def test_cannot_subscript(self):
- with self.assertRaises(TypeError):
- Any[int]
-
- def test_any_is_subclass(self):
- # These expressions must simply not fail.
- typing.Match[Any]
- typing.Pattern[Any]
- typing.IO[Any]
-
-
-class TypeVarTests(BaseTestCase):
-
- def test_basic_plain(self):
- T = TypeVar('T')
- # T equals itself.
- self.assertEqual(T, T)
- # T is an instance of TypeVar
- self.assertIsInstance(T, TypeVar)
-
- def test_typevar_instance_type_error(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- isinstance(42, T)
-
- def test_typevar_subclass_type_error(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- issubclass(int, T)
- with self.assertRaises(TypeError):
- issubclass(T, int)
-
- def test_constrained_error(self):
- with self.assertRaises(TypeError):
- X = TypeVar('X', int)
- X
-
- def test_union_unique(self):
- X = TypeVar('X')
- Y = TypeVar('Y')
- self.assertNotEqual(X, Y)
- self.assertEqual(Union[X], X)
- self.assertNotEqual(Union[X], Union[X, Y])
- self.assertEqual(Union[X, X], X)
- self.assertNotEqual(Union[X, int], Union[X])
- self.assertNotEqual(Union[X, int], Union[int])
- self.assertEqual(Union[X, int].__args__, (X, int))
- self.assertEqual(Union[X, int].__parameters__, (X,))
- self.assertIs(Union[X, int].__origin__, Union)
-
- def test_union_constrained(self):
- A = TypeVar('A', str, bytes)
- self.assertNotEqual(Union[A, str], Union[A])
-
- def test_repr(self):
- self.assertEqual(repr(T), '~T')
- self.assertEqual(repr(KT), '~KT')
- self.assertEqual(repr(VT), '~VT')
- self.assertEqual(repr(AnyStr), '~AnyStr')
- T_co = TypeVar('T_co', covariant=True)
- self.assertEqual(repr(T_co), '+T_co')
- T_contra = TypeVar('T_contra', contravariant=True)
- self.assertEqual(repr(T_contra), '-T_contra')
-
- def test_no_redefinition(self):
- self.assertNotEqual(TypeVar('T'), TypeVar('T'))
- self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
-
- def test_cannot_subclass_vars(self):
- with self.assertRaises(TypeError):
- class V(TypeVar('T')):
- pass
-
- def test_cannot_subclass_var_itself(self):
- with self.assertRaises(TypeError):
- class V(TypeVar):
- pass
-
- def test_cannot_instantiate_vars(self):
- with self.assertRaises(TypeError):
- TypeVar('A')()
-
- def test_bound_errors(self):
- with self.assertRaises(TypeError):
- TypeVar('X', bound=42)
- with self.assertRaises(TypeError):
- TypeVar('X', str, float, bound=Employee)
-
-
-class UnionTests(BaseTestCase):
-
- def test_basics(self):
- u = Union[int, float]
- self.assertNotEqual(u, Union)
-
- def test_subclass_error(self):
- with self.assertRaises(TypeError):
- issubclass(int, Union)
- with self.assertRaises(TypeError):
- issubclass(Union, int)
- with self.assertRaises(TypeError):
- issubclass(int, Union[int, str])
- with self.assertRaises(TypeError):
- issubclass(Union[int, str], int)
-
- def test_union_any(self):
- u = Union[Any]
- self.assertEqual(u, Any)
- u1 = Union[int, Any]
- u2 = Union[Any, int]
- u3 = Union[Any, object]
- self.assertEqual(u1, u2)
- self.assertNotEqual(u1, Any)
- self.assertNotEqual(u2, Any)
- self.assertNotEqual(u3, Any)
-
- def test_union_object(self):
- u = Union[object]
- self.assertEqual(u, object)
- u = Union[int, object]
- self.assertEqual(u, object)
- u = Union[object, int]
- self.assertEqual(u, object)
-
- def test_unordered(self):
- u1 = Union[int, float]
- u2 = Union[float, int]
- self.assertEqual(u1, u2)
-
- def test_single_class_disappears(self):
- t = Union[Employee]
- self.assertIs(t, Employee)
-
- def test_base_class_disappears(self):
- u = Union[Employee, Manager, int]
- self.assertEqual(u, Union[int, Employee])
- u = Union[Manager, int, Employee]
- self.assertEqual(u, Union[int, Employee])
- u = Union[Employee, Manager]
- self.assertIs(u, Employee)
-
- def test_union_union(self):
- u = Union[int, float]
- v = Union[u, Employee]
- self.assertEqual(v, Union[int, float, Employee])
-
- def test_repr(self):
- self.assertEqual(repr(Union), 'typing.Union')
- u = Union[Employee, int]
- self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__)
- u = Union[int, Employee]
- self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__)
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError):
- class C(Union):
- pass
- with self.assertRaises(TypeError):
- class C(type(Union)):
- pass
- with self.assertRaises(TypeError):
- class C(Union[int, str]):
- pass
-
- def test_cannot_instantiate(self):
- with self.assertRaises(TypeError):
- Union()
- u = Union[int, float]
- with self.assertRaises(TypeError):
- u()
- with self.assertRaises(TypeError):
- type(u)()
-
- def test_union_generalization(self):
- self.assertFalse(Union[str, typing.Iterable[int]] == str)
- self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
- self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
-
- def test_optional(self):
- o = Optional[int]
- u = Union[int, None]
- self.assertEqual(o, u)
-
- def test_empty(self):
- with self.assertRaises(TypeError):
- Union[()]
-
- def test_union_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance(42, Union[int, str])
-
- def test_union_str_pattern(self):
- # Shouldn't crash; see http://bugs.python.org/issue25390
- A = Union[str, Pattern]
- A
-
- def test_etree(self):
- # See https://github.com/python/typing/issues/229
- # (Only relevant for Python 2.)
- try:
- from xml.etree.cElementTree import Element
- except ImportError:
- raise SkipTest("cElementTree not found")
- Union[Element, str] # Shouldn't crash
-
- def Elem(*args):
- return Element(*args)
-
- Union[Elem, str] # Nor should this
-
-
-class TupleTests(BaseTestCase):
-
- def test_basics(self):
- with self.assertRaises(TypeError):
- issubclass(Tuple, Tuple[int, str])
- with self.assertRaises(TypeError):
- issubclass(tuple, Tuple[int, str])
-
- class TP(tuple): pass
- self.assertTrue(issubclass(tuple, Tuple))
- self.assertTrue(issubclass(TP, Tuple))
-
- def test_equality(self):
- self.assertEqual(Tuple[int], Tuple[int])
- self.assertEqual(Tuple[int, ...], Tuple[int, ...])
- self.assertNotEqual(Tuple[int], Tuple[int, int])
- self.assertNotEqual(Tuple[int], Tuple[int, ...])
-
- def test_tuple_subclass(self):
- class MyTuple(tuple):
- pass
- self.assertTrue(issubclass(MyTuple, Tuple))
-
- def test_tuple_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance((0, 0), Tuple[int, int])
- isinstance((0, 0), Tuple)
-
- def test_repr(self):
- self.assertEqual(repr(Tuple), 'typing.Tuple')
- self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]')
- self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]')
- self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]')
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- issubclass(42, Tuple)
- with self.assertRaises(TypeError):
- issubclass(42, Tuple[int])
-
-
-class CallableTests(BaseTestCase):
-
- def test_self_subclass(self):
- with self.assertRaises(TypeError):
- self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
- self.assertTrue(issubclass(type(lambda x: x), Callable))
-
- def test_eq_hash(self):
- self.assertEqual(Callable[[int], int], Callable[[int], int])
- self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1)
- self.assertNotEqual(Callable[[int], int], Callable[[int], str])
- self.assertNotEqual(Callable[[int], int], Callable[[str], int])
- self.assertNotEqual(Callable[[int], int], Callable[[int, int], int])
- self.assertNotEqual(Callable[[int], int], Callable[[], int])
- self.assertNotEqual(Callable[[int], int], Callable)
-
- def test_cannot_instantiate(self):
- with self.assertRaises(TypeError):
- Callable()
- with self.assertRaises(TypeError):
- type(Callable)()
- c = Callable[[int], str]
- with self.assertRaises(TypeError):
- c()
- with self.assertRaises(TypeError):
- type(c)()
-
- def test_callable_wrong_forms(self):
- with self.assertRaises(TypeError):
- Callable[(), int]
- with self.assertRaises(TypeError):
- Callable[[()], int]
- with self.assertRaises(TypeError):
- Callable[[int, 1], 2]
-
- def test_callable_instance_works(self):
- def f():
- pass
- self.assertIsInstance(f, Callable)
- self.assertNotIsInstance(None, Callable)
-
- def test_callable_instance_type_error(self):
- def f():
- pass
- with self.assertRaises(TypeError):
- self.assertIsInstance(f, Callable[[], None])
- with self.assertRaises(TypeError):
- self.assertIsInstance(f, Callable[[], Any])
- with self.assertRaises(TypeError):
- self.assertNotIsInstance(None, Callable[[], None])
- with self.assertRaises(TypeError):
- self.assertNotIsInstance(None, Callable[[], Any])
-
- def test_repr(self):
- ct0 = Callable[[], bool]
- self.assertEqual(repr(ct0), 'typing.Callable[[], bool]')
- ct2 = Callable[[str, float], int]
- self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]')
- ctv = Callable[..., str]
- self.assertEqual(repr(ctv), 'typing.Callable[..., str]')
-
- def test_ellipsis_in_generic(self):
- # Shouldn't crash; see https://github.com/python/typing/issues/259
- typing.List[Callable[..., str]]
-
-
-XK = TypeVar('XK', unicode, bytes)
-XV = TypeVar('XV')
-
-
-class SimpleMapping(Generic[XK, XV]):
-
- def __getitem__(self, key):
- pass
-
- def __setitem__(self, key, value):
- pass
-
- def get(self, key, default=None):
- pass
-
-
-class MySimpleMapping(SimpleMapping[XK, XV]):
-
- def __init__(self):
- self.store = {}
-
- def __getitem__(self, key):
- return self.store[key]
-
- def __setitem__(self, key, value):
- self.store[key] = value
-
- def get(self, key, default=None):
- try:
- return self.store[key]
- except KeyError:
- return default
-
-
-class ProtocolTests(BaseTestCase):
-
- def test_supports_int(self):
- self.assertIsSubclass(int, typing.SupportsInt)
- self.assertNotIsSubclass(str, typing.SupportsInt)
-
- def test_supports_float(self):
- self.assertIsSubclass(float, typing.SupportsFloat)
- self.assertNotIsSubclass(str, typing.SupportsFloat)
-
- def test_supports_complex(self):
-
- # Note: complex itself doesn't have __complex__.
- class C(object):
- def __complex__(self):
- return 0j
-
- self.assertIsSubclass(C, typing.SupportsComplex)
- self.assertNotIsSubclass(str, typing.SupportsComplex)
-
- def test_supports_abs(self):
- self.assertIsSubclass(float, typing.SupportsAbs)
- self.assertIsSubclass(int, typing.SupportsAbs)
- self.assertNotIsSubclass(str, typing.SupportsAbs)
-
- def test_reversible(self):
- self.assertIsSubclass(list, typing.Reversible)
- self.assertNotIsSubclass(int, typing.Reversible)
-
- def test_protocol_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance(0, typing.SupportsAbs)
- class C1(typing.SupportsInt):
- def __int__(self):
- return 42
- class C2(C1):
- pass
- c = C2()
- self.assertIsInstance(c, C1)
-
-class GenericTests(BaseTestCase):
-
- def test_basics(self):
- X = SimpleMapping[str, Any]
- self.assertEqual(X.__parameters__, ())
- with self.assertRaises(TypeError):
- X[unicode]
- with self.assertRaises(TypeError):
- X[unicode, unicode]
- Y = SimpleMapping[XK, unicode]
- self.assertEqual(Y.__parameters__, (XK,))
- Y[unicode]
- with self.assertRaises(TypeError):
- Y[unicode, unicode]
-
- def test_generic_errors(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- Generic[T]()
- with self.assertRaises(TypeError):
- isinstance([], List[int])
- with self.assertRaises(TypeError):
- issubclass(list, List[int])
-
- def test_init(self):
- T = TypeVar('T')
- S = TypeVar('S')
- with self.assertRaises(TypeError):
- Generic[T, T]
- with self.assertRaises(TypeError):
- Generic[T, S, T]
-
- def test_repr(self):
- self.assertEqual(repr(SimpleMapping),
- __name__ + '.' + 'SimpleMapping')
- self.assertEqual(repr(MySimpleMapping),
- __name__ + '.' + 'MySimpleMapping')
-
- def test_chain_repr(self):
- T = TypeVar('T')
- S = TypeVar('S')
-
- class C(Generic[T]):
- pass
-
- X = C[Tuple[S, T]]
- self.assertEqual(X, C[Tuple[S, T]])
- self.assertNotEqual(X, C[Tuple[T, S]])
-
- Y = X[T, int]
- self.assertEqual(Y, X[T, int])
- self.assertNotEqual(Y, X[S, int])
- self.assertNotEqual(Y, X[T, str])
-
- Z = Y[str]
- self.assertEqual(Z, Y[str])
- self.assertNotEqual(Z, Y[int])
- self.assertNotEqual(Z, Y[T])
-
- self.assertTrue(str(Z).endswith(
- '.C[typing.Tuple[str, int]]'))
-
- def test_new_repr(self):
- T = TypeVar('T')
- U = TypeVar('U', covariant=True)
- S = TypeVar('S')
-
- self.assertEqual(repr(List), 'typing.List')
- self.assertEqual(repr(List[T]), 'typing.List[~T]')
- self.assertEqual(repr(List[U]), 'typing.List[+U]')
- self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
- self.assertEqual(repr(List[int]), 'typing.List[int]')
-
- def test_new_repr_complex(self):
- T = TypeVar('T')
- TS = TypeVar('TS')
-
- self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
- self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
- 'typing.List[typing.Tuple[int, ~T]]')
- self.assertEqual(repr(List[Tuple[T, T]][List[int]]),
- 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]')
-
- def test_new_repr_bare(self):
- T = TypeVar('T')
- self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
- self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]')
- class C(typing.Dict[Any, Any]): pass
- # this line should just work
- repr(C.__mro__)
-
- def test_dict(self):
- T = TypeVar('T')
-
- class B(Generic[T]):
- pass
-
- b = B()
- b.foo = 42
- self.assertEqual(b.__dict__, {'foo': 42})
-
- class C(B[int]):
- pass
-
- c = C()
- c.bar = 'abc'
- self.assertEqual(c.__dict__, {'bar': 'abc'})
-
- def test_false_subclasses(self):
- class MyMapping(MutableMapping[str, str]): pass
- self.assertNotIsInstance({}, MyMapping)
- self.assertNotIsSubclass(dict, MyMapping)
-
- def test_abc_bases(self):
- class MM(MutableMapping[str, str]):
- def __getitem__(self, k):
- return None
- def __setitem__(self, k, v):
- pass
- def __delitem__(self, k):
- pass
- def __iter__(self):
- return iter(())
- def __len__(self):
- return 0
- # this should just work
- MM().update()
- self.assertIsInstance(MM(), collections_abc.MutableMapping)
- self.assertIsInstance(MM(), MutableMapping)
- self.assertNotIsInstance(MM(), List)
- self.assertNotIsInstance({}, MM)
-
- def test_multiple_bases(self):
- class MM1(MutableMapping[str, str], collections_abc.MutableMapping):
- pass
- with self.assertRaises(TypeError):
- # consistent MRO not possible
- class MM2(collections_abc.MutableMapping, MutableMapping[str, str]):
- pass
-
- def test_orig_bases(self):
- T = TypeVar('T')
- class C(typing.Dict[str, T]): pass
- self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
-
- def test_naive_runtime_checks(self):
- def naive_dict_check(obj, tp):
- # Check if a dictionary conforms to Dict type
- if len(tp.__parameters__) > 0:
- raise NotImplementedError
- if tp.__args__:
- KT, VT = tp.__args__
- return all(isinstance(k, KT) and isinstance(v, VT)
- for k, v in obj.items())
- self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[typing.Text, int]))
- self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[typing.Text, int]))
- with self.assertRaises(NotImplementedError):
- naive_dict_check({1: 'x'}, typing.Dict[typing.Text, T])
-
- def naive_generic_check(obj, tp):
- # Check if an instance conforms to the generic class
- if not hasattr(obj, '__orig_class__'):
- raise NotImplementedError
- return obj.__orig_class__ == tp
- class Node(Generic[T]): pass
- self.assertTrue(naive_generic_check(Node[int](), Node[int]))
- self.assertFalse(naive_generic_check(Node[str](), Node[int]))
- self.assertFalse(naive_generic_check(Node[str](), List))
- with self.assertRaises(NotImplementedError):
- naive_generic_check([1,2,3], Node[int])
-
- def naive_list_base_check(obj, tp):
- # Check if list conforms to a List subclass
- return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
- for x in obj)
- class C(List[int]): pass
- self.assertTrue(naive_list_base_check([1, 2, 3], C))
- self.assertFalse(naive_list_base_check(['a', 'b'], C))
-
- def test_multi_subscr_base(self):
- T = TypeVar('T')
- U = TypeVar('U')
- V = TypeVar('V')
- class C(List[T][U][V]): pass
- class D(C, List[T][U][V]): pass
- self.assertEqual(C.__parameters__, (V,))
- self.assertEqual(D.__parameters__, (V,))
- self.assertEqual(C[int].__parameters__, ())
- self.assertEqual(D[int].__parameters__, ())
- self.assertEqual(C[int].__args__, (int,))
- self.assertEqual(D[int].__args__, (int,))
- self.assertEqual(C.__bases__, (List,))
- self.assertEqual(D.__bases__, (C, List))
- self.assertEqual(C.__orig_bases__, (List[T][U][V],))
- self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
-
- def test_extended_generic_rules_eq(self):
- T = TypeVar('T')
- U = TypeVar('U')
- self.assertEqual(Tuple[T, T][int], Tuple[int, int])
- self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
- with self.assertRaises(TypeError):
- Tuple[T, int][()]
- with self.assertRaises(TypeError):
- Tuple[T, U][T, ...]
-
- self.assertEqual(Union[T, int][int], int)
- self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
- class Base(object): pass
- class Derived(Base): pass
- self.assertEqual(Union[T, Base][Derived], Base)
- with self.assertRaises(TypeError):
- Union[T, int][1]
-
- self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
- self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
- with self.assertRaises(TypeError):
- Callable[[T], U][..., int]
- with self.assertRaises(TypeError):
- Callable[[T], U][[], int]
-
- def test_extended_generic_rules_repr(self):
- T = TypeVar('T')
- self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
- 'Union[Tuple, Callable]')
- self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
- 'Tuple')
- self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
- 'Callable[..., Union[int, NoneType]]')
- self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
- 'Callable[[], List[int]]')
-
- def test_generic_forvard_ref(self):
- LLT = List[List['CC']]
- class CC: pass
- self.assertEqual(typing._eval_type(LLT, globals(), locals()), List[List[CC]])
- T = TypeVar('T')
- AT = Tuple[T, ...]
- self.assertIs(typing._eval_type(AT, globals(), locals()), AT)
- CT = Callable[..., List[T]]
- self.assertIs(typing._eval_type(CT, globals(), locals()), CT)
-
- def test_extended_generic_rules_subclassing(self):
- class T1(Tuple[T, KT]): pass
- class T2(Tuple[T, ...]): pass
- class C1(Callable[[T], T]): pass
- class C2(Callable[..., int]):
- def __call__(self):
- return None
-
- self.assertEqual(T1.__parameters__, (T, KT))
- self.assertEqual(T1[int, str].__args__, (int, str))
- self.assertEqual(T1[int, T].__origin__, T1)
-
- self.assertEqual(T2.__parameters__, (T,))
- with self.assertRaises(TypeError):
- T1[int]
- with self.assertRaises(TypeError):
- T2[int, str]
-
- self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
- self.assertEqual(C2.__parameters__, ())
- self.assertIsInstance(C2(), collections_abc.Callable)
- self.assertIsSubclass(C2, collections_abc.Callable)
- self.assertIsSubclass(C1, collections_abc.Callable)
- self.assertIsInstance(T1(), tuple)
- self.assertIsSubclass(T2, tuple)
- self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
- self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
-
- def test_fail_with_bare_union(self):
- with self.assertRaises(TypeError):
- List[Union]
- with self.assertRaises(TypeError):
- Tuple[Optional]
- with self.assertRaises(TypeError):
- ClassVar[ClassVar]
- with self.assertRaises(TypeError):
- List[ClassVar[int]]
-
- def test_fail_with_bare_generic(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- List[Generic]
- with self.assertRaises(TypeError):
- Tuple[Generic[T]]
- with self.assertRaises(TypeError):
- List[typing._Protocol]
-
- def test_type_erasure_special(self):
- T = TypeVar('T')
- # this is the only test that checks type caching
- self.clear_caches()
- class MyTup(Tuple[T, T]): pass
- self.assertIs(MyTup[int]().__class__, MyTup)
- self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
- class MyCall(Callable[..., T]):
- def __call__(self): return None
- self.assertIs(MyCall[T]().__class__, MyCall)
- self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
- class MyDict(typing.Dict[T, T]): pass
- self.assertIs(MyDict[int]().__class__, MyDict)
- self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
- class MyDef(typing.DefaultDict[str, T]): pass
- self.assertIs(MyDef[int]().__class__, MyDef)
- self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
-
- def test_all_repr_eq_any(self):
- objs = (getattr(typing, el) for el in typing.__all__)
- for obj in objs:
- self.assertNotEqual(repr(obj), '')
- self.assertEqual(obj, obj)
- if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
- self.assertEqual(obj[Any].__args__, (Any,))
- if isinstance(obj, type):
- for base in obj.__mro__:
- self.assertNotEqual(repr(base), '')
- self.assertEqual(base, base)
-
- def test_pickle(self):
- global C # pickle wants to reference the class by name
- T = TypeVar('T')
-
- class B(Generic[T]):
- pass
-
- class C(B[int]):
- pass
-
- c = C()
- c.foo = 42
- c.bar = 'abc'
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(c, proto)
- x = pickle.loads(z)
- self.assertEqual(x.foo, 42)
- self.assertEqual(x.bar, 'abc')
- self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
- simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
- for s in simples:
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(s, proto)
- x = pickle.loads(z)
- self.assertEqual(s, x)
-
- def test_copy_and_deepcopy(self):
- T = TypeVar('T')
- class Node(Generic[T]): pass
- things = [Any, Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int],
- Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T],
- typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str],
- typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'],
- Union['T', int], List['T'], typing.Mapping['T', int]]
- for t in things:
- self.assertEqual(t, deepcopy(t))
- self.assertEqual(t, copy(t))
-
- def test_parameterized_slots(self):
- T = TypeVar('T')
- class C(Generic[T]):
- __slots__ = ('potato',)
-
- c = C()
- c_int = C[int]()
- self.assertEqual(C.__slots__, C[str].__slots__)
-
- c.potato = 0
- c_int.potato = 0
- with self.assertRaises(AttributeError):
- c.tomato = 0
- with self.assertRaises(AttributeError):
- c_int.tomato = 0
-
- self.assertEqual(typing._eval_type(C['C'], globals(), locals()), C[C])
- self.assertEqual(typing._eval_type(C['C'], globals(), locals()).__slots__,
- C.__slots__)
- self.assertEqual(copy(C[int]), deepcopy(C[int]))
-
- def test_parameterized_slots_dict(self):
- T = TypeVar('T')
- class D(Generic[T]):
- __slots__ = {'banana': 42}
-
- d = D()
- d_int = D[int]()
- self.assertEqual(D.__slots__, D[str].__slots__)
-
- d.banana = 'yes'
- d_int.banana = 'yes'
- with self.assertRaises(AttributeError):
- d.foobar = 'no'
- with self.assertRaises(AttributeError):
- d_int.foobar = 'no'
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- B = SimpleMapping[XK, Any]
-
- class C(Generic[B]):
- pass
-
- def test_repr_2(self):
- PY32 = sys.version_info[:2] < (3, 3)
-
- class C(Generic[T]):
- pass
-
- self.assertEqual(C.__module__, __name__)
- if not PY32:
- self.assertEqual(C.__qualname__,
- 'GenericTests.test_repr_2.<locals>.C')
- self.assertEqual(repr(C).split('.')[-1], 'C')
- X = C[int]
- self.assertEqual(X.__module__, __name__)
- if not PY32:
- self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
- self.assertEqual(repr(X).split('.')[-1], 'C[int]')
-
- class Y(C[int]):
- pass
-
- self.assertEqual(Y.__module__, __name__)
- if not PY32:
- self.assertEqual(Y.__qualname__,
- 'GenericTests.test_repr_2.<locals>.Y')
- self.assertEqual(repr(Y).split('.')[-1], 'Y')
-
- def test_eq_1(self):
- self.assertEqual(Generic, Generic)
- self.assertEqual(Generic[T], Generic[T])
- self.assertNotEqual(Generic[KT], Generic[VT])
-
- def test_eq_2(self):
-
- class A(Generic[T]):
- pass
-
- class B(Generic[T]):
- pass
-
- self.assertEqual(A, A)
- self.assertNotEqual(A, B)
- self.assertEqual(A[T], A[T])
- self.assertNotEqual(A[T], B[T])
-
- def test_multiple_inheritance(self):
-
- class A(Generic[T, VT]):
- pass
-
- class B(Generic[KT, T]):
- pass
-
- class C(A[T, VT], Generic[VT, T, KT], B[KT, T]):
- pass
-
- self.assertEqual(C.__parameters__, (VT, T, KT))
-
- def test_nested(self):
-
- G = Generic
-
- class Visitor(G[T]):
-
- a = None
-
- def set(self, a):
- self.a = a
-
- def get(self):
- return self.a
-
- def visit(self):
- return self.a
-
- V = Visitor[typing.List[int]]
-
- class IntListVisitor(V):
-
- def append(self, x):
- self.a.append(x)
-
- a = IntListVisitor()
- a.set([])
- a.append(1)
- a.append(42)
- self.assertEqual(a.get(), [1, 42])
-
- def test_type_erasure(self):
- T = TypeVar('T')
-
- class Node(Generic[T]):
- def __init__(self, label,
- left = None,
- right = None):
- self.label = label # type: T
- self.left = left # type: Optional[Node[T]]
- self.right = right # type: Optional[Node[T]]
-
- def foo(x):
- a = Node(x)
- b = Node[T](x)
- c = Node[Any](x)
- self.assertIs(type(a), Node)
- self.assertIs(type(b), Node)
- self.assertIs(type(c), Node)
- self.assertEqual(a.label, x)
- self.assertEqual(b.label, x)
- self.assertEqual(c.label, x)
-
- foo(42)
-
- def test_implicit_any(self):
- T = TypeVar('T')
-
- class C(Generic[T]):
- pass
-
- class D(C):
- pass
-
- self.assertEqual(D.__parameters__, ())
-
- with self.assertRaises(Exception):
- D[int]
- with self.assertRaises(Exception):
- D[Any]
- with self.assertRaises(Exception):
- D[T]
-
-class ClassVarTests(BaseTestCase):
-
- def test_basics(self):
- with self.assertRaises(TypeError):
- ClassVar[1]
- with self.assertRaises(TypeError):
- ClassVar[int, str]
- with self.assertRaises(TypeError):
- ClassVar[int][str]
-
- def test_repr(self):
- self.assertEqual(repr(ClassVar), 'typing.ClassVar')
- cv = ClassVar[int]
- self.assertEqual(repr(cv), 'typing.ClassVar[int]')
- cv = ClassVar[Employee]
- self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError):
- class C(type(ClassVar)):
- pass
- with self.assertRaises(TypeError):
- class C(type(ClassVar[int])):
- pass
-
- def test_cannot_init(self):
- with self.assertRaises(TypeError):
- ClassVar()
- with self.assertRaises(TypeError):
- type(ClassVar)()
- with self.assertRaises(TypeError):
- type(ClassVar[Optional[int]])()
-
- def test_no_isinstance(self):
- with self.assertRaises(TypeError):
- isinstance(1, ClassVar[int])
- with self.assertRaises(TypeError):
- issubclass(int, ClassVar)
-
-
-class CastTests(BaseTestCase):
-
- def test_basics(self):
- self.assertEqual(cast(int, 42), 42)
- self.assertEqual(cast(float, 42), 42)
- self.assertIs(type(cast(float, 42)), int)
- self.assertEqual(cast(Any, 42), 42)
- self.assertEqual(cast(list, 42), 42)
- self.assertEqual(cast(Union[str, float], 42), 42)
- self.assertEqual(cast(AnyStr, 42), 42)
- self.assertEqual(cast(None, 42), 42)
-
- def test_errors(self):
- # Bogus calls are not expected to fail.
- cast(42, 42)
- cast('hello', 42)
-
-
-class ForwardRefTests(BaseTestCase):
-
- def test_forwardref_instance_type_error(self):
- fr = typing._ForwardRef('int')
- with self.assertRaises(TypeError):
- isinstance(42, fr)
-
- def test_syntax_error(self):
-
- with self.assertRaises(SyntaxError):
- Generic['/T']
-
-
-class OverloadTests(BaseTestCase):
-
- def test_overload_exists(self):
- from typing import overload
-
- def test_overload_fails(self):
- from typing import overload
-
- with self.assertRaises(RuntimeError):
-
- @overload
- def blah():
- pass
-
- blah()
-
- def test_overload_succeeds(self):
- from typing import overload
-
- @overload
- def blah():
- pass
-
- def blah():
- pass
-
- blah()
-
-
-class CollectionsAbcTests(BaseTestCase):
-
- def test_hashable(self):
- self.assertIsInstance(42, typing.Hashable)
- self.assertNotIsInstance([], typing.Hashable)
-
- def test_iterable(self):
- self.assertIsInstance([], typing.Iterable)
- # Due to ABC caching, the second time takes a separate code
- # path and could fail. So call this a few times.
- self.assertIsInstance([], typing.Iterable)
- self.assertIsInstance([], typing.Iterable)
- self.assertNotIsInstance(42, typing.Iterable)
- # Just in case, also test issubclass() a few times.
- self.assertIsSubclass(list, typing.Iterable)
- self.assertIsSubclass(list, typing.Iterable)
-
- def test_iterator(self):
- it = iter([])
- self.assertIsInstance(it, typing.Iterator)
- self.assertNotIsInstance(42, typing.Iterator)
-
- def test_sized(self):
- self.assertIsInstance([], typing.Sized)
- self.assertNotIsInstance(42, typing.Sized)
-
- def test_container(self):
- self.assertIsInstance([], typing.Container)
- self.assertNotIsInstance(42, typing.Container)
-
- def test_abstractset(self):
- self.assertIsInstance(set(), typing.AbstractSet)
- self.assertNotIsInstance(42, typing.AbstractSet)
-
- def test_mutableset(self):
- self.assertIsInstance(set(), typing.MutableSet)
- self.assertNotIsInstance(frozenset(), typing.MutableSet)
-
- def test_mapping(self):
- self.assertIsInstance({}, typing.Mapping)
- self.assertNotIsInstance(42, typing.Mapping)
-
- def test_mutablemapping(self):
- self.assertIsInstance({}, typing.MutableMapping)
- self.assertNotIsInstance(42, typing.MutableMapping)
-
- def test_sequence(self):
- self.assertIsInstance([], typing.Sequence)
- self.assertNotIsInstance(42, typing.Sequence)
-
- def test_mutablesequence(self):
- self.assertIsInstance([], typing.MutableSequence)
- self.assertNotIsInstance((), typing.MutableSequence)
-
- def test_bytestring(self):
- self.assertIsInstance(b'', typing.ByteString)
- self.assertIsInstance(bytearray(b''), typing.ByteString)
-
- def test_list(self):
- self.assertIsSubclass(list, typing.List)
-
- def test_set(self):
- self.assertIsSubclass(set, typing.Set)
- self.assertNotIsSubclass(frozenset, typing.Set)
-
- def test_frozenset(self):
- self.assertIsSubclass(frozenset, typing.FrozenSet)
- self.assertNotIsSubclass(set, typing.FrozenSet)
-
- def test_dict(self):
- self.assertIsSubclass(dict, typing.Dict)
-
- def test_no_list_instantiation(self):
- with self.assertRaises(TypeError):
- typing.List()
- with self.assertRaises(TypeError):
- typing.List[T]()
- with self.assertRaises(TypeError):
- typing.List[int]()
-
- def test_list_subclass(self):
-
- class MyList(typing.List[int]):
- pass
-
- a = MyList()
- self.assertIsInstance(a, MyList)
- self.assertIsInstance(a, typing.Sequence)
-
- self.assertIsSubclass(MyList, list)
- self.assertNotIsSubclass(list, MyList)
-
- def test_no_dict_instantiation(self):
- with self.assertRaises(TypeError):
- typing.Dict()
- with self.assertRaises(TypeError):
- typing.Dict[KT, VT]()
- with self.assertRaises(TypeError):
- typing.Dict[str, int]()
-
- def test_dict_subclass(self):
-
- class MyDict(typing.Dict[str, int]):
- pass
-
- d = MyDict()
- self.assertIsInstance(d, MyDict)
- self.assertIsInstance(d, typing.MutableMapping)
-
- self.assertIsSubclass(MyDict, dict)
- self.assertNotIsSubclass(dict, MyDict)
-
- def test_no_defaultdict_instantiation(self):
- with self.assertRaises(TypeError):
- typing.DefaultDict()
- with self.assertRaises(TypeError):
- typing.DefaultDict[KT, VT]()
- with self.assertRaises(TypeError):
- typing.DefaultDict[str, int]()
-
- def test_defaultdict_subclass(self):
-
- class MyDefDict(typing.DefaultDict[str, int]):
- pass
-
- dd = MyDefDict()
- self.assertIsInstance(dd, MyDefDict)
-
- self.assertIsSubclass(MyDefDict, collections.defaultdict)
- self.assertNotIsSubclass(collections.defaultdict, MyDefDict)
-
- def test_no_set_instantiation(self):
- with self.assertRaises(TypeError):
- typing.Set()
- with self.assertRaises(TypeError):
- typing.Set[T]()
- with self.assertRaises(TypeError):
- typing.Set[int]()
-
- def test_set_subclass_instantiation(self):
-
- class MySet(typing.Set[int]):
- pass
-
- d = MySet()
- self.assertIsInstance(d, MySet)
-
- def test_no_frozenset_instantiation(self):
- with self.assertRaises(TypeError):
- typing.FrozenSet()
- with self.assertRaises(TypeError):
- typing.FrozenSet[T]()
- with self.assertRaises(TypeError):
- typing.FrozenSet[int]()
-
- def test_frozenset_subclass_instantiation(self):
-
- class MyFrozenSet(typing.FrozenSet[int]):
- pass
-
- d = MyFrozenSet()
- self.assertIsInstance(d, MyFrozenSet)
-
- def test_no_tuple_instantiation(self):
- with self.assertRaises(TypeError):
- Tuple()
- with self.assertRaises(TypeError):
- Tuple[T]()
- with self.assertRaises(TypeError):
- Tuple[int]()
-
- def test_generator(self):
- def foo():
- yield 42
- g = foo()
- self.assertIsSubclass(type(g), typing.Generator)
-
- def test_no_generator_instantiation(self):
- with self.assertRaises(TypeError):
- typing.Generator()
- with self.assertRaises(TypeError):
- typing.Generator[T, T, T]()
- with self.assertRaises(TypeError):
- typing.Generator[int, int, int]()
-
- def test_subclassing(self):
-
- class MMA(typing.MutableMapping):
- pass
-
- with self.assertRaises(TypeError): # It's abstract
- MMA()
-
- class MMC(MMA):
- def __getitem__(self, k):
- return None
- def __setitem__(self, k, v):
- pass
- def __delitem__(self, k):
- pass
- def __iter__(self):
- return iter(())
- def __len__(self):
- return 0
-
- self.assertEqual(len(MMC()), 0)
- assert callable(MMC.update)
- self.assertIsInstance(MMC(), typing.Mapping)
-
- class MMB(typing.MutableMapping[KT, VT]):
- def __getitem__(self, k):
- return None
- def __setitem__(self, k, v):
- pass
- def __delitem__(self, k):
- pass
- def __iter__(self):
- return iter(())
- def __len__(self):
- return 0
-
- self.assertEqual(len(MMB()), 0)
- self.assertEqual(len(MMB[str, str]()), 0)
- self.assertEqual(len(MMB[KT, VT]()), 0)
-
- self.assertNotIsSubclass(dict, MMA)
- self.assertNotIsSubclass(dict, MMB)
-
- self.assertIsSubclass(MMA, typing.Mapping)
- self.assertIsSubclass(MMB, typing.Mapping)
- self.assertIsSubclass(MMC, typing.Mapping)
-
- self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
- self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
-
- self.assertIsSubclass(MMA, collections.Mapping)
- self.assertIsSubclass(MMB, collections.Mapping)
- self.assertIsSubclass(MMC, collections.Mapping)
-
- self.assertIsSubclass(MMB[str, str], typing.Mapping)
- self.assertIsSubclass(MMC, MMA)
-
- class I(typing.Iterable): pass
- self.assertNotIsSubclass(list, I)
-
- class G(typing.Generator[int, int, int]): pass
- def g(): yield 0
- self.assertIsSubclass(G, typing.Generator)
- self.assertIsSubclass(G, typing.Iterable)
- if hasattr(collections, 'Generator'):
- self.assertIsSubclass(G, collections.Generator)
- self.assertIsSubclass(G, collections.Iterable)
- self.assertNotIsSubclass(type(g), G)
-
- def test_subclassing_subclasshook(self):
-
- class Base(typing.Iterable):
- @classmethod
- def __subclasshook__(cls, other):
- if other.__name__ == 'Foo':
- return True
- else:
- return False
-
- class C(Base): pass
- class Foo: pass
- class Bar: pass
- self.assertIsSubclass(Foo, Base)
- self.assertIsSubclass(Foo, C)
- self.assertNotIsSubclass(Bar, C)
-
- def test_subclassing_register(self):
-
- class A(typing.Container): pass
- class B(A): pass
-
- class C: pass
- A.register(C)
- self.assertIsSubclass(C, A)
- self.assertNotIsSubclass(C, B)
-
- class D: pass
- B.register(D)
- self.assertIsSubclass(D, A)
- self.assertIsSubclass(D, B)
-
- class M(): pass
- collections.MutableMapping.register(M)
- self.assertIsSubclass(M, typing.Mapping)
-
- def test_collections_as_base(self):
-
- class M(collections.Mapping): pass
- self.assertIsSubclass(M, typing.Mapping)
- self.assertIsSubclass(M, typing.Iterable)
-
- class S(collections.MutableSequence): pass
- self.assertIsSubclass(S, typing.MutableSequence)
- self.assertIsSubclass(S, typing.Iterable)
-
- class I(collections.Iterable): pass
- self.assertIsSubclass(I, typing.Iterable)
-
- class A(collections.Mapping): pass
- class B: pass
- A.register(B)
- self.assertIsSubclass(B, typing.Mapping)
-
-class TypeTests(BaseTestCase):
-
- def test_type_basic(self):
-
- class User(object): pass
- class BasicUser(User): pass
- class ProUser(User): pass
-
- def new_user(user_class):
- # type: (Type[User]) -> User
- return user_class()
-
- joe = new_user(BasicUser)
-
- def test_type_typevar(self):
-
- class User(object): pass
- class BasicUser(User): pass
- class ProUser(User): pass
-
- global U
- U = TypeVar('U', bound=User)
-
- def new_user(user_class):
- # type: (Type[U]) -> U
- return user_class()
-
- joe = new_user(BasicUser)
-
- def test_type_optional(self):
- A = Optional[Type[BaseException]]
-
- def foo(a):
- # type: (A) -> Optional[BaseException]
- if a is None:
- return None
- else:
- return a()
-
- assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
- assert foo(None) is None
-
-
-class NewTypeTests(BaseTestCase):
-
- def test_basic(self):
- UserId = NewType('UserId', int)
- UserName = NewType('UserName', str)
- self.assertIsInstance(UserId(5), int)
- self.assertIsInstance(UserName('Joe'), type('Joe'))
- self.assertEqual(UserId(5) + 1, 6)
-
- def test_errors(self):
- UserId = NewType('UserId', int)
- UserName = NewType('UserName', str)
- with self.assertRaises(TypeError):
- issubclass(UserId, int)
- with self.assertRaises(TypeError):
- class D(UserName):
- pass
-
-
-class NamedTupleTests(BaseTestCase):
-
- def test_basics(self):
- Emp = NamedTuple('Emp', [('name', str), ('id', int)])
- self.assertIsSubclass(Emp, tuple)
- joe = Emp('Joe', 42)
- jim = Emp(name='Jim', id=1)
- self.assertIsInstance(joe, Emp)
- self.assertIsInstance(joe, tuple)
- self.assertEqual(joe.name, 'Joe')
- self.assertEqual(joe.id, 42)
- self.assertEqual(jim.name, 'Jim')
- self.assertEqual(jim.id, 1)
- self.assertEqual(Emp.__name__, 'Emp')
- self.assertEqual(Emp._fields, ('name', 'id'))
- self.assertEqual(Emp._field_types, dict(name=str, id=int))
-
- def test_pickle(self):
- global Emp # pickle wants to reference the class by name
- Emp = NamedTuple('Emp', [('name', str), ('id', int)])
- jane = Emp('jane', 37)
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(jane, proto)
- jane2 = pickle.loads(z)
- self.assertEqual(jane2, jane)
-
-
-class IOTests(BaseTestCase):
-
- def test_io_submodule(self):
- from typing.io import IO, TextIO, BinaryIO, __all__, __name__
- self.assertIs(IO, typing.IO)
- self.assertIs(TextIO, typing.TextIO)
- self.assertIs(BinaryIO, typing.BinaryIO)
- self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
- self.assertEqual(__name__, 'typing.io')
-
-
-class RETests(BaseTestCase):
- # Much of this is really testing _TypeAlias.
-
- def test_basics(self):
- pat = re.compile('[a-z]+', re.I)
- self.assertIsSubclass(pat.__class__, Pattern)
- self.assertIsSubclass(type(pat), Pattern)
- self.assertIsInstance(pat, Pattern)
-
- mat = pat.search('12345abcde.....')
- self.assertIsSubclass(mat.__class__, Match)
- self.assertIsSubclass(type(mat), Match)
- self.assertIsInstance(mat, Match)
-
- # these should just work
- p = Pattern[Union[str, bytes]]
- m = Match[Union[bytes, str]]
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- # Doesn't fit AnyStr.
- Pattern[int]
- with self.assertRaises(TypeError):
- # Can't change type vars?
- Match[T]
- m = Match[Union[str, bytes]]
- with self.assertRaises(TypeError):
- # Too complicated?
- m[str]
- with self.assertRaises(TypeError):
- # We don't support isinstance().
- isinstance(42, Pattern[str])
-
- def test_repr(self):
- self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]')
- self.assertEqual(repr(Pattern[unicode]), 'Pattern[unicode]')
- self.assertEqual(repr(Pattern[str]), 'Pattern[str]')
- self.assertEqual(repr(Match), 'Match[~AnyStr]')
- self.assertEqual(repr(Match[unicode]), 'Match[unicode]')
- self.assertEqual(repr(Match[str]), 'Match[str]')
-
- def test_re_submodule(self):
- from typing.re import Match, Pattern, __all__, __name__
- self.assertIs(Match, typing.Match)
- self.assertIs(Pattern, typing.Pattern)
- self.assertEqual(set(__all__), set(['Match', 'Pattern']))
- self.assertEqual(__name__, 'typing.re')
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError) as ex:
-
- class A(typing.Match):
- pass
-
- self.assertEqual(str(ex.exception),
- "Cannot subclass typing._TypeAlias")
-
-
-class AllTests(BaseTestCase):
- """Tests for __all__."""
-
- def test_all(self):
- from typing import __all__ as a
- # Just spot-check the first and last of every category.
- self.assertIn('AbstractSet', a)
- self.assertIn('ValuesView', a)
- self.assertIn('cast', a)
- self.assertIn('overload', a)
- # Check that io and re are not exported.
- self.assertNotIn('io', a)
- self.assertNotIn('re', a)
- # Spot-check that stdlib modules aren't exported.
- self.assertNotIn('os', a)
- self.assertNotIn('sys', a)
- # Check that Text is defined.
- self.assertIn('Text', a)
-
- def test_respect_no_type_check(self):
- @typing.no_type_check
- class NoTpCheck(object):
- class Inn(object):
- def __init__(self, x): pass
- # type: (this is not actualy a type) -> None
- self.assertTrue(NoTpCheck.__no_type_check__)
- self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
-
- def test_get_type_hints_dummy(self):
-
- def foo(x):
- # type: (int) -> int
- return x + 1
-
- self.assertIsNone(typing.get_type_hints(foo))
-
-
-if __name__ == '__main__':
- main()
diff --git a/lib-typing/2.7/typing.py b/lib-typing/2.7/typing.py
deleted file mode 100644
index 5627697..0000000
--- a/lib-typing/2.7/typing.py
+++ /dev/null
@@ -1,2016 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-
-import abc
-from abc import abstractmethod, abstractproperty
-import collections
-import functools
-import re as stdlib_re # Avoid confusion with the re we export.
-import sys
-import types
-try:
- import collections.abc as collections_abc
-except ImportError:
- import collections as collections_abc # Fallback for PY3.2.
-
-
-# Please keep __all__ alphabetized within each category.
-__all__ = [
- # Super-special typing primitives.
- 'Any',
- 'Callable',
- 'ClassVar',
- 'Generic',
- 'Optional',
- 'Tuple',
- 'Type',
- 'TypeVar',
- 'Union',
-
- # ABCs (from collections.abc).
- 'AbstractSet', # collections.abc.Set.
- 'ByteString',
- 'Container',
- 'Hashable',
- 'ItemsView',
- 'Iterable',
- 'Iterator',
- 'KeysView',
- 'Mapping',
- 'MappingView',
- 'MutableMapping',
- 'MutableSequence',
- 'MutableSet',
- 'Sequence',
- 'Sized',
- 'ValuesView',
-
- # Structural checks, a.k.a. protocols.
- 'Reversible',
- 'SupportsAbs',
- 'SupportsFloat',
- 'SupportsInt',
-
- # Concrete collection types.
- 'Dict',
- 'DefaultDict',
- 'List',
- 'Set',
- 'FrozenSet',
- 'NamedTuple', # Not really a type.
- 'Generator',
-
- # One-off things.
- 'AnyStr',
- 'cast',
- 'get_type_hints',
- 'NewType',
- 'no_type_check',
- 'no_type_check_decorator',
- 'overload',
- 'Text',
- 'TYPE_CHECKING',
-]
-
-# The pseudo-submodules 're' and 'io' are part of the public
-# namespace, but excluded from __all__ because they might stomp on
-# legitimate imports of those modules.
-
-
-def _qualname(x):
- if sys.version_info[:2] >= (3, 3):
- return x.__qualname__
- else:
- # Fall back to just name.
- return x.__name__
-
-def _trim_name(nm):
- if nm.startswith('_') and nm not in ('_TypeAlias',
- '_ForwardRef', '_TypingBase', '_FinalTypingBase'):
- nm = nm[1:]
- return nm
-
-
-class TypingMeta(type):
- """Metaclass for most types defined in typing module
- (not a part of public API).
-
- This also defines a dummy constructor (all the work for most typing
- constructs is done in __new__) and a nicer repr().
- """
-
- _is_protocol = False
-
- def __new__(cls, name, bases, namespace):
- return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace)
-
- @classmethod
- def assert_no_subclassing(cls, bases):
- for base in bases:
- if isinstance(base, cls):
- raise TypeError("Cannot subclass %s" %
- (', '.join(map(_type_repr, bases)) or '()'))
-
- def __init__(self, *args, **kwds):
- pass
-
- def _eval_type(self, globalns, localns):
- """Override this in subclasses to interpret forward references.
-
- For example, List['C'] is internally stored as
- List[_ForwardRef('C')], which should evaluate to List[C],
- where C is an object found in globalns or localns (searching
- localns first, of course).
- """
- return self
-
- def _get_type_vars(self, tvars):
- pass
-
- def __repr__(self):
- qname = _trim_name(_qualname(self))
- return '%s.%s' % (self.__module__, qname)
-
-
-class _TypingBase(object):
- """Internal indicator of special typing constructs."""
- __metaclass__ = TypingMeta
- __slots__ = ()
-
- def __init__(self, *args, **kwds):
- pass
-
- def __new__(cls, *args, **kwds):
- """Constructor.
-
- This only exists to give a better error message in case
- someone tries to subclass a special typing object (not a good idea).
- """
- if (len(args) == 3 and
- isinstance(args[0], str) and
- isinstance(args[1], tuple)):
- # Close enough.
- raise TypeError("Cannot subclass %r" % cls)
- return super(_TypingBase, cls).__new__(cls)
-
- # Things that are not classes also need these.
- def _eval_type(self, globalns, localns):
- return self
-
- def _get_type_vars(self, tvars):
- pass
-
- def __repr__(self):
- cls = type(self)
- qname = _trim_name(_qualname(cls))
- return '%s.%s' % (cls.__module__, qname)
-
- def __call__(self, *args, **kwds):
- raise TypeError("Cannot instantiate %r" % type(self))
-
-
-class _FinalTypingBase(_TypingBase):
- """Internal mix-in class to prevent instantiation.
-
- Prevents instantiation unless _root=True is given in class call.
- It is used to create pseudo-singleton instances Any, Union, Optional, etc.
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds)
- if '_root' in kwds and kwds['_root'] is True:
- return self
- raise TypeError("Cannot instantiate %r" % cls)
-
- def __reduce__(self):
- return _trim_name(type(self).__name__)
-
-
-class _ForwardRef(_TypingBase):
- """Internal wrapper to hold a forward reference."""
-
- __slots__ = ('__forward_arg__', '__forward_code__',
- '__forward_evaluated__', '__forward_value__')
-
- def __init__(self, arg):
- super(_ForwardRef, self).__init__(arg)
- if not isinstance(arg, basestring):
- raise TypeError('Forward reference must be a string -- got %r' % (arg,))
- try:
- code = compile(arg, '<string>', 'eval')
- except SyntaxError:
- raise SyntaxError('Forward reference must be an expression -- got %r' %
- (arg,))
- self.__forward_arg__ = arg
- self.__forward_code__ = code
- self.__forward_evaluated__ = False
- self.__forward_value__ = None
-
- def _eval_type(self, globalns, localns):
- if not self.__forward_evaluated__ or localns is not globalns:
- if globalns is None and localns is None:
- globalns = localns = {}
- elif globalns is None:
- globalns = localns
- elif localns is None:
- localns = globalns
- self.__forward_value__ = _type_check(
- eval(self.__forward_code__, globalns, localns),
- "Forward references must evaluate to types.")
- self.__forward_evaluated__ = True
- return self.__forward_value__
-
- def __instancecheck__(self, obj):
- raise TypeError("Forward references cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Forward references cannot be used with issubclass().")
-
- def __repr__(self):
- return '_ForwardRef(%r)' % (self.__forward_arg__,)
-
-
-class _TypeAlias(_TypingBase):
- """Internal helper class for defining generic variants of concrete types.
-
- Note that this is not a type; let's call it a pseudo-type. It cannot
- be used in instance and subclass checks in parameterized form, i.e.
- ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
- ``False``.
- """
-
- __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
-
-
- def __init__(self, name, type_var, impl_type, type_checker):
- """Initializer.
-
- Args:
- name: The name, e.g. 'Pattern'.
- type_var: The type parameter, e.g. AnyStr, or the
- specific type, e.g. str.
- impl_type: The implementation type.
- type_checker: Function that takes an impl_type instance.
- and returns a value that should be a type_var instance.
- """
- assert isinstance(name, basestring), repr(name)
- assert isinstance(impl_type, type), repr(impl_type)
- assert not isinstance(impl_type, TypingMeta), repr(impl_type)
- assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
- self.name = name
- self.type_var = type_var
- self.impl_type = impl_type
- self.type_checker = type_checker
-
- def __repr__(self):
- return "%s[%s]" % (self.name, _type_repr(self.type_var))
-
- def __getitem__(self, parameter):
- if not isinstance(self.type_var, TypeVar):
- raise TypeError("%s cannot be further parameterized." % self)
- if self.type_var.__constraints__ and isinstance(parameter, type):
- if not issubclass(parameter, self.type_var.__constraints__):
- raise TypeError("%s is not a valid substitution for %s." %
- (parameter, self.type_var))
- if isinstance(parameter, TypeVar) and parameter is not self.type_var:
- raise TypeError("%s cannot be re-parameterized." % self)
- return self.__class__(self.name, parameter,
- self.impl_type, self.type_checker)
-
- def __instancecheck__(self, obj):
- if not isinstance(self.type_var, TypeVar):
- raise TypeError("Parameterized type aliases cannot be used "
- "with isinstance().")
- return isinstance(obj, self.impl_type)
-
- def __subclasscheck__(self, cls):
- if not isinstance(self.type_var, TypeVar):
- raise TypeError("Parameterized type aliases cannot be used "
- "with issubclass().")
- return issubclass(cls, self.impl_type)
-
-
-def _get_type_vars(types, tvars):
- for t in types:
- if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
- t._get_type_vars(tvars)
-
-
-def _type_vars(types):
- tvars = []
- _get_type_vars(types, tvars)
- return tuple(tvars)
-
-
-def _eval_type(t, globalns, localns):
- if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
- return t._eval_type(globalns, localns)
- return t
-
-
-def _type_check(arg, msg):
- """Check that the argument is a type, and return it (internal helper).
-
- As a special case, accept None and return type(None) instead.
- Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
-
- The msg argument is a human-readable error message, e.g.
-
- "Union[arg, ...]: arg should be a type."
-
- We append the repr() of the actual value (truncated to 100 chars).
- """
- if arg is None:
- return type(None)
- if isinstance(arg, basestring):
- arg = _ForwardRef(arg)
- if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
- not isinstance(arg, (type, _TypingBase)) and not callable(arg)):
- raise TypeError(msg + " Got %.100r." % (arg,))
- # Bare Union etc. are not valid as type arguments
- if (type(arg).__name__ in ('_Union', '_Optional')
- and not getattr(arg, '__origin__', None)
- or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)):
- raise TypeError("Plain %s is not valid as type argument" % arg)
- return arg
-
-
-def _type_repr(obj):
- """Return the repr() of an object, special-casing types (internal helper).
-
- If obj is a type, we return a shorter version than the default
- type.__repr__, based on the module and qualified name, which is
- typically enough to uniquely identify a type. For everything
- else, we fall back on repr(obj).
- """
- if isinstance(obj, type) and not isinstance(obj, TypingMeta):
- if obj.__module__ == '__builtin__':
- return _qualname(obj)
- return '%s.%s' % (obj.__module__, _qualname(obj))
- if obj is Ellipsis:
- return('...')
- if isinstance(obj, types.FunctionType):
- return obj.__name__
- return repr(obj)
-
-
-class ClassVarMeta(TypingMeta):
- """Metaclass for _ClassVar"""
-
- def __new__(cls, name, bases, namespace):
- cls.assert_no_subclassing(bases)
- self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace)
- return self
-
-
-class _ClassVar(_FinalTypingBase):
- """Special type construct to mark class variables.
-
- An annotation wrapped in ClassVar indicates that a given
- attribute is intended to be used as a class variable and
- should not be set on instances of that class. Usage::
-
- class Starship:
- stats = {} # type: ClassVar[Dict[str, int]] # class variable
- damage = 10 # type: int # instance variable
-
- ClassVar accepts only types and cannot be further subscribed.
-
- Note that ClassVar is not a class itself, and should not
- be used with isinstance() or issubclass().
- """
-
- __metaclass__ = ClassVarMeta
- __slots__ = ('__type__',)
-
- def __init__(self, tp=None, _root=False):
- self.__type__ = tp
-
- def __getitem__(self, item):
- cls = type(self)
- if self.__type__ is None:
- return cls(_type_check(item,
- '{} accepts only types.'.format(cls.__name__[1:])),
- _root=True)
- raise TypeError('{} cannot be further subscripted'
- .format(cls.__name__[1:]))
-
- def _eval_type(self, globalns, localns):
- return type(self)(_eval_type(self.__type__, globalns, localns),
- _root=True)
-
- def __repr__(self):
- r = super(_ClassVar, self).__repr__()
- if self.__type__ is not None:
- r += '[{}]'.format(_type_repr(self.__type__))
- return r
-
- def __hash__(self):
- return hash((type(self).__name__, self.__type__))
-
- def __eq__(self, other):
- if not isinstance(other, _ClassVar):
- return NotImplemented
- if self.__type__ is not None:
- return self.__type__ == other.__type__
- return self is other
-
-ClassVar = _ClassVar(_root=True)
-
-
-class AnyMeta(TypingMeta):
- """Metaclass for Any."""
-
- def __new__(cls, name, bases, namespace):
- cls.assert_no_subclassing(bases)
- self = super(AnyMeta, cls).__new__(cls, name, bases, namespace)
- return self
-
-
-class _Any(_FinalTypingBase):
- """Special type indicating an unconstrained type.
-
- - Any is compatible with every type.
- - Any assumed to have all methods.
- - All values assumed to be instances of Any.
-
- Note that all the above statements are true from the point of view of
- static type checkers. At runtime, Any should not be used with instance
- or class checks.
- """
- __metaclass__ = AnyMeta
- __slots__ = ()
-
- def __instancecheck__(self, obj):
- raise TypeError("Any cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Any cannot be used with issubclass().")
-
-
-Any = _Any(_root=True)
-
-
-class TypeVarMeta(TypingMeta):
- def __new__(cls, name, bases, namespace):
- cls.assert_no_subclassing(bases)
- return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace)
-
-
-class TypeVar(_TypingBase):
- """Type variable.
-
- Usage::
-
- T = TypeVar('T') # Can be anything
- A = TypeVar('A', str, bytes) # Must be str or bytes
-
- Type variables exist primarily for the benefit of static type
- checkers. They serve as the parameters for generic types as well
- as for generic function definitions. See class Generic for more
- information on generic types. Generic functions work as follows:
-
- def repeat(x: T, n: int) -> List[T]:
- '''Return a list containing n references to x.'''
- return [x]*n
-
- def longest(x: A, y: A) -> A:
- '''Return the longest of two strings.'''
- return x if len(x) >= len(y) else y
-
- The latter example's signature is essentially the overloading
- of (str, str) -> str and (bytes, bytes) -> bytes. Also note
- that if the arguments are instances of some subclass of str,
- the return type is still plain str.
-
- At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
-
- Type variables defined with covariant=True or contravariant=True
- can be used do declare covariant or contravariant generic types.
- See PEP 484 for more details. By default generic types are invariant
- in all type variables.
-
- Type variables can be introspected. e.g.:
-
- T.__name__ == 'T'
- T.__constraints__ == ()
- T.__covariant__ == False
- T.__contravariant__ = False
- A.__constraints__ == (str, bytes)
- """
-
- __metaclass__ = TypeVarMeta
- __slots__ = ('__name__', '__bound__', '__constraints__',
- '__covariant__', '__contravariant__')
-
- def __init__(self, name, *constraints, **kwargs):
- super(TypeVar, self).__init__(name, *constraints, **kwargs)
- bound = kwargs.get('bound', None)
- covariant = kwargs.get('covariant', False)
- contravariant = kwargs.get('contravariant', False)
- self.__name__ = name
- if covariant and contravariant:
- raise ValueError("Bivariant types are not supported.")
- self.__covariant__ = bool(covariant)
- self.__contravariant__ = bool(contravariant)
- if constraints and bound is not None:
- raise TypeError("Constraints cannot be combined with bound=...")
- if constraints and len(constraints) == 1:
- raise TypeError("A single constraint is not allowed")
- msg = "TypeVar(name, constraint, ...): constraints must be types."
- self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
- if bound:
- self.__bound__ = _type_check(bound, "Bound must be a type.")
- else:
- self.__bound__ = None
-
- def _get_type_vars(self, tvars):
- if self not in tvars:
- tvars.append(self)
-
- def __repr__(self):
- if self.__covariant__:
- prefix = '+'
- elif self.__contravariant__:
- prefix = '-'
- else:
- prefix = '~'
- return prefix + self.__name__
-
- def __instancecheck__(self, instance):
- raise TypeError("Type variables cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Type variables cannot be used with issubclass().")
-
-
-# Some unconstrained type variables. These are used by the container types.
-# (These are not for export.)
-T = TypeVar('T') # Any type.
-KT = TypeVar('KT') # Key type.
-VT = TypeVar('VT') # Value type.
-T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
-V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
-VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
-T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
-
-# A useful type variable with constraints. This represents string types.
-# (This one *is* for export!)
-AnyStr = TypeVar('AnyStr', bytes, unicode)
-
-
-def _replace_arg(arg, tvars, args):
- """An internal helper function: replace arg if it is a type variable
- found in tvars with corresponding substitution from args or
- with corresponding substitution sub-tree if arg is a generic type.
- """
-
- if tvars is None:
- tvars = []
- if hasattr(arg, '_subs_tree'):
- return arg._subs_tree(tvars, args)
- if isinstance(arg, TypeVar):
- for i, tvar in enumerate(tvars):
- if arg == tvar:
- return args[i]
- return arg
-
-
-def _subs_tree(cls, tvars=None, args=None):
- """An internal helper function: calculate substitution tree
- for generic cls after replacing its type parameters with
- substitutions in tvars -> args (if any).
- Repeat the same following __origin__'s.
-
- Return a list of arguments with all possible substitutions
- performed. Arguments that are generic classes themselves are represented
- as tuples (so that no new classes are created by this function).
- For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
- """
-
- if cls.__origin__ is None:
- return cls
- # Make of chain of origins (i.e. cls -> cls.__origin__)
- current = cls.__origin__
- orig_chain = []
- while current.__origin__ is not None:
- orig_chain.append(current)
- current = current.__origin__
- # Replace type variables in __args__ if asked ...
- tree_args = []
- for arg in cls.__args__:
- tree_args.append(_replace_arg(arg, tvars, args))
- # ... then continue replacing down the origin chain.
- for ocls in orig_chain:
- new_tree_args = []
- for i, arg in enumerate(ocls.__args__):
- new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
- tree_args = new_tree_args
- return tree_args
-
-
-def _remove_dups_flatten(parameters):
- """An internal helper for Union creation and substitution: flatten Union's
- among parameters, then remove duplicates and strict subclasses.
- """
-
- # Flatten out Union[Union[...], ...].
- params = []
- for p in parameters:
- if isinstance(p, _Union) and p.__origin__ is Union:
- params.extend(p.__args__)
- elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
- params.extend(p[1:])
- else:
- params.append(p)
- # Weed out strict duplicates, preserving the first of each occurrence.
- all_params = set(params)
- if len(all_params) < len(params):
- new_params = []
- for t in params:
- if t in all_params:
- new_params.append(t)
- all_params.remove(t)
- params = new_params
- assert not all_params, all_params
- # Weed out subclasses.
- # E.g. Union[int, Employee, Manager] == Union[int, Employee].
- # If object is present it will be sole survivor among proper classes.
- # Never discard type variables.
- # (In particular, Union[str, AnyStr] != AnyStr.)
- all_params = set(params)
- for t1 in params:
- if not isinstance(t1, type):
- continue
- if any(isinstance(t2, type) and issubclass(t1, t2)
- for t2 in all_params - {t1}
- if not (isinstance(t2, GenericMeta) and
- t2.__origin__ is not None)):
- all_params.remove(t1)
- return tuple(t for t in params if t in all_params)
-
-
-def _check_generic(cls, parameters):
- # Check correct count for parameters of a generic cls (internal helper).
- if not cls.__parameters__:
- raise TypeError("%s is not a generic class" % repr(cls))
- alen = len(parameters)
- elen = len(cls.__parameters__)
- if alen != elen:
- raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
- ("many" if alen > elen else "few", repr(cls), alen, elen))
-
-
-_cleanups = []
-
-
-def _tp_cache(func):
- maxsize = 128
- cache = {}
- _cleanups.append(cache.clear)
-
- @functools.wraps(func)
- def inner(*args):
- key = args
- try:
- return cache[key]
- except TypeError:
- # Assume it's an unhashable argument.
- return func(*args)
- except KeyError:
- value = func(*args)
- if len(cache) >= maxsize:
- # If the cache grows too much, just start over.
- cache.clear()
- cache[key] = value
- return value
-
- return inner
-
-
-class UnionMeta(TypingMeta):
- """Metaclass for Union."""
-
- def __new__(cls, name, bases, namespace):
- cls.assert_no_subclassing(bases)
- return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
-
-
-class _Union(_FinalTypingBase):
- """Union type; Union[X, Y] means either X or Y.
-
- To define a union, use e.g. Union[int, str]. Details:
-
- - The arguments must be types and there must be at least one.
-
- - None as an argument is a special case and is replaced by
- type(None).
-
- - Unions of unions are flattened, e.g.::
-
- Union[Union[int, str], float] == Union[int, str, float]
-
- - Unions of a single argument vanish, e.g.::
-
- Union[int] == int # The constructor actually returns int
-
- - Redundant arguments are skipped, e.g.::
-
- Union[int, str, int] == Union[int, str]
-
- - When comparing unions, the argument order is ignored, e.g.::
-
- Union[int, str] == Union[str, int]
-
- - When two arguments have a subclass relationship, the least
- derived argument is kept, e.g.::
-
- class Employee: pass
- class Manager(Employee): pass
- Union[int, Employee, Manager] == Union[int, Employee]
- Union[Manager, int, Employee] == Union[int, Employee]
- Union[Employee, Manager] == Employee
-
- - Similar for object::
-
- Union[int, object] == object
-
- - You cannot subclass or instantiate a union.
-
- - You can use Optional[X] as a shorthand for Union[X, None].
- """
-
- __metaclass__ = UnionMeta
- __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
-
- def __new__(cls, parameters=None, origin=None, *args, **kwds):
- self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds)
- if origin is None:
- self.__parameters__ = None
- self.__args__ = None
- self.__origin__ = None
- self.__tree_hash__ = hash(frozenset(('Union',)))
- return self
- if not isinstance(parameters, tuple):
- raise TypeError("Expected parameters=<tuple>")
- if origin is Union:
- parameters = _remove_dups_flatten(parameters)
- # It's not a union if there's only one type left.
- if len(parameters) == 1:
- return parameters[0]
- self.__parameters__ = _type_vars(parameters)
- self.__args__ = parameters
- self.__origin__ = origin
- # Pre-calculate the __hash__ on instantiation.
- # This improves speed for complex substitutions.
- subs_tree = self._subs_tree()
- if isinstance(subs_tree, tuple):
- self.__tree_hash__ = hash(frozenset(subs_tree))
- else:
- self.__tree_hash__ = hash(subs_tree)
- return self
-
- def _eval_type(self, globalns, localns):
- if self.__args__ is None:
- return self
- ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
- ev_origin = _eval_type(self.__origin__, globalns, localns)
- if ev_args == self.__args__ and ev_origin == self.__origin__:
- # Everything is already evaluated.
- return self
- return self.__class__(ev_args, ev_origin, _root=True)
-
- def _get_type_vars(self, tvars):
- if self.__origin__ and self.__parameters__:
- _get_type_vars(self.__parameters__, tvars)
-
- def __repr__(self):
- if self.__origin__ is None:
- return super(_Union, self).__repr__()
- tree = self._subs_tree()
- if not isinstance(tree, tuple):
- return repr(tree)
- return tree[0]._tree_repr(tree)
-
- def _tree_repr(self, tree):
- arg_list = []
- for arg in tree[1:]:
- if not isinstance(arg, tuple):
- arg_list.append(_type_repr(arg))
- else:
- arg_list.append(arg[0]._tree_repr(arg))
- return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list)
-
- @_tp_cache
- def __getitem__(self, parameters):
- if parameters == ():
- raise TypeError("Cannot take a Union of no types.")
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- if self.__origin__ is None:
- msg = "Union[arg, ...]: each arg must be a type."
- else:
- msg = "Parameters to generic types must be types."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- if self is not Union:
- _check_generic(self, parameters)
- return self.__class__(parameters, origin=self, _root=True)
-
- def _subs_tree(self, tvars=None, args=None):
- if self is Union:
- return Union # Nothing to substitute
- tree_args = _subs_tree(self, tvars, args)
- tree_args = _remove_dups_flatten(tree_args)
- if len(tree_args) == 1:
- return tree_args[0] # Union of a single type is that type
- return (Union,) + tree_args
-
- def __eq__(self, other):
- if not isinstance(other, _Union):
- return self._subs_tree() == other
- return self.__tree_hash__ == other.__tree_hash__
-
- def __hash__(self):
- return self.__tree_hash__
-
- def __instancecheck__(self, obj):
- raise TypeError("Unions cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Unions cannot be used with issubclass().")
-
-
-Union = _Union(_root=True)
-
-
-class OptionalMeta(TypingMeta):
- """Metaclass for Optional."""
-
- def __new__(cls, name, bases, namespace):
- cls.assert_no_subclassing(bases)
- return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
-
-
-class _Optional(_FinalTypingBase):
- """Optional type.
-
- Optional[X] is equivalent to Union[X, None].
- """
-
- __metaclass__ = OptionalMeta
- __slots__ = ()
-
- @_tp_cache
- def __getitem__(self, arg):
- arg = _type_check(arg, "Optional[t] requires a single type.")
- return Union[arg, type(None)]
-
-
-Optional = _Optional(_root=True)
-
-
-def _gorg(a):
- """Return the farthest origin of a generic class (internal helper)."""
- assert isinstance(a, GenericMeta)
- while a.__origin__ is not None:
- a = a.__origin__
- return a
-
-
-def _geqv(a, b):
- """Return whether two generic classes are equivalent (internal helper).
-
- The intention is to consider generic class X and any of its
- parameterized forms (X[T], X[int], etc.) as equivalent.
-
- However, X is not equivalent to a subclass of X.
-
- The relation is reflexive, symmetric and transitive.
- """
- assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta)
- # Reduce each to its origin.
- return _gorg(a) is _gorg(b)
-
-
-def _next_in_mro(cls):
- """Helper for Generic.__new__.
-
- Returns the class after the last occurrence of Generic or
- Generic[...] in cls.__mro__.
- """
- next_in_mro = object
- # Look for the last occurrence of Generic or Generic[...].
- for i, c in enumerate(cls.__mro__[:-1]):
- if isinstance(c, GenericMeta) and _gorg(c) is Generic:
- next_in_mro = cls.__mro__[i+1]
- return next_in_mro
-
-
-def _valid_for_check(cls):
- """An internal helper to prohibit isinstance([1], List[str]) etc."""
- if cls is Generic:
- raise TypeError("Class %r cannot be used with class "
- "or instance checks" % cls)
- if (cls.__origin__ is not None and
- sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']):
- raise TypeError("Parameterized generics cannot be used with class "
- "or instance checks")
-
-
-def _make_subclasshook(cls):
- """Construct a __subclasshook__ callable that incorporates
- the associated __extra__ class in subclass checks performed
- against cls.
- """
- if isinstance(cls.__extra__, abc.ABCMeta):
- # The logic mirrors that of ABCMeta.__subclasscheck__.
- # Registered classes need not be checked here because
- # cls and its extra share the same _abc_registry.
- def __extrahook__(cls, subclass):
- _valid_for_check(cls)
- res = cls.__extra__.__subclasshook__(subclass)
- if res is not NotImplemented:
- return res
- if cls.__extra__ in getattr(subclass, '__mro__', ()):
- return True
- for scls in cls.__extra__.__subclasses__():
- if isinstance(scls, GenericMeta):
- continue
- if issubclass(subclass, scls):
- return True
- return NotImplemented
- else:
- # For non-ABC extras we'll just call issubclass().
- def __extrahook__(cls, subclass):
- _valid_for_check(cls)
- if cls.__extra__ and issubclass(subclass, cls.__extra__):
- return True
- return NotImplemented
- return classmethod(__extrahook__)
-
-
-class GenericMeta(TypingMeta, abc.ABCMeta):
- """Metaclass for generic types."""
-
- def __new__(cls, name, bases, namespace,
- tvars=None, args=None, origin=None, extra=None, orig_bases=None):
- if tvars is not None:
- # Called from __getitem__() below.
- assert origin is not None
- assert all(isinstance(t, TypeVar) for t in tvars), tvars
- else:
- # Called from class statement.
- assert tvars is None, tvars
- assert args is None, args
- assert origin is None, origin
-
- # Get the full set of tvars from the bases.
- tvars = _type_vars(bases)
- # Look for Generic[T1, ..., Tn].
- # If found, tvars must be a subset of it.
- # If not found, tvars is it.
- # Also check for and reject plain Generic,
- # and reject multiple Generic[...].
- gvars = None
- for base in bases:
- if base is Generic:
- raise TypeError("Cannot inherit from plain Generic")
- if (isinstance(base, GenericMeta) and
- base.__origin__ is Generic):
- if gvars is not None:
- raise TypeError(
- "Cannot inherit from Generic[...] multiple types.")
- gvars = base.__parameters__
- if gvars is None:
- gvars = tvars
- else:
- tvarset = set(tvars)
- gvarset = set(gvars)
- if not tvarset <= gvarset:
- raise TypeError(
- "Some type variables (%s) "
- "are not listed in Generic[%s]" %
- (", ".join(str(t) for t in tvars if t not in gvarset),
- ", ".join(str(g) for g in gvars)))
- tvars = gvars
-
- initial_bases = bases
- if extra is None:
- extra = namespace.get('__extra__')
- if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
- bases = (extra,) + bases
- bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)
-
- # remove bare Generic from bases if there are other generic bases
- if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
- bases = tuple(b for b in bases if b is not Generic)
- self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
-
- self.__parameters__ = tvars
- # Be prepared that GenericMeta will be subclassed by TupleMeta
- # and CallableMeta, those two allow ..., (), or [] in __args___.
- self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else
- () if a is _TypingEmpty else
- a for a in args) if args else None
- self.__origin__ = origin
- self.__extra__ = extra
- # Speed hack (https://github.com/python/typing/issues/196).
- self.__next_in_mro__ = _next_in_mro(self)
- # Preserve base classes on subclassing (__bases__ are type erased now).
- if orig_bases is None:
- self.__orig_bases__ = initial_bases
-
- # This allows unparameterized generic collections to be used
- # with issubclass() and isinstance() in the same way as their
- # collections.abc counterparts (e.g., isinstance([], Iterable)).
- if ('__subclasshook__' not in namespace and extra # allow overriding
- or hasattr(self.__subclasshook__, '__name__') and
- self.__subclasshook__.__name__ == '__extrahook__'):
- self.__subclasshook__ = _make_subclasshook(self)
-
- if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2.
- self.__qualname__ = origin.__qualname__
- self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,))
- return self
-
- def __init__(self, *args, **kwargs):
- super(GenericMeta, self).__init__(*args, **kwargs)
- if isinstance(self.__extra__, abc.ABCMeta):
- self._abc_registry = self.__extra__._abc_registry
-
- def _get_type_vars(self, tvars):
- if self.__origin__ and self.__parameters__:
- _get_type_vars(self.__parameters__, tvars)
-
- def _eval_type(self, globalns, localns):
- ev_origin = (self.__origin__._eval_type(globalns, localns)
- if self.__origin__ else None)
- ev_args = tuple(_eval_type(a, globalns, localns) for a
- in self.__args__) if self.__args__ else None
- if ev_origin == self.__origin__ and ev_args == self.__args__:
- return self
- return self.__class__(self.__name__,
- self.__bases__,
- dict(self.__dict__),
- tvars=_type_vars(ev_args) if ev_args else None,
- args=ev_args,
- origin=ev_origin,
- extra=self.__extra__,
- orig_bases=self.__orig_bases__)
-
- def __repr__(self):
- if self.__origin__ is None:
- return super(GenericMeta, self).__repr__()
- return self._tree_repr(self._subs_tree())
-
- def _tree_repr(self, tree):
- arg_list = []
- for arg in tree[1:]:
- if arg == ():
- arg_list.append('()')
- elif not isinstance(arg, tuple):
- arg_list.append(_type_repr(arg))
- else:
- arg_list.append(arg[0]._tree_repr(arg))
- return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list)
-
- def _subs_tree(self, tvars=None, args=None):
- if self.__origin__ is None:
- return self
- tree_args = _subs_tree(self, tvars, args)
- return (_gorg(self),) + tuple(tree_args)
-
- def __eq__(self, other):
- if not isinstance(other, GenericMeta):
- return NotImplemented
- if self.__origin__ is None or other.__origin__ is None:
- return self is other
- return self.__tree_hash__ == other.__tree_hash__
-
- def __hash__(self):
- return self.__tree_hash__
-
- @_tp_cache
- def __getitem__(self, params):
- if not isinstance(params, tuple):
- params = (params,)
- if not params and not _gorg(self) is Tuple:
- raise TypeError(
- "Parameter list to %s[...] cannot be empty" % _qualname(self))
- msg = "Parameters to generic types must be types."
- params = tuple(_type_check(p, msg) for p in params)
- if self is Generic:
- # Generic can only be subscripted with unique type variables.
- if not all(isinstance(p, TypeVar) for p in params):
- raise TypeError(
- "Parameters to Generic[...] must all be type variables")
- if len(set(params)) != len(params):
- raise TypeError(
- "Parameters to Generic[...] must all be unique")
- tvars = params
- args = params
- elif self in (Tuple, Callable):
- tvars = _type_vars(params)
- args = params
- elif self is _Protocol:
- # _Protocol is internal, don't check anything.
- tvars = params
- args = params
- elif self.__origin__ in (Generic, _Protocol):
- # Can't subscript Generic[...] or _Protocol[...].
- raise TypeError("Cannot subscript already-subscripted %s" %
- repr(self))
- else:
- # Subscripting a regular Generic subclass.
- _check_generic(self, params)
- tvars = _type_vars(params)
- args = params
- return self.__class__(self.__name__,
- self.__bases__,
- dict(self.__dict__),
- tvars=tvars,
- args=args,
- origin=self,
- extra=self.__extra__,
- orig_bases=self.__orig_bases__)
-
- def __instancecheck__(self, instance):
- # Since we extend ABC.__subclasscheck__ and
- # ABC.__instancecheck__ inlines the cache checking done by the
- # latter, we must extend __instancecheck__ too. For simplicity
- # we just skip the cache check -- instance checks for generic
- # classes are supposed to be rare anyways.
- if not isinstance(instance, type):
- return issubclass(instance.__class__, self)
- return False
-
- def __copy__(self):
- return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
- self.__parameters__, self.__args__, self.__origin__,
- self.__extra__, self.__orig_bases__)
-
-
-# Prevent checks for Generic to crash when defining Generic.
-Generic = None
-
-
-def _generic_new(base_cls, cls, *args, **kwds):
- # Assure type is erased on instantiation,
- # but attempt to store it in __orig_class__
- if cls.__origin__ is None:
- return base_cls.__new__(cls)
- else:
- origin = _gorg(cls)
- obj = base_cls.__new__(origin)
- try:
- obj.__orig_class__ = cls
- except AttributeError:
- pass
- obj.__init__(*args, **kwds)
- return obj
-
-
-class Generic(object):
- """Abstract base class for generic types.
-
- A generic type is typically declared by inheriting from
- this class parameterized with one or more type variables.
- For example, a generic mapping type might be defined as::
-
- class Mapping(Generic[KT, VT]):
- def __getitem__(self, key: KT) -> VT:
- ...
- # Etc.
-
- This class can then be used as follows::
-
- def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
- try:
- return mapping[key]
- except KeyError:
- return default
- """
-
- __metaclass__ = GenericMeta
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Generic):
- raise TypeError("Type Generic cannot be instantiated; "
- "it can be used only as a base class")
- return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
-
-
-class _TypingEmpty(object):
- """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
- to allow empty list/tuple in specific places, without allowing them
- to sneak in where prohibited.
- """
-
-
-class _TypingEllipsis(object):
- """Internal placeholder for ... (ellipsis)."""
-
-
-class TupleMeta(GenericMeta):
- """Metaclass for Tuple (internal)."""
-
- @_tp_cache
- def __getitem__(self, parameters):
- if self.__origin__ is not None or not _geqv(self, Tuple):
- # Normal generic rules apply if this is not the first subscription
- # or a subscription of a subclass.
- return super(TupleMeta, self).__getitem__(parameters)
- if parameters == ():
- return super(TupleMeta, self).__getitem__((_TypingEmpty,))
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- if len(parameters) == 2 and parameters[1] is Ellipsis:
- msg = "Tuple[t, ...]: t must be a type."
- p = _type_check(parameters[0], msg)
- return super(TupleMeta, self).__getitem__((p, _TypingEllipsis))
- msg = "Tuple[t0, t1, ...]: each t must be a type."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- return super(TupleMeta, self).__getitem__(parameters)
-
- def __instancecheck__(self, obj):
- if self.__args__ == None:
- return isinstance(obj, tuple)
- raise TypeError("Parameterized Tuple cannot be used "
- "with isinstance().")
-
- def __subclasscheck__(self, cls):
- if self.__args__ == None:
- return issubclass(cls, tuple)
- raise TypeError("Parameterized Tuple cannot be used "
- "with issubclass().")
-
-
-class Tuple(tuple):
- """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
-
- Example: Tuple[T1, T2] is a tuple of two elements corresponding
- to type variables T1 and T2. Tuple[int, float, str] is a tuple
- of an int, a float and a string.
-
- To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
- """
-
- __metaclass__ = TupleMeta
- __extra__ = tuple
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Tuple):
- raise TypeError("Type Tuple cannot be instantiated; "
- "use tuple() instead")
- return _generic_new(tuple, cls, *args, **kwds)
-
-
-class CallableMeta(GenericMeta):
- """ Metaclass for Callable."""
-
- def __repr__(self):
- if self.__origin__ is None:
- return super(CallableMeta, self).__repr__()
- return self._tree_repr(self._subs_tree())
-
- def _tree_repr(self, tree):
- if _gorg(self) is not Callable:
- return super(CallableMeta, self)._tree_repr(tree)
- # For actual Callable (not its subclass) we override
- # super(CallableMeta, self)._tree_repr() for nice formatting.
- arg_list = []
- for arg in tree[1:]:
- if not isinstance(arg, tuple):
- arg_list.append(_type_repr(arg))
- else:
- arg_list.append(arg[0]._tree_repr(arg))
- if arg_list[0] == '...':
- return repr(tree[0]) + '[..., %s]' % arg_list[1]
- return (repr(tree[0]) +
- '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
-
- def __getitem__(self, parameters):
- """A thin wrapper around __getitem_inner__ to provide the latter
- with hashable arguments to improve speed.
- """
-
- if self.__origin__ is not None or not _geqv(self, Callable):
- return super(CallableMeta, self).__getitem__(parameters)
- if not isinstance(parameters, tuple) or len(parameters) != 2:
- raise TypeError("Callable must be used as "
- "Callable[[arg, ...], result].")
- args, result = parameters
- if args is Ellipsis:
- parameters = (Ellipsis, result)
- else:
- if not isinstance(args, list):
- raise TypeError("Callable[args, result]: args must be a list."
- " Got %.100r." % (args,))
- parameters = (tuple(args), result)
- return self.__getitem_inner__(parameters)
-
- @_tp_cache
- def __getitem_inner__(self, parameters):
- args, result = parameters
- msg = "Callable[args, result]: result must be a type."
- result = _type_check(result, msg)
- if args is Ellipsis:
- return super(CallableMeta, self).__getitem__((_TypingEllipsis, result))
- msg = "Callable[[arg, ...], result]: each arg must be a type."
- args = tuple(_type_check(arg, msg) for arg in args)
- parameters = args + (result,)
- return super(CallableMeta, self).__getitem__(parameters)
-
-
-class Callable(object):
- """Callable type; Callable[[int], str] is a function of (int) -> str.
-
- The subscription syntax must always be used with exactly two
- values: the argument list and the return type. The argument list
- must be a list of types or ellipsis; the return type must be a single type.
-
- There is no syntax to indicate optional or keyword arguments,
- such function types are rarely used as callback types.
- """
-
- __metaclass__ = CallableMeta
- __extra__ = collections_abc.Callable
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Callable):
- raise TypeError("Type Callable cannot be instantiated; "
- "use a non-abstract subclass instead")
- return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
-
-
-def cast(typ, val):
- """Cast a value to a type.
-
- This returns the value unchanged. To the type checker this
- signals that the return value has the designated type, but at
- runtime we intentionally don't check anything (we want this
- to be as fast as possible).
- """
- return val
-
-
-def _get_defaults(func):
- """Internal helper to extract the default arguments, by name."""
- code = func.__code__
- pos_count = code.co_argcount
- arg_names = code.co_varnames
- arg_names = arg_names[:pos_count]
- defaults = func.__defaults__ or ()
- kwdefaults = func.__kwdefaults__
- res = dict(kwdefaults) if kwdefaults else {}
- pos_offset = pos_count - len(defaults)
- for name, value in zip(arg_names[pos_offset:], defaults):
- assert name not in res
- res[name] = value
- return res
-
-
-def get_type_hints(obj, globalns=None, localns=None):
- """In Python 2 this is not supported and always returns None."""
- return None
-
-
-def no_type_check(arg):
- """Decorator to indicate that annotations are not type hints.
-
- The argument must be a class or function; if it is a class, it
- applies recursively to all methods and classes defined in that class
- (but not to methods defined in its superclasses or subclasses).
-
- This mutates the function(s) or class(es) in place.
- """
- if isinstance(arg, type):
- arg_attrs = arg.__dict__.copy()
- for attr, val in arg.__dict__.items():
- if val in arg.__bases__:
- arg_attrs.pop(attr)
- for obj in arg_attrs.values():
- if isinstance(obj, types.FunctionType):
- obj.__no_type_check__ = True
- if isinstance(obj, type):
- no_type_check(obj)
- try:
- arg.__no_type_check__ = True
- except TypeError: # built-in classes
- pass
- return arg
-
-
-def no_type_check_decorator(decorator):
- """Decorator to give another decorator the @no_type_check effect.
-
- This wraps the decorator with something that wraps the decorated
- function in @no_type_check.
- """
-
- @functools.wraps(decorator)
- def wrapped_decorator(*args, **kwds):
- func = decorator(*args, **kwds)
- func = no_type_check(func)
- return func
-
- return wrapped_decorator
-
-
-def _overload_dummy(*args, **kwds):
- """Helper for @overload to raise when called."""
- raise NotImplementedError(
- "You should not call an overloaded function. "
- "A series of @overload-decorated functions "
- "outside a stub module should always be followed "
- "by an implementation that is not @overload-ed.")
-
-
-def overload(func):
- """Decorator for overloaded functions/methods.
-
- In a stub file, place two or more stub definitions for the same
- function in a row, each decorated with @overload. For example:
-
- @overload
- def utf8(value: None) -> None: ...
- @overload
- def utf8(value: bytes) -> bytes: ...
- @overload
- def utf8(value: str) -> bytes: ...
-
- In a non-stub file (i.e. a regular .py file), do the same but
- follow it with an implementation. The implementation should *not*
- be decorated with @overload. For example:
-
- @overload
- def utf8(value: None) -> None: ...
- @overload
- def utf8(value: bytes) -> bytes: ...
- @overload
- def utf8(value: str) -> bytes: ...
- def utf8(value):
- # implementation goes here
- """
- return _overload_dummy
-
-
-class _ProtocolMeta(GenericMeta):
- """Internal metaclass for _Protocol.
-
- This exists so _Protocol classes can be generic without deriving
- from Generic.
- """
-
- def __instancecheck__(self, obj):
- if _Protocol not in self.__bases__:
- return super(_ProtocolMeta, self).__instancecheck__(obj)
- raise TypeError("Protocols cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- if not self._is_protocol:
- # No structural checks since this isn't a protocol.
- return NotImplemented
-
- if self is _Protocol:
- # Every class is a subclass of the empty protocol.
- return True
-
- # Find all attributes defined in the protocol.
- attrs = self._get_protocol_attrs()
-
- for attr in attrs:
- if not any(attr in d.__dict__ for d in cls.__mro__):
- return False
- return True
-
- def _get_protocol_attrs(self):
- # Get all Protocol base classes.
- protocol_bases = []
- for c in self.__mro__:
- if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
- protocol_bases.append(c)
-
- # Get attributes included in protocol.
- attrs = set()
- for base in protocol_bases:
- for attr in base.__dict__.keys():
- # Include attributes not defined in any non-protocol bases.
- for c in self.__mro__:
- if (c is not base and attr in c.__dict__ and
- not getattr(c, '_is_protocol', False)):
- break
- else:
- if (not attr.startswith('_abc_') and
- attr != '__abstractmethods__' and
- attr != '_is_protocol' and
- attr != '__dict__' and
- attr != '__args__' and
- attr != '__slots__' and
- attr != '_get_protocol_attrs' and
- attr != '__next_in_mro__' and
- attr != '__parameters__' and
- attr != '__origin__' and
- attr != '__orig_bases__' and
- attr != '__extra__' and
- attr != '__tree_hash__' and
- attr != '__module__'):
- attrs.add(attr)
-
- return attrs
-
-
-class _Protocol(object):
- """Internal base class for protocol classes.
-
- This implements a simple-minded structural issubclass check
- (similar but more general than the one-offs in collections.abc
- such as Hashable).
- """
-
- __metaclass__ = _ProtocolMeta
- __slots__ = ()
-
- _is_protocol = True
-
-
-# Various ABCs mimicking those in collections.abc.
-# A few are simply re-exported for completeness.
-
-Hashable = collections_abc.Hashable # Not generic.
-
-
-class Iterable(Generic[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Iterable
-
-
-class Iterator(Iterable[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Iterator
-
-
-class SupportsInt(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __int__(self):
- pass
-
-
-class SupportsFloat(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __float__(self):
- pass
-
-
-class SupportsComplex(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __complex__(self):
- pass
-
-
-class SupportsAbs(_Protocol[T_co]):
- __slots__ = ()
-
- @abstractmethod
- def __abs__(self):
- pass
-
-
-if hasattr(collections_abc, 'Reversible'):
- class Reversible(Iterable[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Reversible
-else:
- class Reversible(_Protocol[T_co]):
- __slots__ = ()
-
- @abstractmethod
- def __reversed__(self):
- pass
-
-
-Sized = collections_abc.Sized # Not generic.
-
-
-class Container(Generic[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Container
-
-
-# Callable was defined earlier.
-
-
-class AbstractSet(Sized, Iterable[T_co], Container[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Set
-
-
-class MutableSet(AbstractSet[T]):
- __slots__ = ()
- __extra__ = collections_abc.MutableSet
-
-
-# NOTE: It is only covariant in the value type.
-class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]):
- __slots__ = ()
- __extra__ = collections_abc.Mapping
-
-
-class MutableMapping(Mapping[KT, VT]):
- __slots__ = ()
- __extra__ = collections_abc.MutableMapping
-
-
-if hasattr(collections_abc, 'Reversible'):
- class Sequence(Sized, Reversible[T_co], Container[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Sequence
-else:
- class Sequence(Sized, Iterable[T_co], Container[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.Sequence
-
-
-class MutableSequence(Sequence[T]):
- __slots__ = ()
- __extra__ = collections_abc.MutableSequence
-
-
-class ByteString(Sequence[int]):
- pass
-
-
-ByteString.register(str)
-ByteString.register(bytearray)
-
-
-class List(list, MutableSequence[T]):
- __slots__ = ()
- __extra__ = list
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, List):
- raise TypeError("Type List cannot be instantiated; "
- "use list() instead")
- return _generic_new(list, cls, *args, **kwds)
-
-
-class Set(set, MutableSet[T]):
- __slots__ = ()
- __extra__ = set
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Set):
- raise TypeError("Type Set cannot be instantiated; "
- "use set() instead")
- return _generic_new(set, cls, *args, **kwds)
-
-
-class FrozenSet(frozenset, AbstractSet[T_co]):
- __slots__ = ()
- __extra__ = frozenset
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, FrozenSet):
- raise TypeError("Type FrozenSet cannot be instantiated; "
- "use frozenset() instead")
- return _generic_new(frozenset, cls, *args, **kwds)
-
-
-class MappingView(Sized, Iterable[T_co]):
- __slots__ = ()
- __extra__ = collections_abc.MappingView
-
-
-class KeysView(MappingView[KT], AbstractSet[KT]):
- __slots__ = ()
- __extra__ = collections_abc.KeysView
-
-
-class ItemsView(MappingView[Tuple[KT, VT_co]],
- AbstractSet[Tuple[KT, VT_co]],
- Generic[KT, VT_co]):
- __slots__ = ()
- __extra__ = collections_abc.ItemsView
-
-
-class ValuesView(MappingView[VT_co]):
- __slots__ = ()
- __extra__ = collections_abc.ValuesView
-
-
-class Dict(dict, MutableMapping[KT, VT]):
- __slots__ = ()
- __extra__ = dict
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Dict):
- raise TypeError("Type Dict cannot be instantiated; "
- "use dict() instead")
- return _generic_new(dict, cls, *args, **kwds)
-
-
-class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
- __slots__ = ()
- __extra__ = collections.defaultdict
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, DefaultDict):
- raise TypeError("Type DefaultDict cannot be instantiated; "
- "use collections.defaultdict() instead")
- return _generic_new(collections.defaultdict, cls, *args, **kwds)
-
-
-# Determine what base class to use for Generator.
-if hasattr(collections_abc, 'Generator'):
- # Sufficiently recent versions of 3.5 have a Generator ABC.
- _G_base = collections_abc.Generator
-else:
- # Fall back on the exact type.
- _G_base = types.GeneratorType
-
-
-class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]):
- __slots__ = ()
- __extra__ = _G_base
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Generator):
- raise TypeError("Type Generator cannot be instantiated; "
- "create a subclass instead")
- return _generic_new(_G_base, cls, *args, **kwds)
-
-
-# Internal type variable used for Type[].
-CT_co = TypeVar('CT_co', covariant=True, bound=type)
-
-
-# This is not a real generic class. Don't use outside annotations.
-class Type(Generic[CT_co]):
- """A special construct usable to annotate class objects.
-
- For example, suppose we have the following classes::
-
- class User: ... # Abstract base for User classes
- class BasicUser(User): ...
- class ProUser(User): ...
- class TeamUser(User): ...
-
- And a function that takes a class argument that's a subclass of
- User and returns an instance of the corresponding class::
-
- U = TypeVar('U', bound=User)
- def new_user(user_class: Type[U]) -> U:
- user = user_class()
- # (Here we could write the user object to a database)
- return user
-
- joe = new_user(BasicUser)
-
- At this point the type checker knows that joe has type BasicUser.
- """
- __slots__ = ()
- __extra__ = type
-
-
-def NamedTuple(typename, fields):
- """Typed version of namedtuple.
-
- Usage::
-
- Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)])
-
- This is equivalent to::
-
- Employee = collections.namedtuple('Employee', ['name', 'id'])
-
- The resulting class has one extra attribute: _field_types,
- giving a dict mapping field names to types. (The field names
- are in the _fields attribute, which is part of the namedtuple
- API.)
- """
- fields = [(n, t) for n, t in fields]
- cls = collections.namedtuple(typename, [n for n, t in fields])
- cls._field_types = dict(fields)
- # Set the module to the caller's module (otherwise it'd be 'typing').
- try:
- cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
- return cls
-
-
-def NewType(name, tp):
- """NewType creates simple unique types with almost zero
- runtime overhead. NewType(name, tp) is considered a subtype of tp
- by static type checkers. At runtime, NewType(name, tp) returns
- a dummy function that simply returns its argument. Usage::
-
- UserId = NewType('UserId', int)
-
- def name_by_id(user_id):
- # type: (UserId) -> str
- ...
-
- UserId('user') # Fails type check
-
- name_by_id(42) # Fails type check
- name_by_id(UserId(42)) # OK
-
- num = UserId(5) + 1 # type: int
- """
-
- def new_type(x):
- return x
-
- # Some versions of Python 2 complain because of making all strings unicode
- new_type.__name__ = str(name)
- new_type.__supertype__ = tp
- return new_type
-
-
-# Python-version-specific alias (Python 2: unicode; Python 3: str)
-Text = unicode
-
-
-# Constant that's True when type checking, but False here.
-TYPE_CHECKING = False
-
-
-class IO(Generic[AnyStr]):
- """Generic base class for TextIO and BinaryIO.
-
- This is an abstract, generic version of the return of open().
-
- NOTE: This does not distinguish between the different possible
- classes (text vs. binary, read vs. write vs. read/write,
- append-only, unbuffered). The TextIO and BinaryIO subclasses
- below capture the distinctions between text vs. binary, which is
- pervasive in the interface; however we currently do not offer a
- way to track the other distinctions in the type system.
- """
-
- __slots__ = ()
-
- @abstractproperty
- def mode(self):
- pass
-
- @abstractproperty
- def name(self):
- pass
-
- @abstractmethod
- def close(self):
- pass
-
- @abstractmethod
- def closed(self):
- pass
-
- @abstractmethod
- def fileno(self):
- pass
-
- @abstractmethod
- def flush(self):
- pass
-
- @abstractmethod
- def isatty(self):
- pass
-
- @abstractmethod
- def read(self, n = -1):
- pass
-
- @abstractmethod
- def readable(self):
- pass
-
- @abstractmethod
- def readline(self, limit = -1):
- pass
-
- @abstractmethod
- def readlines(self, hint = -1):
- pass
-
- @abstractmethod
- def seek(self, offset, whence = 0):
- pass
-
- @abstractmethod
- def seekable(self):
- pass
-
- @abstractmethod
- def tell(self):
- pass
-
- @abstractmethod
- def truncate(self, size = None):
- pass
-
- @abstractmethod
- def writable(self):
- pass
-
- @abstractmethod
- def write(self, s):
- pass
-
- @abstractmethod
- def writelines(self, lines):
- pass
-
- @abstractmethod
- def __enter__(self):
- pass
-
- @abstractmethod
- def __exit__(self, type, value, traceback):
- pass
-
-
-class BinaryIO(IO[bytes]):
- """Typed version of the return of open() in binary mode."""
-
- __slots__ = ()
-
- @abstractmethod
- def write(self, s):
- pass
-
- @abstractmethod
- def __enter__(self):
- pass
-
-
-class TextIO(IO[unicode]):
- """Typed version of the return of open() in text mode."""
-
- __slots__ = ()
-
- @abstractproperty
- def buffer(self):
- pass
-
- @abstractproperty
- def encoding(self):
- pass
-
- @abstractproperty
- def errors(self):
- pass
-
- @abstractproperty
- def line_buffering(self):
- pass
-
- @abstractproperty
- def newlines(self):
- pass
-
- @abstractmethod
- def __enter__(self):
- pass
-
-
-class io(object):
- """Wrapper namespace for IO generic classes."""
-
- __all__ = ['IO', 'TextIO', 'BinaryIO']
- IO = IO
- TextIO = TextIO
- BinaryIO = BinaryIO
-
-io.__name__ = __name__ + b'.io'
-sys.modules[io.__name__] = io
-
-
-Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
- lambda p: p.pattern)
-Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
- lambda m: m.re.pattern)
-
-
-class re(object):
- """Wrapper namespace for re type aliases."""
-
- __all__ = ['Pattern', 'Match']
- Pattern = Pattern
- Match = Match
-
-re.__name__ = __name__ + b'.re'
-sys.modules[re.__name__] = re
diff --git a/lib-typing/3.2/test_typing.py b/lib-typing/3.2/test_typing.py
deleted file mode 100644
index d203ce3..0000000
--- a/lib-typing/3.2/test_typing.py
+++ /dev/null
@@ -1,2063 +0,0 @@
-import contextlib
-import collections
-import pickle
-import re
-import sys
-from unittest import TestCase, main, skipUnless, SkipTest
-from copy import copy, deepcopy
-
-from typing import Any
-from typing import TypeVar, AnyStr
-from typing import T, KT, VT # Not in __all__.
-from typing import Union, Optional
-from typing import Tuple, List, MutableMapping
-from typing import Callable
-from typing import Generic, ClassVar
-from typing import cast
-from typing import get_type_hints
-from typing import no_type_check, no_type_check_decorator
-from typing import Type
-from typing import NewType
-from typing import NamedTuple
-from typing import IO, TextIO, BinaryIO
-from typing import Pattern, Match
-import abc
-import typing
-try:
- import collections.abc as collections_abc
-except ImportError:
- import collections as collections_abc # Fallback for PY3.2.
-
-
-class BaseTestCase(TestCase):
-
- def assertIsSubclass(self, cls, class_or_tuple, msg=None):
- if not issubclass(cls, class_or_tuple):
- message = '%r is not a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
- def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
- if issubclass(cls, class_or_tuple):
- message = '%r is a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
- def clear_caches(self):
- for f in typing._cleanups:
- f()
-
-
-class Employee:
- pass
-
-
-class Manager(Employee):
- pass
-
-
-class Founder(Employee):
- pass
-
-
-class ManagingFounder(Manager, Founder):
- pass
-
-
-class AnyTests(BaseTestCase):
-
- def test_any_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance(42, Any)
-
- def test_any_subclass_type_error(self):
- with self.assertRaises(TypeError):
- issubclass(Employee, Any)
- with self.assertRaises(TypeError):
- issubclass(Any, Employee)
-
- def test_repr(self):
- self.assertEqual(repr(Any), 'typing.Any')
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- issubclass(42, Any)
- with self.assertRaises(TypeError):
- Any[int] # Any is not a generic type.
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError):
- class A(Any):
- pass
- with self.assertRaises(TypeError):
- class A(type(Any)):
- pass
-
- def test_cannot_instantiate(self):
- with self.assertRaises(TypeError):
- Any()
- with self.assertRaises(TypeError):
- type(Any)()
-
- def test_cannot_subscript(self):
- with self.assertRaises(TypeError):
- Any[int]
-
- def test_any_works_with_alias(self):
- # These expressions must simply not fail.
- typing.Match[Any]
- typing.Pattern[Any]
- typing.IO[Any]
-
-
-class TypeVarTests(BaseTestCase):
-
- def test_basic_plain(self):
- T = TypeVar('T')
- # T equals itself.
- self.assertEqual(T, T)
- # T is an instance of TypeVar
- self.assertIsInstance(T, TypeVar)
-
- def test_typevar_instance_type_error(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- isinstance(42, T)
-
- def test_typevar_subclass_type_error(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- issubclass(int, T)
- with self.assertRaises(TypeError):
- issubclass(T, int)
-
- def test_constrained_error(self):
- with self.assertRaises(TypeError):
- X = TypeVar('X', int)
- X
-
- def test_union_unique(self):
- X = TypeVar('X')
- Y = TypeVar('Y')
- self.assertNotEqual(X, Y)
- self.assertEqual(Union[X], X)
- self.assertNotEqual(Union[X], Union[X, Y])
- self.assertEqual(Union[X, X], X)
- self.assertNotEqual(Union[X, int], Union[X])
- self.assertNotEqual(Union[X, int], Union[int])
- self.assertEqual(Union[X, int].__args__, (X, int))
- self.assertEqual(Union[X, int].__parameters__, (X,))
- self.assertIs(Union[X, int].__origin__, Union)
-
- def test_union_constrained(self):
- A = TypeVar('A', str, bytes)
- self.assertNotEqual(Union[A, str], Union[A])
-
- def test_repr(self):
- self.assertEqual(repr(T), '~T')
- self.assertEqual(repr(KT), '~KT')
- self.assertEqual(repr(VT), '~VT')
- self.assertEqual(repr(AnyStr), '~AnyStr')
- T_co = TypeVar('T_co', covariant=True)
- self.assertEqual(repr(T_co), '+T_co')
- T_contra = TypeVar('T_contra', contravariant=True)
- self.assertEqual(repr(T_contra), '-T_contra')
-
- def test_no_redefinition(self):
- self.assertNotEqual(TypeVar('T'), TypeVar('T'))
- self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
-
- def test_cannot_subclass_vars(self):
- with self.assertRaises(TypeError):
- class V(TypeVar('T')):
- pass
-
- def test_cannot_subclass_var_itself(self):
- with self.assertRaises(TypeError):
- class V(TypeVar):
- pass
-
- def test_cannot_instantiate_vars(self):
- with self.assertRaises(TypeError):
- TypeVar('A')()
-
- def test_bound_errors(self):
- with self.assertRaises(TypeError):
- TypeVar('X', bound=42)
- with self.assertRaises(TypeError):
- TypeVar('X', str, float, bound=Employee)
-
-
-class UnionTests(BaseTestCase):
-
- def test_basics(self):
- u = Union[int, float]
- self.assertNotEqual(u, Union)
-
- def test_subclass_error(self):
- with self.assertRaises(TypeError):
- issubclass(int, Union)
- with self.assertRaises(TypeError):
- issubclass(Union, int)
- with self.assertRaises(TypeError):
- issubclass(int, Union[int, str])
- with self.assertRaises(TypeError):
- issubclass(Union[int, str], int)
-
- def test_union_any(self):
- u = Union[Any]
- self.assertEqual(u, Any)
- u1 = Union[int, Any]
- u2 = Union[Any, int]
- u3 = Union[Any, object]
- self.assertEqual(u1, u2)
- self.assertNotEqual(u1, Any)
- self.assertNotEqual(u2, Any)
- self.assertNotEqual(u3, Any)
-
- def test_union_object(self):
- u = Union[object]
- self.assertEqual(u, object)
- u = Union[int, object]
- self.assertEqual(u, object)
- u = Union[object, int]
- self.assertEqual(u, object)
-
- def test_unordered(self):
- u1 = Union[int, float]
- u2 = Union[float, int]
- self.assertEqual(u1, u2)
-
- def test_single_class_disappears(self):
- t = Union[Employee]
- self.assertIs(t, Employee)
-
- def test_base_class_disappears(self):
- u = Union[Employee, Manager, int]
- self.assertEqual(u, Union[int, Employee])
- u = Union[Manager, int, Employee]
- self.assertEqual(u, Union[int, Employee])
- u = Union[Employee, Manager]
- self.assertIs(u, Employee)
-
- def test_union_union(self):
- u = Union[int, float]
- v = Union[u, Employee]
- self.assertEqual(v, Union[int, float, Employee])
-
- def test_repr(self):
- self.assertEqual(repr(Union), 'typing.Union')
- u = Union[Employee, int]
- self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__)
- u = Union[int, Employee]
- self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__)
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError):
- class C(Union):
- pass
- with self.assertRaises(TypeError):
- class C(type(Union)):
- pass
- with self.assertRaises(TypeError):
- class C(Union[int, str]):
- pass
-
- def test_cannot_instantiate(self):
- with self.assertRaises(TypeError):
- Union()
- with self.assertRaises(TypeError):
- type(Union)()
- u = Union[int, float]
- with self.assertRaises(TypeError):
- u()
- with self.assertRaises(TypeError):
- type(u)()
-
- def test_union_generalization(self):
- self.assertFalse(Union[str, typing.Iterable[int]] == str)
- self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
- self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
-
- def test_optional(self):
- o = Optional[int]
- u = Union[int, None]
- self.assertEqual(o, u)
-
- def test_empty(self):
- with self.assertRaises(TypeError):
- Union[()]
-
- def test_union_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance(42, Union[int, str])
-
- def test_union_str_pattern(self):
- # Shouldn't crash; see http://bugs.python.org/issue25390
- A = Union[str, Pattern]
- A
-
- def test_etree(self):
- # See https://github.com/python/typing/issues/229
- # (Only relevant for Python 2.)
- try:
- from xml.etree.cElementTree import Element
- except ImportError:
- raise SkipTest("cElementTree not found")
- Union[Element, str] # Shouldn't crash
-
- def Elem(*args):
- return Element(*args)
-
- Union[Elem, str] # Nor should this
-
-
-class TupleTests(BaseTestCase):
-
- def test_basics(self):
- with self.assertRaises(TypeError):
- issubclass(Tuple, Tuple[int, str])
- with self.assertRaises(TypeError):
- issubclass(tuple, Tuple[int, str])
-
- class TP(tuple): ...
- self.assertTrue(issubclass(tuple, Tuple))
- self.assertTrue(issubclass(TP, Tuple))
-
- def test_equality(self):
- self.assertEqual(Tuple[int], Tuple[int])
- self.assertEqual(Tuple[int, ...], Tuple[int, ...])
- self.assertNotEqual(Tuple[int], Tuple[int, int])
- self.assertNotEqual(Tuple[int], Tuple[int, ...])
-
- def test_tuple_subclass(self):
- class MyTuple(tuple):
- pass
- self.assertTrue(issubclass(MyTuple, Tuple))
-
- def test_tuple_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance((0, 0), Tuple[int, int])
- self.assertIsInstance((0, 0), Tuple)
-
- def test_repr(self):
- self.assertEqual(repr(Tuple), 'typing.Tuple')
- self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]')
- self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]')
- self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]')
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- issubclass(42, Tuple)
- with self.assertRaises(TypeError):
- issubclass(42, Tuple[int])
-
-
-class CallableTests(BaseTestCase):
-
- def test_self_subclass(self):
- with self.assertRaises(TypeError):
- self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
- self.assertTrue(issubclass(type(lambda x: x), Callable))
-
- def test_eq_hash(self):
- self.assertEqual(Callable[[int], int], Callable[[int], int])
- self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1)
- self.assertNotEqual(Callable[[int], int], Callable[[int], str])
- self.assertNotEqual(Callable[[int], int], Callable[[str], int])
- self.assertNotEqual(Callable[[int], int], Callable[[int, int], int])
- self.assertNotEqual(Callable[[int], int], Callable[[], int])
- self.assertNotEqual(Callable[[int], int], Callable)
-
- def test_cannot_instantiate(self):
- with self.assertRaises(TypeError):
- Callable()
- with self.assertRaises(TypeError):
- type(Callable)()
- c = Callable[[int], str]
- with self.assertRaises(TypeError):
- c()
- with self.assertRaises(TypeError):
- type(c)()
-
- def test_callable_wrong_forms(self):
- with self.assertRaises(TypeError):
- Callable[[...], int]
- with self.assertRaises(TypeError):
- Callable[(), int]
- with self.assertRaises(TypeError):
- Callable[[()], int]
- with self.assertRaises(TypeError):
- Callable[[int, 1], 2]
-
- def test_callable_instance_works(self):
- def f():
- pass
- self.assertIsInstance(f, Callable)
- self.assertNotIsInstance(None, Callable)
-
- def test_callable_instance_type_error(self):
- def f():
- pass
- with self.assertRaises(TypeError):
- self.assertIsInstance(f, Callable[[], None])
- with self.assertRaises(TypeError):
- self.assertIsInstance(f, Callable[[], Any])
- with self.assertRaises(TypeError):
- self.assertNotIsInstance(None, Callable[[], None])
- with self.assertRaises(TypeError):
- self.assertNotIsInstance(None, Callable[[], Any])
-
- def test_repr(self):
- ct0 = Callable[[], bool]
- self.assertEqual(repr(ct0), 'typing.Callable[[], bool]')
- ct2 = Callable[[str, float], int]
- self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]')
- ctv = Callable[..., str]
- self.assertEqual(repr(ctv), 'typing.Callable[..., str]')
-
- def test_callable_with_ellipsis(self):
-
- def foo(a: Callable[..., T]):
- pass
-
- self.assertEqual(get_type_hints(foo, globals(), locals()),
- {'a': Callable[..., T]})
-
- def test_ellipsis_in_generic(self):
- # Shouldn't crash; see https://github.com/python/typing/issues/259
- typing.List[Callable[..., str]]
-
-
-XK = TypeVar('XK', str, bytes)
-XV = TypeVar('XV')
-
-
-class SimpleMapping(Generic[XK, XV]):
-
- def __getitem__(self, key: XK) -> XV:
- ...
-
- def __setitem__(self, key: XK, value: XV):
- ...
-
- def get(self, key: XK, default: XV = None) -> XV:
- ...
-
-
-class MySimpleMapping(SimpleMapping[XK, XV]):
-
- def __init__(self):
- self.store = {}
-
- def __getitem__(self, key: str):
- return self.store[key]
-
- def __setitem__(self, key: str, value):
- self.store[key] = value
-
- def get(self, key: str, default=None):
- try:
- return self.store[key]
- except KeyError:
- return default
-
-
-class ProtocolTests(BaseTestCase):
-
- def test_supports_int(self):
- self.assertIsSubclass(int, typing.SupportsInt)
- self.assertNotIsSubclass(str, typing.SupportsInt)
-
- def test_supports_float(self):
- self.assertIsSubclass(float, typing.SupportsFloat)
- self.assertNotIsSubclass(str, typing.SupportsFloat)
-
- def test_supports_complex(self):
-
- # Note: complex itself doesn't have __complex__.
- class C:
- def __complex__(self):
- return 0j
-
- self.assertIsSubclass(C, typing.SupportsComplex)
- self.assertNotIsSubclass(str, typing.SupportsComplex)
-
- def test_supports_bytes(self):
-
- # Note: bytes itself doesn't have __bytes__.
- class B:
- def __bytes__(self):
- return b''
-
- self.assertIsSubclass(B, typing.SupportsBytes)
- self.assertNotIsSubclass(str, typing.SupportsBytes)
-
- def test_supports_abs(self):
- self.assertIsSubclass(float, typing.SupportsAbs)
- self.assertIsSubclass(int, typing.SupportsAbs)
- self.assertNotIsSubclass(str, typing.SupportsAbs)
-
- def test_supports_round(self):
- issubclass(float, typing.SupportsRound)
- self.assertIsSubclass(float, typing.SupportsRound)
- self.assertIsSubclass(int, typing.SupportsRound)
- self.assertNotIsSubclass(str, typing.SupportsRound)
-
- def test_reversible(self):
- self.assertIsSubclass(list, typing.Reversible)
- self.assertNotIsSubclass(int, typing.Reversible)
-
- def test_protocol_instance_type_error(self):
- with self.assertRaises(TypeError):
- isinstance(0, typing.SupportsAbs)
- class C1(typing.SupportsInt):
- def __int__(self) -> int:
- return 42
- class C2(C1):
- pass
- c = C2()
- self.assertIsInstance(c, C1)
-
-
-class GenericTests(BaseTestCase):
-
- def test_basics(self):
- X = SimpleMapping[str, Any]
- self.assertEqual(X.__parameters__, ())
- with self.assertRaises(TypeError):
- X[str]
- with self.assertRaises(TypeError):
- X[str, str]
- Y = SimpleMapping[XK, str]
- self.assertEqual(Y.__parameters__, (XK,))
- Y[str]
- with self.assertRaises(TypeError):
- Y[str, str]
-
- def test_generic_errors(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- Generic[T]()
- with self.assertRaises(TypeError):
- isinstance([], List[int])
- with self.assertRaises(TypeError):
- issubclass(list, List[int])
-
- def test_init(self):
- T = TypeVar('T')
- S = TypeVar('S')
- with self.assertRaises(TypeError):
- Generic[T, T]
- with self.assertRaises(TypeError):
- Generic[T, S, T]
-
- def test_repr(self):
- self.assertEqual(repr(SimpleMapping),
- __name__ + '.' + 'SimpleMapping')
- self.assertEqual(repr(MySimpleMapping),
- __name__ + '.' + 'MySimpleMapping')
-
- def test_chain_repr(self):
- T = TypeVar('T')
- S = TypeVar('S')
-
- class C(Generic[T]):
- pass
-
- X = C[Tuple[S, T]]
- self.assertEqual(X, C[Tuple[S, T]])
- self.assertNotEqual(X, C[Tuple[T, S]])
-
- Y = X[T, int]
- self.assertEqual(Y, X[T, int])
- self.assertNotEqual(Y, X[S, int])
- self.assertNotEqual(Y, X[T, str])
-
- Z = Y[str]
- self.assertEqual(Z, Y[str])
- self.assertNotEqual(Z, Y[int])
- self.assertNotEqual(Z, Y[T])
-
- self.assertTrue(str(Z).endswith(
- '.C[typing.Tuple[str, int]]'))
-
- def test_new_repr(self):
- T = TypeVar('T')
- U = TypeVar('U', covariant=True)
- S = TypeVar('S')
-
- self.assertEqual(repr(List), 'typing.List')
- self.assertEqual(repr(List[T]), 'typing.List[~T]')
- self.assertEqual(repr(List[U]), 'typing.List[+U]')
- self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
- self.assertEqual(repr(List[int]), 'typing.List[int]')
-
- def test_new_repr_complex(self):
- T = TypeVar('T')
- TS = TypeVar('TS')
-
- self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
- self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
- 'typing.List[typing.Tuple[int, ~T]]')
- self.assertEqual(repr(List[Tuple[T, T]][List[int]]),
- 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]')
-
- def test_new_repr_bare(self):
- T = TypeVar('T')
- self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
- self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]')
- class C(typing.Dict[Any, Any]): ...
- # this line should just work
- repr(C.__mro__)
-
- def test_dict(self):
- T = TypeVar('T')
-
- class B(Generic[T]):
- pass
-
- b = B()
- b.foo = 42
- self.assertEqual(b.__dict__, {'foo': 42})
-
- class C(B[int]):
- pass
-
- c = C()
- c.bar = 'abc'
- self.assertEqual(c.__dict__, {'bar': 'abc'})
-
- def test_false_subclasses(self):
- class MyMapping(MutableMapping[str, str]): pass
- self.assertNotIsInstance({}, MyMapping)
- self.assertNotIsSubclass(dict, MyMapping)
-
- def test_abc_bases(self):
- class MM(MutableMapping[str, str]):
- def __getitem__(self, k):
- return None
- def __setitem__(self, k, v):
- pass
- def __delitem__(self, k):
- pass
- def __iter__(self):
- return iter(())
- def __len__(self):
- return 0
- # this should just work
- MM().update()
- self.assertIsInstance(MM(), collections_abc.MutableMapping)
- self.assertIsInstance(MM(), MutableMapping)
- self.assertNotIsInstance(MM(), List)
- self.assertNotIsInstance({}, MM)
-
- def test_multiple_bases(self):
- class MM1(MutableMapping[str, str], collections_abc.MutableMapping):
- pass
- with self.assertRaises(TypeError):
- # consistent MRO not possible
- class MM2(collections_abc.MutableMapping, MutableMapping[str, str]):
- pass
-
- def test_orig_bases(self):
- T = TypeVar('T')
- class C(typing.Dict[str, T]): ...
- self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
-
- def test_naive_runtime_checks(self):
- def naive_dict_check(obj, tp):
- # Check if a dictionary conforms to Dict type
- if len(tp.__parameters__) > 0:
- raise NotImplementedError
- if tp.__args__:
- KT, VT = tp.__args__
- return all(isinstance(k, KT) and isinstance(v, VT)
- for k, v in obj.items())
- self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[str, int]))
- self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[str, int]))
- with self.assertRaises(NotImplementedError):
- naive_dict_check({1: 'x'}, typing.Dict[str, T])
-
- def naive_generic_check(obj, tp):
- # Check if an instance conforms to the generic class
- if not hasattr(obj, '__orig_class__'):
- raise NotImplementedError
- return obj.__orig_class__ == tp
- class Node(Generic[T]): ...
- self.assertTrue(naive_generic_check(Node[int](), Node[int]))
- self.assertFalse(naive_generic_check(Node[str](), Node[int]))
- self.assertFalse(naive_generic_check(Node[str](), List))
- with self.assertRaises(NotImplementedError):
- naive_generic_check([1,2,3], Node[int])
-
- def naive_list_base_check(obj, tp):
- # Check if list conforms to a List subclass
- return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
- for x in obj)
- class C(List[int]): ...
- self.assertTrue(naive_list_base_check([1, 2, 3], C))
- self.assertFalse(naive_list_base_check(['a', 'b'], C))
-
- def test_multi_subscr_base(self):
- T = TypeVar('T')
- U = TypeVar('U')
- V = TypeVar('V')
- class C(List[T][U][V]): ...
- class D(C, List[T][U][V]): ...
- self.assertEqual(C.__parameters__, (V,))
- self.assertEqual(D.__parameters__, (V,))
- self.assertEqual(C[int].__parameters__, ())
- self.assertEqual(D[int].__parameters__, ())
- self.assertEqual(C[int].__args__, (int,))
- self.assertEqual(D[int].__args__, (int,))
- self.assertEqual(C.__bases__, (List,))
- self.assertEqual(D.__bases__, (C, List))
- self.assertEqual(C.__orig_bases__, (List[T][U][V],))
- self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
-
- def test_extended_generic_rules_eq(self):
- T = TypeVar('T')
- U = TypeVar('U')
- self.assertEqual(Tuple[T, T][int], Tuple[int, int])
- self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
- with self.assertRaises(TypeError):
- Tuple[T, int][()]
- with self.assertRaises(TypeError):
- Tuple[T, U][T, ...]
-
- self.assertEqual(Union[T, int][int], int)
- self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
- class Base: ...
- class Derived(Base): ...
- self.assertEqual(Union[T, Base][Derived], Base)
- with self.assertRaises(TypeError):
- Union[T, int][1]
-
- self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
- self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
- with self.assertRaises(TypeError):
- Callable[[T], U][..., int]
- with self.assertRaises(TypeError):
- Callable[[T], U][[], int]
-
- def test_extended_generic_rules_repr(self):
- T = TypeVar('T')
- self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
- 'Union[Tuple, Callable]')
- self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
- 'Tuple')
- self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
- 'Callable[..., Union[int, NoneType]]')
- self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
- 'Callable[[], List[int]]')
-
- def test_generic_forward_ref(self):
- def foobar(x: List[List['CC']]): ...
- class CC: ...
- self.assertEqual(get_type_hints(foobar, globals(), locals()), {'x': List[List[CC]]})
- T = TypeVar('T')
- AT = Tuple[T, ...]
- def barfoo(x: AT): ...
- self.assertIs(get_type_hints(barfoo, globals(), locals())['x'], AT)
- CT = Callable[..., List[T]]
- def barfoo2(x: CT): ...
- self.assertIs(get_type_hints(barfoo2, globals(), locals())['x'], CT)
-
- def test_extended_generic_rules_subclassing(self):
- class T1(Tuple[T, KT]): ...
- class T2(Tuple[T, ...]): ...
- class C1(Callable[[T], T]): ...
- class C2(Callable[..., int]):
- def __call__(self):
- return None
-
- self.assertEqual(T1.__parameters__, (T, KT))
- self.assertEqual(T1[int, str].__args__, (int, str))
- self.assertEqual(T1[int, T].__origin__, T1)
-
- self.assertEqual(T2.__parameters__, (T,))
- with self.assertRaises(TypeError):
- T1[int]
- with self.assertRaises(TypeError):
- T2[int, str]
-
- self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
- self.assertEqual(C2.__parameters__, ())
- self.assertIsInstance(C2(), collections_abc.Callable)
- self.assertIsSubclass(C2, collections_abc.Callable)
- self.assertIsSubclass(C1, collections_abc.Callable)
- self.assertIsInstance(T1(), tuple)
- self.assertIsSubclass(T2, tuple)
- self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
- self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
-
- def test_fail_with_bare_union(self):
- with self.assertRaises(TypeError):
- List[Union]
- with self.assertRaises(TypeError):
- Tuple[Optional]
- with self.assertRaises(TypeError):
- ClassVar[ClassVar]
- with self.assertRaises(TypeError):
- List[ClassVar[int]]
-
- def test_fail_with_bare_generic(self):
- T = TypeVar('T')
- with self.assertRaises(TypeError):
- List[Generic]
- with self.assertRaises(TypeError):
- Tuple[Generic[T]]
- with self.assertRaises(TypeError):
- List[typing._Protocol]
-
- def test_type_erasure_special(self):
- T = TypeVar('T')
- # this is the only test that checks type caching
- self.clear_caches()
- class MyTup(Tuple[T, T]): ...
- self.assertIs(MyTup[int]().__class__, MyTup)
- self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
- class MyCall(Callable[..., T]):
- def __call__(self): return None
- self.assertIs(MyCall[T]().__class__, MyCall)
- self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
- class MyDict(typing.Dict[T, T]): ...
- self.assertIs(MyDict[int]().__class__, MyDict)
- self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
- class MyDef(typing.DefaultDict[str, T]): ...
- self.assertIs(MyDef[int]().__class__, MyDef)
- self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
-
- def test_all_repr_eq_any(self):
- objs = (getattr(typing, el) for el in typing.__all__)
- for obj in objs:
- self.assertNotEqual(repr(obj), '')
- self.assertEqual(obj, obj)
- if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
- self.assertEqual(obj[Any].__args__, (Any,))
- if isinstance(obj, type):
- for base in obj.__mro__:
- self.assertNotEqual(repr(base), '')
- self.assertEqual(base, base)
-
- def test_substitution_helper(self):
- T = TypeVar('T')
- KT = TypeVar('KT')
- VT = TypeVar('VT')
- class Map(Generic[KT, VT]):
- def meth(self, k: KT, v: VT): ...
- StrMap = Map[str, T]
- obj = StrMap[int]()
-
- new_args = typing._subs_tree(obj.__orig_class__)
- new_annots = {k: typing._replace_arg(v, type(obj).__parameters__, new_args)
- for k, v in obj.meth.__annotations__.items()}
-
- self.assertEqual(new_annots, {'k': str, 'v': int})
-
- def test_pickle(self):
- global C # pickle wants to reference the class by name
- T = TypeVar('T')
-
- class B(Generic[T]):
- pass
-
- class C(B[int]):
- pass
-
- c = C()
- c.foo = 42
- c.bar = 'abc'
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(c, proto)
- x = pickle.loads(z)
- self.assertEqual(x.foo, 42)
- self.assertEqual(x.bar, 'abc')
- self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
- simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
- for s in simples:
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(s, proto)
- x = pickle.loads(z)
- self.assertEqual(s, x)
-
- def test_copy_and_deepcopy(self):
- T = TypeVar('T')
- class Node(Generic[T]): ...
- things = [Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int],
- Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T],
- typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str],
- typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'],
- Union['T', int], List['T'], typing.Mapping['T', int]]
- for t in things + [Any]:
- self.assertEqual(t, copy(t))
- self.assertEqual(t, deepcopy(t))
-
- def test_parameterized_slots(self):
- T = TypeVar('T')
- class C(Generic[T]):
- __slots__ = ('potato',)
-
- c = C()
- c_int = C[int]()
- self.assertEqual(C.__slots__, C[str].__slots__)
-
- c.potato = 0
- c_int.potato = 0
- with self.assertRaises(AttributeError):
- c.tomato = 0
- with self.assertRaises(AttributeError):
- c_int.tomato = 0
-
- def foo(x: C['C']): ...
- self.assertEqual(get_type_hints(foo, globals(), locals())['x'], C[C])
- self.assertEqual(get_type_hints(foo, globals(), locals())['x'].__slots__,
- C.__slots__)
- self.assertEqual(copy(C[int]), deepcopy(C[int]))
-
- def test_parameterized_slots_dict(self):
- T = TypeVar('T')
- class D(Generic[T]):
- __slots__ = {'banana': 42}
-
- d = D()
- d_int = D[int]()
- self.assertEqual(D.__slots__, D[str].__slots__)
-
- d.banana = 'yes'
- d_int.banana = 'yes'
- with self.assertRaises(AttributeError):
- d.foobar = 'no'
- with self.assertRaises(AttributeError):
- d_int.foobar = 'no'
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- B = SimpleMapping[XK, Any]
-
- class C(Generic[B]):
- pass
-
- def test_repr_2(self):
- PY32 = sys.version_info[:2] < (3, 3)
-
- class C(Generic[T]):
- pass
-
- self.assertEqual(C.__module__, __name__)
- if not PY32:
- self.assertEqual(C.__qualname__,
- 'GenericTests.test_repr_2.<locals>.C')
- self.assertEqual(repr(C).split('.')[-1], 'C')
- X = C[int]
- self.assertEqual(X.__module__, __name__)
- if not PY32:
- self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
- self.assertEqual(repr(X).split('.')[-1], 'C[int]')
-
- class Y(C[int]):
- pass
-
- self.assertEqual(Y.__module__, __name__)
- if not PY32:
- self.assertEqual(Y.__qualname__,
- 'GenericTests.test_repr_2.<locals>.Y')
- self.assertEqual(repr(Y).split('.')[-1], 'Y')
-
- def test_eq_1(self):
- self.assertEqual(Generic, Generic)
- self.assertEqual(Generic[T], Generic[T])
- self.assertNotEqual(Generic[KT], Generic[VT])
-
- def test_eq_2(self):
-
- class A(Generic[T]):
- pass
-
- class B(Generic[T]):
- pass
-
- self.assertEqual(A, A)
- self.assertNotEqual(A, B)
- self.assertEqual(A[T], A[T])
- self.assertNotEqual(A[T], B[T])
-
- def test_multiple_inheritance(self):
-
- class A(Generic[T, VT]):
- pass
-
- class B(Generic[KT, T]):
- pass
-
- class C(A[T, VT], Generic[VT, T, KT], B[KT, T]):
- pass
-
- self.assertEqual(C.__parameters__, (VT, T, KT))
-
- def test_nested(self):
-
- G = Generic
-
- class Visitor(G[T]):
-
- a = None
-
- def set(self, a: T):
- self.a = a
-
- def get(self):
- return self.a
-
- def visit(self) -> T:
- return self.a
-
- V = Visitor[typing.List[int]]
-
- class IntListVisitor(V):
-
- def append(self, x: int):
- self.a.append(x)
-
- a = IntListVisitor()
- a.set([])
- a.append(1)
- a.append(42)
- self.assertEqual(a.get(), [1, 42])
-
- def test_type_erasure(self):
- T = TypeVar('T')
-
- class Node(Generic[T]):
- def __init__(self, label: T,
- left: 'Node[T]' = None,
- right: 'Node[T]' = None):
- self.label = label # type: T
- self.left = left # type: Optional[Node[T]]
- self.right = right # type: Optional[Node[T]]
-
- def foo(x: T):
- a = Node(x)
- b = Node[T](x)
- c = Node[Any](x)
- self.assertIs(type(a), Node)
- self.assertIs(type(b), Node)
- self.assertIs(type(c), Node)
- self.assertEqual(a.label, x)
- self.assertEqual(b.label, x)
- self.assertEqual(c.label, x)
-
- foo(42)
-
- def test_implicit_any(self):
- T = TypeVar('T')
-
- class C(Generic[T]):
- pass
-
- class D(C):
- pass
-
- self.assertEqual(D.__parameters__, ())
-
- with self.assertRaises(Exception):
- D[int]
- with self.assertRaises(Exception):
- D[Any]
- with self.assertRaises(Exception):
- D[T]
-
-class ClassVarTests(BaseTestCase):
-
- def test_basics(self):
- with self.assertRaises(TypeError):
- ClassVar[1]
- with self.assertRaises(TypeError):
- ClassVar[int, str]
- with self.assertRaises(TypeError):
- ClassVar[int][str]
-
- def test_repr(self):
- self.assertEqual(repr(ClassVar), 'typing.ClassVar')
- cv = ClassVar[int]
- self.assertEqual(repr(cv), 'typing.ClassVar[int]')
- cv = ClassVar[Employee]
- self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError):
- class C(type(ClassVar)):
- pass
- with self.assertRaises(TypeError):
- class C(type(ClassVar[int])):
- pass
-
- def test_cannot_init(self):
- with self.assertRaises(TypeError):
- ClassVar()
- with self.assertRaises(TypeError):
- type(ClassVar)()
- with self.assertRaises(TypeError):
- type(ClassVar[Optional[int]])()
-
- def test_no_isinstance(self):
- with self.assertRaises(TypeError):
- isinstance(1, ClassVar[int])
- with self.assertRaises(TypeError):
- issubclass(int, ClassVar)
-
-
-class CastTests(BaseTestCase):
-
- def test_basics(self):
- self.assertEqual(cast(int, 42), 42)
- self.assertEqual(cast(float, 42), 42)
- self.assertIs(type(cast(float, 42)), int)
- self.assertEqual(cast(Any, 42), 42)
- self.assertEqual(cast(list, 42), 42)
- self.assertEqual(cast(Union[str, float], 42), 42)
- self.assertEqual(cast(AnyStr, 42), 42)
- self.assertEqual(cast(None, 42), 42)
-
- def test_errors(self):
- # Bogus calls are not expected to fail.
- cast(42, 42)
- cast('hello', 42)
-
-
-class ForwardRefTests(BaseTestCase):
-
- def test_basics(self):
-
- class Node(Generic[T]):
-
- def __init__(self, label: T):
- self.label = label
- self.left = self.right = None
-
- def add_both(self,
- left: 'Optional[Node[T]]',
- right: 'Node[T]' = None,
- stuff: int = None,
- blah=None):
- self.left = left
- self.right = right
-
- def add_left(self, node: Optional['Node[T]']):
- self.add_both(node, None)
-
- def add_right(self, node: 'Node[T]' = None):
- self.add_both(None, node)
-
- t = Node[int]
- both_hints = get_type_hints(t.add_both, globals(), locals())
- self.assertEqual(both_hints['left'], Optional[Node[T]])
- self.assertEqual(both_hints['right'], Optional[Node[T]])
- self.assertEqual(both_hints['left'], both_hints['right'])
- self.assertEqual(both_hints['stuff'], Optional[int])
- self.assertNotIn('blah', both_hints)
-
- left_hints = get_type_hints(t.add_left, globals(), locals())
- self.assertEqual(left_hints['node'], Optional[Node[T]])
-
- right_hints = get_type_hints(t.add_right, globals(), locals())
- self.assertEqual(right_hints['node'], Optional[Node[T]])
-
- def test_forwardref_instance_type_error(self):
- fr = typing._ForwardRef('int')
- with self.assertRaises(TypeError):
- isinstance(42, fr)
-
- def test_union_forward(self):
-
- def foo(a: Union['T']):
- pass
-
- self.assertEqual(get_type_hints(foo, globals(), locals()),
- {'a': Union[T]})
-
- def test_tuple_forward(self):
-
- def foo(a: Tuple['T']):
- pass
-
- self.assertEqual(get_type_hints(foo, globals(), locals()),
- {'a': Tuple[T]})
-
- def test_callable_forward(self):
-
- def foo(a: Callable[['T'], 'T']):
- pass
-
- self.assertEqual(get_type_hints(foo, globals(), locals()),
- {'a': Callable[[T], T]})
-
- def test_callable_with_ellipsis_forward(self):
-
- def foo(a: 'Callable[..., T]'):
- pass
-
- self.assertEqual(get_type_hints(foo, globals(), locals()),
- {'a': Callable[..., T]})
-
- def test_syntax_error(self):
-
- with self.assertRaises(SyntaxError):
- Generic['/T']
-
- def test_delayed_syntax_error(self):
-
- def foo(a: 'Node[T'):
- pass
-
- with self.assertRaises(SyntaxError):
- get_type_hints(foo)
-
- def test_type_error(self):
-
- def foo(a: Tuple['42']):
- pass
-
- with self.assertRaises(TypeError):
- get_type_hints(foo)
-
- def test_name_error(self):
-
- def foo(a: 'Noode[T]'):
- pass
-
- with self.assertRaises(NameError):
- get_type_hints(foo, locals())
-
- def test_no_type_check(self):
-
- @no_type_check
- def foo(a: 'whatevers') -> {}:
- pass
-
- th = get_type_hints(foo)
- self.assertEqual(th, {})
-
- def test_no_type_check_class(self):
-
- @no_type_check
- class C:
- def foo(a: 'whatevers') -> {}:
- pass
-
- cth = get_type_hints(C.foo)
- self.assertEqual(cth, {})
- ith = get_type_hints(C().foo)
- self.assertEqual(ith, {})
-
- def test_meta_no_type_check(self):
-
- @no_type_check_decorator
- def magic_decorator(deco):
- return deco
-
- self.assertEqual(magic_decorator.__name__, 'magic_decorator')
-
- @magic_decorator
- def foo(a: 'whatevers') -> {}:
- pass
-
- @magic_decorator
- class C:
- def foo(a: 'whatevers') -> {}:
- pass
-
- self.assertEqual(foo.__name__, 'foo')
- th = get_type_hints(foo)
- self.assertEqual(th, {})
- cth = get_type_hints(C.foo)
- self.assertEqual(cth, {})
- ith = get_type_hints(C().foo)
- self.assertEqual(ith, {})
-
- def test_default_globals(self):
- code = ("class C:\n"
- " def foo(self, a: 'C') -> 'D': pass\n"
- "class D:\n"
- " def bar(self, b: 'D') -> C: pass\n"
- )
- ns = {}
- exec(code, ns)
- hints = get_type_hints(ns['C'].foo)
- self.assertEqual(hints, {'a': ns['C'], 'return': ns['D']})
-
-
-class OverloadTests(BaseTestCase):
-
- def test_overload_exists(self):
- from typing import overload
-
- def test_overload_fails(self):
- from typing import overload
-
- with self.assertRaises(RuntimeError):
-
- @overload
- def blah():
- pass
-
- blah()
-
- def test_overload_succeeds(self):
- from typing import overload
-
- @overload
- def blah():
- pass
-
- def blah():
- pass
-
- blah()
-
-
-ASYNCIO = sys.version_info[:2] >= (3, 5)
-
-ASYNCIO_TESTS = """
-import asyncio
-
-T_a = TypeVar('T_a')
-
-class AwaitableWrapper(typing.Awaitable[T_a]):
-
- def __init__(self, value):
- self.value = value
-
- def __await__(self) -> typing.Iterator[T_a]:
- yield
- return self.value
-
-class AsyncIteratorWrapper(typing.AsyncIterator[T_a]):
-
- def __init__(self, value: typing.Iterable[T_a]):
- self.value = value
-
- def __aiter__(self) -> typing.AsyncIterator[T_a]:
- return self
-
- @asyncio.coroutine
- def __anext__(self) -> T_a:
- data = yield from self.value
- if data:
- return data
- else:
- raise StopAsyncIteration
-"""
-
-if ASYNCIO:
- try:
- exec(ASYNCIO_TESTS)
- except ImportError:
- ASYNCIO = False
-
-PY36 = sys.version_info[:2] >= (3, 6)
-
-PY36_TESTS = """
-from test import ann_module, ann_module2, ann_module3
-
-class A:
- y: float
-class B(A):
- x: ClassVar[Optional['B']] = None
- y: int
-class CSub(B):
- z: ClassVar['CSub'] = B()
-class G(Generic[T]):
- lst: ClassVar[List[T]] = []
-
-class CoolEmployee(NamedTuple):
- name: str
- cool: int
-"""
-
-if PY36:
- exec(PY36_TESTS)
-
-gth = get_type_hints
-
-class GetTypeHintTests(BaseTestCase):
- def test_get_type_hints_from_various_objects(self):
- # For invalid objects should fail with TypeError (not AttributeError etc).
- with self.assertRaises(TypeError):
- gth(123)
- with self.assertRaises(TypeError):
- gth('abc')
- with self.assertRaises(TypeError):
- gth(None)
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_get_type_hints_modules(self):
- self.assertEqual(gth(ann_module), {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str})
- self.assertEqual(gth(ann_module2), {})
- self.assertEqual(gth(ann_module3), {})
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_get_type_hints_classes(self):
- self.assertEqual(gth(ann_module.C, ann_module.__dict__),
- {'y': Optional[ann_module.C]})
- self.assertIsInstance(gth(ann_module.j_class), dict)
- self.assertEqual(gth(ann_module.M), {'123': 123, 'o': type})
- self.assertEqual(gth(ann_module.D),
- {'j': str, 'k': str, 'y': Optional[ann_module.C]})
- self.assertEqual(gth(ann_module.Y), {'z': int})
- self.assertEqual(gth(ann_module.h_class),
- {'y': Optional[ann_module.C]})
- self.assertEqual(gth(ann_module.S), {'x': str, 'y': str})
- self.assertEqual(gth(ann_module.foo), {'x': int})
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_respect_no_type_check(self):
- @no_type_check
- class NoTpCheck:
- class Inn:
- def __init__(self, x: 'not a type'): ...
- self.assertTrue(NoTpCheck.__no_type_check__)
- self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
- self.assertEqual(gth(ann_module2.NTC.meth), {})
- class ABase(Generic[T]):
- def meth(x: int): ...
- @no_type_check
- class Der(ABase): ...
- self.assertEqual(gth(ABase.meth), {'x': int})
-
- def test_get_type_hints_for_builins(self):
- # Should not fail for built-in classes and functions.
- self.assertEqual(gth(int), {})
- self.assertEqual(gth(type), {})
- self.assertEqual(gth(dir), {})
- self.assertEqual(gth(len), {})
-
- def test_previous_behavior(self):
- def testf(x, y): ...
- testf.__annotations__['x'] = 'int'
- self.assertEqual(gth(testf), {'x': int})
-
- def test_get_type_hints_for_object_with_annotations(self):
- class A: ...
- class B: ...
- b = B()
- b.__annotations__ = {'x': 'A'}
- self.assertEqual(gth(b, locals()), {'x': A})
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_get_type_hints_ClassVar(self):
- self.assertEqual(gth(ann_module2.CV, ann_module2.__dict__),
- {'var': typing.ClassVar[ann_module2.CV]})
- self.assertEqual(gth(B, globals()),
- {'y': int, 'x': ClassVar[Optional[B]]})
- self.assertEqual(gth(CSub, globals()),
- {'z': ClassVar[CSub], 'y': int, 'x': ClassVar[Optional[B]]})
- self.assertEqual(gth(G), {'lst': ClassVar[List[T]]})
-
-
-class CollectionsAbcTests(BaseTestCase):
-
- def test_hashable(self):
- self.assertIsInstance(42, typing.Hashable)
- self.assertNotIsInstance([], typing.Hashable)
-
- def test_iterable(self):
- self.assertIsInstance([], typing.Iterable)
- # Due to ABC caching, the second time takes a separate code
- # path and could fail. So call this a few times.
- self.assertIsInstance([], typing.Iterable)
- self.assertIsInstance([], typing.Iterable)
- self.assertNotIsInstance(42, typing.Iterable)
- # Just in case, also test issubclass() a few times.
- self.assertIsSubclass(list, typing.Iterable)
- self.assertIsSubclass(list, typing.Iterable)
-
- def test_iterator(self):
- it = iter([])
- self.assertIsInstance(it, typing.Iterator)
- self.assertNotIsInstance(42, typing.Iterator)
-
- @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
- def test_awaitable(self):
- ns = {}
- exec(
- "async def foo() -> typing.Awaitable[int]:\n"
- " return await AwaitableWrapper(42)\n",
- globals(), ns)
- foo = ns['foo']
- g = foo()
- self.assertIsInstance(g, typing.Awaitable)
- self.assertNotIsInstance(foo, typing.Awaitable)
- g.send(None) # Run foo() till completion, to avoid warning.
-
- @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
- def test_coroutine(self):
- ns = {}
- exec(
- "async def foo():\n"
- " return\n",
- globals(), ns)
- foo = ns['foo']
- g = foo()
- self.assertIsInstance(g, typing.Coroutine)
- with self.assertRaises(TypeError):
- isinstance(g, typing.Coroutine[int])
- self.assertNotIsInstance(foo, typing.Coroutine)
- try:
- g.send(None)
- except StopIteration:
- pass
-
- @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
- def test_async_iterable(self):
- base_it = range(10) # type: Iterator[int]
- it = AsyncIteratorWrapper(base_it)
- self.assertIsInstance(it, typing.AsyncIterable)
- self.assertIsInstance(it, typing.AsyncIterable)
- self.assertNotIsInstance(42, typing.AsyncIterable)
-
- @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
- def test_async_iterator(self):
- base_it = range(10) # type: Iterator[int]
- it = AsyncIteratorWrapper(base_it)
- self.assertIsInstance(it, typing.AsyncIterator)
- self.assertNotIsInstance(42, typing.AsyncIterator)
-
- def test_sized(self):
- self.assertIsInstance([], typing.Sized)
- self.assertNotIsInstance(42, typing.Sized)
-
- def test_container(self):
- self.assertIsInstance([], typing.Container)
- self.assertNotIsInstance(42, typing.Container)
-
- def test_collection(self):
- if hasattr(typing, 'Collection'):
- self.assertIsInstance(tuple(), typing.Collection)
- self.assertIsInstance(frozenset(), typing.Collection)
- self.assertIsSubclass(dict, typing.Collection)
- self.assertNotIsInstance(42, typing.Collection)
-
- def test_abstractset(self):
- self.assertIsInstance(set(), typing.AbstractSet)
- self.assertNotIsInstance(42, typing.AbstractSet)
-
- def test_mutableset(self):
- self.assertIsInstance(set(), typing.MutableSet)
- self.assertNotIsInstance(frozenset(), typing.MutableSet)
-
- def test_mapping(self):
- self.assertIsInstance({}, typing.Mapping)
- self.assertNotIsInstance(42, typing.Mapping)
-
- def test_mutablemapping(self):
- self.assertIsInstance({}, typing.MutableMapping)
- self.assertNotIsInstance(42, typing.MutableMapping)
-
- def test_sequence(self):
- self.assertIsInstance([], typing.Sequence)
- self.assertNotIsInstance(42, typing.Sequence)
-
- def test_mutablesequence(self):
- self.assertIsInstance([], typing.MutableSequence)
- self.assertNotIsInstance((), typing.MutableSequence)
-
- def test_bytestring(self):
- self.assertIsInstance(b'', typing.ByteString)
- self.assertIsInstance(bytearray(b''), typing.ByteString)
-
- def test_list(self):
- self.assertIsSubclass(list, typing.List)
-
- def test_set(self):
- self.assertIsSubclass(set, typing.Set)
- self.assertNotIsSubclass(frozenset, typing.Set)
-
- def test_frozenset(self):
- self.assertIsSubclass(frozenset, typing.FrozenSet)
- self.assertNotIsSubclass(set, typing.FrozenSet)
-
- def test_dict(self):
- self.assertIsSubclass(dict, typing.Dict)
-
- def test_no_list_instantiation(self):
- with self.assertRaises(TypeError):
- typing.List()
- with self.assertRaises(TypeError):
- typing.List[T]()
- with self.assertRaises(TypeError):
- typing.List[int]()
-
- def test_list_subclass(self):
-
- class MyList(typing.List[int]):
- pass
-
- a = MyList()
- self.assertIsInstance(a, MyList)
- self.assertIsInstance(a, typing.Sequence)
-
- self.assertIsSubclass(MyList, list)
- self.assertNotIsSubclass(list, MyList)
-
- def test_no_dict_instantiation(self):
- with self.assertRaises(TypeError):
- typing.Dict()
- with self.assertRaises(TypeError):
- typing.Dict[KT, VT]()
- with self.assertRaises(TypeError):
- typing.Dict[str, int]()
-
- def test_dict_subclass(self):
-
- class MyDict(typing.Dict[str, int]):
- pass
-
- d = MyDict()
- self.assertIsInstance(d, MyDict)
- self.assertIsInstance(d, typing.MutableMapping)
-
- self.assertIsSubclass(MyDict, dict)
- self.assertNotIsSubclass(dict, MyDict)
-
- def test_no_defaultdict_instantiation(self):
- with self.assertRaises(TypeError):
- typing.DefaultDict()
- with self.assertRaises(TypeError):
- typing.DefaultDict[KT, VT]()
- with self.assertRaises(TypeError):
- typing.DefaultDict[str, int]()
-
- def test_defaultdict_subclass(self):
-
- class MyDefDict(typing.DefaultDict[str, int]):
- pass
-
- dd = MyDefDict()
- self.assertIsInstance(dd, MyDefDict)
-
- self.assertIsSubclass(MyDefDict, collections.defaultdict)
- self.assertNotIsSubclass(collections.defaultdict, MyDefDict)
-
- def test_no_set_instantiation(self):
- with self.assertRaises(TypeError):
- typing.Set()
- with self.assertRaises(TypeError):
- typing.Set[T]()
- with self.assertRaises(TypeError):
- typing.Set[int]()
-
- def test_set_subclass_instantiation(self):
-
- class MySet(typing.Set[int]):
- pass
-
- d = MySet()
- self.assertIsInstance(d, MySet)
-
- def test_no_frozenset_instantiation(self):
- with self.assertRaises(TypeError):
- typing.FrozenSet()
- with self.assertRaises(TypeError):
- typing.FrozenSet[T]()
- with self.assertRaises(TypeError):
- typing.FrozenSet[int]()
-
- def test_frozenset_subclass_instantiation(self):
-
- class MyFrozenSet(typing.FrozenSet[int]):
- pass
-
- d = MyFrozenSet()
- self.assertIsInstance(d, MyFrozenSet)
-
- def test_no_tuple_instantiation(self):
- with self.assertRaises(TypeError):
- Tuple()
- with self.assertRaises(TypeError):
- Tuple[T]()
- with self.assertRaises(TypeError):
- Tuple[int]()
-
- def test_generator(self):
- def foo():
- yield 42
- g = foo()
- self.assertIsSubclass(type(g), typing.Generator)
-
- def test_no_generator_instantiation(self):
- with self.assertRaises(TypeError):
- typing.Generator()
- with self.assertRaises(TypeError):
- typing.Generator[T, T, T]()
- with self.assertRaises(TypeError):
- typing.Generator[int, int, int]()
-
- def test_subclassing(self):
-
- class MMA(typing.MutableMapping):
- pass
-
- with self.assertRaises(TypeError): # It's abstract
- MMA()
-
- class MMC(MMA):
- def __getitem__(self, k):
- return None
- def __setitem__(self, k, v):
- pass
- def __delitem__(self, k):
- pass
- def __iter__(self):
- return iter(())
- def __len__(self):
- return 0
-
- self.assertEqual(len(MMC()), 0)
- assert callable(MMC.update)
- self.assertIsInstance(MMC(), typing.Mapping)
-
- class MMB(typing.MutableMapping[KT, VT]):
- def __getitem__(self, k):
- return None
- def __setitem__(self, k, v):
- pass
- def __delitem__(self, k):
- pass
- def __iter__(self):
- return iter(())
- def __len__(self):
- return 0
-
- self.assertEqual(len(MMB()), 0)
- self.assertEqual(len(MMB[str, str]()), 0)
- self.assertEqual(len(MMB[KT, VT]()), 0)
-
- self.assertNotIsSubclass(dict, MMA)
- self.assertNotIsSubclass(dict, MMB)
-
- self.assertIsSubclass(MMA, typing.Mapping)
- self.assertIsSubclass(MMB, typing.Mapping)
- self.assertIsSubclass(MMC, typing.Mapping)
-
- self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
- self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
-
- self.assertIsSubclass(MMA, collections.Mapping)
- self.assertIsSubclass(MMB, collections.Mapping)
- self.assertIsSubclass(MMC, collections.Mapping)
-
- self.assertIsSubclass(MMB[str, str], typing.Mapping)
- self.assertIsSubclass(MMC, MMA)
-
- class I(typing.Iterable): ...
- self.assertNotIsSubclass(list, I)
-
- class G(typing.Generator[int, int, int]): ...
- def g(): yield 0
- self.assertIsSubclass(G, typing.Generator)
- self.assertIsSubclass(G, typing.Iterable)
- if hasattr(collections, 'Generator'):
- self.assertIsSubclass(G, collections.Generator)
- self.assertIsSubclass(G, collections.Iterable)
- self.assertNotIsSubclass(type(g), G)
-
- def test_subclassing_subclasshook(self):
-
- class Base(typing.Iterable):
- @classmethod
- def __subclasshook__(cls, other):
- if other.__name__ == 'Foo':
- return True
- else:
- return False
-
- class C(Base): ...
- class Foo: ...
- class Bar: ...
- self.assertIsSubclass(Foo, Base)
- self.assertIsSubclass(Foo, C)
- self.assertNotIsSubclass(Bar, C)
-
- def test_subclassing_register(self):
-
- class A(typing.Container): ...
- class B(A): ...
-
- class C: ...
- A.register(C)
- self.assertIsSubclass(C, A)
- self.assertNotIsSubclass(C, B)
-
- class D: ...
- B.register(D)
- self.assertIsSubclass(D, A)
- self.assertIsSubclass(D, B)
-
- class M(): ...
- collections.MutableMapping.register(M)
- self.assertIsSubclass(M, typing.Mapping)
-
- def test_collections_as_base(self):
-
- class M(collections.Mapping): ...
- self.assertIsSubclass(M, typing.Mapping)
- self.assertIsSubclass(M, typing.Iterable)
-
- class S(collections.MutableSequence): ...
- self.assertIsSubclass(S, typing.MutableSequence)
- self.assertIsSubclass(S, typing.Iterable)
-
- class I(collections.Iterable): ...
- self.assertIsSubclass(I, typing.Iterable)
-
- class A(collections.Mapping, metaclass=abc.ABCMeta): ...
- class B: ...
- A.register(B)
- self.assertIsSubclass(B, typing.Mapping)
-
-
-class OtherABCTests(BaseTestCase):
-
- @skipUnless(hasattr(typing, 'ContextManager'),
- 'requires typing.ContextManager')
- def test_contextmanager(self):
- @contextlib.contextmanager
- def manager():
- yield 42
-
- cm = manager()
- self.assertIsInstance(cm, typing.ContextManager)
- self.assertNotIsInstance(42, typing.ContextManager)
-
-
-class TypeTests(BaseTestCase):
-
- def test_type_basic(self):
-
- class User: pass
- class BasicUser(User): pass
- class ProUser(User): pass
-
- def new_user(user_class: Type[User]) -> User:
- return user_class()
-
- joe = new_user(BasicUser)
-
- def test_type_typevar(self):
-
- class User: pass
- class BasicUser(User): pass
- class ProUser(User): pass
-
- U = TypeVar('U', bound=User)
-
- def new_user(user_class: Type[U]) -> U:
- return user_class()
-
- joe = new_user(BasicUser)
-
- def test_type_optional(self):
- A = Optional[Type[BaseException]]
-
- def foo(a: A) -> Optional[BaseException]:
- if a is None:
- return None
- else:
- return a()
-
- assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
- assert foo(None) is None
-
-
-class NewTypeTests(BaseTestCase):
-
- def test_basic(self):
- UserId = NewType('UserId', int)
- UserName = NewType('UserName', str)
- self.assertIsInstance(UserId(5), int)
- self.assertIsInstance(UserName('Joe'), str)
- self.assertEqual(UserId(5) + 1, 6)
-
- def test_errors(self):
- UserId = NewType('UserId', int)
- UserName = NewType('UserName', str)
- with self.assertRaises(TypeError):
- issubclass(UserId, int)
- with self.assertRaises(TypeError):
- class D(UserName):
- pass
-
-
-class NamedTupleTests(BaseTestCase):
-
- def test_basics(self):
- Emp = NamedTuple('Emp', [('name', str), ('id', int)])
- self.assertIsSubclass(Emp, tuple)
- joe = Emp('Joe', 42)
- jim = Emp(name='Jim', id=1)
- self.assertIsInstance(joe, Emp)
- self.assertIsInstance(joe, tuple)
- self.assertEqual(joe.name, 'Joe')
- self.assertEqual(joe.id, 42)
- self.assertEqual(jim.name, 'Jim')
- self.assertEqual(jim.id, 1)
- self.assertEqual(Emp.__name__, 'Emp')
- self.assertEqual(Emp._fields, ('name', 'id'))
- self.assertEqual(Emp._field_types, dict(name=str, id=int))
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_annotation_usage(self):
- tim = CoolEmployee('Tim', 9000)
- self.assertIsInstance(tim, CoolEmployee)
- self.assertIsInstance(tim, tuple)
- self.assertEqual(tim.name, 'Tim')
- self.assertEqual(tim.cool, 9000)
- self.assertEqual(CoolEmployee.__name__, 'CoolEmployee')
- self.assertEqual(CoolEmployee._fields, ('name', 'cool'))
- self.assertEqual(CoolEmployee._field_types, dict(name=str, cool=int))
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_namedtuple_keyword_usage(self):
- LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int)
- nick = LocalEmployee('Nick', 25)
- self.assertIsInstance(nick, tuple)
- self.assertEqual(nick.name, 'Nick')
- self.assertEqual(LocalEmployee.__name__, 'LocalEmployee')
- self.assertEqual(LocalEmployee._fields, ('name', 'age'))
- self.assertEqual(LocalEmployee._field_types, dict(name=str, age=int))
- with self.assertRaises(TypeError):
- NamedTuple('Name', [('x', int)], y=str)
- with self.assertRaises(TypeError):
- NamedTuple('Name', x=1, y='a')
-
- def test_pickle(self):
- global Emp # pickle wants to reference the class by name
- Emp = NamedTuple('Emp', [('name', str), ('id', int)])
- jane = Emp('jane', 37)
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(jane, proto)
- jane2 = pickle.loads(z)
- self.assertEqual(jane2, jane)
-
-
-class IOTests(BaseTestCase):
-
- def test_io(self):
-
- def stuff(a: IO) -> AnyStr:
- return a.readline()
-
- a = stuff.__annotations__['a']
- self.assertEqual(a.__parameters__, (AnyStr,))
-
- def test_textio(self):
-
- def stuff(a: TextIO) -> str:
- return a.readline()
-
- a = stuff.__annotations__['a']
- self.assertEqual(a.__parameters__, ())
-
- def test_binaryio(self):
-
- def stuff(a: BinaryIO) -> bytes:
- return a.readline()
-
- a = stuff.__annotations__['a']
- self.assertEqual(a.__parameters__, ())
-
- def test_io_submodule(self):
- from typing.io import IO, TextIO, BinaryIO, __all__, __name__
- self.assertIs(IO, typing.IO)
- self.assertIs(TextIO, typing.TextIO)
- self.assertIs(BinaryIO, typing.BinaryIO)
- self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
- self.assertEqual(__name__, 'typing.io')
-
-
-class RETests(BaseTestCase):
- # Much of this is really testing _TypeAlias.
-
- def test_basics(self):
- pat = re.compile('[a-z]+', re.I)
- self.assertIsSubclass(pat.__class__, Pattern)
- self.assertIsSubclass(type(pat), Pattern)
- self.assertIsInstance(pat, Pattern)
-
- mat = pat.search('12345abcde.....')
- self.assertIsSubclass(mat.__class__, Match)
- self.assertIsSubclass(type(mat), Match)
- self.assertIsInstance(mat, Match)
-
- # these should just work
- p = Pattern[Union[str, bytes]]
- m = Match[Union[bytes, str]]
-
- def test_errors(self):
- with self.assertRaises(TypeError):
- # Doesn't fit AnyStr.
- Pattern[int]
- with self.assertRaises(TypeError):
- # Can't change type vars?
- Match[T]
- m = Match[Union[str, bytes]]
- with self.assertRaises(TypeError):
- # Too complicated?
- m[str]
- with self.assertRaises(TypeError):
- # We don't support isinstance().
- isinstance(42, Pattern[str])
-
- def test_repr(self):
- self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]')
- self.assertEqual(repr(Pattern[str]), 'Pattern[str]')
- self.assertEqual(repr(Pattern[bytes]), 'Pattern[bytes]')
- self.assertEqual(repr(Match), 'Match[~AnyStr]')
- self.assertEqual(repr(Match[str]), 'Match[str]')
- self.assertEqual(repr(Match[bytes]), 'Match[bytes]')
-
- def test_re_submodule(self):
- from typing.re import Match, Pattern, __all__, __name__
- self.assertIs(Match, typing.Match)
- self.assertIs(Pattern, typing.Pattern)
- self.assertEqual(set(__all__), set(['Match', 'Pattern']))
- self.assertEqual(__name__, 'typing.re')
-
- def test_cannot_subclass(self):
- with self.assertRaises(TypeError) as ex:
-
- class A(typing.Match):
- pass
-
- self.assertEqual(str(ex.exception),
- "Cannot subclass typing._TypeAlias")
-
-
-class AllTests(BaseTestCase):
- """Tests for __all__."""
-
- def test_all(self):
- from typing import __all__ as a
- # Just spot-check the first and last of every category.
- self.assertIn('AbstractSet', a)
- self.assertIn('ValuesView', a)
- self.assertIn('cast', a)
- self.assertIn('overload', a)
- if hasattr(contextlib, 'AbstractContextManager'):
- self.assertIn('ContextManager', a)
- # Check that io and re are not exported.
- self.assertNotIn('io', a)
- self.assertNotIn('re', a)
- # Spot-check that stdlib modules aren't exported.
- self.assertNotIn('os', a)
- self.assertNotIn('sys', a)
- # Check that Text is defined.
- self.assertIn('Text', a)
-
-
-if __name__ == '__main__':
- main()
diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py
deleted file mode 100644
index 34845b7..0000000
--- a/lib-typing/3.2/typing.py
+++ /dev/null
@@ -1,2160 +0,0 @@
-import abc
-from abc import abstractmethod, abstractproperty
-import collections
-import contextlib
-import functools
-import re as stdlib_re # Avoid confusion with the re we export.
-import sys
-import types
-try:
- import collections.abc as collections_abc
-except ImportError:
- import collections as collections_abc # Fallback for PY3.2.
-
-
-# Please keep __all__ alphabetized within each category.
-__all__ = [
- # Super-special typing primitives.
- 'Any',
- 'Callable',
- 'ClassVar',
- 'Generic',
- 'Optional',
- 'Tuple',
- 'Type',
- 'TypeVar',
- 'Union',
-
- # ABCs (from collections.abc).
- 'AbstractSet', # collections.abc.Set.
- 'ByteString',
- 'Container',
- 'Hashable',
- 'ItemsView',
- 'Iterable',
- 'Iterator',
- 'KeysView',
- 'Mapping',
- 'MappingView',
- 'MutableMapping',
- 'MutableSequence',
- 'MutableSet',
- 'Sequence',
- 'Sized',
- 'ValuesView',
- # The following are added depending on presence
- # of their non-generic counterparts in stdlib:
- # Awaitable,
- # AsyncIterator,
- # AsyncIterable,
- # Coroutine,
- # Collection,
- # ContextManager
-
- # Structural checks, a.k.a. protocols.
- 'Reversible',
- 'SupportsAbs',
- 'SupportsFloat',
- 'SupportsInt',
- 'SupportsRound',
-
- # Concrete collection types.
- 'Dict',
- 'DefaultDict',
- 'List',
- 'Set',
- 'FrozenSet',
- 'NamedTuple', # Not really a type.
- 'Generator',
-
- # One-off things.
- 'AnyStr',
- 'cast',
- 'get_type_hints',
- 'NewType',
- 'no_type_check',
- 'no_type_check_decorator',
- 'overload',
- 'Text',
- 'TYPE_CHECKING',
-]
-
-# The pseudo-submodules 're' and 'io' are part of the public
-# namespace, but excluded from __all__ because they might stomp on
-# legitimate imports of those modules.
-
-
-def _qualname(x):
- if sys.version_info[:2] >= (3, 3):
- return x.__qualname__
- else:
- # Fall back to just name.
- return x.__name__
-
-
-def _trim_name(nm):
- if nm.startswith('_') and nm not in ('_TypeAlias',
- '_ForwardRef', '_TypingBase', '_FinalTypingBase'):
- nm = nm[1:]
- return nm
-
-
-class TypingMeta(type):
- """Metaclass for most types defined in typing module
- (not a part of public API).
-
- This overrides __new__() to require an extra keyword parameter
- '_root', which serves as a guard against naive subclassing of the
- typing classes. Any legitimate class defined using a metaclass
- derived from TypingMeta must pass _root=True.
-
- This also defines a dummy constructor (all the work for most typing
- constructs is done in __new__) and a nicer repr().
- """
-
- _is_protocol = False
-
- def __new__(cls, name, bases, namespace, *, _root=False):
- if not _root:
- raise TypeError("Cannot subclass %s" %
- (', '.join(map(_type_repr, bases)) or '()'))
- return super().__new__(cls, name, bases, namespace)
-
- def __init__(self, *args, **kwds):
- pass
-
- def _eval_type(self, globalns, localns):
- """Override this in subclasses to interpret forward references.
-
- For example, List['C'] is internally stored as
- List[_ForwardRef('C')], which should evaluate to List[C],
- where C is an object found in globalns or localns (searching
- localns first, of course).
- """
- return self
-
- def _get_type_vars(self, tvars):
- pass
-
- def __repr__(self):
- qname = _trim_name(_qualname(self))
- return '%s.%s' % (self.__module__, qname)
-
-
-class _TypingBase(metaclass=TypingMeta, _root=True):
- """Internal indicator of special typing constructs."""
-
- __slots__ = ()
-
- def __init__(self, *args, **kwds):
- pass
-
- def __new__(cls, *args, **kwds):
- """Constructor.
-
- This only exists to give a better error message in case
- someone tries to subclass a special typing object (not a good idea).
- """
- if (len(args) == 3 and
- isinstance(args[0], str) and
- isinstance(args[1], tuple)):
- # Close enough.
- raise TypeError("Cannot subclass %r" % cls)
- return super().__new__(cls)
-
- # Things that are not classes also need these.
- def _eval_type(self, globalns, localns):
- return self
-
- def _get_type_vars(self, tvars):
- pass
-
- def __repr__(self):
- cls = type(self)
- qname = _trim_name(_qualname(cls))
- return '%s.%s' % (cls.__module__, qname)
-
- def __call__(self, *args, **kwds):
- raise TypeError("Cannot instantiate %r" % type(self))
-
-
-class _FinalTypingBase(_TypingBase, _root=True):
- """Internal mix-in class to prevent instantiation.
-
- Prevents instantiation unless _root=True is given in class call.
- It is used to create pseudo-singleton instances Any, Union, Optional, etc.
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, _root=False, **kwds):
- self = super().__new__(cls, *args, **kwds)
- if _root is True:
- return self
- raise TypeError("Cannot instantiate %r" % cls)
-
- def __reduce__(self):
- return _trim_name(type(self).__name__)
-
-
-class _ForwardRef(_TypingBase, _root=True):
- """Internal wrapper to hold a forward reference."""
-
- __slots__ = ('__forward_arg__', '__forward_code__',
- '__forward_evaluated__', '__forward_value__')
-
- def __init__(self, arg):
- super().__init__(arg)
- if not isinstance(arg, str):
- raise TypeError('Forward reference must be a string -- got %r' % (arg,))
- try:
- code = compile(arg, '<string>', 'eval')
- except SyntaxError:
- raise SyntaxError('Forward reference must be an expression -- got %r' %
- (arg,))
- self.__forward_arg__ = arg
- self.__forward_code__ = code
- self.__forward_evaluated__ = False
- self.__forward_value__ = None
-
- def _eval_type(self, globalns, localns):
- if not self.__forward_evaluated__ or localns is not globalns:
- if globalns is None and localns is None:
- globalns = localns = {}
- elif globalns is None:
- globalns = localns
- elif localns is None:
- localns = globalns
- self.__forward_value__ = _type_check(
- eval(self.__forward_code__, globalns, localns),
- "Forward references must evaluate to types.")
- self.__forward_evaluated__ = True
- return self.__forward_value__
-
- def __eq__(self, other):
- if not isinstance(other, _ForwardRef):
- return NotImplemented
- return (self.__forward_arg__ == other.__forward_arg__ and
- self.__forward_value__ == other.__forward_value__)
-
- def __hash__(self):
- return hash((self.__forward_arg__, self.__forward_value__))
-
- def __instancecheck__(self, obj):
- raise TypeError("Forward references cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Forward references cannot be used with issubclass().")
-
- def __repr__(self):
- return '_ForwardRef(%r)' % (self.__forward_arg__,)
-
-
-class _TypeAlias(_TypingBase, _root=True):
- """Internal helper class for defining generic variants of concrete types.
-
- Note that this is not a type; let's call it a pseudo-type. It cannot
- be used in instance and subclass checks in parameterized form, i.e.
- ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
- ``False``.
- """
-
- __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
-
- def __init__(self, name, type_var, impl_type, type_checker):
- """Initializer.
-
- Args:
- name: The name, e.g. 'Pattern'.
- type_var: The type parameter, e.g. AnyStr, or the
- specific type, e.g. str.
- impl_type: The implementation type.
- type_checker: Function that takes an impl_type instance.
- and returns a value that should be a type_var instance.
- """
- assert isinstance(name, str), repr(name)
- assert isinstance(impl_type, type), repr(impl_type)
- assert not isinstance(impl_type, TypingMeta), repr(impl_type)
- assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
- self.name = name
- self.type_var = type_var
- self.impl_type = impl_type
- self.type_checker = type_checker
-
- def __repr__(self):
- return "%s[%s]" % (self.name, _type_repr(self.type_var))
-
- def __getitem__(self, parameter):
- if not isinstance(self.type_var, TypeVar):
- raise TypeError("%s cannot be further parameterized." % self)
- if self.type_var.__constraints__ and isinstance(parameter, type):
- if not issubclass(parameter, self.type_var.__constraints__):
- raise TypeError("%s is not a valid substitution for %s." %
- (parameter, self.type_var))
- if isinstance(parameter, TypeVar) and parameter is not self.type_var:
- raise TypeError("%s cannot be re-parameterized." % self)
- return self.__class__(self.name, parameter,
- self.impl_type, self.type_checker)
-
- def __eq__(self, other):
- if not isinstance(other, _TypeAlias):
- return NotImplemented
- return self.name == other.name and self.type_var == other.type_var
-
- def __hash__(self):
- return hash((self.name, self.type_var))
-
- def __instancecheck__(self, obj):
- if not isinstance(self.type_var, TypeVar):
- raise TypeError("Parameterized type aliases cannot be used "
- "with isinstance().")
- return isinstance(obj, self.impl_type)
-
- def __subclasscheck__(self, cls):
- if not isinstance(self.type_var, TypeVar):
- raise TypeError("Parameterized type aliases cannot be used "
- "with issubclass().")
- return issubclass(cls, self.impl_type)
-
-
-def _get_type_vars(types, tvars):
- for t in types:
- if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
- t._get_type_vars(tvars)
-
-
-def _type_vars(types):
- tvars = []
- _get_type_vars(types, tvars)
- return tuple(tvars)
-
-
-def _eval_type(t, globalns, localns):
- if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
- return t._eval_type(globalns, localns)
- return t
-
-
-def _type_check(arg, msg):
- """Check that the argument is a type, and return it (internal helper).
-
- As a special case, accept None and return type(None) instead.
- Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
-
- The msg argument is a human-readable error message, e.g.
-
- "Union[arg, ...]: arg should be a type."
-
- We append the repr() of the actual value (truncated to 100 chars).
- """
- if arg is None:
- return type(None)
- if isinstance(arg, str):
- arg = _ForwardRef(arg)
- if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
- not isinstance(arg, (type, _TypingBase)) and not callable(arg)):
- raise TypeError(msg + " Got %.100r." % (arg,))
- # Bare Union etc. are not valid as type arguments
- if (type(arg).__name__ in ('_Union', '_Optional')
- and not getattr(arg, '__origin__', None)
- or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)):
- raise TypeError("Plain %s is not valid as type argument" % arg)
- return arg
-
-
-def _type_repr(obj):
- """Return the repr() of an object, special-casing types (internal helper).
-
- If obj is a type, we return a shorter version than the default
- type.__repr__, based on the module and qualified name, which is
- typically enough to uniquely identify a type. For everything
- else, we fall back on repr(obj).
- """
- if isinstance(obj, type) and not isinstance(obj, TypingMeta):
- if obj.__module__ == 'builtins':
- return _qualname(obj)
- return '%s.%s' % (obj.__module__, _qualname(obj))
- if obj is ...:
- return('...')
- if isinstance(obj, types.FunctionType):
- return obj.__name__
- return repr(obj)
-
-
-class _Any(_FinalTypingBase, _root=True):
- """Special type indicating an unconstrained type.
-
- - Any is compatible with every type.
- - Any assumed to have all methods.
- - All values assumed to be instances of Any.
-
- Note that all the above statements are true from the point of view of
- static type checkers. At runtime, Any should not be used with instance
- or class checks.
- """
-
- __slots__ = ()
-
- def __instancecheck__(self, obj):
- raise TypeError("Any cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Any cannot be used with issubclass().")
-
-
-Any = _Any(_root=True)
-
-
-class TypeVar(_TypingBase, _root=True):
- """Type variable.
-
- Usage::
-
- T = TypeVar('T') # Can be anything
- A = TypeVar('A', str, bytes) # Must be str or bytes
-
- Type variables exist primarily for the benefit of static type
- checkers. They serve as the parameters for generic types as well
- as for generic function definitions. See class Generic for more
- information on generic types. Generic functions work as follows:
-
- def repeat(x: T, n: int) -> List[T]:
- '''Return a list containing n references to x.'''
- return [x]*n
-
- def longest(x: A, y: A) -> A:
- '''Return the longest of two strings.'''
- return x if len(x) >= len(y) else y
-
- The latter example's signature is essentially the overloading
- of (str, str) -> str and (bytes, bytes) -> bytes. Also note
- that if the arguments are instances of some subclass of str,
- the return type is still plain str.
-
- At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
-
- Type variables defined with covariant=True or contravariant=True
- can be used do declare covariant or contravariant generic types.
- See PEP 484 for more details. By default generic types are invariant
- in all type variables.
-
- Type variables can be introspected. e.g.:
-
- T.__name__ == 'T'
- T.__constraints__ == ()
- T.__covariant__ == False
- T.__contravariant__ = False
- A.__constraints__ == (str, bytes)
- """
-
- __slots__ = ('__name__', '__bound__', '__constraints__',
- '__covariant__', '__contravariant__')
-
- def __init__(self, name, *constraints, bound=None,
- covariant=False, contravariant=False):
- super().__init__(name, *constraints, bound=bound,
- covariant=covariant, contravariant=contravariant)
- self.__name__ = name
- if covariant and contravariant:
- raise ValueError("Bivariant types are not supported.")
- self.__covariant__ = bool(covariant)
- self.__contravariant__ = bool(contravariant)
- if constraints and bound is not None:
- raise TypeError("Constraints cannot be combined with bound=...")
- if constraints and len(constraints) == 1:
- raise TypeError("A single constraint is not allowed")
- msg = "TypeVar(name, constraint, ...): constraints must be types."
- self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
- if bound:
- self.__bound__ = _type_check(bound, "Bound must be a type.")
- else:
- self.__bound__ = None
-
- def _get_type_vars(self, tvars):
- if self not in tvars:
- tvars.append(self)
-
- def __repr__(self):
- if self.__covariant__:
- prefix = '+'
- elif self.__contravariant__:
- prefix = '-'
- else:
- prefix = '~'
- return prefix + self.__name__
-
- def __instancecheck__(self, instance):
- raise TypeError("Type variables cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Type variables cannot be used with issubclass().")
-
-
-# Some unconstrained type variables. These are used by the container types.
-# (These are not for export.)
-T = TypeVar('T') # Any type.
-KT = TypeVar('KT') # Key type.
-VT = TypeVar('VT') # Value type.
-T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
-V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
-VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
-T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
-
-# A useful type variable with constraints. This represents string types.
-# (This one *is* for export!)
-AnyStr = TypeVar('AnyStr', bytes, str)
-
-
-def _replace_arg(arg, tvars, args):
- """An internal helper function: replace arg if it is a type variable
- found in tvars with corresponding substitution from args or
- with corresponding substitution sub-tree if arg is a generic type.
- """
-
- if tvars is None:
- tvars = []
- if hasattr(arg, '_subs_tree'):
- return arg._subs_tree(tvars, args)
- if isinstance(arg, TypeVar):
- for i, tvar in enumerate(tvars):
- if arg == tvar:
- return args[i]
- return arg
-
-
-def _subs_tree(cls, tvars=None, args=None):
- """An internal helper function: calculate substitution tree
- for generic cls after replacing its type parameters with
- substitutions in tvars -> args (if any).
- Repeat the same following __origin__'s.
-
- Return a list of arguments with all possible substitutions
- performed. Arguments that are generic classes themselves are represented
- as tuples (so that no new classes are created by this function).
- For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
- """
-
- if cls.__origin__ is None:
- return cls
- # Make of chain of origins (i.e. cls -> cls.__origin__)
- current = cls.__origin__
- orig_chain = []
- while current.__origin__ is not None:
- orig_chain.append(current)
- current = current.__origin__
- # Replace type variables in __args__ if asked ...
- tree_args = []
- for arg in cls.__args__:
- tree_args.append(_replace_arg(arg, tvars, args))
- # ... then continue replacing down the origin chain.
- for ocls in orig_chain:
- new_tree_args = []
- for i, arg in enumerate(ocls.__args__):
- new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
- tree_args = new_tree_args
- return tree_args
-
-
-def _remove_dups_flatten(parameters):
- """An internal helper for Union creation and substitution: flatten Union's
- among parameters, then remove duplicates and strict subclasses.
- """
-
- # Flatten out Union[Union[...], ...].
- params = []
- for p in parameters:
- if isinstance(p, _Union) and p.__origin__ is Union:
- params.extend(p.__args__)
- elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
- params.extend(p[1:])
- else:
- params.append(p)
- # Weed out strict duplicates, preserving the first of each occurrence.
- all_params = set(params)
- if len(all_params) < len(params):
- new_params = []
- for t in params:
- if t in all_params:
- new_params.append(t)
- all_params.remove(t)
- params = new_params
- assert not all_params, all_params
- # Weed out subclasses.
- # E.g. Union[int, Employee, Manager] == Union[int, Employee].
- # If object is present it will be sole survivor among proper classes.
- # Never discard type variables.
- # (In particular, Union[str, AnyStr] != AnyStr.)
- all_params = set(params)
- for t1 in params:
- if not isinstance(t1, type):
- continue
- if any(isinstance(t2, type) and issubclass(t1, t2)
- for t2 in all_params - {t1}
- if not (isinstance(t2, GenericMeta) and
- t2.__origin__ is not None)):
- all_params.remove(t1)
- return tuple(t for t in params if t in all_params)
-
-
-def _check_generic(cls, parameters):
- # Check correct count for parameters of a generic cls (internal helper).
- if not cls.__parameters__:
- raise TypeError("%s is not a generic class" % repr(cls))
- alen = len(parameters)
- elen = len(cls.__parameters__)
- if alen != elen:
- raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
- ("many" if alen > elen else "few", repr(cls), alen, elen))
-
-
-_cleanups = []
-
-
-def _tp_cache(func):
- """Internal wrapper caching __getitem__ of generic types with a fallback to
- original function for non-hashable arguments.
- """
-
- cached = functools.lru_cache()(func)
- _cleanups.append(cached.cache_clear)
- @functools.wraps(func)
- def inner(*args, **kwds):
- try:
- return cached(*args, **kwds)
- except TypeError:
- pass # All real errors (not unhashable args) are raised below.
- return func(*args, **kwds)
- return inner
-
-
-class _Union(_FinalTypingBase, _root=True):
- """Union type; Union[X, Y] means either X or Y.
-
- To define a union, use e.g. Union[int, str]. Details:
-
- - The arguments must be types and there must be at least one.
-
- - None as an argument is a special case and is replaced by
- type(None).
-
- - Unions of unions are flattened, e.g.::
-
- Union[Union[int, str], float] == Union[int, str, float]
-
- - Unions of a single argument vanish, e.g.::
-
- Union[int] == int # The constructor actually returns int
-
- - Redundant arguments are skipped, e.g.::
-
- Union[int, str, int] == Union[int, str]
-
- - When comparing unions, the argument order is ignored, e.g.::
-
- Union[int, str] == Union[str, int]
-
- - When two arguments have a subclass relationship, the least
- derived argument is kept, e.g.::
-
- class Employee: pass
- class Manager(Employee): pass
- Union[int, Employee, Manager] == Union[int, Employee]
- Union[Manager, int, Employee] == Union[int, Employee]
- Union[Employee, Manager] == Employee
-
- - Similar for object::
-
- Union[int, object] == object
-
- - You cannot subclass or instantiate a union.
-
- - You can use Optional[X] as a shorthand for Union[X, None].
- """
-
- __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
-
- def __new__(cls, parameters=None, origin=None, *args, _root=False):
- self = super().__new__(cls, parameters, origin, *args, _root=_root)
- if origin is None:
- self.__parameters__ = None
- self.__args__ = None
- self.__origin__ = None
- self.__tree_hash__ = hash(frozenset(('Union',)))
- return self
- if not isinstance(parameters, tuple):
- raise TypeError("Expected parameters=<tuple>")
- if origin is Union:
- parameters = _remove_dups_flatten(parameters)
- # It's not a union if there's only one type left.
- if len(parameters) == 1:
- return parameters[0]
- self.__parameters__ = _type_vars(parameters)
- self.__args__ = parameters
- self.__origin__ = origin
- # Pre-calculate the __hash__ on instantiation.
- # This improves speed for complex substitutions.
- subs_tree = self._subs_tree()
- if isinstance(subs_tree, tuple):
- self.__tree_hash__ = hash(frozenset(subs_tree))
- else:
- self.__tree_hash__ = hash(subs_tree)
- return self
-
- def _eval_type(self, globalns, localns):
- if self.__args__ is None:
- return self
- ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
- ev_origin = _eval_type(self.__origin__, globalns, localns)
- if ev_args == self.__args__ and ev_origin == self.__origin__:
- # Everything is already evaluated.
- return self
- return self.__class__(ev_args, ev_origin, _root=True)
-
- def _get_type_vars(self, tvars):
- if self.__origin__ and self.__parameters__:
- _get_type_vars(self.__parameters__, tvars)
-
- def __repr__(self):
- if self.__origin__ is None:
- return super().__repr__()
- tree = self._subs_tree()
- if not isinstance(tree, tuple):
- return repr(tree)
- return tree[0]._tree_repr(tree)
-
- def _tree_repr(self, tree):
- arg_list = []
- for arg in tree[1:]:
- if not isinstance(arg, tuple):
- arg_list.append(_type_repr(arg))
- else:
- arg_list.append(arg[0]._tree_repr(arg))
- return super().__repr__() + '[%s]' % ', '.join(arg_list)
-
- @_tp_cache
- def __getitem__(self, parameters):
- if parameters == ():
- raise TypeError("Cannot take a Union of no types.")
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- if self.__origin__ is None:
- msg = "Union[arg, ...]: each arg must be a type."
- else:
- msg = "Parameters to generic types must be types."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- if self is not Union:
- _check_generic(self, parameters)
- return self.__class__(parameters, origin=self, _root=True)
-
- def _subs_tree(self, tvars=None, args=None):
- if self is Union:
- return Union # Nothing to substitute
- tree_args = _subs_tree(self, tvars, args)
- tree_args = _remove_dups_flatten(tree_args)
- if len(tree_args) == 1:
- return tree_args[0] # Union of a single type is that type
- return (Union,) + tree_args
-
- def __eq__(self, other):
- if not isinstance(other, _Union):
- return self._subs_tree() == other
- return self.__tree_hash__ == other.__tree_hash__
-
- def __hash__(self):
- return self.__tree_hash__
-
- def __instancecheck__(self, obj):
- raise TypeError("Unions cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- raise TypeError("Unions cannot be used with issubclass().")
-
-
-Union = _Union(_root=True)
-
-
-class _Optional(_FinalTypingBase, _root=True):
- """Optional type.
-
- Optional[X] is equivalent to Union[X, None].
- """
-
- __slots__ = ()
-
- @_tp_cache
- def __getitem__(self, arg):
- arg = _type_check(arg, "Optional[t] requires a single type.")
- return Union[arg, type(None)]
-
-
-Optional = _Optional(_root=True)
-
-
-def _gorg(a):
- """Return the farthest origin of a generic class (internal helper)."""
- assert isinstance(a, GenericMeta)
- while a.__origin__ is not None:
- a = a.__origin__
- return a
-
-
-def _geqv(a, b):
- """Return whether two generic classes are equivalent (internal helper).
-
- The intention is to consider generic class X and any of its
- parameterized forms (X[T], X[int], etc.) as equivalent.
-
- However, X is not equivalent to a subclass of X.
-
- The relation is reflexive, symmetric and transitive.
- """
- assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta)
- # Reduce each to its origin.
- return _gorg(a) is _gorg(b)
-
-
-def _next_in_mro(cls):
- """Helper for Generic.__new__.
-
- Returns the class after the last occurrence of Generic or
- Generic[...] in cls.__mro__.
- """
- next_in_mro = object
- # Look for the last occurrence of Generic or Generic[...].
- for i, c in enumerate(cls.__mro__[:-1]):
- if isinstance(c, GenericMeta) and _gorg(c) is Generic:
- next_in_mro = cls.__mro__[i+1]
- return next_in_mro
-
-
-def _valid_for_check(cls):
- """An internal helper to prohibit isinstance([1], List[str]) etc."""
- if cls is Generic:
- raise TypeError("Class %r cannot be used with class "
- "or instance checks" % cls)
- if (cls.__origin__ is not None and
- sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']):
- raise TypeError("Parameterized generics cannot be used with class "
- "or instance checks")
-
-
-def _make_subclasshook(cls):
- """Construct a __subclasshook__ callable that incorporates
- the associated __extra__ class in subclass checks performed
- against cls.
- """
- if isinstance(cls.__extra__, abc.ABCMeta):
- # The logic mirrors that of ABCMeta.__subclasscheck__.
- # Registered classes need not be checked here because
- # cls and its extra share the same _abc_registry.
- def __extrahook__(subclass):
- _valid_for_check(cls)
- res = cls.__extra__.__subclasshook__(subclass)
- if res is not NotImplemented:
- return res
- if cls.__extra__ in subclass.__mro__:
- return True
- for scls in cls.__extra__.__subclasses__():
- if isinstance(scls, GenericMeta):
- continue
- if issubclass(subclass, scls):
- return True
- return NotImplemented
- else:
- # For non-ABC extras we'll just call issubclass().
- def __extrahook__(subclass):
- _valid_for_check(cls)
- if cls.__extra__ and issubclass(subclass, cls.__extra__):
- return True
- return NotImplemented
- return __extrahook__
-
-
-def _no_slots_copy(dct):
- """Internal helper: copy class __dict__ and clean slots class variables.
- (They will be re-created if necessary by normal class machinery.)
- """
- dict_copy = dict(dct)
- if '__slots__' in dict_copy:
- for slot in dict_copy['__slots__']:
- dict_copy.pop(slot, None)
- return dict_copy
-
-
-class GenericMeta(TypingMeta, abc.ABCMeta):
- """Metaclass for generic types."""
-
- def __new__(cls, name, bases, namespace,
- tvars=None, args=None, origin=None, extra=None, orig_bases=None):
- if tvars is not None:
- # Called from __getitem__() below.
- assert origin is not None
- assert all(isinstance(t, TypeVar) for t in tvars), tvars
- else:
- # Called from class statement.
- assert tvars is None, tvars
- assert args is None, args
- assert origin is None, origin
-
- # Get the full set of tvars from the bases.
- tvars = _type_vars(bases)
- # Look for Generic[T1, ..., Tn].
- # If found, tvars must be a subset of it.
- # If not found, tvars is it.
- # Also check for and reject plain Generic,
- # and reject multiple Generic[...].
- gvars = None
- for base in bases:
- if base is Generic:
- raise TypeError("Cannot inherit from plain Generic")
- if (isinstance(base, GenericMeta) and
- base.__origin__ is Generic):
- if gvars is not None:
- raise TypeError(
- "Cannot inherit from Generic[...] multiple types.")
- gvars = base.__parameters__
- if gvars is None:
- gvars = tvars
- else:
- tvarset = set(tvars)
- gvarset = set(gvars)
- if not tvarset <= gvarset:
- raise TypeError(
- "Some type variables (%s) "
- "are not listed in Generic[%s]" %
- (", ".join(str(t) for t in tvars if t not in gvarset),
- ", ".join(str(g) for g in gvars)))
- tvars = gvars
-
- initial_bases = bases
- if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
- bases = (extra,) + bases
- bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)
-
- # remove bare Generic from bases if there are other generic bases
- if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
- bases = tuple(b for b in bases if b is not Generic)
- self = super().__new__(cls, name, bases, namespace, _root=True)
-
- self.__parameters__ = tvars
- # Be prepared that GenericMeta will be subclassed by TupleMeta
- # and CallableMeta, those two allow ..., (), or [] in __args___.
- self.__args__ = tuple(... if a is _TypingEllipsis else
- () if a is _TypingEmpty else
- a for a in args) if args else None
- self.__origin__ = origin
- self.__extra__ = extra
- # Speed hack (https://github.com/python/typing/issues/196).
- self.__next_in_mro__ = _next_in_mro(self)
- # Preserve base classes on subclassing (__bases__ are type erased now).
- if orig_bases is None:
- self.__orig_bases__ = initial_bases
-
- # This allows unparameterized generic collections to be used
- # with issubclass() and isinstance() in the same way as their
- # collections.abc counterparts (e.g., isinstance([], Iterable)).
- if ('__subclasshook__' not in namespace and extra # allow overriding
- or hasattr(self.__subclasshook__, '__name__') and
- self.__subclasshook__.__name__ == '__extrahook__'):
- self.__subclasshook__ = _make_subclasshook(self)
- if isinstance(extra, abc.ABCMeta):
- self._abc_registry = extra._abc_registry
-
- if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2.
- self.__qualname__ = origin.__qualname__
- self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,))
- return self
-
- def _get_type_vars(self, tvars):
- if self.__origin__ and self.__parameters__:
- _get_type_vars(self.__parameters__, tvars)
-
- def _eval_type(self, globalns, localns):
- ev_origin = (self.__origin__._eval_type(globalns, localns)
- if self.__origin__ else None)
- ev_args = tuple(_eval_type(a, globalns, localns) for a
- in self.__args__) if self.__args__ else None
- if ev_origin == self.__origin__ and ev_args == self.__args__:
- return self
- return self.__class__(self.__name__,
- self.__bases__,
- _no_slots_copy(self.__dict__),
- tvars=_type_vars(ev_args) if ev_args else None,
- args=ev_args,
- origin=ev_origin,
- extra=self.__extra__,
- orig_bases=self.__orig_bases__)
-
- def __repr__(self):
- if self.__origin__ is None:
- return super().__repr__()
- return self._tree_repr(self._subs_tree())
-
- def _tree_repr(self, tree):
- arg_list = []
- for arg in tree[1:]:
- if arg == ():
- arg_list.append('()')
- elif not isinstance(arg, tuple):
- arg_list.append(_type_repr(arg))
- else:
- arg_list.append(arg[0]._tree_repr(arg))
- return super().__repr__() + '[%s]' % ', '.join(arg_list)
-
- def _subs_tree(self, tvars=None, args=None):
- if self.__origin__ is None:
- return self
- tree_args = _subs_tree(self, tvars, args)
- return (_gorg(self),) + tuple(tree_args)
-
- def __eq__(self, other):
- if not isinstance(other, GenericMeta):
- return NotImplemented
- if self.__origin__ is None or other.__origin__ is None:
- return self is other
- return self.__tree_hash__ == other.__tree_hash__
-
- def __hash__(self):
- return self.__tree_hash__
-
- @_tp_cache
- def __getitem__(self, params):
- if not isinstance(params, tuple):
- params = (params,)
- if not params and not _gorg(self) is Tuple:
- raise TypeError(
- "Parameter list to %s[...] cannot be empty" % _qualname(self))
- msg = "Parameters to generic types must be types."
- params = tuple(_type_check(p, msg) for p in params)
- if self is Generic:
- # Generic can only be subscripted with unique type variables.
- if not all(isinstance(p, TypeVar) for p in params):
- raise TypeError(
- "Parameters to Generic[...] must all be type variables")
- if len(set(params)) != len(params):
- raise TypeError(
- "Parameters to Generic[...] must all be unique")
- tvars = params
- args = params
- elif self in (Tuple, Callable):
- tvars = _type_vars(params)
- args = params
- elif self is _Protocol:
- # _Protocol is internal, don't check anything.
- tvars = params
- args = params
- elif self.__origin__ in (Generic, _Protocol):
- # Can't subscript Generic[...] or _Protocol[...].
- raise TypeError("Cannot subscript already-subscripted %s" %
- repr(self))
- else:
- # Subscripting a regular Generic subclass.
- _check_generic(self, params)
- tvars = _type_vars(params)
- args = params
- return self.__class__(self.__name__,
- self.__bases__,
- _no_slots_copy(self.__dict__),
- tvars=tvars,
- args=args,
- origin=self,
- extra=self.__extra__,
- orig_bases=self.__orig_bases__)
-
- def __instancecheck__(self, instance):
- # Since we extend ABC.__subclasscheck__ and
- # ABC.__instancecheck__ inlines the cache checking done by the
- # latter, we must extend __instancecheck__ too. For simplicity
- # we just skip the cache check -- instance checks for generic
- # classes are supposed to be rare anyways.
- return issubclass(instance.__class__, self)
-
- def __copy__(self):
- return self.__class__(self.__name__, self.__bases__,
- _no_slots_copy(self.__dict__),
- self.__parameters__, self.__args__, self.__origin__,
- self.__extra__, self.__orig_bases__)
-
-
-# Prevent checks for Generic to crash when defining Generic.
-Generic = None
-
-
-def _generic_new(base_cls, cls, *args, **kwds):
- # Assure type is erased on instantiation,
- # but attempt to store it in __orig_class__
- if cls.__origin__ is None:
- return base_cls.__new__(cls)
- else:
- origin = _gorg(cls)
- obj = base_cls.__new__(origin)
- try:
- obj.__orig_class__ = cls
- except AttributeError:
- pass
- obj.__init__(*args, **kwds)
- return obj
-
-
-class Generic(metaclass=GenericMeta):
- """Abstract base class for generic types.
-
- A generic type is typically declared by inheriting from
- this class parameterized with one or more type variables.
- For example, a generic mapping type might be defined as::
-
- class Mapping(Generic[KT, VT]):
- def __getitem__(self, key: KT) -> VT:
- ...
- # Etc.
-
- This class can then be used as follows::
-
- def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
- try:
- return mapping[key]
- except KeyError:
- return default
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Generic):
- raise TypeError("Type Generic cannot be instantiated; "
- "it can be used only as a base class")
- return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
-
-
-class _TypingEmpty:
- """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
- to allow empty list/tuple in specific places, without allowing them
- to sneak in where prohibited.
- """
-
-
-class _TypingEllipsis:
- """Internal placeholder for ... (ellipsis)."""
-
-
-class TupleMeta(GenericMeta):
- """Metaclass for Tuple (internal)."""
-
- @_tp_cache
- def __getitem__(self, parameters):
- if self.__origin__ is not None or not _geqv(self, Tuple):
- # Normal generic rules apply if this is not the first subscription
- # or a subscription of a subclass.
- return super().__getitem__(parameters)
- if parameters == ():
- return super().__getitem__((_TypingEmpty,))
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- if len(parameters) == 2 and parameters[1] is ...:
- msg = "Tuple[t, ...]: t must be a type."
- p = _type_check(parameters[0], msg)
- return super().__getitem__((p, _TypingEllipsis))
- msg = "Tuple[t0, t1, ...]: each t must be a type."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- return super().__getitem__(parameters)
-
- def __instancecheck__(self, obj):
- if self.__args__ == None:
- return isinstance(obj, tuple)
- raise TypeError("Parameterized Tuple cannot be used "
- "with isinstance().")
-
- def __subclasscheck__(self, cls):
- if self.__args__ == None:
- return issubclass(cls, tuple)
- raise TypeError("Parameterized Tuple cannot be used "
- "with issubclass().")
-
-
-class Tuple(tuple, extra=tuple, metaclass=TupleMeta):
- """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
-
- Example: Tuple[T1, T2] is a tuple of two elements corresponding
- to type variables T1 and T2. Tuple[int, float, str] is a tuple
- of an int, a float and a string.
-
- To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Tuple):
- raise TypeError("Type Tuple cannot be instantiated; "
- "use tuple() instead")
- return _generic_new(tuple, cls, *args, **kwds)
-
-
-class CallableMeta(GenericMeta):
- """Metaclass for Callable (internal)."""
-
- def __repr__(self):
- if self.__origin__ is None:
- return super().__repr__()
- return self._tree_repr(self._subs_tree())
-
- def _tree_repr(self, tree):
- if _gorg(self) is not Callable:
- return super()._tree_repr(tree)
- # For actual Callable (not its subclass) we override
- # super()._tree_repr() for nice formatting.
- arg_list = []
- for arg in tree[1:]:
- if not isinstance(arg, tuple):
- arg_list.append(_type_repr(arg))
- else:
- arg_list.append(arg[0]._tree_repr(arg))
- if arg_list[0] == '...':
- return repr(tree[0]) + '[..., %s]' % arg_list[1]
- return (repr(tree[0]) +
- '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
-
- def __getitem__(self, parameters):
- """A thin wrapper around __getitem_inner__ to provide the latter
- with hashable arguments to improve speed.
- """
-
- if self.__origin__ is not None or not _geqv(self, Callable):
- return super().__getitem__(parameters)
- if not isinstance(parameters, tuple) or len(parameters) != 2:
- raise TypeError("Callable must be used as "
- "Callable[[arg, ...], result].")
- args, result = parameters
- if args is Ellipsis:
- parameters = (Ellipsis, result)
- else:
- if not isinstance(args, list):
- raise TypeError("Callable[args, result]: args must be a list."
- " Got %.100r." % (args,))
- parameters = (tuple(args), result)
- return self.__getitem_inner__(parameters)
-
- @_tp_cache
- def __getitem_inner__(self, parameters):
- args, result = parameters
- msg = "Callable[args, result]: result must be a type."
- result = _type_check(result, msg)
- if args is Ellipsis:
- return super().__getitem__((_TypingEllipsis, result))
- msg = "Callable[[arg, ...], result]: each arg must be a type."
- args = tuple(_type_check(arg, msg) for arg in args)
- parameters = args + (result,)
- return super().__getitem__(parameters)
-
-
-class Callable(extra=collections_abc.Callable, metaclass = CallableMeta):
- """Callable type; Callable[[int], str] is a function of (int) -> str.
-
- The subscription syntax must always be used with exactly two
- values: the argument list and the return type. The argument list
- must be a list of types or ellipsis; the return type must be a single type.
-
- There is no syntax to indicate optional or keyword arguments,
- such function types are rarely used as callback types.
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Callable):
- raise TypeError("Type Callable cannot be instantiated; "
- "use a non-abstract subclass instead")
- return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
-
-
-class _ClassVar(_FinalTypingBase, _root=True):
- """Special type construct to mark class variables.
-
- An annotation wrapped in ClassVar indicates that a given
- attribute is intended to be used as a class variable and
- should not be set on instances of that class. Usage::
-
- class Starship:
- stats: ClassVar[Dict[str, int]] = {} # class variable
- damage: int = 10 # instance variable
-
- ClassVar accepts only types and cannot be further subscribed.
-
- Note that ClassVar is not a class itself, and should not
- be used with isinstance() or issubclass().
- """
-
- __slots__ = ('__type__',)
-
- def __init__(self, tp=None, **kwds):
- self.__type__ = tp
-
- def __getitem__(self, item):
- cls = type(self)
- if self.__type__ is None:
- return cls(_type_check(item,
- '{} accepts only single type.'.format(cls.__name__[1:])),
- _root=True)
- raise TypeError('{} cannot be further subscripted'
- .format(cls.__name__[1:]))
-
- def _eval_type(self, globalns, localns):
- new_tp = _eval_type(self.__type__, globalns, localns)
- if new_tp == self.__type__:
- return self
- return type(self)(new_tp, _root=True)
-
- def __repr__(self):
- r = super().__repr__()
- if self.__type__ is not None:
- r += '[{}]'.format(_type_repr(self.__type__))
- return r
-
- def __hash__(self):
- return hash((type(self).__name__, self.__type__))
-
- def __eq__(self, other):
- if not isinstance(other, _ClassVar):
- return NotImplemented
- if self.__type__ is not None:
- return self.__type__ == other.__type__
- return self is other
-
-
-ClassVar = _ClassVar(_root=True)
-
-
-def cast(typ, val):
- """Cast a value to a type.
-
- This returns the value unchanged. To the type checker this
- signals that the return value has the designated type, but at
- runtime we intentionally don't check anything (we want this
- to be as fast as possible).
- """
- return val
-
-
-def _get_defaults(func):
- """Internal helper to extract the default arguments, by name."""
- try:
- code = func.__code__
- except AttributeError:
- # Some built-in functions don't have __code__, __defaults__, etc.
- return {}
- pos_count = code.co_argcount
- arg_names = code.co_varnames
- arg_names = arg_names[:pos_count]
- defaults = func.__defaults__ or ()
- kwdefaults = func.__kwdefaults__
- res = dict(kwdefaults) if kwdefaults else {}
- pos_offset = pos_count - len(defaults)
- for name, value in zip(arg_names[pos_offset:], defaults):
- assert name not in res
- res[name] = value
- return res
-
-
-def get_type_hints(obj, globalns=None, localns=None):
- """Return type hints for an object.
-
- This is often the same as obj.__annotations__, but it handles
- forward references encoded as string literals, and if necessary
- adds Optional[t] if a default value equal to None is set.
-
- The argument may be a module, class, method, or function. The annotations
- are returned as a dictionary. For classes, annotations include also
- inherited members.
-
- TypeError is raised if the argument is not of a type that can contain
- annotations, and an empty dictionary is returned if no annotations are
- present.
-
- BEWARE -- the behavior of globalns and localns is counterintuitive
- (unless you are familiar with how eval() and exec() work). The
- search order is locals first, then globals.
-
- - If no dict arguments are passed, an attempt is made to use the
- globals from obj, and these are also used as the locals. If the
- object does not appear to have globals, an exception is raised.
-
- - If one dict argument is passed, it is used for both globals and
- locals.
-
- - If two dict arguments are passed, they specify globals and
- locals, respectively.
- """
-
- if getattr(obj, '__no_type_check__', None):
- return {}
- if globalns is None:
- globalns = getattr(obj, '__globals__', {})
- if localns is None:
- localns = globalns
- elif localns is None:
- localns = globalns
- # Classes require a special treatment.
- if isinstance(obj, type):
- hints = {}
- for base in reversed(obj.__mro__):
- ann = base.__dict__.get('__annotations__', {})
- for name, value in ann.items():
- if value is None:
- value = type(None)
- if isinstance(value, str):
- value = _ForwardRef(value)
- value = _eval_type(value, globalns, localns)
- hints[name] = value
- return hints
- hints = getattr(obj, '__annotations__', None)
- if hints is None:
- # Return empty annotations for something that _could_ have them.
- if (isinstance(obj, types.FunctionType) or
- isinstance(obj, types.BuiltinFunctionType) or
- isinstance(obj, types.MethodType) or
- isinstance(obj, types.ModuleType)):
- return {}
- else:
- raise TypeError('{!r} is not a module, class, method, '
- 'or function.'.format(obj))
- defaults = _get_defaults(obj)
- hints = dict(hints)
- for name, value in hints.items():
- if value is None:
- value = type(None)
- if isinstance(value, str):
- value = _ForwardRef(value)
- value = _eval_type(value, globalns, localns)
- if name in defaults and defaults[name] is None:
- value = Optional[value]
- hints[name] = value
- return hints
-
-
-def no_type_check(arg):
- """Decorator to indicate that annotations are not type hints.
-
- The argument must be a class or function; if it is a class, it
- applies recursively to all methods and classes defined in that class
- (but not to methods defined in its superclasses or subclasses).
-
- This mutates the function(s) or class(es) in place.
- """
- if isinstance(arg, type):
- arg_attrs = arg.__dict__.copy()
- for attr, val in arg.__dict__.items():
- if val in arg.__bases__:
- arg_attrs.pop(attr)
- for obj in arg_attrs.values():
- if isinstance(obj, types.FunctionType):
- obj.__no_type_check__ = True
- if isinstance(obj, type):
- no_type_check(obj)
- try:
- arg.__no_type_check__ = True
- except TypeError: # built-in classes
- pass
- return arg
-
-
-def no_type_check_decorator(decorator):
- """Decorator to give another decorator the @no_type_check effect.
-
- This wraps the decorator with something that wraps the decorated
- function in @no_type_check.
- """
-
- @functools.wraps(decorator)
- def wrapped_decorator(*args, **kwds):
- func = decorator(*args, **kwds)
- func = no_type_check(func)
- return func
-
- return wrapped_decorator
-
-
-def _overload_dummy(*args, **kwds):
- """Helper for @overload to raise when called."""
- raise NotImplementedError(
- "You should not call an overloaded function. "
- "A series of @overload-decorated functions "
- "outside a stub module should always be followed "
- "by an implementation that is not @overload-ed.")
-
-
-def overload(func):
- """Decorator for overloaded functions/methods.
-
- In a stub file, place two or more stub definitions for the same
- function in a row, each decorated with @overload. For example:
-
- @overload
- def utf8(value: None) -> None: ...
- @overload
- def utf8(value: bytes) -> bytes: ...
- @overload
- def utf8(value: str) -> bytes: ...
-
- In a non-stub file (i.e. a regular .py file), do the same but
- follow it with an implementation. The implementation should *not*
- be decorated with @overload. For example:
-
- @overload
- def utf8(value: None) -> None: ...
- @overload
- def utf8(value: bytes) -> bytes: ...
- @overload
- def utf8(value: str) -> bytes: ...
- def utf8(value):
- # implementation goes here
- """
- return _overload_dummy
-
-
-class _ProtocolMeta(GenericMeta):
- """Internal metaclass for _Protocol.
-
- This exists so _Protocol classes can be generic without deriving
- from Generic.
- """
-
- def __instancecheck__(self, obj):
- if _Protocol not in self.__bases__:
- return super().__instancecheck__(obj)
- raise TypeError("Protocols cannot be used with isinstance().")
-
- def __subclasscheck__(self, cls):
- if not self._is_protocol:
- # No structural checks since this isn't a protocol.
- return NotImplemented
-
- if self is _Protocol:
- # Every class is a subclass of the empty protocol.
- return True
-
- # Find all attributes defined in the protocol.
- attrs = self._get_protocol_attrs()
-
- for attr in attrs:
- if not any(attr in d.__dict__ for d in cls.__mro__):
- return False
- return True
-
- def _get_protocol_attrs(self):
- # Get all Protocol base classes.
- protocol_bases = []
- for c in self.__mro__:
- if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
- protocol_bases.append(c)
-
- # Get attributes included in protocol.
- attrs = set()
- for base in protocol_bases:
- for attr in base.__dict__.keys():
- # Include attributes not defined in any non-protocol bases.
- for c in self.__mro__:
- if (c is not base and attr in c.__dict__ and
- not getattr(c, '_is_protocol', False)):
- break
- else:
- if (not attr.startswith('_abc_') and
- attr != '__abstractmethods__' and
- attr != '__annotations__' and
- attr != '__weakref__' and
- attr != '_is_protocol' and
- attr != '__dict__' and
- attr != '__args__' and
- attr != '__slots__' and
- attr != '_get_protocol_attrs' and
- attr != '__next_in_mro__' and
- attr != '__parameters__' and
- attr != '__origin__' and
- attr != '__orig_bases__' and
- attr != '__extra__' and
- attr != '__tree_hash__' and
- attr != '__module__'):
- attrs.add(attr)
-
- return attrs
-
-
-class _Protocol(metaclass=_ProtocolMeta):
- """Internal base class for protocol classes.
-
- This implements a simple-minded structural issubclass check
- (similar but more general than the one-offs in collections.abc
- such as Hashable).
- """
-
- __slots__ = ()
-
- _is_protocol = True
-
-
-# Various ABCs mimicking those in collections.abc.
-# A few are simply re-exported for completeness.
-
-Hashable = collections_abc.Hashable # Not generic.
-
-
-if hasattr(collections_abc, 'Awaitable'):
- class Awaitable(Generic[T_co], extra=collections_abc.Awaitable):
- __slots__ = ()
-
- __all__.append('Awaitable')
-
-
-if hasattr(collections_abc, 'Coroutine'):
- class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co],
- extra=collections_abc.Coroutine):
- __slots__ = ()
-
- __all__.append('Coroutine')
-
-
-if hasattr(collections_abc, 'AsyncIterable'):
-
- class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable):
- __slots__ = ()
-
- class AsyncIterator(AsyncIterable[T_co],
- extra=collections_abc.AsyncIterator):
- __slots__ = ()
-
- __all__.append('AsyncIterable')
- __all__.append('AsyncIterator')
-
-
-class Iterable(Generic[T_co], extra=collections_abc.Iterable):
- __slots__ = ()
-
-
-class Iterator(Iterable[T_co], extra=collections_abc.Iterator):
- __slots__ = ()
-
-
-class SupportsInt(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __int__(self) -> int:
- pass
-
-
-class SupportsFloat(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __float__(self) -> float:
- pass
-
-
-class SupportsComplex(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __complex__(self) -> complex:
- pass
-
-
-class SupportsBytes(_Protocol):
- __slots__ = ()
-
- @abstractmethod
- def __bytes__(self) -> bytes:
- pass
-
-
-class SupportsAbs(_Protocol[T_co]):
- __slots__ = ()
-
- @abstractmethod
- def __abs__(self) -> T_co:
- pass
-
-
-class SupportsRound(_Protocol[T_co]):
- __slots__ = ()
-
- @abstractmethod
- def __round__(self, ndigits: int = 0) -> T_co:
- pass
-
-
-if hasattr(collections_abc, 'Reversible'):
- class Reversible(Iterable[T_co], extra=collections_abc.Reversible):
- __slots__ = ()
-else:
- class Reversible(_Protocol[T_co]):
- __slots__ = ()
-
- @abstractmethod
- def __reversed__(self) -> 'Iterator[T_co]':
- pass
-
-
-Sized = collections_abc.Sized # Not generic.
-
-
-class Container(Generic[T_co], extra=collections_abc.Container):
- __slots__ = ()
-
-
-if hasattr(collections_abc, 'Collection'):
- class Collection(Sized, Iterable[T_co], Container[T_co],
- extra=collections_abc.Collection):
- __slots__ = ()
-
- __all__.append('Collection')
-
-
-# Callable was defined earlier.
-
-if hasattr(collections_abc, 'Collection'):
- class AbstractSet(Collection[T_co],
- extra=collections_abc.Set):
- __slots__ = ()
-else:
- class AbstractSet(Sized, Iterable[T_co], Container[T_co],
- extra=collections_abc.Set):
- __slots__ = ()
-
-
-class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet):
- __slots__ = ()
-
-
-# NOTE: It is only covariant in the value type.
-if hasattr(collections_abc, 'Collection'):
- class Mapping(Collection[KT], Generic[KT, VT_co],
- extra=collections_abc.Mapping):
- __slots__ = ()
-else:
- class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
- extra=collections_abc.Mapping):
- __slots__ = ()
-
-
-class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping):
- __slots__ = ()
-
-if hasattr(collections_abc, 'Reversible'):
- if hasattr(collections_abc, 'Collection'):
- class Sequence(Reversible[T_co], Collection[T_co],
- extra=collections_abc.Sequence):
- __slots__ = ()
- else:
- class Sequence(Sized, Reversible[T_co], Container[T_co],
- extra=collections_abc.Sequence):
- __slots__ = ()
-else:
- class Sequence(Sized, Iterable[T_co], Container[T_co],
- extra=collections_abc.Sequence):
- __slots__ = ()
-
-
-class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence):
- __slots__ = ()
-
-
-class ByteString(Sequence[int], extra=collections_abc.ByteString):
- __slots__ = ()
-
-
-class List(list, MutableSequence[T], extra=list):
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, List):
- raise TypeError("Type List cannot be instantiated; "
- "use list() instead")
- return _generic_new(list, cls, *args, **kwds)
-
-
-class Set(set, MutableSet[T], extra=set):
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Set):
- raise TypeError("Type Set cannot be instantiated; "
- "use set() instead")
- return _generic_new(set, cls, *args, **kwds)
-
-
-class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset):
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, FrozenSet):
- raise TypeError("Type FrozenSet cannot be instantiated; "
- "use frozenset() instead")
- return _generic_new(frozenset, cls, *args, **kwds)
-
-
-class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView):
- __slots__ = ()
-
-
-class KeysView(MappingView[KT], AbstractSet[KT],
- extra=collections_abc.KeysView):
- __slots__ = ()
-
-
-class ItemsView(MappingView[Tuple[KT, VT_co]],
- AbstractSet[Tuple[KT, VT_co]],
- Generic[KT, VT_co],
- extra=collections_abc.ItemsView):
- __slots__ = ()
-
-
-class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView):
- __slots__ = ()
-
-
-if hasattr(contextlib, 'AbstractContextManager'):
- class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager):
- __slots__ = ()
- __all__.append('ContextManager')
-
-
-class Dict(dict, MutableMapping[KT, VT], extra=dict):
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Dict):
- raise TypeError("Type Dict cannot be instantiated; "
- "use dict() instead")
- return _generic_new(dict, cls, *args, **kwds)
-
-class DefaultDict(collections.defaultdict, MutableMapping[KT, VT],
- extra=collections.defaultdict):
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, DefaultDict):
- raise TypeError("Type DefaultDict cannot be instantiated; "
- "use collections.defaultdict() instead")
- return _generic_new(collections.defaultdict, cls, *args, **kwds)
-
-# Determine what base class to use for Generator.
-if hasattr(collections_abc, 'Generator'):
- # Sufficiently recent versions of 3.5 have a Generator ABC.
- _G_base = collections_abc.Generator
-else:
- # Fall back on the exact type.
- _G_base = types.GeneratorType
-
-
-class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co],
- extra=_G_base):
- __slots__ = ()
-
- def __new__(cls, *args, **kwds):
- if _geqv(cls, Generator):
- raise TypeError("Type Generator cannot be instantiated; "
- "create a subclass instead")
- return _generic_new(_G_base, cls, *args, **kwds)
-
-
-# Internal type variable used for Type[].
-CT_co = TypeVar('CT_co', covariant=True, bound=type)
-
-
-# This is not a real generic class. Don't use outside annotations.
-class Type(Generic[CT_co], extra=type):
- """A special construct usable to annotate class objects.
-
- For example, suppose we have the following classes::
-
- class User: ... # Abstract base for User classes
- class BasicUser(User): ...
- class ProUser(User): ...
- class TeamUser(User): ...
-
- And a function that takes a class argument that's a subclass of
- User and returns an instance of the corresponding class::
-
- U = TypeVar('U', bound=User)
- def new_user(user_class: Type[U]) -> U:
- user = user_class()
- # (Here we could write the user object to a database)
- return user
-
- joe = new_user(BasicUser)
-
- At this point the type checker knows that joe has type BasicUser.
- """
-
- __slots__ = ()
-
-
-def _make_nmtuple(name, types):
- msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type"
- types = [(n, _type_check(t, msg)) for n, t in types]
- nm_tpl = collections.namedtuple(name, [n for n, t in types])
- nm_tpl._field_types = dict(types)
- try:
- nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
- return nm_tpl
-
-
-_PY36 = sys.version_info[:2] >= (3, 6)
-
-
-class NamedTupleMeta(type):
-
- def __new__(cls, typename, bases, ns):
- if ns.get('_root', False):
- return super().__new__(cls, typename, bases, ns)
- if not _PY36:
- raise TypeError("Class syntax for NamedTuple is only supported"
- " in Python 3.6+")
- types = ns.get('__annotations__', {})
- return _make_nmtuple(typename, types.items())
-
-class NamedTuple(metaclass=NamedTupleMeta):
- """Typed version of namedtuple.
-
- Usage in Python versions >= 3.6::
-
- class Employee(NamedTuple):
- name: str
- id: int
-
- This is equivalent to::
-
- Employee = collections.namedtuple('Employee', ['name', 'id'])
-
- The resulting class has one extra attribute: _field_types,
- giving a dict mapping field names to types. (The field names
- are in the _fields attribute, which is part of the namedtuple
- API.) Alternative equivalent keyword syntax is also accepted::
-
- Employee = NamedTuple('Employee', name=str, id=int)
-
- In Python versions <= 3.5 use::
-
- Employee = NamedTuple('Employee', [('name', str), ('id', int)])
- """
- _root = True
-
- def __new__(self, typename, fields=None, **kwargs):
- if kwargs and not _PY36:
- raise TypeError("Keyword syntax for NamedTuple is only supported"
- " in Python 3.6+")
- if fields is None:
- fields = kwargs.items()
- elif kwargs:
- raise TypeError("Either list of fields or keywords"
- " can be provided to NamedTuple, not both")
- return _make_nmtuple(typename, fields)
-
-
-def NewType(name, tp):
- """NewType creates simple unique types with almost zero
- runtime overhead. NewType(name, tp) is considered a subtype of tp
- by static type checkers. At runtime, NewType(name, tp) returns
- a dummy function that simply returns its argument. Usage::
-
- UserId = NewType('UserId', int)
-
- def name_by_id(user_id: UserId) -> str:
- ...
-
- UserId('user') # Fails type check
-
- name_by_id(42) # Fails type check
- name_by_id(UserId(42)) # OK
-
- num = UserId(5) + 1 # type: int
- """
-
- def new_type(x):
- return x
-
- new_type.__name__ = name
- new_type.__supertype__ = tp
- return new_type
-
-
-# Python-version-specific alias (Python 2: unicode; Python 3: str)
-Text = str
-
-
-# Constant that's True when type checking, but False here.
-TYPE_CHECKING = False
-
-
-class IO(Generic[AnyStr]):
- """Generic base class for TextIO and BinaryIO.
-
- This is an abstract, generic version of the return of open().
-
- NOTE: This does not distinguish between the different possible
- classes (text vs. binary, read vs. write vs. read/write,
- append-only, unbuffered). The TextIO and BinaryIO subclasses
- below capture the distinctions between text vs. binary, which is
- pervasive in the interface; however we currently do not offer a
- way to track the other distinctions in the type system.
- """
-
- __slots__ = ()
-
- @abstractproperty
- def mode(self) -> str:
- pass
-
- @abstractproperty
- def name(self) -> str:
- pass
-
- @abstractmethod
- def close(self) -> None:
- pass
-
- @abstractmethod
- def closed(self) -> bool:
- pass
-
- @abstractmethod
- def fileno(self) -> int:
- pass
-
- @abstractmethod
- def flush(self) -> None:
- pass
-
- @abstractmethod
- def isatty(self) -> bool:
- pass
-
- @abstractmethod
- def read(self, n: int = -1) -> AnyStr:
- pass
-
- @abstractmethod
- def readable(self) -> bool:
- pass
-
- @abstractmethod
- def readline(self, limit: int = -1) -> AnyStr:
- pass
-
- @abstractmethod
- def readlines(self, hint: int = -1) -> List[AnyStr]:
- pass
-
- @abstractmethod
- def seek(self, offset: int, whence: int = 0) -> int:
- pass
-
- @abstractmethod
- def seekable(self) -> bool:
- pass
-
- @abstractmethod
- def tell(self) -> int:
- pass
-
- @abstractmethod
- def truncate(self, size: int = None) -> int:
- pass
-
- @abstractmethod
- def writable(self) -> bool:
- pass
-
- @abstractmethod
- def write(self, s: AnyStr) -> int:
- pass
-
- @abstractmethod
- def writelines(self, lines: List[AnyStr]) -> None:
- pass
-
- @abstractmethod
- def __enter__(self) -> 'IO[AnyStr]':
- pass
-
- @abstractmethod
- def __exit__(self, type, value, traceback) -> None:
- pass
-
-
-class BinaryIO(IO[bytes]):
- """Typed version of the return of open() in binary mode."""
-
- __slots__ = ()
-
- @abstractmethod
- def write(self, s: Union[bytes, bytearray]) -> int:
- pass
-
- @abstractmethod
- def __enter__(self) -> 'BinaryIO':
- pass
-
-
-class TextIO(IO[str]):
- """Typed version of the return of open() in text mode."""
-
- __slots__ = ()
-
- @abstractproperty
- def buffer(self) -> BinaryIO:
- pass
-
- @abstractproperty
- def encoding(self) -> str:
- pass
-
- @abstractproperty
- def errors(self) -> Optional[str]:
- pass
-
- @abstractproperty
- def line_buffering(self) -> bool:
- pass
-
- @abstractproperty
- def newlines(self) -> Any:
- pass
-
- @abstractmethod
- def __enter__(self) -> 'TextIO':
- pass
-
-
-class io:
- """Wrapper namespace for IO generic classes."""
-
- __all__ = ['IO', 'TextIO', 'BinaryIO']
- IO = IO
- TextIO = TextIO
- BinaryIO = BinaryIO
-
-io.__name__ = __name__ + '.io'
-sys.modules[io.__name__] = io
-
-
-Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
- lambda p: p.pattern)
-Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
- lambda m: m.re.pattern)
-
-
-class re:
- """Wrapper namespace for re type aliases."""
-
- __all__ = ['Pattern', 'Match']
- Pattern = Pattern
- Match = Match
-
-re.__name__ = __name__ + '.re'
-sys.modules[re.__name__] = re
diff --git a/misc/actions_stubs.py b/misc/actions_stubs.py
deleted file mode 100644
index 978af71..0000000
--- a/misc/actions_stubs.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-import os
-import shutil
-from typing import Tuple, Any
-try:
- import click
-except ImportError:
- print("You need the module \'click\'")
- exit(1)
-
-base_path = os.getcwd()
-
-# I don't know how to set callables with different args
-def apply_all(func: Any, directory: str, extension: str,
- to_extension: str='', exclude: Tuple[str]=('',),
- recursive: bool=True, debug: bool=False) -> None:
- excluded = [x+extension for x in exclude] if exclude else []
- for p, d, files in os.walk(os.path.join(base_path,directory)):
- for f in files:
- if "{}".format(f) in excluded:
- continue
- inner_path = os.path.join(p,f)
- if not inner_path.endswith(extension):
- continue
- if to_extension:
- new_path = "{}{}".format(inner_path[:-len(extension)],to_extension)
- func(inner_path,new_path)
- else:
- func(inner_path)
- if not recursive:
- break
-
-def confirm(resp: bool=False, **kargs) -> bool:
- kargs['rest'] = "to this {f2}/*{e2}".format(**kargs) if kargs.get('f2') else ''
- prompt = "{act} all files {rec}matching this expression {f1}/*{e1} {rest}".format(**kargs)
- prompt.format(**kargs)
- prompt = "{} [{}]|{}: ".format(prompt, 'Y' if resp else 'N', 'n' if resp else 'y')
- while True:
- ans = input(prompt).lower()
- if not ans:
- return resp
- if ans not in ['y','n']:
- print( 'Please, enter (y) or (n).')
- continue
- if ans == 'y':
- return True
- else:
- return False
-
-actions = ['cp', 'mv', 'rm']
- at click.command(context_settings=dict(help_option_names=['-h', '--help']))
- at click.option('--action', '-a', type=click.Choice(actions), required=True, help="What do I have to do :-)")
- at click.option('--dir', '-d', 'directory', default='stubs', help="Directory to start search!")
- at click.option('--ext', '-e', 'extension', default='.py', help="Extension \"from\" will be applied the action. Default .py")
- at click.option('--to', '-t', 'to_extension', default='.pyi', help="Extension \"to\" will be applied the action if can. Default .pyi")
- at click.option('--exclude', '-x', multiple=True, default=('__init__',), help="For every appear, will ignore this files. (can set multiples times)")
- at click.option('--not-recursive', '-n', default=True, is_flag=True, help="Set if don't want to walk recursively.")
-def main(action: str, directory: str, extension: str, to_extension: str,
- exclude: Tuple[str], not_recursive: bool) -> None:
- """
- This script helps to copy/move/remove files based on their extension.
-
- The three actions will ask you for confirmation.
-
- Examples (by default the script search in stubs directory):
-
- - Change extension of all stubs from .py to .pyi:
-
- python <script.py> -a mv
-
- - Revert the previous action.
-
- python <script.py> -a mv -e .pyi -t .py
-
- - If you want to ignore "awesome.py" files.
-
- python <script.py> -a [cp|mv|rm] -x awesome
-
- - If you want to ignore "awesome.py" and "__init__.py" files.
-
- python <script.py> -a [cp|mv|rm] -x awesome -x __init__
-
- - If you want to remove all ".todo" files in "todo" directory, but not recursively:
-
- python <script.py> -a rm -e .todo -d todo -r
-
- """
- if action not in actions:
- print("Your action have to be one of this: {}".format(', '.join(actions)))
- return
-
- rec = "[Recursively] " if not_recursive else ''
- if not extension.startswith('.'):
- extension = ".{}".format(extension)
- if not to_extension.startswith('.'):
- to_extension = ".{}".format(to_extension)
- if directory.endswith('/'):
- directory = directory[:-1]
- if action == 'cp':
- if confirm(act='Copy',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
- apply_all(shutil.copy, directory, extension, to_extension, exclude, not_recursive)
- elif action == 'rm':
- if confirm(act='Remove',rec=rec, f1=directory, e1=extension):
- apply_all(os.remove, directory, extension, exclude=exclude, recursive=not_recursive)
- elif action == 'mv':
- if confirm(act='Move',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
- apply_all(shutil.move, directory, extension, to_extension, exclude, not_recursive)
-
-
-if __name__ == '__main__':
- main()
diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py
deleted file mode 100644
index 643e2bf..0000000
--- a/misc/analyze_cache.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python
-
-from typing import Any, Dict, Generator, Iterable, List, Optional
-from collections import Counter
-
-import os
-import os.path
-import json
-
-ROOT = ".mypy_cache/3.5"
-
-JsonDict = Dict[str, Any]
-
-class CacheData:
- def __init__(self, filename: str, data_json: JsonDict, meta_json: JsonDict,
- data_size: int, meta_size: int) -> None:
- self.filename = filename
- self.data = data_json
- self.meta = meta_json
- self.data_size = data_size
- self.meta_size = meta_size
-
- @property
- def total_size(self):
- return self.data_size + self.meta_size
-
-
-def extract_classes(chunks: Iterable[CacheData]) -> Iterable[JsonDict]:
- def extract(chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
- for chunk in chunks:
- if isinstance(chunk, dict):
- yield chunk
- yield from extract(chunk.values())
- elif isinstance(chunk, list):
- yield from extract(chunk)
- yield from extract([chunk.data for chunk in chunks])
-
-
-def load_json(data_path: str, meta_path: str) -> CacheData:
- with open(data_path, 'r') as ds:
- data_json = json.load(ds)
-
- with open(meta_path, 'r') as ms:
- meta_json = json.load(ms)
-
- data_size = os.path.getsize(data_path)
- meta_size = os.path.getsize(meta_path)
-
- return CacheData(data_path.replace(".data.json", ".*.json"),
- data_json, meta_json, data_size, meta_size)
-
-
-def get_files(root: str) -> Iterable[CacheData]:
- for (dirpath, dirnames, filenames) in os.walk(root):
- for filename in filenames:
- if filename.endswith(".data.json"):
- meta_filename = filename.replace(".data.json", ".meta.json")
- yield load_json(
- os.path.join(dirpath, filename),
- os.path.join(dirpath, meta_filename))
-
-
-def pluck(name: str, chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
- return (chunk for chunk in chunks if chunk['.class'] == name)
-
-
-def report_counter(counter: Counter, amount: Optional[int] = None) -> None:
- for name, count in counter.most_common(amount):
- print(' {: <8} {}'.format(count, name))
- print()
-
-
-def report_most_common(chunks: List[JsonDict], amount: Optional[int] = None) -> None:
- report_counter(Counter(str(chunk) for chunk in chunks), amount)
-
-
-def compress(chunk: JsonDict) -> JsonDict:
- cache = {} # type: Dict[int, JsonDict]
- counter = 0
- def helper(chunk: Any) -> Any:
- nonlocal counter
- if not isinstance(chunk, dict):
- return chunk
-
- if len(chunk) <= 2:
- return chunk
- id = hash(str(chunk))
-
- if id in cache:
- return cache[id]
- else:
- cache[id] = {'.id': counter}
- chunk['.cache_id'] = counter
- counter += 1
-
- for name in sorted(chunk.keys()):
- value = chunk[name]
- if isinstance(value, list):
- chunk[name] = [helper(child) for child in value]
- elif isinstance(value, dict):
- chunk[name] = helper(value)
-
- return chunk
- out = helper(chunk)
- return out
-
-def decompress(chunk: JsonDict) -> JsonDict:
- cache = {} # type: Dict[int, JsonDict]
- def helper(chunk: Any) -> Any:
- if not isinstance(chunk, dict):
- return chunk
- if '.id' in chunk:
- return cache[chunk['.id']]
-
- counter = None
- if '.cache_id' in chunk:
- counter = chunk['.cache_id']
- del chunk['.cache_id']
-
- for name in sorted(chunk.keys()):
- value = chunk[name]
- if isinstance(value, list):
- chunk[name] = [helper(child) for child in value]
- elif isinstance(value, dict):
- chunk[name] = helper(value)
-
- if counter is not None:
- cache[counter] = chunk
-
- return chunk
- return helper(chunk)
-
-
-
-
-def main() -> None:
- json_chunks = list(get_files(ROOT))
- class_chunks = list(extract_classes(json_chunks))
-
- total_size = sum(chunk.total_size for chunk in json_chunks)
- print("Total cache size: {:.3f} megabytes".format(total_size / (1024 * 1024)))
- print()
-
- class_name_counter = Counter(chunk[".class"] for chunk in class_chunks)
- print("Most commonly used classes:")
- report_counter(class_name_counter)
-
- print("Most common literal chunks:")
- report_most_common(class_chunks, 15)
-
- build = None
- for chunk in json_chunks:
- if 'build.*.json' in chunk.filename:
- build = chunk
- break
- original = json.dumps(build.data, sort_keys=True)
- print("Size of build.data.json, in kilobytes: {:.3f}".format(len(original) / 1024))
-
- build.data = compress(build.data)
- compressed = json.dumps(build.data, sort_keys=True)
- print("Size of compressed build.data.json, in kilobytes: {:.3f}".format(len(compressed) / 1024))
-
- build.data = decompress(build.data)
- decompressed = json.dumps(build.data, sort_keys=True)
- print("Size of decompressed build.data.json, in kilobytes: {:.3f}".format(len(decompressed) / 1024))
-
- print("Lossless conversion back", original == decompressed)
-
-
- '''var_chunks = list(pluck("Var", class_chunks))
- report_most_common(var_chunks, 20)
- print()
-
- #for var in var_chunks:
- # if var['fullname'] == 'self' and not (isinstance(var['type'], dict) and var['type']['.class'] == 'AnyType'):
- # print(var)
- #argument_chunks = list(pluck("Argument", class_chunks))
-
- symbol_table_node_chunks = list(pluck("SymbolTableNode", class_chunks))
- report_most_common(symbol_table_node_chunks, 20)
-
- print()
- print("Most common")
- report_most_common(class_chunks, 20)
- print()'''
-
-
-if __name__ == '__main__':
- main()
diff --git a/misc/async_matrix.py b/misc/async_matrix.py
deleted file mode 100644
index e9a758a..0000000
--- a/misc/async_matrix.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-"""Test various combinations of generators/coroutines.
-
-This was used to cross-check the errors in the test case
-testFullCoroutineMatrix in test-data/unit/check-async-await.test.
-"""
-
-import sys
-from types import coroutine
-from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
-
-# The various things you might try to use in `await` or `yield from`.
-
-def plain_generator() -> Generator[str, None, int]:
- yield 'a'
- return 1
-
-async def plain_coroutine() -> int:
- return 1
-
- at coroutine
-def decorated_generator() -> Generator[str, None, int]:
- yield 'a'
- return 1
-
- at coroutine
-async def decorated_coroutine() -> int:
- return 1
-
-class It(Iterator[str]):
- stop = False
- def __iter__(self) -> 'It':
- return self
- def __next__(self) -> str:
- if self.stop:
- raise StopIteration('end')
- else:
- self.stop = True
- return 'a'
-
-def other_iterator() -> It:
- return It()
-
-class Aw(Awaitable[int]):
- def __await__(self) -> Generator[str, Any, int]:
- yield 'a'
- return 1
-
-def other_coroutine() -> Aw:
- return Aw()
-
-# The various contexts in which `await` or `yield from` might occur.
-
-def plain_host_generator(func) -> Generator[str, None, None]:
- yield 'a'
- x = 0
- f = func()
- try:
- x = yield from f
- finally:
- try:
- f.close()
- except AttributeError:
- pass
-
-async def plain_host_coroutine(func) -> None:
- x = 0
- x = await func()
-
- at coroutine
-def decorated_host_generator(func) -> Generator[str, None, None]:
- yield 'a'
- x = 0
- f = func()
- try:
- x = yield from f
- finally:
- try:
- f.close()
- except AttributeError:
- pass
-
- at coroutine
-async def decorated_host_coroutine(func) -> None:
- x = 0
- x = await func()
-
-# Main driver.
-
-def main():
- verbose = ('-v' in sys.argv)
- for host in [plain_host_generator, plain_host_coroutine,
- decorated_host_generator, decorated_host_coroutine]:
- print()
- print("==== Host:", host.__name__)
- for func in [plain_generator, plain_coroutine,
- decorated_generator, decorated_coroutine,
- other_iterator, other_coroutine]:
- print(" ---- Func:", func.__name__)
- try:
- f = host(func)
- for i in range(10):
- try:
- x = f.send(None)
- if verbose:
- print(" yield:", x)
- except StopIteration as e:
- if verbose:
- print(" stop:", e.value)
- break
- else:
- if verbose:
- print(" ???? still going")
- except Exception as e:
- print(" error:", repr(e))
-
-# Run main().
-
-if __name__ == '__main__':
- main()
diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py
deleted file mode 100644
index 0b552bf..0000000
--- a/misc/fix_annotate.py
+++ /dev/null
@@ -1,219 +0,0 @@
-"""Fixer for lib2to3 that inserts mypy annotations into all methods.
-
-The simplest way to run this is to copy it into lib2to3's "fixes"
-subdirectory and then run "2to3 -f annotate" over your files.
-
-The fixer transforms e.g.
-
- def foo(self, bar, baz=12):
- return bar + baz
-
-into
-
- def foo(self, bar, baz=12):
- # type: (Any, int) -> Any
- return bar + baz
-
-It does not do type inference but it recognizes some basic default
-argument values such as numbers and strings (and assumes their type
-implies the argument type).
-
-It also uses some basic heuristics to decide whether to ignore the
-first argument:
-
- - always if it's named 'self'
- - if there's a @classmethod decorator
-
-Finally, it knows that __init__() is supposed to return None.
-"""
-
-from __future__ import print_function
-
-import os
-import re
-
-from lib2to3.fixer_base import BaseFix
-from lib2to3.patcomp import compile_pattern
-from lib2to3.pytree import Leaf, Node
-from lib2to3.fixer_util import token, syms, touch_import
-
-
-class FixAnnotate(BaseFix):
-
- # This fixer is compatible with the bottom matcher.
- BM_compatible = True
-
- # This fixer shouldn't run by default.
- explicit = True
-
- # The pattern to match.
- PATTERN = """
- funcdef< 'def' name=any parameters< '(' [args=any] ')' > ':' suite=any+ >
- """
-
- counter = None if not os.getenv('MAXFIXES') else int(os.getenv('MAXFIXES'))
-
- def transform(self, node, results):
- if FixAnnotate.counter is not None:
- if FixAnnotate.counter <= 0:
- return
- suite = results['suite']
- children = suite[0].children
-
- # NOTE: I've reverse-engineered the structure of the parse tree.
- # It's always a list of nodes, the first of which contains the
- # entire suite. Its children seem to be:
- #
- # [0] NEWLINE
- # [1] INDENT
- # [2...n-2] statements (the first may be a docstring)
- # [n-1] DEDENT
- #
- # Comments before the suite are part of the INDENT's prefix.
- #
- # "Compact" functions (e.g. "def foo(x, y): return max(x, y)")
- # have a different structure that isn't matched by PATTERN.
-
- ## print('-'*60)
- ## print(node)
- ## for i, ch in enumerate(children):
- ## print(i, repr(ch.prefix), repr(ch))
-
- # Check if there's already an annotation.
- for ch in children:
- if ch.prefix.lstrip().startswith('# type:'):
- return # There's already a # type: comment here; don't change anything.
-
- # Compute the annotation
- annot = self.make_annotation(node, results)
-
- # Insert '# type: {annot}' comment.
- # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib.
- if len(children) >= 2 and children[1].type == token.INDENT:
- children[1].prefix = '%s# type: %s\n%s' % (children[1].value, annot, children[1].prefix)
- children[1].changed()
- if FixAnnotate.counter is not None:
- FixAnnotate.counter -= 1
-
- # Also add 'from typing import Any' at the top.
- if 'Any' in annot:
- touch_import('typing', 'Any', node)
-
- def make_annotation(self, node, results):
- name = results['name']
- assert isinstance(name, Leaf), repr(name)
- assert name.type == token.NAME, repr(name)
- decorators = self.get_decorators(node)
- is_method = self.is_method(node)
- if name.value == '__init__' or not self.has_return_exprs(node):
- restype = 'None'
- else:
- restype = 'Any'
- args = results.get('args')
- argtypes = []
- if isinstance(args, Node):
- children = args.children
- elif isinstance(args, Leaf):
- children = [args]
- else:
- children = []
- # Interpret children according to the following grammar:
- # (('*'|'**')? NAME ['=' expr] ','?)*
- stars = inferred_type = ''
- in_default = False
- at_start = True
- for child in children:
- if isinstance(child, Leaf):
- if child.value in ('*', '**'):
- stars += child.value
- elif child.type == token.NAME and not in_default:
- if not is_method or not at_start or 'staticmethod' in decorators:
- inferred_type = 'Any'
- else:
- # Always skip the first argument if it's named 'self'.
- # Always skip the first argument of a class method.
- if child.value == 'self' or 'classmethod' in decorators:
- pass
- else:
- inferred_type = 'Any'
- elif child.value == '=':
- in_default = True
- elif in_default and child.value != ',':
- if child.type == token.NUMBER:
- if re.match(r'\d+[lL]?$', child.value):
- inferred_type = 'int'
- else:
- inferred_type = 'float' # TODO: complex?
- elif child.type == token.STRING:
- if child.value.startswith(('u', 'U')):
- inferred_type = 'unicode'
- else:
- inferred_type = 'str'
- elif child.type == token.NAME and child.value in ('True', 'False'):
- inferred_type = 'bool'
- elif child.value == ',':
- if inferred_type:
- argtypes.append(stars + inferred_type)
- # Reset
- stars = inferred_type = ''
- in_default = False
- at_start = False
- if inferred_type:
- argtypes.append(stars + inferred_type)
- return '(' + ', '.join(argtypes) + ') -> ' + restype
-
- # The parse tree has a different shape when there is a single
- # decorator vs. when there are multiple decorators.
- DECORATED = "decorated< (d=decorator | decorators< dd=decorator+ >) funcdef >"
- decorated = compile_pattern(DECORATED)
-
- def get_decorators(self, node):
- """Return a list of decorators found on a function definition.
-
- This is a list of strings; only simple decorators
- (e.g. @staticmethod) are returned.
-
- If the function is undecorated or only non-simple decorators
- are found, return [].
- """
- if node.parent is None:
- return []
- results = {}
- if not self.decorated.match(node.parent, results):
- return []
- decorators = results.get('dd') or [results['d']]
- decs = []
- for d in decorators:
- for child in d.children:
- if isinstance(child, Leaf) and child.type == token.NAME:
- decs.append(child.value)
- return decs
-
- def is_method(self, node):
- """Return whether the node occurs (directly) inside a class."""
- node = node.parent
- while node is not None:
- if node.type == syms.classdef:
- return True
- if node.type == syms.funcdef:
- return False
- node = node.parent
- return False
-
- RETURN_EXPR = "return_stmt< 'return' any >"
- return_expr = compile_pattern(RETURN_EXPR)
-
- def has_return_exprs(self, node):
- """Traverse the tree below node looking for 'return expr'.
-
- Return True if at least 'return expr' is found, False if not.
- (If both 'return' and 'return expr' are found, return True.)
- """
- results = {}
- if self.return_expr.match(node, results):
- return True
- for child in node.children:
- if child.type not in (syms.funcdef, syms.classdef):
- if self.has_return_exprs(child):
- return True
- return False
diff --git a/misc/incremental_checker.py b/misc/incremental_checker.py
deleted file mode 100755
index 515e662..0000000
--- a/misc/incremental_checker.py
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/usr/bin/env python3
-"""
-This file compares the output and runtime of running normal vs incremental mode
-on the history of any arbitrary git repo as a way of performing a sanity check
-to make sure incremental mode is working correctly and efficiently.
-
-It does so by first running mypy without incremental mode on the specified range
-of commits to find the expected result, then rewinds back to the first commit and
-re-runs mypy on the commits with incremental mode enabled to make sure it returns
-the same results.
-
-This script will download and test the offical mypy repo by default. Running:
-
- python3 misc/incremental_checker.py last 30
-
-is equivalent to running
-
- python3 misc/incremental_checker.py last 30 \\
- --repo_url https://github.com/python/mypy.git \\
- --file-path mypy
-
-You can chose to run this script against a specific commit id or against the
-last n commits.
-
-To run this script against the last 30 commits:
-
- python3 misc/incremental_checker.py last 30
-
-To run this script starting from the commit id 2a432b:
-
- python3 misc/incremental_checker.py commit 2a432b
-"""
-
-from typing import Any, Dict, List, Optional, Tuple
-
-from argparse import (ArgumentParser, RawDescriptionHelpFormatter,
- ArgumentDefaultsHelpFormatter, Namespace)
-import base64
-import json
-import os
-import random
-import shutil
-import subprocess
-import sys
-import textwrap
-import time
-
-
-CACHE_PATH = ".incremental_checker_cache.json"
-MYPY_REPO_URL = "https://github.com/python/mypy.git"
-MYPY_TARGET_FILE = "mypy"
-
-JsonDict = Dict[str, Any]
-
-
-def print_offset(text: str, indent_length: int = 4) -> None:
- print()
- print(textwrap.indent(text, ' ' * indent_length))
- print()
-
-
-def delete_folder(folder_path: str) -> None:
- if os.path.exists(folder_path):
- shutil.rmtree(folder_path)
-
-
-def execute(command: List[str], fail_on_error: bool = True) -> Tuple[str, str, int]:
- proc = subprocess.Popen(
- ' '.join(command),
- stderr=subprocess.PIPE,
- stdout=subprocess.PIPE,
- shell=True)
- stdout_bytes, stderr_bytes = proc.communicate() # type: Tuple[bytes, bytes]
- stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
- if fail_on_error and proc.returncode != 0:
- print('EXECUTED COMMAND:', repr(command))
- print('RETURN CODE:', proc.returncode)
- print()
- print('STDOUT:')
- print_offset(stdout)
- print('STDERR:')
- print_offset(stderr)
- raise RuntimeError('Unexpected error from external tool.')
- return stdout, stderr, proc.returncode
-
-
-def ensure_environment_is_ready(mypy_path: str, temp_repo_path: str, mypy_cache_path: str) -> None:
- os.chdir(mypy_path)
- delete_folder(temp_repo_path)
- delete_folder(mypy_cache_path)
-
-
-def initialize_repo(repo_url: str, temp_repo_path: str, branch: str) -> None:
- print("Cloning repo {0} to {1}".format(repo_url, temp_repo_path))
- execute(["git", "clone", repo_url, temp_repo_path])
- if branch is not None:
- print("Checking out branch {}".format(branch))
- execute(["git", "-C", temp_repo_path, "checkout", branch])
-
-
-def get_commits(repo_folder_path: str, commit_range: str) -> List[Tuple[str, str]]:
- raw_data, _stderr, _errcode = execute([
- "git", "-C", repo_folder_path, "log", "--reverse", "--oneline", commit_range])
- output = []
- for line in raw_data.strip().split('\n'):
- commit_id, _, message = line.partition(' ')
- output.append((commit_id, message))
- return output
-
-
-def get_commits_starting_at(repo_folder_path: str, start_commit: str) -> List[Tuple[str, str]]:
- print("Fetching commits starting at {0}".format(start_commit))
- return get_commits(repo_folder_path, '{0}^..HEAD'.format(start_commit))
-
-
-def get_nth_commit(repo_folder_path, n: int) -> Tuple[str, str]:
- print("Fetching last {} commits (or all, if there are fewer commits than n)".format(n))
- return get_commits(repo_folder_path, '-{}'.format(n))[0]
-
-
-def run_mypy(target_file_path: Optional[str],
- mypy_cache_path: str,
- mypy_script: Optional[str],
- incremental: bool = True,
- verbose: bool = False) -> Tuple[float, str]:
- """Runs mypy against `target_file_path` and returns what mypy prints to stdout as a string.
-
- If `incremental` is set to True, this function will use store and retrieve all caching data
- inside `mypy_cache_path`. If `verbose` is set to True, this function will pass the "-v -v"
- flags to mypy to make it output debugging information.
- """
- if mypy_script is None:
- command = ["python3", "-m", "mypy"]
- else:
- command = [mypy_script]
- command.extend(["--cache-dir", mypy_cache_path])
- if incremental:
- command.append("--incremental")
- if verbose:
- command.extend(["-v", "-v"])
- if target_file_path is not None:
- command.append(target_file_path)
- start = time.time()
- output, stderr, _ = execute(command, False)
- if stderr != "":
- output = stderr
- runtime = time.time() - start
- return runtime, output
-
-
-def load_cache(incremental_cache_path: str = CACHE_PATH) -> JsonDict:
- if os.path.exists(incremental_cache_path):
- with open(incremental_cache_path, 'r') as stream:
- return json.load(stream)
- else:
- return {}
-
-
-def save_cache(cache: JsonDict, incremental_cache_path: str = CACHE_PATH) -> None:
- with open(incremental_cache_path, 'w') as stream:
- json.dump(cache, stream, indent=2)
-
-
-def set_expected(commits: List[Tuple[str, str]],
- cache: JsonDict,
- temp_repo_path: str,
- target_file_path: Optional[str],
- mypy_cache_path: str,
- mypy_script: Optional[str]) -> None:
- """Populates the given `cache` with the expected results for all of the given `commits`.
-
- This function runs mypy on the `target_file_path` inside the `temp_repo_path`, and stores
- the result in the `cache`.
-
- If `cache` already contains results for a particular commit, this function will
- skip evaluating that commit and move on to the next."""
- for commit_id, message in commits:
- if commit_id in cache:
- print('Skipping commit (already cached): {0}: "{1}"'.format(commit_id, message))
- else:
- print('Caching expected output for commit {0}: "{1}"'.format(commit_id, message))
- execute(["git", "-C", temp_repo_path, "checkout", commit_id])
- runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
- incremental=False)
- cache[commit_id] = {'runtime': runtime, 'output': output}
- if output == "":
- print(" Clean output ({:.3f} sec)".format(runtime))
- else:
- print(" Output ({:.3f} sec)".format(runtime))
- print_offset(output, 8)
- print()
-
-
-def test_incremental(commits: List[Tuple[str, str]],
- cache: JsonDict,
- temp_repo_path: str,
- target_file_path: Optional[str],
- mypy_cache_path: str,
- mypy_script: Optional[str]) -> None:
- """Runs incremental mode on all `commits` to verify the output matches the expected output.
-
- This function runs mypy on the `target_file_path` inside the `temp_repo_path`. The
- expected output must be stored inside of the given `cache`.
- """
- print("Note: first commit is evaluated twice to warm up cache")
- commits = [commits[0]] + commits
- for commit_id, message in commits:
- print('Now testing commit {0}: "{1}"'.format(commit_id, message))
- execute(["git", "-C", temp_repo_path, "checkout", commit_id])
- runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
- incremental=True)
- expected_runtime = cache[commit_id]['runtime'] # type: float
- expected_output = cache[commit_id]['output'] # type: str
- if output != expected_output:
- print(" Output does not match expected result!")
- print(" Expected output ({:.3f} sec):".format(expected_runtime))
- print_offset(expected_output, 8)
- print(" Actual output: ({:.3f} sec):".format(runtime))
- print_offset(output, 8)
- else:
- print(" Output matches expected result!")
- print(" Incremental: {:.3f} sec".format(runtime))
- print(" Original: {:.3f} sec".format(expected_runtime))
-
-
-def cleanup(temp_repo_path: str, mypy_cache_path: str) -> None:
- delete_folder(temp_repo_path)
- delete_folder(mypy_cache_path)
-
-
-def test_repo(target_repo_url: str, temp_repo_path: str,
- target_file_path: Optional[str],
- mypy_path: str, incremental_cache_path: str, mypy_cache_path: str,
- range_type: str, range_start: str, branch: str,
- params: Optional[Namespace] = None) -> None:
- """Tests incremental mode against the repo specified in `target_repo_url`.
-
- This algorithm runs in five main stages:
-
- 1. Clones `target_repo_url` into the `temp_repo_path` folder locally,
- checking out the specified `branch` if applicable.
- 2. Examines the repo's history to get the list of all commits to
- to test incremental mode on.
- 3. Runs mypy WITHOUT incremental mode against the `target_file_path` (which is
- assumed to be located inside the `temp_repo_path`), testing each commit
- discovered in stage two.
- - If the results of running mypy WITHOUT incremental mode on a
- particular commit are already cached inside the `incremental_cache_path`,
- skip that commit to save time.
- - Cache the results after finishing.
- 4. Rewind back to the first commit, and run mypy WITH incremental mode
- against the `target_file_path` commit-by-commit, and compare to the expected
- results found in stage 3.
- 5. Delete all unnecessary temp files.
- """
- # Stage 1: Clone repo and get ready to being testing
- ensure_environment_is_ready(mypy_path, temp_repo_path, mypy_cache_path)
- initialize_repo(target_repo_url, temp_repo_path, branch)
-
- # Stage 2: Get all commits we want to test
- if range_type == "last":
- start_commit = get_nth_commit(temp_repo_path, int(range_start))[0]
- elif range_type == "commit":
- start_commit = range_start
- else:
- raise RuntimeError("Invalid option: {}".format(range_type))
- commits = get_commits_starting_at(temp_repo_path, start_commit)
- if params is not None and params.sample:
- seed = params.seed or base64.urlsafe_b64encode(os.urandom(15)).decode('ascii')
- random.seed(seed)
- commits = random.sample(commits, params.sample)
- print("Sampled down to %d commits using random seed %s" % (len(commits), seed))
-
- # Stage 3: Find and cache expected results for each commit (without incremental mode)
- cache = load_cache(incremental_cache_path)
- set_expected(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
- mypy_script=params.mypy_script)
- save_cache(cache, incremental_cache_path)
-
- # Stage 4: Rewind and re-run mypy (with incremental mode enabled)
- test_incremental(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
- mypy_script=params.mypy_script)
-
- # Stage 5: Remove temp files
- cleanup(temp_repo_path, mypy_cache_path)
-
-
-def main() -> None:
- help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))
- parser = ArgumentParser(
- prog='incremental_checker',
- description=__doc__,
- formatter_class=help_factory)
-
- parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
- help="must be one of 'last' or 'commit'")
- parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
- help="the commit id to start from, or the number of "
- "commits to move back (see above)")
- parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
- help="the repo to clone and run tests on")
- parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
- help="the name of the file or directory to typecheck")
- parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
- help="sets a custom location to store cache data")
- parser.add_argument("--branch", default=None, metavar="NAME",
- help="check out and test a custom branch"
- "uses the default if not specified")
- parser.add_argument("--sample", type=int, help="use a random sample of size SAMPLE")
- parser.add_argument("--seed", type=str, help="random seed")
- parser.add_argument("--mypy-script", type=str, help="alternate mypy script to run")
-
- if len(sys.argv[1:]) == 0:
- parser.print_help()
- parser.exit()
-
- params = parser.parse_args(sys.argv[1:])
-
- # Make all paths absolute so we avoid having to worry about being in the right folder
-
- # The path to this specific script (incremental_checker.py).
- script_path = os.path.abspath(sys.argv[0])
-
- # The path to the mypy repo.
- mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))
-
- # The folder the cloned repo will reside in.
- temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))
-
- # The particular file or package to typecheck inside the repo.
- if params.file_path:
- target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))
- else:
- # Allow `-f ''` to clear target_file_path.
- target_file_path = None
-
- # The path to where the incremental checker cache data is stored.
- incremental_cache_path = os.path.abspath(params.cache_path)
-
- # The path to store the mypy incremental mode cache data
- mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))
-
- print("Assuming mypy is located at {0}".format(mypy_path))
- print("Temp repo will be cloned at {0}".format(temp_repo_path))
- print("Testing file/dir located at {0}".format(target_file_path))
- print("Using cache data located at {0}".format(incremental_cache_path))
- print()
-
- test_repo(params.repo_url, temp_repo_path, target_file_path,
- mypy_path, incremental_cache_path, mypy_cache_path,
- params.range_type, params.range_start, params.branch,
- params)
-
-
-if __name__ == '__main__':
- main()
diff --git a/misc/macs.el b/misc/macs.el
deleted file mode 100644
index 67d80aa..0000000
--- a/misc/macs.el
+++ /dev/null
@@ -1,22 +0,0 @@
-; Example Emacs integration; shows type of expression in region.
-
-(defun mypy-show-region ()
- "Show type of variable at point."
- (interactive)
- (let ((here (region-beginning))
- (there (region-end))
- (filename (buffer-file-name)))
- (let ((hereline (line-number-at-pos here))
- (herecol (save-excursion (goto-char here) (current-column)))
- (thereline (line-number-at-pos there))
- (therecol (save-excursion (goto-char there) (current-column))))
- (shell-command
- (format "cd ~/src/mypy; python3 ./scripts/find_type.py %s %s %s %s %s python3 -m mypy -i mypy"
- filename hereline herecol thereline therecol)
- )
- )
- )
- )
-
-; I like to bind this to ^X-t.
-(global-set-key "\C-xt" 'mypy-show-region)
diff --git a/misc/perf_checker.py b/misc/perf_checker.py
deleted file mode 100644
index e55f8cc..0000000
--- a/misc/perf_checker.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3
-
-from typing import Callable, List, Tuple
-
-import os
-import shutil
-import statistics
-import subprocess
-import textwrap
-import time
-
-
-class Command:
- def __init__(self, setup: Callable[[], None], command: Callable[[], None]) -> None:
- self.setup = setup
- self.command = command
-
-
-def print_offset(text: str, indent_length: int = 4) -> None:
- print()
- print(textwrap.indent(text, ' ' * indent_length))
- print()
-
-
-def delete_folder(folder_path: str) -> None:
- if os.path.exists(folder_path):
- shutil.rmtree(folder_path)
-
-
-def execute(command: List[str]) -> None:
- proc = subprocess.Popen(
- ' '.join(command),
- stderr=subprocess.PIPE,
- stdout=subprocess.PIPE,
- shell=True)
- stdout_bytes, stderr_bytes = proc.communicate() # type: Tuple[bytes, bytes]
- stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
- if proc.returncode != 0:
- print('EXECUTED COMMAND:', repr(command))
- print('RETURN CODE:', proc.returncode)
- print()
- print('STDOUT:')
- print_offset(stdout)
- print('STDERR:')
- print_offset(stderr)
- raise RuntimeError('Unexpected error from external tool.')
-
-
-def trial(num_trials: int, command: Command) -> List[float]:
- trials = []
- for i in range(num_trials):
- command.setup()
- start = time.time()
- command.command()
- delta = time.time() - start
- trials.append(delta)
- return trials
-
-
-def report(name: str, times: List[float]) -> None:
- print("{}:".format(name))
- print(" Times: {}".format(times))
- print(" Mean: {}".format(statistics.mean(times)))
- print(" Stdev: {}".format(statistics.stdev(times)))
- print()
-
-
-def main() -> None:
- trials = 3
-
- print("Testing baseline")
- baseline = trial(trials, Command(
- lambda: None,
- lambda: execute(["python3", "-m", "mypy", "mypy"])))
- report("Baseline", baseline)
-
- print("Testing cold cache")
- cold_cache = trial(trials, Command(
- lambda: delete_folder(".mypy_cache"),
- lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
- report("Cold cache", cold_cache)
-
- print("Testing warm cache")
- execute(["python3", "-m", "mypy", "-i", "mypy"])
- warm_cache = trial(trials, Command(
- lambda: None,
- lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
- report("Warm cache", warm_cache)
-
-
-if __name__ == '__main__':
- main()
-
diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh
deleted file mode 100644
index 3da6b9d..0000000
--- a/misc/remove-eol-whitespace.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-
-# Remove trailing whitespace from all non-binary files in a git repo.
-
-# From https://gist.github.com/dpaluy/3690668; originally from here:
-# http://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240
-
-git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/'
diff --git a/misc/test_case_to_actual.py b/misc/test_case_to_actual.py
deleted file mode 100644
index 9a91bb1..0000000
--- a/misc/test_case_to_actual.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from typing import Iterator, List
-import sys
-import os
-import os.path
-
-
-class Chunk:
- def __init__(self, header_type: str, args: str) -> None:
- self.header_type = header_type
- self.args = args
- self.lines = [] # type: List[str]
-
-
-def is_header(line: str) -> bool:
- return line.startswith('[') and line.endswith(']')
-
-
-def normalize(lines: Iterator[str]) -> Iterator[str]:
- return (line.rstrip() for line in lines)
-
-
-def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]:
- current_chunk = None # type: Chunk
- for line in normalize(lines):
- if is_header(line):
- if current_chunk is not None:
- yield current_chunk
- parts = line[1:-1].split(' ', 1)
- args = parts[1] if len(parts) > 1 else ''
- current_chunk = Chunk(parts[0], args)
- else:
- current_chunk.lines.append(line)
- if current_chunk is not None:
- yield current_chunk
-
-
-def write_out(filename: str, lines: List[str]) -> None:
- os.makedirs(os.path.dirname(filename), exist_ok=True)
- with open(filename, 'w') as stream:
- stream.write('\n'.join(lines))
-
-
-def write_tree(root: str, chunks: Iterator[Chunk]) -> None:
- init = next(chunks)
- assert init.header_type == 'case'
-
- root = os.path.join(root, init.args)
- write_out(os.path.join(root, 'main.py'), init.lines)
-
- for chunk in chunks:
- if chunk.header_type == 'file' and chunk.args.endswith('.py'):
- write_out(os.path.join(root, chunk.args), chunk.lines)
-
-
-def help() -> None:
- print("Usage: python misc/test_case_to_actual.py test_file.txt root_path")
-
-
-def main() -> None:
- if len(sys.argv) != 3:
- help()
- return
-
- test_file_path, root_path = sys.argv[1], sys.argv[2]
- with open(test_file_path, 'r') as stream:
- chunks = produce_chunks(iter(stream))
- write_tree(root_path, chunks)
-
-
-if __name__ == '__main__':
- main()
diff --git a/misc/touch_checker.py b/misc/touch_checker.py
deleted file mode 100644
index c44afe4..0000000
--- a/misc/touch_checker.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-
-from typing import Callable, List, Tuple, Optional
-
-import sys
-import glob
-import os
-import shutil
-import statistics
-import subprocess
-import textwrap
-import time
-
-
-def print_offset(text: str, indent_length: int = 4) -> None:
- print()
- print(textwrap.indent(text, ' ' * indent_length))
- print()
-
-
-def delete_folder(folder_path: str) -> None:
- if os.path.exists(folder_path):
- shutil.rmtree(folder_path)
-
-
-def execute(command: List[str]) -> None:
- proc = subprocess.Popen(
- ' '.join(command),
- stderr=subprocess.PIPE,
- stdout=subprocess.PIPE,
- shell=True)
- stdout_bytes, stderr_bytes = proc.communicate() # type: Tuple[bytes, bytes]
- stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
- if proc.returncode != 0:
- print('EXECUTED COMMAND:', repr(command))
- print('RETURN CODE:', proc.returncode)
- print()
- print('STDOUT:')
- print_offset(stdout)
- print('STDERR:')
- print_offset(stderr)
- print()
-
-
-Command = Callable[[], None]
-
-
-def test(setup: Command, command: Command, teardown: Command) -> float:
- setup()
- start = time.time()
- command()
- end = time.time() - start
- teardown()
- return end
-
-
-def make_touch_wrappers(filename: str) -> Tuple[Command, Command]:
- def setup() -> None:
- execute(["touch", filename])
- def teardown() -> None:
- pass
- return setup, teardown
-
-
-def make_change_wrappers(filename: str) -> Tuple[Command, Command]:
- copy = None # type: Optional[str]
-
- def setup() -> None:
- nonlocal copy
- with open(filename, 'r') as stream:
- copy = stream.read()
- with open(filename, 'a') as stream:
- stream.write('\n\nfoo = 3')
-
- def teardown() -> None:
- assert copy is not None
- with open(filename, 'w') as stream:
- stream.write(copy)
-
- # Re-run to reset cache
- execute(["python3", "-m", "mypy", "-i", "mypy"]),
-
- return setup, teardown
-
-def main() -> None:
- if len(sys.argv) != 2 or sys.argv[1] not in {'touch', 'change'}:
- print("First argument should be 'touch' or 'change'")
- return
-
- if sys.argv[1] == 'touch':
- make_wrappers = make_touch_wrappers
- verb = "Touching"
- elif sys.argv[1] == 'change':
- make_wrappers = make_change_wrappers
- verb = "Changing"
- else:
- raise AssertionError()
-
- print("Setting up...")
-
- baseline = test(
- lambda: None,
- lambda: execute(["python3", "-m", "mypy", "mypy"]),
- lambda: None)
- print("Baseline: {}".format(baseline))
-
- cold = test(
- lambda: delete_folder(".mypy_cache"),
- lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
- lambda: None)
- print("Cold cache: {}".format(cold))
-
- warm = test(
- lambda: None,
- lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
- lambda: None)
- print("Warm cache: {}".format(warm))
-
- print()
-
- deltas = []
- for filename in glob.iglob("mypy/**/*.py", recursive=True):
- print("{} {}".format(verb, filename))
-
- setup, teardown = make_wrappers(filename)
- delta = test(
- setup,
- lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
- teardown)
- print(" Time: {}".format(delta))
- deltas.append(delta)
- print()
-
- print("Initial:")
- print(" Baseline: {}".format(baseline))
- print(" Cold cache: {}".format(cold))
- print(" Warm cache: {}".format(warm))
- print()
- print("Aggregate:")
- print(" Times: {}".format(deltas))
- print(" Mean: {}".format(statistics.mean(deltas)))
- print(" Median: {}".format(statistics.median(deltas)))
- print(" Stdev: {}".format(statistics.stdev(deltas)))
- print(" Min: {}".format(min(deltas)))
- print(" Max: {}".format(max(deltas)))
- print(" Total: {}".format(sum(deltas)))
- print()
-
-if __name__ == '__main__':
- main()
-
diff --git a/misc/variadics.py b/misc/variadics.py
deleted file mode 100644
index 9200288..0000000
--- a/misc/variadics.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""Example of code generation approach to variadics.
-
-See https://github.com/python/typing/issues/193#issuecomment-236383893
-"""
-
-LIMIT = 5
-BOUND = 'object'
-
-def prelude(limit: int, bound: str) -> None:
- print('from typing import Callable, Iterable, Iterator, Tuple, TypeVar, overload')
- print('Ts = TypeVar(\'Ts\', bound={bound})'.format(bound=bound))
- print('R = TypeVar(\'R\')')
- for i in range(LIMIT):
- print('T{i} = TypeVar(\'T{i}\', bound={bound})'.format(i=i+1, bound=bound))
-
-def expand_template(template: str,
- arg_template: str = 'arg{i}: {Ts}',
- lower: int = 0,
- limit: int = LIMIT) -> None:
- print()
- for i in range(lower, limit):
- tvs = ', '.join('T{i}'.format(i=j+1) for j in range(i))
- args = ', '.join(arg_template.format(i=j+1, Ts='T{}'.format(j+1))
- for j in range(i))
- print('@overload')
- s = template.format(Ts=tvs, argsTs=args)
- s = s.replace('Tuple[]', 'Tuple[()]')
- print(s)
- args_l = [arg_template.format(i=j+1, Ts='Ts') for j in range(limit)]
- args_l.append('*' + (arg_template.format(i='s', Ts='Ts')))
- args = ', '.join(args_l)
- s = template.format(Ts='Ts, ...', argsTs=args)
- s = s.replace('Callable[[Ts, ...]', 'Callable[...')
- print('@overload')
- print(s)
-
-def main():
- prelude(LIMIT, BOUND)
-
- # map()
- expand_template('def map(func: Callable[[{Ts}], R], {argsTs}) -> R: ...',
- lower=1)
- # zip()
- expand_template('def zip({argsTs}) -> Tuple[{Ts}]: ...')
-
- # Naomi's examples
- expand_template('def my_zip({argsTs}) -> Iterator[Tuple[{Ts}]]: ...',
- 'arg{i}: Iterable[{Ts}]')
- expand_template('def make_check({argsTs}) -> Callable[[{Ts}], bool]: ...')
- expand_template('def my_map(f: Callable[[{Ts}], R], {argsTs}) -> Iterator[R]: ...',
- 'arg{i}: Iterable[{Ts}]')
-
-
-main()
diff --git a/mypy.egg-info/PKG-INFO b/mypy.egg-info/PKG-INFO
index 236181d..135e4bd 100644
--- a/mypy.egg-info/PKG-INFO
+++ b/mypy.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: mypy
-Version: 0.480.dev0
+Version: 0.511
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
diff --git a/mypy.egg-info/SOURCES.txt b/mypy.egg-info/SOURCES.txt
index 5694d26..5e48494 100644
--- a/mypy.egg-info/SOURCES.txt
+++ b/mypy.egg-info/SOURCES.txt
@@ -1,70 +1,6 @@
-.gitignore
-.gitmodules
-.travis.yml
-CONTRIBUTING.md
-CREDITS
-LICENSE
MANIFEST.in
-README.md
-appveyor.yml
-build-requirements.txt
-conftest.py
-mypy_self_check.ini
-mypy_strict_optional.ini
-pytest.ini
-runtests.py
setup.cfg
setup.py
-test-requirements.txt
-typeshed
-docs/Makefile
-docs/README.md
-docs/make.bat
-docs/requirements-docs.txt
-docs/source/additional_features.rst
-docs/source/basics.rst
-docs/source/builtin_types.rst
-docs/source/casts.rst
-docs/source/cheat_sheet.rst
-docs/source/cheat_sheet_py3.rst
-docs/source/class_basics.rst
-docs/source/command_line.rst
-docs/source/common_issues.rst
-docs/source/conf.py
-docs/source/config_file.rst
-docs/source/duck_type_compatibility.rst
-docs/source/dynamic_typing.rst
-docs/source/faq.rst
-docs/source/function_overloading.rst
-docs/source/generics.rst
-docs/source/getting_started.rst
-docs/source/index.rst
-docs/source/introduction.rst
-docs/source/kinds_of_types.rst
-docs/source/python2.rst
-docs/source/python36.rst
-docs/source/revision_history.rst
-docs/source/supported_python_features.rst
-docs/source/type_inference_and_annotations.rst
-extensions/README.md
-extensions/mypy_extensions.py
-extensions/setup.py
-lib-typing/2.7/setup.py
-lib-typing/2.7/test_typing.py
-lib-typing/2.7/typing.py
-lib-typing/3.2/test_typing.py
-lib-typing/3.2/typing.py
-misc/actions_stubs.py
-misc/analyze_cache.py
-misc/async_matrix.py
-misc/fix_annotate.py
-misc/incremental_checker.py
-misc/macs.el
-misc/perf_checker.py
-misc/remove-eol-whitespace.sh
-misc/test_case_to_actual.py
-misc/touch_checker.py
-misc/variadics.py
mypy/__init__.py
mypy/__main__.py
mypy/api.py
@@ -89,7 +25,6 @@ mypy/git.py
mypy/indirection.py
mypy/infer.py
mypy/join.py
-mypy/lex.py
mypy/main.py
mypy/maptype.py
mypy/meet.py
@@ -98,7 +33,6 @@ mypy/moduleinfo.py
mypy/nodes.py
mypy/options.py
mypy/parse.py
-mypy/parsetype.py
mypy/report.py
mypy/sametypes.py
mypy/semanal.py
@@ -112,6 +46,7 @@ mypy/stubutil.py
mypy/subtypes.py
mypy/traverser.py
mypy/treetransform.py
+mypy/tvar_scope.py
mypy/typeanal.py
mypy/typefixture.py
mypy/types.py
@@ -125,233 +60,31 @@ mypy.egg-info/SOURCES.txt
mypy.egg-info/dependency_links.txt
mypy.egg-info/requires.txt
mypy.egg-info/top_level.txt
-mypy/myunit/__init__.py
-mypy/myunit/__main__.py
-mypy/test/__init__.py
-mypy/test/collect.py
-mypy/test/config.py
-mypy/test/data.py
-mypy/test/helpers.py
-mypy/test/testargs.py
-mypy/test/testcheck.py
-mypy/test/testcmdline.py
-mypy/test/testextensions.py
-mypy/test/testgraph.py
-mypy/test/testinfer.py
-mypy/test/testlex.py
-mypy/test/testmoduleinfo.py
-mypy/test/testparse.py
-mypy/test/testpythoneval.py
-mypy/test/testreports.py
-mypy/test/testsemanal.py
-mypy/test/testsolve.py
-mypy/test/teststubgen.py
-mypy/test/testsubtypes.py
-mypy/test/testtransform.py
-mypy/test/testtypegen.py
-mypy/test/testtypes.py
-mypy/test/update.py
-pinfer/.gitignore
-pinfer/LICENSE
-pinfer/README
-pinfer/__init__.py
-pinfer/inspect3.py
-pinfer/p.py
-pinfer/pinfer.py
-pinfer/test_pinfer.py
-pinfer/test_pinfer3.py
-pinfer/unparse.py
-pinfer/unparse3.py
+scripts/dumpmodule.py
scripts/find_type.py
scripts/mypy
scripts/mypy.bat
scripts/stubgen
-test-data/.flake8
-test-data/samples/bottles.py
-test-data/samples/class.py
-test-data/samples/cmdline.py
-test-data/samples/crawl.py
-test-data/samples/crawl2.py
-test-data/samples/dict.py
-test-data/samples/fib.py
-test-data/samples/files.py
-test-data/samples/for.py
-test-data/samples/generators.py
-test-data/samples/greet.py
-test-data/samples/guess.py
-test-data/samples/hello.py
-test-data/samples/input.py
-test-data/samples/itertool.py
-test-data/samples/readme.txt
-test-data/samples/regexp.py
-test-data/stdlib-samples/3.2/base64.py
-test-data/stdlib-samples/3.2/fnmatch.py
-test-data/stdlib-samples/3.2/genericpath.py
-test-data/stdlib-samples/3.2/getopt.py
-test-data/stdlib-samples/3.2/glob.py
-test-data/stdlib-samples/3.2/posixpath.py
-test-data/stdlib-samples/3.2/pprint.py
-test-data/stdlib-samples/3.2/random.py
-test-data/stdlib-samples/3.2/shutil.py
-test-data/stdlib-samples/3.2/subprocess.py
-test-data/stdlib-samples/3.2/tempfile.py
-test-data/stdlib-samples/3.2/textwrap.py
-test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
-test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
-test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
-test-data/stdlib-samples/3.2/test/__init__.py
-test-data/stdlib-samples/3.2/test/randv2_32.pck
-test-data/stdlib-samples/3.2/test/randv2_64.pck
-test-data/stdlib-samples/3.2/test/randv3.pck
-test-data/stdlib-samples/3.2/test/support.py
-test-data/stdlib-samples/3.2/test/test_base64.py
-test-data/stdlib-samples/3.2/test/test_fnmatch.py
-test-data/stdlib-samples/3.2/test/test_genericpath.py
-test-data/stdlib-samples/3.2/test/test_getopt.py
-test-data/stdlib-samples/3.2/test/test_glob.py
-test-data/stdlib-samples/3.2/test/test_posixpath.py
-test-data/stdlib-samples/3.2/test/test_pprint.py
-test-data/stdlib-samples/3.2/test/test_random.py
-test-data/stdlib-samples/3.2/test/test_set.py
-test-data/stdlib-samples/3.2/test/test_shutil.py
-test-data/stdlib-samples/3.2/test/test_subprocess.py
-test-data/stdlib-samples/3.2/test/test_tempfile.py
-test-data/stdlib-samples/3.2/test/test_textwrap.py
-test-data/stdlib-samples/3.2/test/tf_inherit_check.py
-test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
-test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
-test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
-test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
-test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
-test-data/unit/check-abstract.test
-test-data/unit/check-async-await.test
-test-data/unit/check-basic.test
-test-data/unit/check-bound.test
-test-data/unit/check-callable.test
-test-data/unit/check-class-namedtuple.test
-test-data/unit/check-classes.test
-test-data/unit/check-columns.test
-test-data/unit/check-dynamic-typing.test
-test-data/unit/check-expressions.test
-test-data/unit/check-fastparse.test
-test-data/unit/check-flags.test
-test-data/unit/check-functions.test
-test-data/unit/check-generic-subtyping.test
-test-data/unit/check-generics.test
-test-data/unit/check-ignore.test
-test-data/unit/check-incremental.test
-test-data/unit/check-inference-context.test
-test-data/unit/check-inference.test
-test-data/unit/check-isinstance.test
-test-data/unit/check-kwargs.test
-test-data/unit/check-lists.test
-test-data/unit/check-modules.test
-test-data/unit/check-multiple-inheritance.test
-test-data/unit/check-namedtuple.test
-test-data/unit/check-newsyntax.test
-test-data/unit/check-newtype.test
-test-data/unit/check-optional.test
-test-data/unit/check-overloading.test
-test-data/unit/check-python2.test
-test-data/unit/check-selftype.test
-test-data/unit/check-semanal-error.test
-test-data/unit/check-statements.test
-test-data/unit/check-super.test
-test-data/unit/check-tuples.test
-test-data/unit/check-type-aliases.test
-test-data/unit/check-type-checks.test
-test-data/unit/check-type-promotion.test
-test-data/unit/check-typeddict.test
-test-data/unit/check-typevar-values.test
-test-data/unit/check-underscores.test
-test-data/unit/check-unions.test
-test-data/unit/check-unreachable-code.test
-test-data/unit/check-unsupported.test
-test-data/unit/check-varargs.test
-test-data/unit/check-warnings.test
-test-data/unit/cmdline.test
-test-data/unit/parse-errors.test
-test-data/unit/parse-python2.test
-test-data/unit/parse.test
-test-data/unit/python2eval.test
-test-data/unit/pythoneval-asyncio.test
-test-data/unit/pythoneval-enum.test
-test-data/unit/pythoneval.test
-test-data/unit/semanal-abstractclasses.test
-test-data/unit/semanal-basic.test
-test-data/unit/semanal-classes.test
-test-data/unit/semanal-errors.test
-test-data/unit/semanal-expressions.test
-test-data/unit/semanal-modules.test
-test-data/unit/semanal-namedtuple.test
-test-data/unit/semanal-python2.test
-test-data/unit/semanal-statements.test
-test-data/unit/semanal-symtable.test
-test-data/unit/semanal-typealiases.test
-test-data/unit/semanal-typeddict.test
-test-data/unit/semanal-typeinfo.test
-test-data/unit/semanal-types.test
-test-data/unit/stubgen.test
-test-data/unit/typexport-basic.test
-test-data/unit/fixtures/__new__.pyi
-test-data/unit/fixtures/alias.pyi
-test-data/unit/fixtures/args.pyi
-test-data/unit/fixtures/async_await.pyi
-test-data/unit/fixtures/bool.pyi
-test-data/unit/fixtures/callable.pyi
-test-data/unit/fixtures/classmethod.pyi
-test-data/unit/fixtures/complex.pyi
-test-data/unit/fixtures/dict.pyi
-test-data/unit/fixtures/exception.pyi
-test-data/unit/fixtures/for.pyi
-test-data/unit/fixtures/function.pyi
-test-data/unit/fixtures/isinstance.pyi
-test-data/unit/fixtures/isinstancelist.pyi
-test-data/unit/fixtures/list.pyi
-test-data/unit/fixtures/module.pyi
-test-data/unit/fixtures/module_all.pyi
-test-data/unit/fixtures/module_all_python2.pyi
-test-data/unit/fixtures/ops.pyi
-test-data/unit/fixtures/primitives.pyi
-test-data/unit/fixtures/property.pyi
-test-data/unit/fixtures/python2.pyi
-test-data/unit/fixtures/set.pyi
-test-data/unit/fixtures/slice.pyi
-test-data/unit/fixtures/staticmethod.pyi
-test-data/unit/fixtures/transform.pyi
-test-data/unit/fixtures/tuple-simple.pyi
-test-data/unit/fixtures/tuple.pyi
-test-data/unit/fixtures/union.pyi
-test-data/unit/lib-stub/__builtin__.pyi
-test-data/unit/lib-stub/abc.pyi
-test-data/unit/lib-stub/builtins.pyi
-test-data/unit/lib-stub/collections.pyi
-test-data/unit/lib-stub/mypy_extensions.pyi
-test-data/unit/lib-stub/sys.pyi
-test-data/unit/lib-stub/types.pyi
-test-data/unit/lib-stub/typing.pyi
-tmp-test-dirs/.gitignore
+scripts/stubtest.py
typeshed/stdlib/2/BaseHTTPServer.pyi
typeshed/stdlib/2/ConfigParser.pyi
typeshed/stdlib/2/Cookie.pyi
typeshed/stdlib/2/HTMLParser.pyi
typeshed/stdlib/2/Queue.pyi
+typeshed/stdlib/2/SimpleHTTPServer.pyi
typeshed/stdlib/2/SocketServer.pyi
typeshed/stdlib/2/StringIO.pyi
typeshed/stdlib/2/UserDict.pyi
typeshed/stdlib/2/UserList.pyi
typeshed/stdlib/2/UserString.pyi
typeshed/stdlib/2/__builtin__.pyi
-typeshed/stdlib/2/__future__.pyi
typeshed/stdlib/2/_ast.pyi
-typeshed/stdlib/2/_codecs.pyi
typeshed/stdlib/2/_collections.pyi
typeshed/stdlib/2/_functools.pyi
typeshed/stdlib/2/_hotshot.pyi
typeshed/stdlib/2/_io.pyi
typeshed/stdlib/2/_json.pyi
typeshed/stdlib/2/_md5.pyi
-typeshed/stdlib/2/_random.pyi
typeshed/stdlib/2/_sha.pyi
typeshed/stdlib/2/_sha256.pyi
typeshed/stdlib/2/_sha512.pyi
@@ -366,18 +99,14 @@ typeshed/stdlib/2/abc.pyi
typeshed/stdlib/2/array.pyi
typeshed/stdlib/2/ast.pyi
typeshed/stdlib/2/atexit.pyi
-typeshed/stdlib/2/base64.pyi
-typeshed/stdlib/2/binascii.pyi
typeshed/stdlib/2/builtins.pyi
typeshed/stdlib/2/cPickle.pyi
typeshed/stdlib/2/cStringIO.pyi
typeshed/stdlib/2/calendar.pyi
-typeshed/stdlib/2/codecs.pyi
typeshed/stdlib/2/collections.pyi
typeshed/stdlib/2/commands.pyi
typeshed/stdlib/2/compileall.pyi
typeshed/stdlib/2/cookielib.pyi
-typeshed/stdlib/2/copy.pyi
typeshed/stdlib/2/csv.pyi
typeshed/stdlib/2/datetime.pyi
typeshed/stdlib/2/decimal.pyi
@@ -408,58 +137,56 @@ typeshed/stdlib/2/io.pyi
typeshed/stdlib/2/itertools.pyi
typeshed/stdlib/2/json.pyi
typeshed/stdlib/2/linecache.pyi
+typeshed/stdlib/2/macpath.pyi
typeshed/stdlib/2/markupbase.pyi
typeshed/stdlib/2/md5.pyi
typeshed/stdlib/2/mimetools.pyi
-typeshed/stdlib/2/optparse.pyi
+typeshed/stdlib/2/ntpath.pyi
+typeshed/stdlib/2/nturl2path.pyi
+typeshed/stdlib/2/os2emxpath.pyi
typeshed/stdlib/2/pdb.pyi
typeshed/stdlib/2/pickle.pyi
typeshed/stdlib/2/pipes.pyi
typeshed/stdlib/2/platform.pyi
typeshed/stdlib/2/posix.pyi
typeshed/stdlib/2/posixpath.pyi
-typeshed/stdlib/2/pprint.pyi
typeshed/stdlib/2/pwd.pyi
-typeshed/stdlib/2/quopri.pyi
+typeshed/stdlib/2/pydoc.pyi
typeshed/stdlib/2/random.pyi
typeshed/stdlib/2/re.pyi
+typeshed/stdlib/2/repr.pyi
typeshed/stdlib/2/resource.pyi
typeshed/stdlib/2/rfc822.pyi
typeshed/stdlib/2/robotparser.pyi
typeshed/stdlib/2/runpy.pyi
typeshed/stdlib/2/select.pyi
+typeshed/stdlib/2/sets.pyi
typeshed/stdlib/2/sha.pyi
typeshed/stdlib/2/shelve.pyi
typeshed/stdlib/2/shlex.pyi
typeshed/stdlib/2/shutil.pyi
typeshed/stdlib/2/signal.pyi
typeshed/stdlib/2/smtplib.pyi
-typeshed/stdlib/2/socket.pyi
-typeshed/stdlib/2/spwd.pyi
typeshed/stdlib/2/ssl.pyi
typeshed/stdlib/2/stat.pyi
typeshed/stdlib/2/string.pyi
typeshed/stdlib/2/strop.pyi
-typeshed/stdlib/2/struct.pyi
typeshed/stdlib/2/subprocess.pyi
+typeshed/stdlib/2/symbol.pyi
typeshed/stdlib/2/sys.pyi
typeshed/stdlib/2/tempfile.pyi
typeshed/stdlib/2/textwrap.pyi
typeshed/stdlib/2/thread.pyi
typeshed/stdlib/2/time.pyi
-typeshed/stdlib/2/token.pyi
typeshed/stdlib/2/tokenize.pyi
typeshed/stdlib/2/types.pyi
typeshed/stdlib/2/typing.pyi
-typeshed/stdlib/2/unicodedata.pyi
typeshed/stdlib/2/unittest.pyi
typeshed/stdlib/2/urllib.pyi
typeshed/stdlib/2/urllib2.pyi
typeshed/stdlib/2/urlparse.pyi
-typeshed/stdlib/2/uuid.pyi
typeshed/stdlib/2/weakref.pyi
-typeshed/stdlib/2/xxsubtype.pyi
-typeshed/stdlib/2/zlib.pyi
+typeshed/stdlib/2/xmlrpclib.pyi
typeshed/stdlib/2/distutils/__init__.pyi
typeshed/stdlib/2/distutils/emxccompiler.pyi
typeshed/stdlib/2/email/MIMEText.pyi
@@ -483,20 +210,33 @@ typeshed/stdlib/2/sqlite3/dbapi2.pyi
typeshed/stdlib/2/wsgiref/__init__.pyi
typeshed/stdlib/2/wsgiref/types.pyi
typeshed/stdlib/2/wsgiref/validate.pyi
+typeshed/stdlib/2and3/__future__.pyi
typeshed/stdlib/2and3/_bisect.pyi
+typeshed/stdlib/2and3/_codecs.pyi
typeshed/stdlib/2and3/_heapq.pyi
+typeshed/stdlib/2and3/_random.pyi
typeshed/stdlib/2and3/argparse.pyi
typeshed/stdlib/2and3/asynchat.pyi
typeshed/stdlib/2and3/asyncore.pyi
+typeshed/stdlib/2and3/base64.pyi
+typeshed/stdlib/2and3/binascii.pyi
+typeshed/stdlib/2and3/binhex.pyi
typeshed/stdlib/2and3/bisect.pyi
typeshed/stdlib/2and3/bz2.pyi
typeshed/stdlib/2and3/cProfile.pyi
+typeshed/stdlib/2and3/cgi.pyi
typeshed/stdlib/2and3/cmath.pyi
+typeshed/stdlib/2and3/cmd.pyi
typeshed/stdlib/2and3/code.pyi
+typeshed/stdlib/2and3/codecs.pyi
typeshed/stdlib/2and3/colorsys.pyi
typeshed/stdlib/2and3/contextlib.pyi
+typeshed/stdlib/2and3/copy.pyi
+typeshed/stdlib/2and3/dis.pyi
typeshed/stdlib/2and3/errno.pyi
+typeshed/stdlib/2and3/filecmp.pyi
typeshed/stdlib/2and3/fractions.pyi
+typeshed/stdlib/2and3/ftplib.pyi
typeshed/stdlib/2and3/hmac.pyi
typeshed/stdlib/2and3/keyword.pyi
typeshed/stdlib/2and3/locale.pyi
@@ -505,23 +245,54 @@ typeshed/stdlib/2and3/math.pyi
typeshed/stdlib/2and3/mimetypes.pyi
typeshed/stdlib/2and3/mmap.pyi
typeshed/stdlib/2and3/numbers.pyi
+typeshed/stdlib/2and3/opcode.pyi
typeshed/stdlib/2and3/operator.pyi
+typeshed/stdlib/2and3/optparse.pyi
+typeshed/stdlib/2and3/pickletools.pyi
typeshed/stdlib/2and3/pkgutil.pyi
typeshed/stdlib/2and3/plistlib.pyi
+typeshed/stdlib/2and3/poplib.pyi
+typeshed/stdlib/2and3/pprint.pyi
typeshed/stdlib/2and3/profile.pyi
typeshed/stdlib/2and3/pstats.pyi
+typeshed/stdlib/2and3/pty.pyi
+typeshed/stdlib/2and3/py_compile.pyi
+typeshed/stdlib/2and3/pyclbr.pyi
+typeshed/stdlib/2and3/quopri.pyi
typeshed/stdlib/2and3/readline.pyi
typeshed/stdlib/2and3/rlcompleter.pyi
+typeshed/stdlib/2and3/sched.pyi
typeshed/stdlib/2and3/site.pyi
+typeshed/stdlib/2and3/smtpd.pyi
+typeshed/stdlib/2and3/sndhdr.pyi
+typeshed/stdlib/2and3/socket.pyi
+typeshed/stdlib/2and3/spwd.pyi
+typeshed/stdlib/2and3/stringprep.pyi
+typeshed/stdlib/2and3/struct.pyi
+typeshed/stdlib/2and3/sunau.pyi
+typeshed/stdlib/2and3/symtable.pyi
+typeshed/stdlib/2and3/sysconfig.pyi
typeshed/stdlib/2and3/syslog.pyi
+typeshed/stdlib/2and3/tabnanny.pyi
typeshed/stdlib/2and3/tarfile.pyi
+typeshed/stdlib/2and3/telnetlib.pyi
typeshed/stdlib/2and3/termios.pyi
typeshed/stdlib/2and3/threading.pyi
+typeshed/stdlib/2and3/timeit.pyi
+typeshed/stdlib/2and3/token.pyi
+typeshed/stdlib/2and3/trace.pyi
typeshed/stdlib/2and3/traceback.pyi
+typeshed/stdlib/2and3/tty.pyi
+typeshed/stdlib/2and3/unicodedata.pyi
+typeshed/stdlib/2and3/uu.pyi
+typeshed/stdlib/2and3/uuid.pyi
typeshed/stdlib/2and3/warnings.pyi
+typeshed/stdlib/2and3/wave.pyi
typeshed/stdlib/2and3/webbrowser.pyi
+typeshed/stdlib/2and3/xdrlib.pyi
typeshed/stdlib/2and3/zipfile.pyi
typeshed/stdlib/2and3/zipimport.pyi
+typeshed/stdlib/2and3/zlib.pyi
typeshed/stdlib/2and3/distutils/__init__.pyi
typeshed/stdlib/2and3/distutils/archive_util.pyi
typeshed/stdlib/2and3/distutils/bcppcompiler.pyi
@@ -568,6 +339,17 @@ typeshed/stdlib/2and3/distutils/command/install_lib.pyi
typeshed/stdlib/2and3/distutils/command/install_scripts.pyi
typeshed/stdlib/2and3/distutils/command/register.pyi
typeshed/stdlib/2and3/distutils/command/sdist.pyi
+typeshed/stdlib/2and3/lib2to3/__init__.pyi
+typeshed/stdlib/2and3/lib2to3/pygram.pyi
+typeshed/stdlib/2and3/lib2to3/pytree.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi
+typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi
typeshed/stdlib/2and3/logging/__init__.pyi
typeshed/stdlib/2and3/logging/config.pyi
typeshed/stdlib/2and3/logging/handlers.pyi
@@ -581,18 +363,16 @@ typeshed/stdlib/2and3/xml/sax/__init__.pyi
typeshed/stdlib/2and3/xml/sax/handler.pyi
typeshed/stdlib/2and3/xml/sax/saxutils.pyi
typeshed/stdlib/2and3/xml/sax/xmlreader.pyi
-typeshed/stdlib/3/__future__.pyi
typeshed/stdlib/3/_ast.pyi
-typeshed/stdlib/3/_codecs.pyi
typeshed/stdlib/3/_compression.pyi
typeshed/stdlib/3/_curses.pyi
typeshed/stdlib/3/_dummy_thread.pyi
+typeshed/stdlib/3/_imp.pyi
typeshed/stdlib/3/_importlib_modulespec.pyi
typeshed/stdlib/3/_json.pyi
typeshed/stdlib/3/_markupbase.pyi
typeshed/stdlib/3/_operator.pyi
typeshed/stdlib/3/_posixsubprocess.pyi
-typeshed/stdlib/3/_random.pyi
typeshed/stdlib/3/_subprocess.pyi
typeshed/stdlib/3/_thread.pyi
typeshed/stdlib/3/_warnings.pyi
@@ -600,19 +380,14 @@ typeshed/stdlib/3/abc.pyi
typeshed/stdlib/3/array.pyi
typeshed/stdlib/3/ast.pyi
typeshed/stdlib/3/atexit.pyi
-typeshed/stdlib/3/base64.pyi
-typeshed/stdlib/3/binascii.pyi
typeshed/stdlib/3/builtins.pyi
typeshed/stdlib/3/calendar.pyi
-typeshed/stdlib/3/cgi.pyi
-typeshed/stdlib/3/codecs.pyi
+typeshed/stdlib/3/compileall.pyi
typeshed/stdlib/3/configparser.pyi
-typeshed/stdlib/3/copy.pyi
typeshed/stdlib/3/csv.pyi
typeshed/stdlib/3/datetime.pyi
typeshed/stdlib/3/decimal.pyi
typeshed/stdlib/3/difflib.pyi
-typeshed/stdlib/3/dis.pyi
typeshed/stdlib/3/doctest.pyi
typeshed/stdlib/3/fcntl.pyi
typeshed/stdlib/3/fileinput.pyi
@@ -631,22 +406,23 @@ typeshed/stdlib/3/imp.pyi
typeshed/stdlib/3/inspect.pyi
typeshed/stdlib/3/io.pyi
typeshed/stdlib/3/itertools.pyi
-typeshed/stdlib/3/json.pyi
typeshed/stdlib/3/linecache.pyi
+typeshed/stdlib/3/macpath.pyi
typeshed/stdlib/3/msvcrt.pyi
-typeshed/stdlib/3/opcode.pyi
+typeshed/stdlib/3/nntplib.pyi
+typeshed/stdlib/3/ntpath.pyi
+typeshed/stdlib/3/nturl2path.pyi
typeshed/stdlib/3/pdb.pyi
typeshed/stdlib/3/pickle.pyi
typeshed/stdlib/3/pipes.pyi
typeshed/stdlib/3/platform.pyi
typeshed/stdlib/3/posix.pyi
typeshed/stdlib/3/posixpath.pyi
-typeshed/stdlib/3/pprint.pyi
typeshed/stdlib/3/pwd.pyi
-typeshed/stdlib/3/pyclbr.pyi
typeshed/stdlib/3/queue.pyi
typeshed/stdlib/3/random.pyi
typeshed/stdlib/3/re.pyi
+typeshed/stdlib/3/reprlib.pyi
typeshed/stdlib/3/resource.pyi
typeshed/stdlib/3/runpy.pyi
typeshed/stdlib/3/select.pyi
@@ -655,32 +431,28 @@ typeshed/stdlib/3/shlex.pyi
typeshed/stdlib/3/shutil.pyi
typeshed/stdlib/3/signal.pyi
typeshed/stdlib/3/smtplib.pyi
-typeshed/stdlib/3/socket.pyi
typeshed/stdlib/3/socketserver.pyi
typeshed/stdlib/3/ssl.pyi
typeshed/stdlib/3/stat.pyi
typeshed/stdlib/3/string.pyi
-typeshed/stdlib/3/struct.pyi
typeshed/stdlib/3/subprocess.pyi
+typeshed/stdlib/3/symbol.pyi
typeshed/stdlib/3/sys.pyi
-typeshed/stdlib/3/sysconfig.pyi
typeshed/stdlib/3/tempfile.pyi
typeshed/stdlib/3/textwrap.pyi
typeshed/stdlib/3/time.pyi
-typeshed/stdlib/3/token.pyi
typeshed/stdlib/3/tokenize.pyi
typeshed/stdlib/3/types.pyi
typeshed/stdlib/3/typing.pyi
-typeshed/stdlib/3/unicodedata.pyi
-typeshed/stdlib/3/uuid.pyi
typeshed/stdlib/3/weakref.pyi
-typeshed/stdlib/3/zlib.pyi
typeshed/stdlib/3.3/ipaddress.pyi
typeshed/stdlib/3.4/_stat.pyi
typeshed/stdlib/3.4/_tracemalloc.pyi
typeshed/stdlib/3.4/enum.pyi
typeshed/stdlib/3.4/pathlib.pyi
typeshed/stdlib/3.4/selectors.pyi
+typeshed/stdlib/3.4/statistics.pyi
+typeshed/stdlib/3.4/tracemalloc.pyi
typeshed/stdlib/3.4/asyncio/__init__.pyi
typeshed/stdlib/3.4/asyncio/coroutines.pyi
typeshed/stdlib/3.4/asyncio/events.pyi
@@ -692,6 +464,7 @@ typeshed/stdlib/3.4/asyncio/streams.pyi
typeshed/stdlib/3.4/asyncio/subprocess.pyi
typeshed/stdlib/3.4/asyncio/tasks.pyi
typeshed/stdlib/3.4/asyncio/transports.pyi
+typeshed/stdlib/3.5/zipapp.pyi
typeshed/stdlib/3.6/secrets.pyi
typeshed/stdlib/3/collections/__init__.pyi
typeshed/stdlib/3/collections/abc.pyi
@@ -738,6 +511,9 @@ typeshed/stdlib/3/importlib/__init__.pyi
typeshed/stdlib/3/importlib/abc.pyi
typeshed/stdlib/3/importlib/machinery.pyi
typeshed/stdlib/3/importlib/util.pyi
+typeshed/stdlib/3/json/__init__.pyi
+typeshed/stdlib/3/json/decoder.pyi
+typeshed/stdlib/3/json/encoder.pyi
typeshed/stdlib/3/multiprocessing/__init__.pyi
typeshed/stdlib/3/multiprocessing/managers.pyi
typeshed/stdlib/3/multiprocessing/pool.pyi
@@ -760,6 +536,7 @@ typeshed/stdlib/3/urllib/robotparser.pyi
typeshed/stdlib/3/wsgiref/__init__.pyi
typeshed/stdlib/3/wsgiref/types.pyi
typeshed/stdlib/3/wsgiref/validate.pyi
+typeshed/tests/mypy_selftest.py
typeshed/tests/mypy_test.py
typeshed/tests/pytype_test.py
typeshed/third_party/2/croniter.pyi
@@ -811,41 +588,6 @@ typeshed/third_party/2/redis/client.pyi
typeshed/third_party/2/redis/connection.pyi
typeshed/third_party/2/redis/exceptions.pyi
typeshed/third_party/2/redis/utils.pyi
-typeshed/third_party/2/requests/__init__.pyi
-typeshed/third_party/2/requests/adapters.pyi
-typeshed/third_party/2/requests/api.pyi
-typeshed/third_party/2/requests/auth.pyi
-typeshed/third_party/2/requests/compat.pyi
-typeshed/third_party/2/requests/cookies.pyi
-typeshed/third_party/2/requests/exceptions.pyi
-typeshed/third_party/2/requests/hooks.pyi
-typeshed/third_party/2/requests/models.pyi
-typeshed/third_party/2/requests/sessions.pyi
-typeshed/third_party/2/requests/status_codes.pyi
-typeshed/third_party/2/requests/structures.pyi
-typeshed/third_party/2/requests/utils.pyi
-typeshed/third_party/2/requests/packages/__init__.pyi
-typeshed/third_party/2/requests/packages/urllib3/__init__.pyi
-typeshed/third_party/2/requests/packages/urllib3/_collections.pyi
-typeshed/third_party/2/requests/packages/urllib3/connection.pyi
-typeshed/third_party/2/requests/packages/urllib3/connectionpool.pyi
-typeshed/third_party/2/requests/packages/urllib3/exceptions.pyi
-typeshed/third_party/2/requests/packages/urllib3/fields.pyi
-typeshed/third_party/2/requests/packages/urllib3/filepost.pyi
-typeshed/third_party/2/requests/packages/urllib3/poolmanager.pyi
-typeshed/third_party/2/requests/packages/urllib3/request.pyi
-typeshed/third_party/2/requests/packages/urllib3/response.pyi
-typeshed/third_party/2/requests/packages/urllib3/contrib/__init__.pyi
-typeshed/third_party/2/requests/packages/urllib3/packages/__init__.pyi
-typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
-typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/__init__.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/connection.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/request.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/response.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/retry.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/timeout.pyi
-typeshed/third_party/2/requests/packages/urllib3/util/url.pyi
typeshed/third_party/2/routes/__init__.pyi
typeshed/third_party/2/routes/mapper.pyi
typeshed/third_party/2/routes/util.pyi
@@ -860,12 +602,6 @@ typeshed/third_party/2/simplejson/encoder.pyi
typeshed/third_party/2/simplejson/scanner.pyi
typeshed/third_party/2/six/__init__.pyi
typeshed/third_party/2/six/moves/__init__.pyi
-typeshed/third_party/2/six/moves/cPickle.pyi
-typeshed/third_party/2/six/moves/urllib_error.pyi
-typeshed/third_party/2/six/moves/urllib_parse.pyi
-typeshed/third_party/2/six/moves/urllib_request.pyi
-typeshed/third_party/2/six/moves/urllib_response.pyi
-typeshed/third_party/2/six/moves/urllib_robotparser.pyi
typeshed/third_party/2/six/moves/urllib/__init__.pyi
typeshed/third_party/2/six/moves/urllib/error.pyi
typeshed/third_party/2/six/moves/urllib/parse.pyi
@@ -932,22 +668,6 @@ typeshed/third_party/2/werkzeug/debug/__init__.pyi
typeshed/third_party/2/werkzeug/debug/console.pyi
typeshed/third_party/2/werkzeug/debug/repr.pyi
typeshed/third_party/2/werkzeug/debug/tbtools.pyi
-typeshed/third_party/2/yaml/__init__.pyi
-typeshed/third_party/2/yaml/composer.pyi
-typeshed/third_party/2/yaml/constructor.pyi
-typeshed/third_party/2/yaml/dumper.pyi
-typeshed/third_party/2/yaml/emitter.pyi
-typeshed/third_party/2/yaml/error.pyi
-typeshed/third_party/2/yaml/events.pyi
-typeshed/third_party/2/yaml/loader.pyi
-typeshed/third_party/2/yaml/nodes.pyi
-typeshed/third_party/2/yaml/parser.pyi
-typeshed/third_party/2/yaml/reader.pyi
-typeshed/third_party/2/yaml/representer.pyi
-typeshed/third_party/2/yaml/resolver.pyi
-typeshed/third_party/2/yaml/scanner.pyi
-typeshed/third_party/2/yaml/serializer.pyi
-typeshed/third_party/2/yaml/tokens.pyi
typeshed/third_party/2and3/backports_abc.pyi
typeshed/third_party/2and3/certifi.pyi
typeshed/third_party/2and3/mypy_extensions.pyi
@@ -1021,6 +741,9 @@ typeshed/third_party/2and3/boto/plugin.pyi
typeshed/third_party/2and3/boto/regioninfo.pyi
typeshed/third_party/2and3/boto/ec2/__init__.pyi
typeshed/third_party/2and3/boto/elb/__init__.pyi
+typeshed/third_party/2and3/boto/kms/__init__.pyi
+typeshed/third_party/2and3/boto/kms/exceptions.pyi
+typeshed/third_party/2and3/boto/kms/layer1.pyi
typeshed/third_party/2and3/boto/s3/__init__.pyi
typeshed/third_party/2and3/boto/s3/acl.pyi
typeshed/third_party/2and3/boto/s3/bucket.pyi
@@ -1084,45 +807,58 @@ typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi
typeshed/third_party/2and3/pymysql/constants/__init__.pyi
typeshed/third_party/2and3/pytz/__init__.pyi
typeshed/third_party/2and3/pytz/lazy.pyi
-typeshed/third_party/2and3/sqlalchemy/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/exc.pyi
-typeshed/third_party/2and3/sqlalchemy/inspection.pyi
-typeshed/third_party/2and3/sqlalchemy/log.pyi
-typeshed/third_party/2and3/sqlalchemy/pool.pyi
-typeshed/third_party/2and3/sqlalchemy/schema.pyi
-typeshed/third_party/2and3/sqlalchemy/types.pyi
-typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi
-typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi
-typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/engine/base.pyi
-typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi
-typeshed/third_party/2and3/sqlalchemy/engine/url.pyi
-typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
-typeshed/third_party/2and3/sqlalchemy/orm/util.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/base.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi
-typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi
-typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi
-typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi
-typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
-typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi
-typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi
+typeshed/third_party/2and3/requests/__init__.pyi
+typeshed/third_party/2and3/requests/adapters.pyi
+typeshed/third_party/2and3/requests/api.pyi
+typeshed/third_party/2and3/requests/auth.pyi
+typeshed/third_party/2and3/requests/compat.pyi
+typeshed/third_party/2and3/requests/cookies.pyi
+typeshed/third_party/2and3/requests/exceptions.pyi
+typeshed/third_party/2and3/requests/hooks.pyi
+typeshed/third_party/2and3/requests/models.pyi
+typeshed/third_party/2and3/requests/sessions.pyi
+typeshed/third_party/2and3/requests/status_codes.pyi
+typeshed/third_party/2and3/requests/structures.pyi
+typeshed/third_party/2and3/requests/utils.pyi
+typeshed/third_party/2and3/requests/packages/__init__.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/request.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/response.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi
+typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi
+typeshed/third_party/2and3/yaml/__init__.pyi
+typeshed/third_party/2and3/yaml/composer.pyi
+typeshed/third_party/2and3/yaml/constructor.pyi
+typeshed/third_party/2and3/yaml/dumper.pyi
+typeshed/third_party/2and3/yaml/emitter.pyi
+typeshed/third_party/2and3/yaml/error.pyi
+typeshed/third_party/2and3/yaml/events.pyi
+typeshed/third_party/2and3/yaml/loader.pyi
+typeshed/third_party/2and3/yaml/nodes.pyi
+typeshed/third_party/2and3/yaml/parser.pyi
+typeshed/third_party/2and3/yaml/reader.pyi
+typeshed/third_party/2and3/yaml/representer.pyi
+typeshed/third_party/2and3/yaml/resolver.pyi
+typeshed/third_party/2and3/yaml/scanner.pyi
+typeshed/third_party/2and3/yaml/serializer.pyi
+typeshed/third_party/2and3/yaml/tokens.pyi
typeshed/third_party/3/enum.pyi
typeshed/third_party/3/itsdangerous.pyi
typeshed/third_party/3/pkg_resources.pyi
@@ -1153,50 +889,8 @@ typeshed/third_party/3/docutils/parsers/rst/states.pyi
typeshed/third_party/3/lxml/__init__.pyi
typeshed/third_party/3/lxml/etree.pyi
typeshed/third_party/3/lxml/objectify.pyi
-typeshed/third_party/3/requests/__init__.pyi
-typeshed/third_party/3/requests/adapters.pyi
-typeshed/third_party/3/requests/api.pyi
-typeshed/third_party/3/requests/auth.pyi
-typeshed/third_party/3/requests/compat.pyi
-typeshed/third_party/3/requests/cookies.pyi
-typeshed/third_party/3/requests/exceptions.pyi
-typeshed/third_party/3/requests/hooks.pyi
-typeshed/third_party/3/requests/models.pyi
-typeshed/third_party/3/requests/sessions.pyi
-typeshed/third_party/3/requests/status_codes.pyi
-typeshed/third_party/3/requests/structures.pyi
-typeshed/third_party/3/requests/utils.pyi
-typeshed/third_party/3/requests/packages/__init__.pyi
-typeshed/third_party/3/requests/packages/urllib3/__init__.pyi
-typeshed/third_party/3/requests/packages/urllib3/_collections.pyi
-typeshed/third_party/3/requests/packages/urllib3/connection.pyi
-typeshed/third_party/3/requests/packages/urllib3/connectionpool.pyi
-typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi
-typeshed/third_party/3/requests/packages/urllib3/fields.pyi
-typeshed/third_party/3/requests/packages/urllib3/filepost.pyi
-typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi
-typeshed/third_party/3/requests/packages/urllib3/request.pyi
-typeshed/third_party/3/requests/packages/urllib3/response.pyi
-typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi
-typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi
-typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
-typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/__init__.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/request.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/response.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/ssl_.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi
-typeshed/third_party/3/requests/packages/urllib3/util/url.pyi
typeshed/third_party/3/six/__init__.pyi
typeshed/third_party/3/six/moves/__init__.pyi
-typeshed/third_party/3/six/moves/cPickle.pyi
-typeshed/third_party/3/six/moves/urllib_error.pyi
-typeshed/third_party/3/six/moves/urllib_parse.pyi
-typeshed/third_party/3/six/moves/urllib_request.pyi
-typeshed/third_party/3/six/moves/urllib_response.pyi
-typeshed/third_party/3/six/moves/urllib_robotparser.pyi
typeshed/third_party/3/six/moves/urllib/__init__.pyi
typeshed/third_party/3/six/moves/urllib/error.pyi
typeshed/third_party/3/six/moves/urllib/parse.pyi
@@ -1205,7 +899,7 @@ typeshed/third_party/3/six/moves/urllib/response.pyi
typeshed/third_party/3/six/moves/urllib/robotparser.pyi
typeshed/third_party/3/typed_ast/__init__.pyi
typeshed/third_party/3/typed_ast/ast27.pyi
-typeshed/third_party/3/typed_ast/ast35.pyi
+typeshed/third_party/3/typed_ast/ast3.pyi
typeshed/third_party/3/typed_ast/conversions.pyi
typeshed/third_party/3/werkzeug/__init__.pyi
typeshed/third_party/3/werkzeug/_compat.pyi
diff --git a/mypy.egg-info/requires.txt b/mypy.egg-info/requires.txt
index 34d5806..8b20c5b 100644
--- a/mypy.egg-info/requires.txt
+++ b/mypy.egg-info/requires.txt
@@ -1 +1 @@
-typed-ast >= 0.6.3
+typed-ast >= 1.0.3, < 1.1.0
diff --git a/mypy/api.py b/mypy/api.py
index f6d0d5a..b8bb86f 100644
--- a/mypy/api.py
+++ b/mypy/api.py
@@ -41,9 +41,7 @@ from typing import List, Tuple
from mypy.main import main
-def run(params: List[str]) -> Tuple[str, str, int]:
- sys.argv = [''] + params
-
+def run(args: List[str]) -> Tuple[str, str, int]:
old_stdout = sys.stdout
new_stdout = StringIO()
sys.stdout = new_stdout
@@ -53,12 +51,12 @@ def run(params: List[str]) -> Tuple[str, str, int]:
sys.stderr = new_stderr
try:
- main(None)
+ main(None, args=args)
exit_status = 0
except SystemExit as system_exit:
exit_status = system_exit.code
-
- sys.stdout = old_stdout
- sys.stderr = old_stderr
+ finally:
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
return new_stdout.getvalue(), new_stderr.getvalue(), exit_status
diff --git a/mypy/applytype.py b/mypy/applytype.py
index d976700..6d2f3a9 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -3,7 +3,7 @@ from typing import List, Dict
import mypy.subtypes
from mypy.sametypes import is_same_type
from mypy.expandtype import expand_type
-from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType
+from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType
from mypy.messages import MessageBuilder
from mypy.nodes import Context
@@ -34,14 +34,15 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
for v1 in type.values):
continue
for value in values:
- if mypy.subtypes.is_subtype(type, value):
+ if isinstance(type, PartialType) or mypy.subtypes.is_subtype(type, value):
types[i] = value
break
else:
msg.incompatible_typevar_value(callable, i + 1, type, context)
upper_bound = callable.variables[i].upper_bound
- if type and not mypy.subtypes.satisfies_upper_bound(type, upper_bound):
+ if (type and not isinstance(type, PartialType) and
+ not mypy.subtypes.is_subtype(type, upper_bound)):
msg.incompatible_typevar_value(callable, i + 1, type, context)
# Create a map from type variable id to target type.
diff --git a/mypy/binder.py b/mypy/binder.py
index 23be259..3ae2952 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -1,13 +1,19 @@
from typing import (Dict, List, Set, Iterator, Union)
from contextlib import contextmanager
-from mypy.types import Type, AnyType, PartialType
+from mypy.types import Type, AnyType, PartialType, UnionType, NoneTyp
from mypy.nodes import (Key, Node, Expression, Var, RefExpr, SymbolTableNode)
from mypy.subtypes import is_subtype
from mypy.join import join_simple
from mypy.sametypes import is_same_type
+from mypy.nodes import IndexExpr, MemberExpr, NameExpr
+
+
+BindableTypes = (IndexExpr, MemberExpr, NameExpr)
+BindableExpression = Union[IndexExpr, MemberExpr, NameExpr]
+
class Frame(Dict[Key, Type]):
"""A Frame represents a specific point in the execution of a program.
@@ -92,7 +98,7 @@ class ConditionalTypeBinder:
self.options_on_return.append([])
return f
- def _push(self, key: Key, type: Type, index: int=-1) -> None:
+ def _put(self, key: Key, type: Type, index: int=-1) -> None:
self.frames[index][key] = type
def _get(self, key: Key, index: int=-1) -> Type:
@@ -103,19 +109,22 @@ class ConditionalTypeBinder:
return self.frames[i][key]
return None
- def push(self, node: Node, typ: Type) -> None:
- if not node.literal:
+ def put(self, expr: Expression, typ: Type) -> None:
+ if not isinstance(expr, BindableTypes):
return
- key = node.literal_hash
+ if not expr.literal:
+ return
+ key = expr.literal_hash
if key not in self.declarations:
- self.declarations[key] = self.get_declaration(node)
+ assert isinstance(expr, BindableTypes)
+ self.declarations[key] = get_declaration(expr)
self._add_dependencies(key)
- self._push(key, typ)
+ self._put(key, typ)
def unreachable(self) -> None:
self.frames[-1].unreachable = True
- def get(self, expr: Union[Expression, Var]) -> Type:
+ def get(self, expr: Expression) -> Type:
return self._get(expr.literal_hash)
def is_unreachable(self) -> bool:
@@ -163,7 +172,7 @@ class ConditionalTypeBinder:
for other in resulting_values[1:]:
type = join_simple(self.declarations[key], type, other)
if not is_same_type(type, current_value):
- self._push(key, type)
+ self._put(key, type)
changed = True
self.frames[-1].unreachable = not frames
@@ -189,19 +198,12 @@ class ConditionalTypeBinder:
return result
- def get_declaration(self, expr: Node) -> Type:
- if isinstance(expr, RefExpr) and isinstance(expr.node, Var):
- type = expr.node.type
- if isinstance(type, PartialType):
- return None
- return type
- else:
- return None
-
def assign_type(self, expr: Expression,
type: Type,
declared_type: Type,
restrict_any: bool = False) -> None:
+ if not isinstance(expr, BindableTypes):
+ return None
if not expr.literal:
return
self.invalidate_dependencies(expr)
@@ -225,17 +227,21 @@ class ConditionalTypeBinder:
if (isinstance(self.most_recent_enclosing_type(expr, type), AnyType)
and not restrict_any):
pass
- elif isinstance(type, AnyType):
- self.push(expr, declared_type)
+ elif (isinstance(type, AnyType)
+ and not (isinstance(declared_type, UnionType)
+ and any(isinstance(item, AnyType) for item in declared_type.items))):
+ # Assigning an Any value doesn't affect the type to avoid false negatives, unless
+ # there is an Any item in a declared union type.
+ self.put(expr, declared_type)
else:
- self.push(expr, type)
+ self.put(expr, type)
for i in self.try_frames:
# XXX This should probably not copy the entire frame, but
# just copy this variable into a single stored frame.
self.allow_jump(i)
- def invalidate_dependencies(self, expr: Expression) -> None:
+ def invalidate_dependencies(self, expr: BindableExpression) -> None:
"""Invalidate knowledge of types that include expr, but not expr itself.
For example, when expr is foo.bar, invalidate foo.bar.baz.
@@ -246,11 +252,11 @@ class ConditionalTypeBinder:
for dep in self.dependencies.get(expr.literal_hash, set()):
self._cleanse_key(dep)
- def most_recent_enclosing_type(self, expr: Expression, type: Type) -> Type:
+ def most_recent_enclosing_type(self, expr: BindableExpression, type: Type) -> Type:
if isinstance(type, AnyType):
- return self.get_declaration(expr)
+ return get_declaration(expr)
key = expr.literal_hash
- enclosers = ([self.get_declaration(expr)] +
+ enclosers = ([get_declaration(expr)] +
[f[key] for f in self.frames
if key in f and is_subtype(type, f[key])])
return enclosers[-1]
@@ -334,3 +340,11 @@ class ConditionalTypeBinder:
assert len(self.frames) == 1
yield self.push_frame()
self.pop_frame(True, 0)
+
+
+def get_declaration(expr: BindableExpression) -> Type:
+ if isinstance(expr, RefExpr) and isinstance(expr.node, Var):
+ type = expr.node.type
+ if not isinstance(type, PartialType):
+ return type
+ return None
diff --git a/mypy/build.py b/mypy/build.py
index 0f866c8..f803929 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -22,6 +22,10 @@ from os.path import dirname, basename
from typing import (AbstractSet, Dict, Iterable, Iterator, List,
NamedTuple, Optional, Set, Tuple, Union)
+# Can't use TYPE_CHECKING because it's not in the Python 3.5.1 stdlib
+MYPY = False
+if MYPY:
+ from typing import Deque
from mypy.nodes import (MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll)
from mypy.semanal import FirstPass, SemanticAnalyzer, ThirdPass
@@ -61,8 +65,9 @@ class BuildResult:
errors: List of error messages.
"""
- def __init__(self, manager: 'BuildManager') -> None:
+ def __init__(self, manager: 'BuildManager', graph: Graph) -> None:
self.manager = manager
+ self.graph = graph
self.files = manager.modules
self.types = manager.all_types
self.errors = manager.errors.messages()
@@ -180,8 +185,8 @@ def build(sources: List[BuildSource],
)
try:
- dispatch(sources, manager)
- return BuildResult(manager)
+ graph = dispatch(sources, manager)
+ return BuildResult(manager, graph)
finally:
manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
(time.time() - manager.start_time,
@@ -361,7 +366,7 @@ class BuildManager:
version_id: str) -> None:
self.start_time = time.time()
self.data_dir = data_dir
- self.errors = Errors(options.hide_error_context, options.show_column_numbers)
+ self.errors = Errors(options.show_error_context, options.show_column_numbers)
self.errors.set_ignore_prefix(ignore_prefix)
self.lib_path = tuple(lib_path)
self.source_set = source_set
@@ -470,7 +475,7 @@ class BuildManager:
return tree
def module_not_found(self, path: str, line: int, id: str) -> None:
- self.errors.set_file(path)
+ self.errors.set_file(path, id)
stub_msg = "(Stub files are from https://github.com/python/typeshed)"
if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
(self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
@@ -486,9 +491,12 @@ class BuildManager:
'or using the "--ignore-missing-imports" flag would help)',
severity='note', only_once=True)
- def report_file(self, file: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def report_file(self,
+ file: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
if self.source_set.is_source(file):
- self.reports.file(file, type_map)
+ self.reports.file(file, type_map, options)
def log(self, *message: str) -> None:
if self.options.verbosity >= 1:
@@ -784,6 +792,9 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
# Ignore cache if (relevant) options aren't the same.
cached_options = m.options
current_options = manager.options.clone_for_module(id).select_options_affecting_cache()
+ if manager.options.quick_and_dirty:
+ # In quick_and_dirty mode allow non-quick_and_dirty cache files.
+ cached_options['quick_and_dirty'] = True
if cached_options != current_options:
manager.trace('Metadata abandoned for {}: options differ'.format(id))
return None
@@ -828,6 +839,10 @@ def write_cache(id: str, path: str, tree: MypyFile,
old_interface_hash: str, manager: BuildManager) -> str:
"""Write cache files for a module.
+ Note that this mypy's behavior is still correct when any given
+ write_cache() call is replaced with a no-op, so error handling
+ code that bails without writing anything is okay.
+
Args:
id: module ID
path: module path
@@ -849,8 +864,6 @@ def write_cache(id: str, path: str, tree: MypyFile,
# Make sure directory for cache files exists
parent = os.path.dirname(data_json)
- if not os.path.isdir(parent):
- os.makedirs(parent)
assert os.path.dirname(meta_json) == parent
# Construct temp file names
@@ -866,6 +879,22 @@ def write_cache(id: str, path: str, tree: MypyFile,
data_str = json.dumps(data, sort_keys=True)
interface_hash = compute_hash(data_str)
+ # Obtain and set up metadata
+ try:
+ os.makedirs(parent, exist_ok=True)
+ st = manager.get_stat(path)
+ except OSError as err:
+ manager.log("Cannot get stat for {}: {}".format(path, err))
+ # Remove apparently-invalid cache files.
+ # (This is purely an optimization.)
+ for filename in [data_json, meta_json]:
+ try:
+ os.remove(filename)
+ except OSError:
+ pass
+ # Still return the interface hash we computed.
+ return interface_hash
+
# Write data cache file, if applicable
if old_interface_hash == interface_hash:
# If the interface is unchanged, the cached data is guaranteed
@@ -873,15 +902,27 @@ def write_cache(id: str, path: str, tree: MypyFile,
data_mtime = os.path.getmtime(data_json)
manager.trace("Interface for {} is unchanged".format(id))
else:
- with open(data_json_tmp, 'w') as f:
- f.write(data_str)
- f.write('\n')
- data_mtime = os.path.getmtime(data_json_tmp)
- os.replace(data_json_tmp, data_json)
manager.trace("Interface for {} has changed".format(id))
+ try:
+ with open(data_json_tmp, 'w') as f:
+ f.write(data_str)
+ f.write('\n')
+ os.replace(data_json_tmp, data_json)
+ data_mtime = os.path.getmtime(data_json)
+ except os.error as err:
+ # Most likely the error is the replace() call
+ # (see https://github.com/python/mypy/issues/3215).
+ manager.log("Error writing data JSON file {}".format(data_json_tmp))
+ # Let's continue without writing the meta file. Analysis:
+ # If the replace failed, we've changed nothing except left
+ # behind an extraneous temporary file; if the replace
+ # worked but the getmtime() call failed, the meta file
+ # will be considered invalid on the next run because the
+ # data_mtime field won't match the data file's mtime.
+ # Both have the effect of slowing down the next run a
+ # little bit due to an out-of-date cache file.
+ return interface_hash
- # Obtain and set up metadata
- st = manager.get_stat(path) # TODO: Handle errors
mtime = st.st_mtime
size = st.st_size
options = manager.options.clone_for_module(id)
@@ -900,12 +941,18 @@ def write_cache(id: str, path: str, tree: MypyFile,
}
# Write meta cache file
- with open(meta_json_tmp, 'w') as f:
- if manager.options.debug_cache:
- json.dump(meta, f, indent=2, sort_keys=True)
- else:
- json.dump(meta, f)
- os.replace(meta_json_tmp, meta_json)
+ try:
+ with open(meta_json_tmp, 'w') as f:
+ if manager.options.debug_cache:
+ json.dump(meta, f, indent=2, sort_keys=True)
+ else:
+ json.dump(meta, f)
+ os.replace(meta_json_tmp, meta_json)
+ except os.error as err:
+ # Most likely the error is the replace() call
+ # (see https://github.com/python/mypy/issues/3215).
+ # The next run will simply find the cache entry out of date.
+ manager.log("Error writing meta JSON file {}".format(meta_json_tmp))
return interface_hash
@@ -1220,7 +1267,7 @@ class State:
# so we'd need to cache the decision.
manager = self.manager
manager.errors.set_import_context([])
- manager.errors.set_file(ancestor_for.xpath)
+ manager.errors.set_file(ancestor_for.xpath, ancestor_for.id)
manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,),
severity='note', only_once=True)
manager.errors.report(-1, -1,
@@ -1232,7 +1279,7 @@ class State:
manager = self.manager
save_import_context = manager.errors.import_context()
manager.errors.set_import_context(self.caller_state.import_context)
- manager.errors.set_file(self.caller_state.xpath)
+ manager.errors.set_file(self.caller_state.xpath, self.caller_state.id)
line = self.caller_line
manager.errors.report(line, 0,
"Import of '%s' ignored" % (id,),
@@ -1243,6 +1290,13 @@ class State:
manager.errors.set_import_context(save_import_context)
def add_ancestors(self) -> None:
+ if self.path is not None:
+ _, name = os.path.split(self.path)
+ base, _ = os.path.splitext(name)
+ if '.' in base:
+ # This is just a weird filename, don't add anything
+ self.ancestors = []
+ return
# All parent packages are new ancestors.
ancestors = []
parent = self.id
@@ -1308,10 +1362,12 @@ class State:
self.manager.modules[self.id] = self.tree
def fix_cross_refs(self) -> None:
- fixup_module_pass_one(self.tree, self.manager.modules)
+ fixup_module_pass_one(self.tree, self.manager.modules,
+ self.manager.options.quick_and_dirty)
def calculate_mros(self) -> None:
- fixup_module_pass_two(self.tree, self.manager.modules)
+ fixup_module_pass_two(self.tree, self.manager.modules,
+ self.manager.options.quick_and_dirty)
def fix_suppressed_dependencies(self, graph: Graph) -> None:
"""Corrects whether dependencies are considered stale in silent mode.
@@ -1382,7 +1438,8 @@ class State:
# this before processing imports, since this may mark some
# import statements as unreachable.
first = FirstPass(manager.semantic_analyzer)
- first.visit_file(self.tree, self.xpath, self.id, self.options)
+ with self.wrap_context():
+ first.visit_file(self.tree, self.xpath, self.id, self.options)
# Initialize module symbol table, which was populated by the
# semantic analyzer.
@@ -1409,7 +1466,7 @@ class State:
continue
if id == '':
# Must be from a relative import.
- manager.errors.set_file(self.xpath)
+ manager.errors.set_file(self.xpath, self.id)
manager.errors.report(line, 0,
"No parent module -- cannot perform relative import",
blocker=True)
@@ -1472,7 +1529,7 @@ class State:
if self.options.dump_inference_stats:
dump_type_stats(self.tree, self.xpath, inferred=True,
typemap=self.type_checker.type_map)
- manager.report_file(self.tree, self.type_checker.type_map)
+ manager.report_file(self.tree, self.type_checker.type_map, self.options)
def _patch_indirect_dependencies(self,
module_refs: Set[str],
@@ -1503,35 +1560,43 @@ class State:
return valid_refs
def write_cache(self) -> None:
- if self.path and self.options.incremental and not self.manager.errors.is_errors():
- dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
- new_interface_hash = write_cache(
- self.id, self.path, self.tree,
- list(self.dependencies), list(self.suppressed), list(self.child_modules),
- dep_prios, self.interface_hash,
- self.manager)
- if new_interface_hash == self.interface_hash:
- self.manager.log("Cached module {} has same interface".format(self.id))
- else:
- self.manager.log("Cached module {} has changed interface".format(self.id))
- self.mark_interface_stale()
- self.interface_hash = new_interface_hash
+ if not self.path or self.options.cache_dir == os.devnull:
+ return
+ if self.manager.options.quick_and_dirty:
+ is_errors = self.manager.errors.is_errors_for_file(self.path)
+ else:
+ is_errors = self.manager.errors.is_errors()
+ if is_errors:
+ return
+ dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
+ new_interface_hash = write_cache(
+ self.id, self.path, self.tree,
+ list(self.dependencies), list(self.suppressed), list(self.child_modules),
+ dep_prios, self.interface_hash,
+ self.manager)
+ if new_interface_hash == self.interface_hash:
+ self.manager.log("Cached module {} has same interface".format(self.id))
+ else:
+ self.manager.log("Cached module {} has changed interface".format(self.id))
+ self.mark_interface_stale()
+ self.interface_hash = new_interface_hash
-def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
+def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
manager.log("Mypy version %s" % __version__)
graph = load_graph(sources, manager)
if not graph:
print("Nothing to do?!")
- return
+ return graph
manager.log("Loaded graph with %d nodes" % len(graph))
if manager.options.dump_graph:
dump_graph(graph)
- return
+ return graph
process_graph(graph, manager)
if manager.options.warn_unused_ignores:
# TODO: This could also be a per-module option.
manager.errors.generate_unused_ignore_notes()
+ return graph
class NodeInfo:
@@ -1596,7 +1661,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
# The deque is used to implement breadth-first traversal.
# TODO: Consider whether to go depth-first instead. This may
# affect the order in which we process files within import cycles.
- new = collections.deque() # type: collections.deque[State]
+ new = collections.deque() # type: Deque[State]
entry_points = set() # type: Set[str]
# Seed the graph with the initial root sources.
for bs in sources:
@@ -1606,7 +1671,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
except ModuleNotFound:
continue
if st.id in graph:
- manager.errors.set_file(st.xpath)
+ manager.errors.set_file(st.xpath, st.id)
manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id)
manager.errors.raise_error()
graph[st.id] = st
@@ -1620,7 +1685,9 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
# so we ignore any suppressed module not explicitly re-included
# from the command line.
ignored = dep in st.suppressed and dep not in entry_points
- if dep not in graph and not ignored:
+ if ignored:
+ manager.missing_modules.add(dep)
+ elif dep not in graph:
try:
if dep in st.ancestors:
# TODO: Why not 'if dep not in st.dependencies' ?
@@ -1692,7 +1759,8 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
deps.update(graph[id].dependencies)
deps -= ascc
stale_deps = {id for id in deps if not graph[id].is_interface_fresh()}
- fresh = fresh and not stale_deps
+ if not manager.options.quick_and_dirty:
+ fresh = fresh and not stale_deps
undeps = set()
if fresh:
# Check if any dependencies that were suppressed according
@@ -1707,7 +1775,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
# All cache files are fresh. Check that no dependency's
# cache file is newer than any scc node's cache file.
oldest_in_scc = min(graph[id].meta.data_mtime for id in scc)
- viable = {id for id in deps if not graph[id].is_interface_fresh()}
+ viable = {id for id in stale_deps if graph[id].meta is not None}
newest_in_deps = 0 if not viable else max(graph[dep].meta.data_mtime for dep in viable)
if manager.options.verbosity >= 3: # Dump all mtimes for extreme debugging.
all_ids = sorted(ascc | viable, key=lambda id: graph[id].meta.data_mtime)
@@ -1725,7 +1793,9 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
manager.trace(" %5s %.0f %s" % (key, graph[id].meta.data_mtime, id))
# If equal, give the benefit of the doubt, due to 1-sec time granularity
# (on some platforms).
- if oldest_in_scc < newest_in_deps:
+ if manager.options.quick_and_dirty and stale_deps:
+ fresh_msg = "fresh(ish)"
+ elif oldest_in_scc < newest_in_deps:
fresh = False
fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc)
else:
@@ -1743,7 +1813,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
scc_str = " ".join(scc)
if fresh:
- manager.log("Queuing fresh SCC (%s)" % scc_str)
+ manager.log("Queuing %s SCC (%s)" % (fresh_msg, scc_str))
fresh_scc_queue.append(scc)
else:
if len(fresh_scc_queue) > 0:
@@ -1765,7 +1835,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
manager.log("Processing SCC singleton (%s) as %s" % (scc_str, fresh_msg))
else:
manager.log("Processing SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg))
- process_stale_scc(graph, scc)
+ process_stale_scc(graph, scc, manager)
sccs_left = len(fresh_scc_queue)
if sccs_left:
@@ -1832,26 +1902,46 @@ def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
graph[id].calculate_mros()
-def process_stale_scc(graph: Graph, scc: List[str]) -> None:
- """Process the modules in one SCC from source code."""
- for id in scc:
+def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> None:
+ """Process the modules in one SCC from source code.
+
+ Exception: If quick_and_dirty is set, use the cache for fresh modules.
+ """
+ if manager.options.quick_and_dirty:
+ fresh = [id for id in scc if graph[id].is_fresh()]
+ fresh_set = set(fresh) # To avoid running into O(N**2)
+ stale = [id for id in scc if id not in fresh_set]
+ if fresh:
+ manager.log(" Fresh ids: %s" % (", ".join(fresh)))
+ if stale:
+ manager.log(" Stale ids: %s" % (", ".join(stale)))
+ else:
+ fresh = []
+ stale = scc
+ for id in fresh:
+ graph[id].load_tree()
+ for id in stale:
# We may already have parsed the module, or not.
# If the former, parse_file() is a no-op.
graph[id].parse_file()
graph[id].fix_suppressed_dependencies(graph)
- for id in scc:
+ for id in fresh:
+ graph[id].fix_cross_refs()
+ for id in stale:
graph[id].semantic_analysis()
- for id in scc:
+ for id in stale:
graph[id].semantic_analysis_pass_three()
- for id in scc:
+ for id in fresh:
+ graph[id].calculate_mros()
+ for id in stale:
graph[id].type_check_first_pass()
more = True
while more:
more = False
- for id in scc:
+ for id in stale:
if graph[id].type_check_second_pass():
more = True
- for id in scc:
+ for id in stale:
graph[id].finish_passes()
graph[id].write_cache()
graph[id].mark_as_rechecked()
diff --git a/mypy/checker.py b/mypy/checker.py
index 77c8c4b..c65f956 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -3,6 +3,7 @@
import itertools
import fnmatch
from contextlib import contextmanager
+import sys
from typing import (
Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator
@@ -17,22 +18,22 @@ from mypy.nodes import (
WhileStmt, OperatorAssignmentStmt, WithStmt, AssertStmt,
RaiseStmt, TryStmt, ForStmt, DelStmt, CallExpr, IntExpr, StrExpr,
BytesExpr, UnicodeExpr, FloatExpr, OpExpr, UnaryExpr, CastExpr, RevealTypeExpr, SuperExpr,
- TypeApplication, DictExpr, SliceExpr, FuncExpr, TempNode, SymbolTableNode,
+ TypeApplication, DictExpr, SliceExpr, LambdaExpr, TempNode, SymbolTableNode,
Context, ListComprehension, ConditionalExpr, GeneratorExpr,
Decorator, SetExpr, TypeVarExpr, NewTypeExpr, PrintStmt,
LITERAL_TYPE, BreakStmt, PassStmt, ContinueStmt, ComparisonExpr, StarExpr,
YieldFromExpr, NamedTupleExpr, TypedDictExpr, SetComprehension,
DictionaryComprehension, ComplexExpr, EllipsisExpr, TypeAliasExpr,
- RefExpr, YieldExpr, BackquoteExpr, ImportFrom, ImportAll, ImportBase,
- AwaitExpr, PromoteExpr, Node,
+ RefExpr, YieldExpr, BackquoteExpr, Import, ImportFrom, ImportAll, ImportBase,
+ AwaitExpr, PromoteExpr, Node, EnumCallExpr,
ARG_POS, MDEF,
CONTRAVARIANT, COVARIANT)
from mypy import nodes
from mypy.types import (
- Type, AnyType, CallableType, Void, FunctionLike, Overloaded, TupleType, TypedDictType,
- Instance, NoneTyp, ErrorType, strip_type, TypeType,
+ Type, AnyType, CallableType, FunctionLike, Overloaded, TupleType, TypedDictType,
+ Instance, NoneTyp, strip_type, TypeType,
UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType, TypeVarDef,
- true_only, false_only, function_type
+ true_only, false_only, function_type, is_named_instance, union_items
)
from mypy.sametypes import is_same_type, is_same_types
from mypy.messages import MessageBuilder
@@ -41,7 +42,8 @@ from mypy.checkmember import map_type_from_supertype, bind_self, erase_to_bound
from mypy import messages
from mypy.subtypes import (
is_subtype, is_equivalent, is_proper_subtype, is_more_precise,
- restrict_subtype_away, is_subtype_ignoring_tvars
+ restrict_subtype_away, is_subtype_ignoring_tvars, is_callable_subtype,
+ unify_generic_callable,
)
from mypy.maptype import map_instance_to_supertype
from mypy.typevars import fill_typevars, has_no_typevars
@@ -51,8 +53,8 @@ from mypy.expandtype import expand_type, expand_type_by_instance
from mypy.visitor import NodeVisitor
from mypy.join import join_types
from mypy.treetransform import TransformVisitor
-from mypy.meet import meet_simple, is_overlapping_types
-from mypy.binder import ConditionalTypeBinder
+from mypy.binder import ConditionalTypeBinder, get_declaration
+from mypy.meet import is_overlapping_types
from mypy.options import Options
from mypy import experiments
@@ -63,17 +65,20 @@ T = TypeVar('T')
LAST_PASS = 1 # Pass numbers start at 0
-# A node which is postponed to be type checked during the next pass.
+# A node which is postponed to be processed during the next pass.
+# This is used for both batch mode and fine-grained incremental mode.
DeferredNode = NamedTuple(
'DeferredNode',
[
- ('node', FuncItem),
+ # In batch mode only FuncDef and LambdaExpr are supported
+ ('node', Union[FuncDef, LambdaExpr, MypyFile]),
('context_type_name', Optional[str]), # Name of the surrounding class (for error messages)
- ('active_class', Optional[Type]), # And its type (for selftype handline)
+ ('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis
+ # self type handling)
])
-class TypeChecker(NodeVisitor[Type]):
+class TypeChecker(NodeVisitor[None]):
"""Mypy type checker.
Type check mypy source files that have been semantically analyzed.
@@ -98,8 +103,6 @@ class TypeChecker(NodeVisitor[Type]):
scope = None # type: Scope
# Stack of function return types
return_types = None # type: List[Type]
- # Type context for type inference
- type_context = None # type: List[Type]
# Flags; true for dynamically typed functions
dynamic_funcs = None # type: List[bool]
# Stack of collections of variables with partial types
@@ -141,7 +144,6 @@ class TypeChecker(NodeVisitor[Type]):
self.binder = ConditionalTypeBinder()
self.globals = tree.names
self.return_types = []
- self.type_context = []
self.dynamic_funcs = []
self.partial_types = []
self.deferred_nodes = []
@@ -168,19 +170,18 @@ class TypeChecker(NodeVisitor[Type]):
Deferred functions will be processed by check_second_pass().
"""
- self.errors.set_file(self.path)
- self.enter_partial_types()
-
- with self.binder.top_frame_context():
- for d in self.tree.defs:
- self.accept(d)
-
- self.leave_partial_types()
+ self.errors.set_file(self.path, self.tree.fullname())
+ with self.enter_partial_types():
+ with self.binder.top_frame_context():
+ for d in self.tree.defs:
+ self.accept(d)
assert not self.current_node_deferred
all_ = self.globals.get('__all__')
if all_ is not None and all_.type is not None:
+ all_node = all_.node
+ assert all_node is not None
seq_str = self.named_generic_type('typing.Sequence',
[self.named_type('builtins.str')])
if self.options.python_version[0] < 3:
@@ -189,42 +190,59 @@ class TypeChecker(NodeVisitor[Type]):
if not is_subtype(all_.type, seq_str):
str_seq_s, all_s = self.msg.format_distinctly(seq_str, all_.type)
self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
- all_.node)
+ all_node)
- def check_second_pass(self) -> bool:
+ def check_second_pass(self, todo: List[DeferredNode] = None) -> bool:
"""Run second or following pass of type checking.
This goes through deferred nodes, returning True if there were any.
"""
- if not self.deferred_nodes:
+ if not todo and not self.deferred_nodes:
return False
- self.errors.set_file(self.path)
+ self.errors.set_file(self.path, self.tree.fullname())
self.pass_num += 1
- todo = self.deferred_nodes
+ if not todo:
+ todo = self.deferred_nodes
+ else:
+ assert not self.deferred_nodes
self.deferred_nodes = []
- done = set() # type: Set[FuncItem]
- for node, type_name, active_class in todo:
+ done = set() # type: Set[Union[FuncDef, LambdaExpr, MypyFile]]
+ for node, type_name, active_typeinfo in todo:
if node in done:
continue
# This is useful for debugging:
# print("XXX in pass %d, class %s, function %s" %
# (self.pass_num, type_name, node.fullname() or node.name()))
done.add(node)
- if type_name:
- self.errors.push_type(type_name)
-
- if active_class:
- with self.scope.push_class(active_class):
- self.accept(node)
- else:
- self.accept(node)
- if type_name:
- self.errors.pop_type()
+ with self.errors.enter_type(type_name) if type_name else nothing():
+ with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing():
+ self.check_partial(node)
return True
+ def check_partial(self, node: Union[FuncDef, LambdaExpr, MypyFile]) -> None:
+ if isinstance(node, MypyFile):
+ self.check_top_level(node)
+ elif isinstance(node, LambdaExpr):
+ self.expr_checker.accept(node)
+ else:
+ self.accept(node)
+
+ def check_top_level(self, node: MypyFile) -> None:
+ """Check only the top-level of a module, skipping function definitions."""
+ with self.enter_partial_types():
+ with self.binder.top_frame_context():
+ for d in node.defs:
+ # TODO: Type check class bodies.
+ if not isinstance(d, (FuncDef, ClassDef)):
+ d.accept(self)
+
+ assert not self.current_node_deferred
+ # TODO: Handle __all__
+
def handle_cannot_determine_type(self, name: str, context: Context) -> None:
node = self.scope.top_function()
- if self.pass_num < LAST_PASS and node is not None:
+ if (self.pass_num < LAST_PASS and node is not None
+ and isinstance(node, (FuncDef, LambdaExpr))):
# Don't report an error yet. Just defer.
if self.errors.type_name:
type_name = self.errors.type_name[-1]
@@ -240,22 +258,12 @@ class TypeChecker(NodeVisitor[Type]):
else:
self.msg.cannot_determine_type(name, context)
- def accept(self, node: Union[Expression, Statement, FuncItem],
- type_context: Type = None) -> Type:
+ def accept(self, stmt: Statement) -> None:
"""Type check a node in the given type context."""
- self.type_context.append(type_context)
try:
- typ = node.accept(self)
+ stmt.accept(self)
except Exception as err:
- report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
- self.type_context.pop()
- if typ is not None:
- assert isinstance(node, Expression)
- self.store_type(node, typ)
- if not self.in_checked_function():
- return AnyType()
- else:
- return typ
+ report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options)
def accept_loop(self, body: Statement, else_body: Statement = None, *,
exit_condition: Expression = None) -> None:
@@ -282,17 +290,27 @@ class TypeChecker(NodeVisitor[Type]):
# Definitions
#
- def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> Type:
+ def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
num_abstract = 0
+ if not defn.items:
+ # In this case we have already complained about none of these being
+ # valid overloads.
+ return None
+ if len(defn.items) == 1:
+ self.fail('Single overload definition, multiple required', defn)
+
if defn.is_property:
# HACK: Infer the type of the property.
- self.visit_decorator(defn.items[0])
+ self.visit_decorator(cast(Decorator, defn.items[0]))
for fdef in defn.items:
+ assert isinstance(fdef, Decorator)
self.check_func_item(fdef.func, name=fdef.func.name())
if fdef.func.is_abstract:
num_abstract += 1
if num_abstract not in (0, len(defn.items)):
self.fail(messages.INCONSISTENT_ABSTRACT_OVERLOAD, defn)
+ if defn.impl:
+ defn.impl.accept(self)
if defn.info:
self.check_method_override(defn)
self.check_inplace_operator_method(defn)
@@ -300,14 +318,45 @@ class TypeChecker(NodeVisitor[Type]):
return None
def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
+ # At this point we should have set the impl already, and all remaining
+ # items are decorators
for i, item in enumerate(defn.items):
+ assert isinstance(item, Decorator)
+ sig1 = self.function_type(item.func)
for j, item2 in enumerate(defn.items[i + 1:]):
# TODO overloads involving decorators
- sig1 = self.function_type(item.func)
+ assert isinstance(item2, Decorator)
sig2 = self.function_type(item2.func)
if is_unsafe_overlapping_signatures(sig1, sig2):
self.msg.overloaded_signatures_overlap(i + 1, i + j + 2,
item.func)
+ if defn.impl:
+ if isinstance(defn.impl, FuncDef):
+ impl_type = defn.impl.type
+ elif isinstance(defn.impl, Decorator):
+ impl_type = defn.impl.var.type
+ else:
+ assert False, "Impl isn't the right type"
+ # This can happen if we've got an overload with a different
+ # decorator too -- we gave up on the types.
+ if impl_type is None or isinstance(impl_type, AnyType) or sig1 is None:
+ return
+
+ assert isinstance(impl_type, CallableType)
+ assert isinstance(sig1, CallableType)
+ if not is_callable_subtype(impl_type, sig1, ignore_return=True):
+ self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl)
+ impl_type_subst = impl_type
+ if impl_type.variables:
+ unified = unify_generic_callable(impl_type, sig1, ignore_return=False)
+ if unified is None:
+ self.fail("Type variable mismatch between " +
+ "overload signature {} and implementation".format(i + 1),
+ defn.impl)
+ return
+ impl_type_subst = unified
+ if not is_subtype(sig1.ret_type, impl_type_subst.ret_type):
+ self.msg.overloaded_signatures_ret_specific(i + 1, defn.impl)
# Here's the scoop about generators and coroutines.
#
@@ -323,7 +372,7 @@ class TypeChecker(NodeVisitor[Type]):
#
# A classic generator must define a return type that's either
# `Generator[ty, tc, tr]`, Iterator[ty], or Iterable[ty] (or
- # object or Any). If tc/tr are not given, both are Void.
+ # object or Any). If tc/tr are not given, both are None.
#
# A coroutine must define a return type corresponding to tr; the
# other two are unconstrained. The "external" return type (seen
@@ -335,12 +384,19 @@ class TypeChecker(NodeVisitor[Type]):
# for functions decorated with `@types.coroutine` or
# `@asyncio.coroutine`. Its single parameter corresponds to tr.
#
+ # PEP 525 adds a new type, the asynchronous generator, which was
+ # first released in Python 3.6. Async generators are `async def`
+ # functions that can also `yield` values. They can be parameterized
+ # with two types, ty and tc, because they cannot return a value.
+ #
# There are several useful methods, each taking a type t and a
# flag c indicating whether it's for a generator or coroutine:
#
# - is_generator_return_type(t, c) returns whether t is a Generator,
# Iterator, Iterable (if not c), or Awaitable (if c), or
# AwaitableGenerator (regardless of c).
+ # - is_async_generator_return_type(t) returns whether t is an
+ # AsyncGenerator.
# - get_generator_yield_type(t, c) returns ty.
# - get_generator_receive_type(t, c) returns tc.
# - get_generator_return_type(t, c) returns tr.
@@ -362,11 +418,24 @@ class TypeChecker(NodeVisitor[Type]):
return True
return isinstance(typ, Instance) and typ.type.fullname() == 'typing.AwaitableGenerator'
+ def is_async_generator_return_type(self, typ: Type) -> bool:
+ """Is `typ` a valid type for an async generator?
+
+ True if `typ` is a supertype of AsyncGenerator.
+ """
+ try:
+ agt = self.named_generic_type('typing.AsyncGenerator', [AnyType(), AnyType()])
+ except KeyError:
+ # we're running on a version of typing that doesn't have AsyncGenerator yet
+ return False
+ return is_subtype(agt, typ)
+
def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Type:
"""Given the declared return type of a generator (t), return the type it yields (ty)."""
if isinstance(return_type, AnyType):
return AnyType()
- elif not self.is_generator_return_type(return_type, is_coroutine):
+ elif (not self.is_generator_return_type(return_type, is_coroutine)
+ and not self.is_async_generator_return_type(return_type)):
# If the function doesn't have a proper Generator (or
# Awaitable) return type, anything is permissible.
return AnyType()
@@ -377,14 +446,9 @@ class TypeChecker(NodeVisitor[Type]):
# Awaitable: ty is Any.
return AnyType()
elif return_type.args:
- # AwaitableGenerator, Generator, Iterator, or Iterable; ty is args[0].
+ # AwaitableGenerator, Generator, AsyncGenerator, Iterator, or Iterable; ty is args[0].
ret_type = return_type.args[0]
# TODO not best fix, better have dedicated yield token
- if isinstance(ret_type, NoneTyp):
- if experiments.STRICT_OPTIONAL:
- return NoneTyp(is_ret_type=True)
- else:
- return Void()
return ret_type
else:
# If the function's declared supertype of Generator has no type
@@ -397,7 +461,8 @@ class TypeChecker(NodeVisitor[Type]):
"""Given a declared generator return type (t), return the type its yield receives (tc)."""
if isinstance(return_type, AnyType):
return AnyType()
- elif not self.is_generator_return_type(return_type, is_coroutine):
+ elif (not self.is_generator_return_type(return_type, is_coroutine)
+ and not self.is_async_generator_return_type(return_type)):
# If the function doesn't have a proper Generator (or
# Awaitable) return type, anything is permissible.
return AnyType()
@@ -411,13 +476,12 @@ class TypeChecker(NodeVisitor[Type]):
and len(return_type.args) >= 3):
# Generator: tc is args[1].
return return_type.args[1]
+ elif return_type.type.fullname() == 'typing.AsyncGenerator' and len(return_type.args) >= 2:
+ return return_type.args[1]
else:
# `return_type` is a supertype of Generator, so callers won't be able to send it
# values. IOW, tc is None.
- if experiments.STRICT_OPTIONAL:
- return NoneTyp(is_ret_type=True)
- else:
- return Void()
+ return NoneTyp()
def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Type:
"""Given the declared return type of a generator (t), return the type it returns (tr)."""
@@ -441,7 +505,7 @@ class TypeChecker(NodeVisitor[Type]):
# Supertype of Generator (Iterator, Iterable, object): tr is any.
return AnyType()
- def visit_func_def(self, defn: FuncDef) -> Type:
+ def visit_func_def(self, defn: FuncDef) -> None:
"""Type check a function definition."""
self.check_func_item(defn, name=defn.name())
if defn.info:
@@ -462,7 +526,7 @@ class TypeChecker(NodeVisitor[Type]):
# XXX This can be None, as happens in
# test_testcheck_TypeCheckSuite.testRedefinedFunctionInTryWithElse
self.msg.note("Internal mypy error checking function redefinition.", defn)
- return None
+ return
if isinstance(orig_type, PartialType):
if orig_type.type is None:
# Ah this is a partial type. Give it the type of the function.
@@ -480,7 +544,6 @@ class TypeChecker(NodeVisitor[Type]):
messages.INCOMPATIBLE_REDEFINITION,
'redefinition with type',
'original type')
- return None
def check_func_item(self, defn: FuncItem,
type_override: CallableType = None,
@@ -497,23 +560,15 @@ class TypeChecker(NodeVisitor[Type]):
self.dynamic_funcs.append(defn.is_dynamic() and not type_override)
- if fdef:
- self.errors.push_function(fdef.name())
-
- self.enter_partial_types()
-
- typ = self.function_type(defn)
- if type_override:
- typ = type_override
- if isinstance(typ, CallableType):
- self.check_func_def(defn, typ, name)
- else:
- raise RuntimeError('Not supported')
-
- self.leave_partial_types()
-
- if fdef:
- self.errors.pop_function()
+ with self.errors.enter_function(fdef.name()) if fdef else nothing():
+ with self.enter_partial_types():
+ typ = self.function_type(defn)
+ if type_override:
+ typ = type_override
+ if isinstance(typ, CallableType):
+ self.check_func_def(defn, typ, name)
+ else:
+ raise RuntimeError('Not supported')
self.dynamic_funcs.pop()
self.current_node_deferred = False
@@ -538,10 +593,10 @@ class TypeChecker(NodeVisitor[Type]):
if fdef:
# Check if __init__ has an invalid, non-None return type.
if (fdef.info and fdef.name() in ('__init__', '__init_subclass__') and
- not isinstance(typ.ret_type, (Void, NoneTyp)) and
+ not isinstance(typ.ret_type, NoneTyp) and
not self.dynamic_funcs[-1]):
self.fail(messages.MUST_HAVE_NONE_RETURN_TYPE.format(fdef.name()),
- item.type)
+ item)
show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub
if self.options.disallow_untyped_defs and show_untyped:
@@ -570,14 +625,18 @@ class TypeChecker(NodeVisitor[Type]):
# Check that Generator functions have the appropriate return type.
if defn.is_generator:
- if not self.is_generator_return_type(typ.ret_type, defn.is_coroutine):
- self.fail(messages.INVALID_RETURN_TYPE_FOR_GENERATOR, typ)
+ if defn.is_async_generator:
+ if not self.is_async_generator_return_type(typ.ret_type):
+ self.fail(messages.INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR, typ)
+ else:
+ if not self.is_generator_return_type(typ.ret_type, defn.is_coroutine):
+ self.fail(messages.INVALID_RETURN_TYPE_FOR_GENERATOR, typ)
# Python 2 generators aren't allowed to return values.
if (self.options.python_version[0] == 2 and
isinstance(typ.ret_type, Instance) and
typ.ret_type.type.fullname() == 'typing.Generator'):
- if not isinstance(typ.ret_type.args[2], (Void, NoneTyp, AnyType)):
+ if not isinstance(typ.ret_type.args[2], (NoneTyp, AnyType)):
self.fail(messages.INVALID_GENERATOR_RETURN_ITEM_TYPE, typ)
# Fix the type if decorated with `@types.coroutine` or `@asyncio.coroutine`.
@@ -601,21 +660,41 @@ class TypeChecker(NodeVisitor[Type]):
for i in range(len(typ.arg_types)):
arg_type = typ.arg_types[i]
- ref_type = self.scope.active_class()
+ ref_type = self.scope.active_self_type() # type: Optional[Type]
if (isinstance(defn, FuncDef) and ref_type is not None and i == 0
and not defn.is_static
and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]):
- if defn.is_class or defn.name() in ('__new__', '__init_subclass__'):
+ isclass = defn.is_class or defn.name() in ('__new__', '__init_subclass__')
+ if isclass:
ref_type = mypy.types.TypeType(ref_type)
erased = erase_to_bound(arg_type)
if not is_subtype_ignoring_tvars(ref_type, erased):
- self.fail("The erased type of self '{}' "
- "is not a supertype of its class '{}'"
- .format(erased, ref_type), defn)
+ note = None
+ if typ.arg_names[i] in ['self', 'cls']:
+ if (self.options.python_version[0] < 3
+ and is_same_type(erased, arg_type) and not isclass):
+ msg = ("Invalid type for self, or extra argument type "
+ "in function annotation")
+ note = '(Hint: typically annotations omit the type for self)'
+ else:
+ msg = ("The erased type of self '{}' "
+ "is not a supertype of its class '{}'"
+ ).format(erased, ref_type)
+ else:
+ msg = ("Self argument missing for a non-static method "
+ "(or an invalid type for self)")
+ self.fail(msg, defn)
+ if note:
+ self.note(note, defn)
+ if defn.is_class and isinstance(arg_type, CallableType):
+ arg_type.is_classmethod_class = True
elif isinstance(arg_type, TypeVarType):
# Refuse covariant parameter type variables
- # TODO: check recuresively for inner type variables
- if arg_type.variance == COVARIANT:
+ # TODO: check recursively for inner type variables
+ if (
+ arg_type.variance == COVARIANT and
+ defn.name() not in ('__init__', '__new__')
+ ):
self.fail(messages.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, arg_type)
if typ.arg_kinds[i] == nodes.ARG_STAR:
# builtins.tuple[T] is typing.Tuple[T, ...]
@@ -639,16 +718,24 @@ class TypeChecker(NodeVisitor[Type]):
self.accept(item.body)
unreachable = self.binder.is_unreachable()
- if (self.options.warn_no_return and not unreachable
- and not isinstance(self.return_types[-1], (Void, NoneTyp, AnyType))
- and (defn.is_coroutine or not defn.is_generator)):
- # Control flow fell off the end of a function that was
- # declared to return a non-None type.
- # Allow functions that are entirely pass/Ellipsis.
- if self.is_trivial_body(defn.body):
- pass
+ if (self.options.warn_no_return and not unreachable):
+ if (defn.is_generator or
+ is_named_instance(self.return_types[-1], 'typing.AwaitableGenerator')):
+ return_type = self.get_generator_return_type(self.return_types[-1],
+ defn.is_coroutine)
else:
- self.msg.note(messages.MISSING_RETURN_STATEMENT, defn)
+ return_type = self.return_types[-1]
+
+ if (not isinstance(return_type, (NoneTyp, AnyType))
+ and not self.is_trivial_body(defn.body)):
+ # Control flow fell off the end of a function that was
+ # declared to return a non-None type and is not
+ # entirely pass/Ellipsis.
+ if isinstance(return_type, UninhabitedType):
+ # This is a NoReturn function
+ self.msg.note(messages.INVALID_IMPLICIT_RETURN, defn)
+ else:
+ self.msg.fail(messages.MISSING_RETURN_STATEMENT, defn)
self.return_types.pop()
@@ -755,44 +842,45 @@ class TypeChecker(NodeVisitor[Type]):
# of x in __radd__ would not be A, the methods could be
# non-overlapping.
- if isinstance(forward_type, CallableType):
- # TODO check argument kinds
- if len(forward_type.arg_types) < 1:
- # Not a valid operator method -- can't succeed anyway.
- return
+ for forward_item in union_items(forward_type):
+ if isinstance(forward_item, CallableType):
+ # TODO check argument kinds
+ if len(forward_item.arg_types) < 1:
+ # Not a valid operator method -- can't succeed anyway.
+ return
- # Construct normalized function signatures corresponding to the
- # operator methods. The first argument is the left operand and the
- # second operand is the right argument -- we switch the order of
- # the arguments of the reverse method.
- forward_tweaked = CallableType(
- [forward_base, forward_type.arg_types[0]],
- [nodes.ARG_POS] * 2,
- [None] * 2,
- forward_type.ret_type,
- forward_type.fallback,
- name=forward_type.name)
- reverse_args = reverse_type.arg_types
- reverse_tweaked = CallableType(
- [reverse_args[1], reverse_args[0]],
- [nodes.ARG_POS] * 2,
- [None] * 2,
- reverse_type.ret_type,
- fallback=self.named_type('builtins.function'),
- name=reverse_type.name)
-
- if is_unsafe_overlapping_signatures(forward_tweaked,
- reverse_tweaked):
- self.msg.operator_method_signatures_overlap(
- reverse_class.name(), reverse_name,
- forward_base.type.name(), forward_name, context)
- elif isinstance(forward_type, Overloaded):
- for item in forward_type.items():
- self.check_overlapping_op_methods(
- reverse_type, reverse_name, reverse_class,
- item, forward_name, forward_base, context)
- elif not isinstance(forward_type, AnyType):
- self.msg.forward_operator_not_callable(forward_name, context)
+ # Construct normalized function signatures corresponding to the
+ # operator methods. The first argument is the left operand and the
+ # second operand is the right argument -- we switch the order of
+ # the arguments of the reverse method.
+ forward_tweaked = CallableType(
+ [forward_base, forward_item.arg_types[0]],
+ [nodes.ARG_POS] * 2,
+ [None] * 2,
+ forward_item.ret_type,
+ forward_item.fallback,
+ name=forward_item.name)
+ reverse_args = reverse_type.arg_types
+ reverse_tweaked = CallableType(
+ [reverse_args[1], reverse_args[0]],
+ [nodes.ARG_POS] * 2,
+ [None] * 2,
+ reverse_type.ret_type,
+ fallback=self.named_type('builtins.function'),
+ name=reverse_type.name)
+
+ if is_unsafe_overlapping_signatures(forward_tweaked,
+ reverse_tweaked):
+ self.msg.operator_method_signatures_overlap(
+ reverse_class.name(), reverse_name,
+ forward_base.type.name(), forward_name, context)
+ elif isinstance(forward_item, Overloaded):
+ for item in forward_item.items():
+ self.check_overlapping_op_methods(
+ reverse_type, reverse_name, reverse_class,
+ item, forward_name, forward_base, context)
+ elif not isinstance(forward_item, AnyType):
+ self.msg.forward_operator_not_callable(forward_name, context)
def check_inplace_operator_method(self, defn: FuncBase) -> None:
"""Check an inplace operator method such as __iadd__.
@@ -862,8 +950,9 @@ class TypeChecker(NodeVisitor[Type]):
"""Check if method definition is compatible with a base class."""
if base:
name = defn.name()
- if name not in ('__init__', '__new__'):
- # Check method override (__init__ and __new__ are special).
+ if name not in ('__init__', '__new__', '__init_subclass__'):
+ # Check method override
+ # (__init__, __new__, __init_subclass__ are special).
self.check_method_override_for_base_with_name(defn, name, base)
if name in nodes.inplace_operator_methods:
# Figure out the name of the corresponding operator method.
@@ -882,7 +971,7 @@ class TypeChecker(NodeVisitor[Type]):
# The name of the method is defined in the base class.
# Construct the type of the overriding method.
- typ = bind_self(self.function_type(defn), self.scope.active_class())
+ typ = bind_self(self.function_type(defn), self.scope.active_self_type())
# Map the overridden method type to subtype context so that
# it can be checked for compatibility.
original_type = base_attr.type
@@ -895,7 +984,7 @@ class TypeChecker(NodeVisitor[Type]):
assert False, str(base_attr.node)
if isinstance(original_type, FunctionLike):
original = map_type_from_supertype(
- bind_self(original_type, self.scope.active_class()),
+ bind_self(original_type, self.scope.active_self_type()),
defn.info, base)
# Check that the types are compatible.
# TODO overloaded signatures
@@ -905,6 +994,8 @@ class TypeChecker(NodeVisitor[Type]):
name,
base.name(),
defn)
+ elif isinstance(original_type, AnyType):
+ pass
else:
self.msg.signature_incompatible_with_supertype(
defn.name(), name, base.name(), defn)
@@ -932,6 +1023,13 @@ class TypeChecker(NodeVisitor[Type]):
# this could be unsafe with reverse operator methods.
fail = True
+ if isinstance(original, CallableType) and isinstance(override, CallableType):
+ if (isinstance(original.definition, FuncItem) and
+ isinstance(override.definition, FuncItem)):
+ if ((original.definition.is_static or original.definition.is_class) and
+ not (override.definition.is_static or override.definition.is_class)):
+ fail = True
+
if fail:
emitted_msg = False
if (isinstance(override, CallableType) and
@@ -971,23 +1069,19 @@ class TypeChecker(NodeVisitor[Type]):
self.msg.signature_incompatible_with_supertype(
name, name_in_super, supertype, node)
- def visit_class_def(self, defn: ClassDef) -> Type:
+ def visit_class_def(self, defn: ClassDef) -> None:
"""Type check a class definition."""
typ = defn.info
- self.errors.push_type(defn.name)
- self.enter_partial_types()
- old_binder = self.binder
- self.binder = ConditionalTypeBinder()
- with self.binder.top_frame_context():
- with self.scope.push_class(fill_typevars(defn.info)):
- self.accept(defn.defs)
- self.binder = old_binder
- if not defn.has_incompatible_baseclass:
- # Otherwise we've already found errors; more errors are not useful
- self.check_multiple_inheritance(typ)
- self.leave_partial_types()
- self.errors.pop_type()
- return None
+ with self.errors.enter_type(defn.name), self.enter_partial_types():
+ old_binder = self.binder
+ self.binder = ConditionalTypeBinder()
+ with self.binder.top_frame_context():
+ with self.scope.push_class(defn.info):
+ self.accept(defn.defs)
+ self.binder = old_binder
+ if not defn.has_incompatible_baseclass:
+ # Otherwise we've already found errors; more errors are not useful
+ self.check_multiple_inheritance(typ)
def check_multiple_inheritance(self, typ: TypeInfo) -> None:
"""Check for multiple inheritance related errors."""
@@ -1043,13 +1137,14 @@ class TypeChecker(NodeVisitor[Type]):
self.msg.base_class_definitions_incompatible(name, base1, base2,
ctx)
- def visit_import_from(self, node: ImportFrom) -> Type:
+ def visit_import_from(self, node: ImportFrom) -> None:
self.check_import(node)
- return None
- def visit_import_all(self, node: ImportAll) -> Type:
+ def visit_import_all(self, node: ImportAll) -> None:
self.check_import(node)
- return None
+
+ def visit_import(self, s: Import) -> None:
+ pass
def check_import(self, node: ImportBase) -> None:
for assign in node.assignments:
@@ -1068,17 +1163,16 @@ class TypeChecker(NodeVisitor[Type]):
# Statements
#
- def visit_block(self, b: Block) -> Type:
+ def visit_block(self, b: Block) -> None:
if b.is_unreachable:
self.binder.unreachable()
- return None
+ return
for s in b.body:
if self.binder.is_unreachable():
break
self.accept(s)
- return None
- def visit_assignment_stmt(self, s: AssignmentStmt) -> Type:
+ def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
"""Type check an assignment statement.
Handle all kinds of assignment statements (simple, indexed, multiple).
@@ -1089,11 +1183,10 @@ class TypeChecker(NodeVisitor[Type]):
# Chained assignment (e.g. x = y = ...).
# Make sure that rvalue type will not be reinferred.
if s.rvalue not in self.type_map:
- self.accept(s.rvalue)
+ self.expr_checker.accept(s.rvalue)
rvalue = self.temp_node(self.type_map[s.rvalue], s)
for lv in s.lvalues[:-1]:
self.check_assignment(lv, rvalue, s.type is None)
- return None
def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type: bool = True,
new_syntax: bool = False) -> None:
@@ -1112,7 +1205,7 @@ class TypeChecker(NodeVisitor[Type]):
if lvalue_type:
if isinstance(lvalue_type, PartialType) and lvalue_type.type is None:
# Try to infer a proper type for a variable with a partial None type.
- rvalue_type = self.accept(rvalue)
+ rvalue_type = self.expr_checker.accept(rvalue)
if isinstance(rvalue_type, NoneTyp):
# This doesn't actually provide any additional information -- multiple
# None initializers preserve the partial None type.
@@ -1123,11 +1216,8 @@ class TypeChecker(NodeVisitor[Type]):
partial_types = self.find_partial_types(var)
if partial_types is not None:
if not self.current_node_deferred:
- if experiments.STRICT_OPTIONAL:
- var.type = UnionType.make_simplified_union(
- [rvalue_type, NoneTyp()])
- else:
- var.type = rvalue_type
+ var.type = UnionType.make_simplified_union(
+ [rvalue_type, NoneTyp()])
else:
var.type = None
del partial_types[var]
@@ -1145,25 +1235,32 @@ class TypeChecker(NodeVisitor[Type]):
rvalue_type = lvalue_type
elif (isinstance(lvalue, MemberExpr) and
lvalue.kind is None): # Ignore member access to modules
- instance_type = self.accept(lvalue.expr)
+ instance_type = self.expr_checker.accept(lvalue.expr)
rvalue_type, infer_lvalue_type = self.check_member_assignment(
instance_type, lvalue_type, rvalue, lvalue)
else:
rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue)
+ # Special case: only non-abstract classes can be assigned to variables
+ # with explicit type Type[A].
+ if (isinstance(rvalue_type, CallableType) and rvalue_type.is_type_obj() and
+ rvalue_type.type_object().is_abstract and
+ isinstance(lvalue_type, TypeType) and
+ isinstance(lvalue_type.item, Instance) and
+ lvalue_type.item.type.is_abstract):
+ self.fail("Can only assign non-abstract classes"
+ " to a variable of type '{}'".format(lvalue_type), rvalue)
+ return
if rvalue_type and infer_lvalue_type:
- self.binder.assign_type(lvalue,
- rvalue_type,
- lvalue_type,
- False)
+ self.binder.assign_type(lvalue, rvalue_type, lvalue_type, False)
elif index_lvalue:
self.check_indexed_assignment(index_lvalue, rvalue, lvalue)
if inferred:
- self.infer_variable_type(inferred, lvalue, self.accept(rvalue),
+ self.infer_variable_type(inferred, lvalue, self.expr_checker.accept(rvalue),
rvalue)
- def check_compatibility_all_supers(self, lvalue: NameExpr, lvalue_type: Type,
+ def check_compatibility_all_supers(self, lvalue: NameExpr, lvalue_type: Optional[Type],
rvalue: Expression) -> bool:
lvalue_node = lvalue.node
@@ -1173,6 +1270,15 @@ class TypeChecker(NodeVisitor[Type]):
len(lvalue_node.info.bases) > 0):
for base in lvalue_node.info.mro[1:]:
+ tnode = base.names.get(lvalue_node.name())
+ if tnode is not None:
+ if not self.check_compatibility_classvar_super(lvalue_node,
+ base,
+ tnode.node):
+ # Show only one error per variable
+ break
+
+ for base in lvalue_node.info.mro[1:]:
# Only check __slots__ against the 'object'
# If a base class defines a Tuple of 3 elements, a child of
# this class should not be allowed to define it as a Tuple of
@@ -1197,8 +1303,9 @@ class TypeChecker(NodeVisitor[Type]):
break
return False
- def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Type, rvalue: Expression,
- base: TypeInfo, base_type: Type, base_node: Node) -> bool:
+ def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Optional[Type],
+ rvalue: Expression, base: TypeInfo, base_type: Type,
+ base_node: Node) -> bool:
lvalue_node = lvalue.node
assert isinstance(lvalue_node, Var)
@@ -1211,7 +1318,7 @@ class TypeChecker(NodeVisitor[Type]):
compare_type = lvalue_type
compare_node = lvalue.node
else:
- compare_type = self.accept(rvalue, base_type)
+ compare_type = self.expr_checker.accept(rvalue, base_type)
if isinstance(rvalue, NameExpr):
compare_node = rvalue.node
if isinstance(compare_node, Decorator):
@@ -1236,8 +1343,8 @@ class TypeChecker(NodeVisitor[Type]):
# Class-level function objects and classmethods become bound
# methods: the former to the instance, the latter to the
# class
- base_type = bind_self(base_type, self.scope.active_class())
- compare_type = bind_self(compare_type, self.scope.active_class())
+ base_type = bind_self(base_type, self.scope.active_self_type())
+ compare_type = bind_self(compare_type, self.scope.active_self_type())
# If we are a static method, ensure to also tell the
# lvalue it now contains a static method
@@ -1266,7 +1373,8 @@ class TypeChecker(NodeVisitor[Type]):
if base_type:
if not has_no_typevars(base_type):
- instance = cast(Instance, self.scope.active_class())
+ # TODO: Handle TupleType, don't cast
+ instance = cast(Instance, self.scope.active_self_type())
itype = map_instance_to_supertype(instance, base)
base_type = expand_type_by_instance(base_type, itype)
@@ -1280,6 +1388,22 @@ class TypeChecker(NodeVisitor[Type]):
return None, None
+ def check_compatibility_classvar_super(self, node: Var,
+ base: TypeInfo, base_node: Node) -> bool:
+ if not isinstance(base_node, Var):
+ return True
+ if node.is_classvar and not base_node.is_classvar:
+ self.fail('Cannot override instance variable '
+ '(previously declared on base class "%s") '
+ 'with class variable' % base.name(), node)
+ return False
+ elif not node.is_classvar and base_node.is_classvar:
+ self.fail('Cannot override class variable '
+ '(previously declared on base class "%s") '
+ 'with instance variable' % base.name(), node)
+ return False
+ return True
+
def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Expression,
context: Context,
infer_lvalue_type: bool = True) -> None:
@@ -1336,7 +1460,7 @@ class TypeChecker(NodeVisitor[Type]):
"""Check the assignment of one rvalue to a number of lvalues."""
# Infer the type of an ordinary rvalue expression.
- rvalue_type = self.accept(rvalue) # TODO maybe elsewhere; redundant
+ rvalue_type = self.expr_checker.accept(rvalue) # TODO maybe elsewhere; redundant
undefined_rvalue = False
if isinstance(rvalue_type, AnyType):
@@ -1346,10 +1470,10 @@ class TypeChecker(NodeVisitor[Type]):
self.check_assignment(lv, self.temp_node(AnyType(), context), infer_lvalue_type)
elif isinstance(rvalue_type, TupleType):
self.check_multi_assignment_from_tuple(lvalues, rvalue, rvalue_type,
- context, undefined_rvalue, infer_lvalue_type)
+ context, undefined_rvalue, infer_lvalue_type)
else:
self.check_multi_assignment_from_iterable(lvalues, rvalue_type,
- context, infer_lvalue_type)
+ context, infer_lvalue_type)
def check_multi_assignment_from_tuple(self, lvalues: List[Lvalue], rvalue: Expression,
rvalue_type: TupleType, context: Context,
@@ -1366,7 +1490,7 @@ class TypeChecker(NodeVisitor[Type]):
if not undefined_rvalue:
# Infer rvalue again, now in the correct type context.
lvalue_type = self.lvalue_type_for_inference(lvalues, rvalue_type)
- rvalue_type = cast(TupleType, self.accept(rvalue, lvalue_type))
+ rvalue_type = cast(TupleType, self.expr_checker.accept(rvalue, lvalue_type))
left_rv_types, star_rv_types, right_rv_types = self.split_around_star(
rvalue_type.items, star_index, len(lvalues))
@@ -1433,6 +1557,8 @@ class TypeChecker(NodeVisitor[Type]):
return (left, star, right)
def type_is_iterable(self, type: Type) -> bool:
+ if isinstance(type, CallableType) and type.is_type_obj():
+ type = type.fallback
return (is_subtype(type, self.named_generic_type('typing.Iterable',
[AnyType()])) and
isinstance(type, Instance))
@@ -1452,10 +1578,12 @@ class TypeChecker(NodeVisitor[Type]):
else:
self.msg.type_not_iterable(rvalue_type, context)
- def check_lvalue(self, lvalue: Lvalue) -> Tuple[Type, IndexExpr, Var]:
- lvalue_type = None # type: Type
- index_lvalue = None # type: IndexExpr
- inferred = None # type: Var
+ def check_lvalue(self, lvalue: Lvalue) -> Tuple[Optional[Type],
+ Optional[IndexExpr],
+ Optional[Var]]:
+ lvalue_type = None # type: Optional[Type]
+ index_lvalue = None # type: Optional[IndexExpr]
+ inferred = None # type: Optional[Var]
if self.is_definition(lvalue):
if isinstance(lvalue, NameExpr):
@@ -1463,7 +1591,7 @@ class TypeChecker(NodeVisitor[Type]):
assert isinstance(inferred, Var)
else:
assert isinstance(lvalue, MemberExpr)
- self.accept(lvalue.expr)
+ self.expr_checker.accept(lvalue.expr)
inferred = lvalue.def_var
elif isinstance(lvalue, IndexExpr):
index_lvalue = lvalue
@@ -1478,7 +1606,7 @@ class TypeChecker(NodeVisitor[Type]):
types = [self.check_lvalue(sub_expr)[0] for sub_expr in lvalue.items]
lvalue_type = TupleType(types, self.named_type('builtins.tuple'))
else:
- lvalue_type = self.accept(lvalue)
+ lvalue_type = self.expr_checker.accept(lvalue)
return lvalue_type, index_lvalue, inferred
@@ -1501,10 +1629,7 @@ class TypeChecker(NodeVisitor[Type]):
def infer_variable_type(self, name: Var, lvalue: Lvalue,
init_type: Type, context: Context) -> None:
"""Infer the type of initialized variables from initializer type."""
- if self.is_unusable_type(init_type):
- self.check_usable_type(init_type, context)
- self.set_inference_error_fallback_type(name, lvalue, init_type, context)
- elif isinstance(init_type, DeletedType):
+ if isinstance(init_type, DeletedType):
self.msg.deleted_as_rvalue(init_type, context)
elif not is_valid_inferred_type(init_type):
# We cannot use the type of the initialization expression for full type
@@ -1527,7 +1652,7 @@ class TypeChecker(NodeVisitor[Type]):
partial_type = PartialType(None, name, [init_type])
elif isinstance(init_type, Instance):
fullname = init_type.type.fullname()
- if (isinstance(lvalue, NameExpr) and
+ if (isinstance(lvalue, (NameExpr, MemberExpr)) and
(fullname == 'builtins.list' or
fullname == 'builtins.set' or
fullname == 'builtins.dict') and
@@ -1567,14 +1692,6 @@ class TypeChecker(NodeVisitor[Type]):
if context.get_line() in self.errors.ignored_lines[self.errors.file]:
self.set_inferred_type(var, lvalue, AnyType())
- def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type:
- if expr.literal >= LITERAL_TYPE:
- restriction = self.binder.get(expr)
- if restriction:
- ans = meet_simple(known_type, restriction)
- return ans
- return known_type
-
def check_simple_assignment(self, lvalue_type: Type, rvalue: Expression,
context: Context,
msg: str = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
@@ -1584,7 +1701,7 @@ class TypeChecker(NodeVisitor[Type]):
# '...' is always a valid initializer in a stub.
return AnyType()
else:
- rvalue_type = self.accept(rvalue, lvalue_type)
+ rvalue_type = self.expr_checker.accept(rvalue, lvalue_type)
if isinstance(rvalue_type, DeletedType):
self.msg.deleted_as_rvalue(rvalue_type, context)
if isinstance(lvalue_type, DeletedType):
@@ -1656,7 +1773,7 @@ class TypeChecker(NodeVisitor[Type]):
The lvalue argument is the base[index] expression.
"""
self.try_infer_partial_type_from_indexed_assignment(lvalue, rvalue)
- basetype = self.accept(lvalue.base)
+ basetype = self.expr_checker.accept(lvalue.base)
if isinstance(basetype, TypedDictType):
item_type = self.expr_checker.visit_typeddict_index_expr(basetype, lvalue.index)
method_type = CallableType(
@@ -1689,8 +1806,8 @@ class TypeChecker(NodeVisitor[Type]):
typename = type_type.fullname()
if typename == 'builtins.dict':
# TODO: Don't infer things twice.
- key_type = self.accept(lvalue.index)
- value_type = self.accept(rvalue)
+ key_type = self.expr_checker.accept(lvalue.index)
+ value_type = self.expr_checker.accept(rvalue)
full_key_type = UnionType.make_simplified_union(
[key_type, var.type.inner_types[0]])
full_value_type = UnionType.make_simplified_union(
@@ -1702,15 +1819,13 @@ class TypeChecker(NodeVisitor[Type]):
[full_key_type, full_value_type])
del partial_types[var]
- def visit_expression_stmt(self, s: ExpressionStmt) -> Type:
- self.accept(s.expr)
- return None
+ def visit_expression_stmt(self, s: ExpressionStmt) -> None:
+ self.expr_checker.accept(s.expr, allow_none_return=True)
- def visit_return_stmt(self, s: ReturnStmt) -> Type:
+ def visit_return_stmt(self, s: ReturnStmt) -> None:
"""Type check a return statement."""
self.check_return_stmt(s)
self.binder.unreachable()
- return None
def check_return_stmt(self, s: ReturnStmt) -> None:
defn = self.scope.top_function()
@@ -1721,17 +1836,44 @@ class TypeChecker(NodeVisitor[Type]):
else:
return_type = self.return_types[-1]
+ if isinstance(return_type, UninhabitedType):
+ self.fail(messages.NO_RETURN_EXPECTED, s)
+ return
+
if s.expr:
+ is_lambda = isinstance(self.scope.top_function(), LambdaExpr)
+ declared_none_return = isinstance(return_type, NoneTyp)
+ declared_any_return = isinstance(return_type, AnyType)
+
+ # This controls whether or not we allow a function call that
+ # returns None as the expression of this return statement.
+ # E.g. `return f()` for some `f` that returns None. We allow
+ # this only if we're in a lambda or in a function that returns
+ # `None` or `Any`.
+ allow_none_func_call = is_lambda or declared_none_return or declared_any_return
+
# Return with a value.
- typ = self.accept(s.expr, return_type)
+ typ = self.expr_checker.accept(s.expr,
+ return_type,
+ allow_none_return=allow_none_func_call)
+
+ if defn.is_async_generator:
+ self.fail("'return' with value in async generator is not allowed", s)
+ return
# Returning a value of type Any is always fine.
if isinstance(typ, AnyType):
+ # (Unless you asked to be warned in that case, and the
+ # function is not declared to return Any)
+ if not isinstance(return_type, AnyType) and self.options.warn_return_any:
+ self.warn(messages.RETURN_ANY.format(return_type), s)
return
- if self.is_unusable_type(return_type):
- # Lambdas are allowed to have a unusable returns.
- # Functions returning a value of type None are allowed to have a Void return.
- if isinstance(self.scope.top_function(), FuncExpr) or isinstance(typ, NoneTyp):
+ # Disallow return expressions in functions declared to return
+ # None, subject to two exceptions below.
+ if declared_none_return:
+ # Lambdas are allowed to have None returns.
+ # Functions returning a value of type None are allowed to have a None return.
+ if is_lambda or isinstance(typ, NoneTyp):
return
self.fail(messages.NO_RETURN_VALUE_EXPECTED, s)
else:
@@ -1749,20 +1891,19 @@ class TypeChecker(NodeVisitor[Type]):
isinstance(return_type, AnyType)):
return
- if isinstance(return_type, (Void, NoneTyp, AnyType)):
+ if isinstance(return_type, (NoneTyp, AnyType)):
return
if self.in_checked_function():
self.fail(messages.RETURN_VALUE_EXPECTED, s)
- def visit_if_stmt(self, s: IfStmt) -> Type:
+ def visit_if_stmt(self, s: IfStmt) -> None:
"""Type check an if statement."""
# This frame records the knowledge from previous if/elif clauses not being taken.
# Fall-through to the original frame is handled explicitly in each block.
with self.binder.frame_context(can_skip=False, fall_through=0):
for e, b in zip(s.expr, s.body):
- t = self.accept(e)
- self.check_usable_type(t, e)
+ t = self.expr_checker.accept(e)
if isinstance(t, DeletedType):
self.msg.deleted_as_rvalue(t, s)
@@ -1785,20 +1926,18 @@ class TypeChecker(NodeVisitor[Type]):
with self.binder.frame_context(can_skip=False, fall_through=2):
if s.else_body:
self.accept(s.else_body)
- return None
- def visit_while_stmt(self, s: WhileStmt) -> Type:
+ def visit_while_stmt(self, s: WhileStmt) -> None:
"""Type check a while statement."""
if_stmt = IfStmt([s.expr], [s.body], None)
if_stmt.set_line(s.get_line(), s.get_column())
self.accept_loop(if_stmt, s.else_body,
exit_condition=s.expr)
- return None
def visit_operator_assignment_stmt(self,
- s: OperatorAssignmentStmt) -> Type:
+ s: OperatorAssignmentStmt) -> None:
"""Type check an operator assignment statement, e.g. x += 1."""
- lvalue_type = self.accept(s.lvalue)
+ lvalue_type = self.expr_checker.accept(s.lvalue)
inplace, method = infer_operator_assignment_method(lvalue_type, s.op)
rvalue_type, method_type = self.expr_checker.check_op(
method, lvalue_type, s.rvalue, s)
@@ -1808,36 +1947,31 @@ class TypeChecker(NodeVisitor[Type]):
else:
if not is_subtype(rvalue_type, lvalue_type):
self.msg.incompatible_operator_assignment(s.op, s)
- return None
- def visit_assert_stmt(self, s: AssertStmt) -> Type:
- self.accept(s.expr)
+ def visit_assert_stmt(self, s: AssertStmt) -> None:
+ self.expr_checker.accept(s.expr)
if s.msg is not None:
- self.accept(s.msg)
+ self.expr_checker.accept(s.msg)
- if self.options.fast_parser:
- if isinstance(s.expr, TupleExpr) and len(s.expr.items) > 0:
- self.warn(messages.MALFORMED_ASSERT, s)
+ if isinstance(s.expr, TupleExpr) and len(s.expr.items) > 0:
+ self.warn(messages.MALFORMED_ASSERT, s)
# If this is asserting some isinstance check, bind that type in the following code
true_map, _ = self.find_isinstance_check(s.expr)
-
self.push_type_map(true_map)
- return None
- def visit_raise_stmt(self, s: RaiseStmt) -> Type:
+ def visit_raise_stmt(self, s: RaiseStmt) -> None:
"""Type check a raise statement."""
if s.expr:
self.type_check_raise(s.expr, s)
if s.from_expr:
self.type_check_raise(s.from_expr, s, True)
self.binder.unreachable()
- return None
def type_check_raise(self, e: Expression, s: RaiseStmt,
optional: bool = False) -> None:
- typ = self.accept(e)
+ typ = self.expr_checker.accept(e)
if isinstance(typ, FunctionLike):
if typ.is_type_obj():
# Cases like "raise/from ExceptionClass".
@@ -1861,7 +1995,7 @@ class TypeChecker(NodeVisitor[Type]):
expected_type = UnionType([expected_type, NoneTyp()])
self.check_subtype(typ, expected_type, s, messages.INVALID_EXCEPTION)
- def visit_try_stmt(self, s: TryStmt) -> Type:
+ def visit_try_stmt(self, s: TryStmt) -> None:
"""Type check a try statement."""
# Our enclosing frame will get the result if the try/except falls through.
# This one gets all possible states after the try block exited abnormally
@@ -1893,8 +2027,6 @@ class TypeChecker(NodeVisitor[Type]):
# that follows the try statement.)
self.accept(s.finally_body)
- return None
-
def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None:
"""Type check a try statement, ignoring the finally block.
@@ -1915,7 +2047,7 @@ class TypeChecker(NodeVisitor[Type]):
for i in range(len(s.handlers)):
with self.binder.frame_context(can_skip=True, fall_through=4):
if s.types[i]:
- t = self.visit_except_handler_test(s.types[i])
+ t = self.check_except_handler_test(s.types[i])
if s.vars[i]:
# To support local variables, we make this a definition line,
# causing assignment to set the variable's type.
@@ -1948,12 +2080,12 @@ class TypeChecker(NodeVisitor[Type]):
if s.else_body:
self.accept(s.else_body)
- def visit_except_handler_test(self, n: Expression) -> Type:
+ def check_except_handler_test(self, n: Expression) -> Type:
"""Type check an exception handler test clause."""
- typ = self.accept(n)
+ typ = self.expr_checker.accept(n)
all_types = [] # type: List[Type]
- test_types = typ.items if isinstance(typ, TupleType) else [typ]
+ test_types = self.get_types_from_except_handler(typ, n)
for ttype in test_types:
if isinstance(ttype, AnyType):
@@ -1980,7 +2112,23 @@ class TypeChecker(NodeVisitor[Type]):
return UnionType.make_simplified_union(all_types)
- def visit_for_stmt(self, s: ForStmt) -> Type:
+ def get_types_from_except_handler(self, typ: Type, n: Expression) -> List[Type]:
+ """Helper for check_except_handler_test to retrieve handler types."""
+ if isinstance(typ, TupleType):
+ return typ.items
+ elif isinstance(typ, UnionType):
+ return [
+ union_typ
+ for item in typ.items
+ for union_typ in self.get_types_from_except_handler(item, n)
+ ]
+ elif isinstance(typ, Instance) and is_named_instance(typ, 'builtins.tuple'):
+ # variadic tuple
+ return [typ.args[0]]
+ else:
+ return [typ]
+
+ def visit_for_stmt(self, s: ForStmt) -> None:
"""Type check a for statement."""
if s.is_async:
item_type = self.analyze_async_iterable_item_type(s.expr)
@@ -1988,20 +2136,17 @@ class TypeChecker(NodeVisitor[Type]):
item_type = self.analyze_iterable_item_type(s.expr)
self.analyze_index_variables(s.index, item_type, s.index_type is None, s)
self.accept_loop(s.body, s.else_body)
- return None
def analyze_async_iterable_item_type(self, expr: Expression) -> Type:
"""Analyse async iterable expression and return iterator item type."""
- iterable = self.accept(expr)
-
- self.check_usable_type(iterable, expr)
+ echk = self.expr_checker
+ iterable = echk.accept(expr)
self.check_subtype(iterable,
self.named_generic_type('typing.AsyncIterable',
[AnyType()]),
expr, messages.ASYNC_ITERABLE_EXPECTED)
- echk = self.expr_checker
method = echk.analyze_external_member_access('__aiter__', iterable, expr)
iterator = echk.check_call(method, [], [], expr)[0]
method = echk.analyze_external_member_access('__anext__', iterator, expr)
@@ -2011,19 +2156,13 @@ class TypeChecker(NodeVisitor[Type]):
def analyze_iterable_item_type(self, expr: Expression) -> Type:
"""Analyse iterable expression and return iterator item type."""
- iterable = self.accept(expr)
+ echk = self.expr_checker
+ iterable = echk.accept(expr)
- self.check_usable_type(iterable, expr)
if isinstance(iterable, TupleType):
- if experiments.STRICT_OPTIONAL:
- joined = UninhabitedType() # type: Type
- else:
- joined = NoneTyp()
+ joined = UninhabitedType() # type: Type
for item in iterable.items:
joined = join_types(joined, item)
- if isinstance(joined, ErrorType):
- self.fail(messages.CANNOT_INFER_ITEM_TYPE, expr)
- return AnyType()
return joined
else:
# Non-tuple iterable.
@@ -2032,7 +2171,6 @@ class TypeChecker(NodeVisitor[Type]):
[AnyType()]),
expr, messages.ITERABLE_EXPECTED)
- echk = self.expr_checker
method = echk.analyze_external_member_access('__iter__', iterable,
expr)
iterator = echk.check_call(method, [], [], expr)[0]
@@ -2049,50 +2187,37 @@ class TypeChecker(NodeVisitor[Type]):
"""Type check or infer for loop or list comprehension index vars."""
self.check_assignment(index, self.temp_node(item_type, context), infer_lvalue_type)
- def visit_del_stmt(self, s: DelStmt) -> Type:
+ def visit_del_stmt(self, s: DelStmt) -> None:
if isinstance(s.expr, IndexExpr):
e = s.expr
m = MemberExpr(e.base, '__delitem__')
m.line = s.line
c = CallExpr(m, [e.index], [nodes.ARG_POS], [None])
c.line = s.line
- c.accept(self)
- return None
+ self.expr_checker.accept(c, allow_none_return=True)
else:
- def flatten(t: Expression) -> List[Expression]:
- """Flatten a nested sequence of tuples/lists into one list of nodes."""
- if isinstance(t, TupleExpr) or isinstance(t, ListExpr):
- return [b for a in t.items for b in flatten(a)]
- else:
- return [t]
-
- s.expr.accept(self)
+ s.expr.accept(self.expr_checker)
for elt in flatten(s.expr):
if isinstance(elt, NameExpr):
- self.binder.assign_type(elt,
- DeletedType(source=elt.name),
- self.binder.get_declaration(elt),
- False)
- return None
+ self.binder.assign_type(elt, DeletedType(source=elt.name),
+ get_declaration(elt), False)
- def visit_decorator(self, e: Decorator) -> Type:
+ def visit_decorator(self, e: Decorator) -> None:
for d in e.decorators:
if isinstance(d, RefExpr):
if d.fullname == 'typing.no_type_check':
e.var.type = AnyType()
e.var.is_ready = True
- return None
+ return
e.func.accept(self)
sig = self.function_type(e.func) # type: Type
# Process decorators from the inside out.
- for i in range(len(e.decorators)):
- n = len(e.decorators) - 1 - i
- d = e.decorators[n]
- if isinstance(d, NameExpr) and d.fullname == 'typing.overload':
+ for d in reversed(e.decorators):
+ if refers_to_fullname(d, 'typing.overload'):
self.fail('Single overload definition, multiple required', e)
continue
- dec = self.accept(d)
+ dec = self.expr_checker.accept(d)
temp = self.temp_node(sig)
sig, t2 = self.expr_checker.check_call(dec, [temp],
[nodes.ARG_POS], e)
@@ -2102,7 +2227,6 @@ class TypeChecker(NodeVisitor[Type]):
e.var.is_ready = True
if e.func.is_property:
self.check_incompatible_property_override(e)
- return None
def check_incompatible_property_override(self, e: Decorator) -> None:
if not e.var.is_settable_property and e.func.info is not None:
@@ -2113,22 +2237,22 @@ class TypeChecker(NodeVisitor[Type]):
continue
if (isinstance(base_attr.node, OverloadedFuncDef) and
base_attr.node.is_property and
- base_attr.node.items[0].var.is_settable_property):
+ cast(Decorator,
+ base_attr.node.items[0]).var.is_settable_property):
self.fail(messages.READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE, e)
- def visit_with_stmt(self, s: WithStmt) -> Type:
+ def visit_with_stmt(self, s: WithStmt) -> None:
for expr, target in zip(s.expr, s.target):
if s.is_async:
self.check_async_with_item(expr, target, s.target_type is None)
else:
self.check_with_item(expr, target, s.target_type is None)
self.accept(s.body)
- return None
def check_async_with_item(self, expr: Expression, target: Expression,
infer_lvalue_type: bool) -> None:
echk = self.expr_checker
- ctx = self.accept(expr)
+ ctx = echk.accept(expr)
enter = echk.analyze_external_member_access('__aenter__', ctx, expr)
obj = echk.check_call(enter, [], [], expr)[0]
obj = echk.check_awaitable_expr(
@@ -2144,7 +2268,7 @@ class TypeChecker(NodeVisitor[Type]):
def check_with_item(self, expr: Expression, target: Expression,
infer_lvalue_type: bool) -> None:
echk = self.expr_checker
- ctx = self.accept(expr)
+ ctx = echk.accept(expr)
enter = echk.analyze_external_member_access('__enter__', ctx, expr)
obj = echk.check_call(enter, [], [], expr)[0]
if target:
@@ -2153,152 +2277,23 @@ class TypeChecker(NodeVisitor[Type]):
arg = self.temp_node(AnyType(), expr)
echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)
- def visit_print_stmt(self, s: PrintStmt) -> Type:
+ def visit_print_stmt(self, s: PrintStmt) -> None:
for arg in s.args:
- self.accept(arg)
+ self.expr_checker.accept(arg)
if s.target:
- target_type = self.accept(s.target)
+ target_type = self.expr_checker.accept(s.target)
if not isinstance(target_type, NoneTyp):
# TODO: Also verify the type of 'write'.
self.expr_checker.analyze_external_member_access('write', target_type, s.target)
- return None
- def visit_break_stmt(self, s: BreakStmt) -> Type:
+ def visit_break_stmt(self, s: BreakStmt) -> None:
self.binder.handle_break()
- return None
- def visit_continue_stmt(self, s: ContinueStmt) -> Type:
+ def visit_continue_stmt(self, s: ContinueStmt) -> None:
self.binder.handle_continue()
return None
#
- # Expressions
- #
-
- def visit_name_expr(self, e: NameExpr) -> Type:
- return self.expr_checker.visit_name_expr(e)
-
- def visit_call_expr(self, e: CallExpr) -> Type:
- return self.expr_checker.visit_call_expr(e)
-
- def visit_yield_from_expr(self, e: YieldFromExpr) -> Type:
- return self.expr_checker.visit_yield_from_expr(e)
-
- def visit_member_expr(self, e: MemberExpr) -> Type:
- return self.expr_checker.visit_member_expr(e)
-
- def visit_int_expr(self, e: IntExpr) -> Type:
- return self.expr_checker.visit_int_expr(e)
-
- def visit_str_expr(self, e: StrExpr) -> Type:
- return self.expr_checker.visit_str_expr(e)
-
- def visit_bytes_expr(self, e: BytesExpr) -> Type:
- return self.expr_checker.visit_bytes_expr(e)
-
- def visit_unicode_expr(self, e: UnicodeExpr) -> Type:
- return self.expr_checker.visit_unicode_expr(e)
-
- def visit_float_expr(self, e: FloatExpr) -> Type:
- return self.expr_checker.visit_float_expr(e)
-
- def visit_complex_expr(self, e: ComplexExpr) -> Type:
- return self.expr_checker.visit_complex_expr(e)
-
- def visit_ellipsis(self, e: EllipsisExpr) -> Type:
- return self.expr_checker.visit_ellipsis(e)
-
- def visit_op_expr(self, e: OpExpr) -> Type:
- return self.expr_checker.visit_op_expr(e)
-
- def visit_comparison_expr(self, e: ComparisonExpr) -> Type:
- return self.expr_checker.visit_comparison_expr(e)
-
- def visit_unary_expr(self, e: UnaryExpr) -> Type:
- return self.expr_checker.visit_unary_expr(e)
-
- def visit_index_expr(self, e: IndexExpr) -> Type:
- return self.expr_checker.visit_index_expr(e)
-
- def visit_cast_expr(self, e: CastExpr) -> Type:
- return self.expr_checker.visit_cast_expr(e)
-
- def visit_reveal_type_expr(self, e: RevealTypeExpr) -> Type:
- return self.expr_checker.visit_reveal_type_expr(e)
-
- def visit_super_expr(self, e: SuperExpr) -> Type:
- return self.expr_checker.visit_super_expr(e)
-
- def visit_type_application(self, e: TypeApplication) -> Type:
- return self.expr_checker.visit_type_application(e)
-
- def visit_type_alias_expr(self, e: TypeAliasExpr) -> Type:
- return self.expr_checker.visit_type_alias_expr(e)
-
- def visit_type_var_expr(self, e: TypeVarExpr) -> Type:
- return self.expr_checker.visit_type_var_expr(e)
-
- def visit_newtype_expr(self, e: NewTypeExpr) -> Type:
- return self.expr_checker.visit_newtype_expr(e)
-
- def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
- return self.expr_checker.visit_namedtuple_expr(e)
-
- def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
- return self.expr_checker.visit_typeddict_expr(e)
-
- def visit_list_expr(self, e: ListExpr) -> Type:
- return self.expr_checker.visit_list_expr(e)
-
- def visit_set_expr(self, e: SetExpr) -> Type:
- return self.expr_checker.visit_set_expr(e)
-
- def visit_tuple_expr(self, e: TupleExpr) -> Type:
- return self.expr_checker.visit_tuple_expr(e)
-
- def visit_dict_expr(self, e: DictExpr) -> Type:
- return self.expr_checker.visit_dict_expr(e)
-
- def visit_slice_expr(self, e: SliceExpr) -> Type:
- return self.expr_checker.visit_slice_expr(e)
-
- def visit_func_expr(self, e: FuncExpr) -> Type:
- return self.expr_checker.visit_func_expr(e)
-
- def visit_list_comprehension(self, e: ListComprehension) -> Type:
- return self.expr_checker.visit_list_comprehension(e)
-
- def visit_set_comprehension(self, e: SetComprehension) -> Type:
- return self.expr_checker.visit_set_comprehension(e)
-
- def visit_generator_expr(self, e: GeneratorExpr) -> Type:
- return self.expr_checker.visit_generator_expr(e)
-
- def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type:
- return self.expr_checker.visit_dictionary_comprehension(e)
-
- def visit_temp_node(self, e: TempNode) -> Type:
- return self.expr_checker.visit_temp_node(e)
-
- def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
- return self.expr_checker.visit_conditional_expr(e)
-
- def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
- return self.expr_checker.visit_backquote_expr(e)
-
- def visit_yield_expr(self, e: YieldExpr) -> Type:
- return self.expr_checker.visit_yield_expr(e)
-
- def visit_await_expr(self, e: AwaitExpr) -> Type:
- return self.expr_checker.visit_await_expr(e)
-
- def visit__promote_expr(self, e: PromoteExpr) -> Type:
- return self.expr_checker.visit__promote_expr(e)
-
- def visit_star_expr(self, e: StarExpr) -> Type:
- return self.expr_checker.visit_star_expr(e)
-
- #
# Helpers
#
@@ -2311,21 +2306,18 @@ class TypeChecker(NodeVisitor[Type]):
if is_subtype(subtype, supertype):
return True
else:
- if self.is_unusable_type(subtype):
- self.msg.does_not_return_value(subtype, context)
- else:
- if self.should_suppress_optional_error([subtype]):
- return False
- extra_info = [] # type: List[str]
- if subtype_label is not None or supertype_label is not None:
- subtype_str, supertype_str = self.msg.format_distinctly(subtype, supertype)
- if subtype_label is not None:
- extra_info.append(subtype_label + ' ' + subtype_str)
- if supertype_label is not None:
- extra_info.append(supertype_label + ' ' + supertype_str)
- if extra_info:
- msg += ' (' + ', '.join(extra_info) + ')'
- self.fail(msg, context)
+ if self.should_suppress_optional_error([subtype]):
+ return False
+ extra_info = [] # type: List[str]
+ if subtype_label is not None or supertype_label is not None:
+ subtype_str, supertype_str = self.msg.format_distinctly(subtype, supertype)
+ if subtype_label is not None:
+ extra_info.append(subtype_label + ' ' + subtype_str)
+ if supertype_label is not None:
+ extra_info.append(supertype_label + ' ' + supertype_str)
+ if extra_info:
+ msg += ' (' + ', '.join(extra_info) + ')'
+ self.fail(msg, context)
return False
def contains_none(self, t: Type) -> bool:
@@ -2347,7 +2339,9 @@ class TypeChecker(NodeVisitor[Type]):
"""
# Assume that the name refers to a type.
sym = self.lookup_qualified(name)
- return Instance(cast(TypeInfo, sym.node), [])
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ return Instance(node, [AnyType()] * len(node.defn.type_vars))
def named_generic_type(self, name: str, args: List[Type]) -> Instance:
"""Return an instance with the given name and type arguments.
@@ -2355,25 +2349,21 @@ class TypeChecker(NodeVisitor[Type]):
Assume that the number of arguments is correct. Assume that
the name refers to a compatible generic type.
"""
- return Instance(self.lookup_typeinfo(name), args)
+ info = self.lookup_typeinfo(name)
+ # TODO: assert len(args) == len(info.defn.type_vars)
+ return Instance(info, args)
def lookup_typeinfo(self, fullname: str) -> TypeInfo:
# Assume that the name refers to a class.
sym = self.lookup_qualified(fullname)
- return cast(TypeInfo, sym.node)
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ return node
def type_type(self) -> Instance:
"""Return instance type 'type'."""
return self.named_type('builtins.type')
- def object_type(self) -> Instance:
- """Return instance type 'object'."""
- return self.named_type('builtins.object')
-
- def bool_type(self) -> Instance:
- """Return instance type 'bool'."""
- return self.named_type('builtins.bool')
-
def str_type(self) -> Instance:
"""Return instance type 'str'."""
return self.named_type('builtins.str')
@@ -2427,21 +2417,20 @@ class TypeChecker(NodeVisitor[Type]):
msg = "Failed qualified lookup: '{}' (fullname = '{}')."
raise KeyError(msg.format(last, name))
- def enter_partial_types(self) -> None:
- """Push a new scope for collecting partial types."""
- self.partial_types.append({})
-
- def leave_partial_types(self) -> None:
- """Pop partial type scope.
+ @contextmanager
+ def enter_partial_types(self) -> Iterator[None]:
+ """Enter a new scope for collecting partial types.
Also report errors for variables which still have partial
types, i.e. we couldn't infer a complete type.
"""
+ self.partial_types.append({})
+ yield
+
partial_types = self.partial_types.pop()
if not self.current_node_deferred:
for var, context in partial_types.items():
- if (experiments.STRICT_OPTIONAL and
- isinstance(var.type, PartialType) and var.type.type is None):
+ if isinstance(var.type, PartialType) and var.type.type is None:
# None partial type: assume variable is intended to have type None
var.type = NoneTyp()
else:
@@ -2454,18 +2443,6 @@ class TypeChecker(NodeVisitor[Type]):
return partial_types
return None
- def is_unusable_type(self, typ: Type) -> bool:
- """Is this type an unusable type?
-
- The two unusable types are Void and NoneTyp(is_ret_type=True).
- """
- return isinstance(typ, Void) or (isinstance(typ, NoneTyp) and typ.is_ret_type)
-
- def check_usable_type(self, typ: Type, context: Context) -> None:
- """Generate an error if the type is not a usable type."""
- if self.is_unusable_type(typ):
- self.msg.does_not_return_value(typ, context)
-
def temp_node(self, t: Type, context: Context = None) -> TempNode:
"""Create a temporary node with the given, fixed type."""
temp = TempNode(t)
@@ -2481,6 +2458,10 @@ class TypeChecker(NodeVisitor[Type]):
"""Produce a warning message."""
self.msg.warn(msg, context)
+ def note(self, msg: str, context: Context) -> None:
+ """Produce a note."""
+ self.msg.note(msg, context)
+
def iterable_item_type(self, instance: Instance) -> Type:
iterable = map_instance_to_supertype(
instance,
@@ -2500,7 +2481,7 @@ class TypeChecker(NodeVisitor[Type]):
self.binder.unreachable()
else:
for expr, type in type_map.items():
- self.binder.push(expr, type)
+ self.binder.put(expr, type)
# Data structure returned by find_isinstance_check representing
# information learned from the truth or falsehood of a condition. The
@@ -2519,10 +2500,19 @@ class TypeChecker(NodeVisitor[Type]):
TypeMap = Optional[Dict[Expression, Type]]
+# An object that represents either a precise type or a type with an upper bound;
+# it is important for correct type inference with isinstance.
+TypeRange = NamedTuple(
+ 'TypeRange',
+ [
+ ('item', Type),
+ ('is_upper_bound', bool), # False => precise type
+ ])
+
def conditional_type_map(expr: Expression,
current_type: Optional[Type],
- proposed_type: Optional[Type],
+ proposed_type_ranges: Optional[List[TypeRange]],
) -> Tuple[TypeMap, TypeMap]:
"""Takes in an expression, the current type of the expression, and a
proposed type of that expression.
@@ -2530,17 +2520,26 @@ def conditional_type_map(expr: Expression,
Returns a 2-tuple: The first element is a map from the expression to
the proposed type, if the expression can be the proposed type. The
second element is a map from the expression to the type it would hold
- if it was not the proposed type, if any."""
- if proposed_type:
+ if it was not the proposed type, if any. None means bot, {} means top"""
+ if proposed_type_ranges:
+ if len(proposed_type_ranges) == 1:
+ proposed_type = proposed_type_ranges[0].item # Union with a single type breaks tests
+ else:
+ proposed_type = UnionType([type_range.item for type_range in proposed_type_ranges])
if current_type:
- if is_proper_subtype(current_type, proposed_type):
- # Expression is always of type proposed_type
+ if (not any(type_range.is_upper_bound for type_range in proposed_type_ranges)
+ and is_proper_subtype(current_type, proposed_type)):
+ # Expression is always of one of the types in proposed_type_ranges
return {}, None
elif not is_overlapping_types(current_type, proposed_type):
- # Expression is never of type proposed_type
+ # Expression is never of any type in proposed_type_ranges
return None, {}
else:
- remaining_type = restrict_subtype_away(current_type, proposed_type)
+ # we can only restrict when the type is precise, not bounded
+ proposed_precise_type = UnionType([type_range.item
+ for type_range in proposed_type_ranges
+ if not type_range.is_upper_bound])
+ remaining_type = restrict_subtype_away(current_type, proposed_precise_type)
return {expr: proposed_type}, {expr: remaining_type}
else:
return {expr: proposed_type}, {}
@@ -2688,6 +2687,21 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
return result
+def convert_to_typetype(type_map: TypeMap) -> TypeMap:
+ converted_type_map = {} # type: TypeMap
+ if type_map is None:
+ return None
+ for expr, typ in type_map.items():
+ if isinstance(typ, UnionType):
+ converted_type_map[expr] = UnionType([TypeType(t) for t in typ.items])
+ elif isinstance(typ, Instance):
+ converted_type_map[expr] = TypeType(typ)
+ else:
+ # unknown type; error was likely reported earlier
+ return {}
+ return converted_type_map
+
+
def find_isinstance_check(node: Expression,
type_map: Dict[Expression, Type],
) -> Tuple[TypeMap, TypeMap]:
@@ -2708,29 +2722,56 @@ def find_isinstance_check(node: Expression,
return None, {}
elif isinstance(node, CallExpr):
if refers_to_fullname(node.callee, 'builtins.isinstance'):
+ if len(node.args) != 2: # the error will be reported later
+ return {}, {}
expr = node.args[0]
if expr.literal == LITERAL_TYPE:
vartype = type_map[expr]
type = get_isinstance_type(node.args[1], type_map)
return conditional_type_map(expr, vartype, type)
+ elif refers_to_fullname(node.callee, 'builtins.issubclass'):
+ expr = node.args[0]
+ if expr.literal == LITERAL_TYPE:
+ vartype = type_map[expr]
+ type = get_isinstance_type(node.args[1], type_map)
+ if isinstance(vartype, UnionType):
+ union_list = []
+ for t in vartype.items:
+ if isinstance(t, TypeType):
+ union_list.append(t.item)
+ else:
+ # this is an error that should be reported earlier
+ # if we reach here, we refuse to do any type inference
+ return {}, {}
+ vartype = UnionType(union_list)
+ elif isinstance(vartype, TypeType):
+ vartype = vartype.item
+ else:
+ # any other object whose type we don't know precisely
+ # for example, Any or Instance of type type
+ return {}, {} # unknown type
+ yes_map, no_map = conditional_type_map(expr, vartype, type)
+ yes_map, no_map = map(convert_to_typetype, (yes_map, no_map))
+ return yes_map, no_map
elif refers_to_fullname(node.callee, 'builtins.callable'):
expr = node.args[0]
if expr.literal == LITERAL_TYPE:
vartype = type_map[expr]
return conditional_callable_type_map(expr, vartype)
- elif (isinstance(node, ComparisonExpr) and experiments.STRICT_OPTIONAL):
+ elif isinstance(node, ComparisonExpr) and experiments.STRICT_OPTIONAL:
# Check for `x is None` and `x is not None`.
is_not = node.operators == ['is not']
if any(is_literal_none(n) for n in node.operands) and (is_not or node.operators == ['is']):
- if_vars = {} # type: Dict[Expression, Type]
- else_vars = {} # type: Dict[Expression, Type]
+ if_vars = {} # type: TypeMap
+ else_vars = {} # type: TypeMap
for expr in node.operands:
if expr.literal == LITERAL_TYPE and not is_literal_none(expr) and expr in type_map:
# This should only be true at most once: there should be
# two elements in node.operands, and at least one of them
# should represent a None.
vartype = type_map[expr]
- if_vars, else_vars = conditional_type_map(expr, vartype, NoneTyp())
+ none_typ = [TypeRange(NoneTyp(), is_upper_bound=False)]
+ if_vars, else_vars = conditional_type_map(expr, vartype, none_typ)
break
if is_not:
@@ -2784,31 +2825,47 @@ def find_isinstance_check(node: Expression,
return {}, {}
-def get_isinstance_type(expr: Expression, type_map: Dict[Expression, Type]) -> Type:
- type = type_map[expr]
-
- if isinstance(type, TupleType):
- all_types = type.items
+def flatten(t: Expression) -> List[Expression]:
+ """Flatten a nested sequence of tuples/lists into one list of nodes."""
+ if isinstance(t, TupleExpr) or isinstance(t, ListExpr):
+ return [b for a in t.items for b in flatten(a)]
else:
- all_types = [type]
+ return [t]
- types = [] # type: List[Type]
- for type in all_types:
- if isinstance(type, FunctionLike):
- if type.is_type_obj():
- # Type variables may be present -- erase them, which is the best
- # we can do (outside disallowing them here).
- type = erase_typevars(type.items()[0].ret_type)
-
- types.append(type)
-
- if len(types) == 0:
- return None
- elif len(types) == 1:
- return types[0]
+def flatten_types(t: Type) -> List[Type]:
+ """Flatten a nested sequence of tuples into one list of nodes."""
+ if isinstance(t, TupleType):
+ return [b for a in t.items for b in flatten_types(a)]
else:
- return UnionType(types)
+ return [t]
+
+
+def get_isinstance_type(expr: Expression,
+ type_map: Dict[Expression, Type]) -> Optional[List[TypeRange]]:
+ all_types = flatten_types(type_map[expr])
+ types = [] # type: List[TypeRange]
+ for typ in all_types:
+ if isinstance(typ, FunctionLike) and typ.is_type_obj():
+ # Type variables may be present -- erase them, which is the best
+ # we can do (outside disallowing them here).
+ typ = erase_typevars(typ.items()[0].ret_type)
+ types.append(TypeRange(typ, is_upper_bound=False))
+ elif isinstance(typ, TypeType):
+ # Type[A] means "any type that is a subtype of A" rather than "precisely type A"
+ # we indicate this by setting is_upper_bound flag
+ types.append(TypeRange(typ.item, is_upper_bound=True))
+ elif isinstance(typ, Instance) and typ.type.fullname() == 'builtins.type':
+ object_type = Instance(typ.type.mro[-1], [])
+ types.append(TypeRange(object_type, is_upper_bound=True))
+ else: # we didn't see an actual type, but rather a variable whose value is unknown to us
+ return None
+ if not types:
+ # this can happen if someone has empty tuple as 2nd argument to isinstance
+ # strictly speaking, we should return UninhabitedType but for simplicity we will simply
+ # refuse to do any type inference for now
+ return None
+ return types
def expand_func(defn: FuncItem, map: Dict[TypeVarId, Type]) -> FuncItem:
@@ -2950,17 +3007,17 @@ def is_more_precise_signature(t: CallableType, s: CallableType) -> bool:
return is_more_precise(t.ret_type, s.ret_type)
-def infer_operator_assignment_method(type: Type, operator: str) -> Tuple[bool, str]:
+def infer_operator_assignment_method(typ: Type, operator: str) -> Tuple[bool, str]:
"""Determine if operator assignment on given value type is in-place, and the method name.
For example, if operator is '+', return (True, '__iadd__') or (False, '__add__')
depending on which method is supported by the type.
"""
method = nodes.op_methods[operator]
- if isinstance(type, Instance):
+ if isinstance(typ, Instance):
if operator in nodes.ops_with_inplace_method:
inplace_method = '__i' + method[2:]
- if type.type.has_readable_member(inplace_method):
+ if typ.type.has_readable_member(inplace_method):
return True, inplace_method
return False, method
@@ -2989,9 +3046,6 @@ def is_valid_inferred_type_component(typ: Type) -> bool:
In strict Optional mode this excludes bare None types, as otherwise every
type containing None would be invalid.
"""
- if not experiments.STRICT_OPTIONAL:
- if is_same_type(typ, NoneTyp()):
- return False
if is_same_type(typ, UninhabitedType()):
return False
elif isinstance(typ, Instance):
@@ -3019,7 +3073,7 @@ def is_node_static(node: Node) -> Optional[bool]:
class Scope:
# We keep two stacks combined, to maintain the relative order
- stack = None # type: List[Union[Type, FuncItem, MypyFile]]
+ stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]]
def __init__(self, module: MypyFile) -> None:
self.stack = [module]
@@ -3030,11 +3084,17 @@ class Scope:
return e
return None
- def active_class(self) -> Optional[Type]:
- if isinstance(self.stack[-1], Type):
+ def active_class(self) -> Optional[TypeInfo]:
+ if isinstance(self.stack[-1], TypeInfo):
return self.stack[-1]
return None
+ def active_self_type(self) -> Optional[Union[Instance, TupleType]]:
+ info = self.active_class()
+ if info:
+ return fill_typevars(info)
+ return None
+
@contextmanager
def push_function(self, item: FuncItem) -> Iterator[None]:
self.stack.append(item)
@@ -3042,7 +3102,12 @@ class Scope:
self.stack.pop()
@contextmanager
- def push_class(self, t: Type) -> Iterator[None]:
- self.stack.append(t)
+ def push_class(self, info: TypeInfo) -> Iterator[None]:
+ self.stack.append(info)
yield
self.stack.pop()
+
+
+ at contextmanager
+def nothing() -> Iterator[None]:
+ yield
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 79454d0..b19a0a1 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1,11 +1,12 @@
"""Expression type checker. This file is conceptually part of TypeChecker."""
from collections import OrderedDict
-from typing import cast, Dict, Set, List, Iterable, Tuple, Callable, Union, Optional
+from typing import cast, Dict, Set, List, Tuple, Callable, Union, Optional
+from mypy.errors import report_internal_error
from mypy.types import (
- Type, AnyType, CallableType, Overloaded, NoneTyp, Void, TypeVarDef,
- TupleType, TypedDictType, Instance, TypeVarId, TypeVarType, ErasedType, UnionType,
+ Type, AnyType, CallableType, Overloaded, NoneTyp, TypeVarDef,
+ TupleType, TypedDictType, Instance, TypeVarType, ErasedType, UnionType,
PartialType, DeletedType, UnboundType, UninhabitedType, TypeType,
true_only, false_only, is_named_instance, function_type, callable_type, FunctionLike,
get_typ_args, set_typ_args,
@@ -14,13 +15,13 @@ from mypy.nodes import (
NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
OpExpr, UnaryExpr, IndexExpr, CastExpr, RevealTypeExpr, TypeApplication, ListExpr,
- TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context, Expression,
+ TupleExpr, DictExpr, LambdaExpr, SuperExpr, SliceExpr, Context, Expression,
ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr, AwaitExpr, YieldExpr,
YieldFromExpr, TypedDictExpr, PromoteExpr, NewTypeExpr, NamedTupleExpr, TypeVarExpr,
- TypeAliasExpr, BackquoteExpr, ARG_POS, ARG_NAMED, ARG_STAR, ARG_STAR2, MODULE_REF,
- UNBOUND_TVAR, BOUND_TVAR,
+ TypeAliasExpr, BackquoteExpr, EnumCallExpr,
+ ARG_POS, ARG_NAMED, ARG_STAR, ARG_STAR2, MODULE_REF, TVAR, LITERAL_TYPE,
)
from mypy import nodes
import mypy.checker
@@ -31,6 +32,7 @@ from mypy.messages import MessageBuilder
from mypy import messages
from mypy.infer import infer_type_arguments, infer_function_type_arguments
from mypy import join
+from mypy.meet import narrow_declared_type
from mypy.maptype import map_instance_to_supertype
from mypy.subtypes import is_subtype, is_equivalent
from mypy import applytype
@@ -38,7 +40,7 @@ from mypy import erasetype
from mypy.checkmember import analyze_member_access, type_object_type, bind_self
from mypy.constraints import get_actual_type
from mypy.checkstrformat import StringFormatterChecker
-from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars
+from mypy.expandtype import expand_type_by_instance, freshen_function_type_vars
from mypy.util import split_module_names
from mypy.typevars import fill_typevars
from mypy.visitor import ExpressionVisitor
@@ -97,6 +99,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
chk = None # type: mypy.checker.TypeChecker
# This is shared with TypeChecker, but stored also here for convenience.
msg = None # type: MessageBuilder
+ # Type context for type inference
+ type_context = None # type: List[Optional[Type]]
strfrm_checker = None # type: StringFormatterChecker
@@ -106,6 +110,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
"""Construct an expression type checker."""
self.chk = chk
self.msg = msg
+ self.type_context = [None]
self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg)
def visit_name_expr(self, e: NameExpr) -> Type:
@@ -115,10 +120,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
"""
self.chk.module_refs.update(extract_refexpr_names(e))
result = self.analyze_ref_expr(e)
- return self.chk.narrow_type_from_binder(e, result)
+ return self.narrow_type_from_binder(e, result)
def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
- result = None # type: Type
+ result = None # type: Optional[Type]
node = e.node
if isinstance(node, Var):
# Variable reference.
@@ -145,38 +150,47 @@ class ExpressionChecker(ExpressionVisitor[Type]):
result = type_object_type(node, self.named_type)
elif isinstance(node, MypyFile):
# Reference to a module object.
- result = self.named_type('builtins.module')
+ result = self.named_type('types.ModuleType')
elif isinstance(node, Decorator):
result = self.analyze_var_ref(node.var, e)
else:
# Unknown reference; use any type implicitly to avoid
# generating extra type errors.
result = AnyType()
+ assert result is not None
return result
def analyze_var_ref(self, var: Var, context: Context) -> Type:
- if not var.type:
+ if var.type:
+ return var.type
+ else:
if not var.is_ready and self.chk.in_checked_function():
self.chk.handle_cannot_determine_type(var.name(), context)
# Implicit 'Any' type.
return AnyType()
- else:
- # Look up local type of variable with type (inferred or explicit).
- val = self.chk.binder.get(var)
- if val is None:
- return var.type
- else:
- return val
- def visit_call_expr(self, e: CallExpr) -> Type:
+ def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type:
"""Type check a call expression."""
if e.analyzed:
# It's really a special form that only looks like a call.
- return self.accept(e.analyzed, self.chk.type_context[-1])
+ return self.accept(e.analyzed, self.type_context[-1])
if isinstance(e.callee, NameExpr) and isinstance(e.callee.node, TypeInfo) and \
e.callee.node.typeddict_type is not None:
return self.check_typeddict_call(e.callee.node.typeddict_type,
e.arg_kinds, e.arg_names, e.args, e)
+ if isinstance(e.callee, NameExpr) and e.callee.name in ('isinstance', 'issubclass'):
+ for typ in mypy.checker.flatten(e.args[1]):
+ if isinstance(typ, NameExpr):
+ try:
+ node = self.chk.lookup_qualified(typ.name)
+ except KeyError:
+ # Undefined names should already be reported in semantic analysis.
+ node = None
+ if (isinstance(typ, IndexExpr)
+ and isinstance(typ.analyzed, (TypeApplication, TypeAliasExpr))
+ # node.kind == TYPE_ALIAS only for aliases like It = Iterable[int].
+ or isinstance(typ, NameExpr) and node and node.kind == nodes.TYPE_ALIAS):
+ self.msg.type_arguments_not_allowed(e)
self.try_infer_partial_type(e)
callee_type = self.accept(e.callee)
if (self.chk.options.disallow_untyped_calls and
@@ -184,7 +198,13 @@ class ExpressionChecker(ExpressionVisitor[Type]):
isinstance(callee_type, CallableType)
and callee_type.implicit):
return self.msg.untyped_function_call(callee_type, e)
- return self.check_call_expr_with_callee_type(callee_type, e)
+ ret_type = self.check_call_expr_with_callee_type(callee_type, e)
+ if isinstance(ret_type, UninhabitedType):
+ self.chk.binder.unreachable()
+ if not allow_none_return and isinstance(ret_type, NoneTyp):
+ self.chk.msg.does_not_return_value(callee_type, e)
+ return AnyType(implicit=True)
+ return ret_type
def check_typeddict_call(self, callee: TypedDictType,
arg_kinds: List[int],
@@ -248,12 +268,12 @@ class ExpressionChecker(ExpressionVisitor[Type]):
for (item_name, item_expected_type) in callee.items.items():
item_value = kwargs[item_name]
- item_actual_type = self.chk.check_simple_assignment(
+ self.chk.check_simple_assignment(
lvalue_type=item_expected_type, rvalue=item_value, context=item_value,
msg=messages.INCOMPATIBLE_TYPES,
lvalue_name='TypedDict item "{}"'.format(item_name),
rvalue_name='expression')
- items[item_name] = item_actual_type
+ items[item_name] = item_expected_type
mapping_value_type = join.join_type_list(list(items.values()))
fallback = self.chk.named_generic_type('typing.Mapping',
@@ -343,7 +363,15 @@ class ExpressionChecker(ExpressionVisitor[Type]):
"""
arg_messages = arg_messages or self.msg
if isinstance(callee, CallableType):
- if callee.is_concrete_type_obj() and callee.type_object().is_abstract:
+ if (isinstance(callable_node, RefExpr)
+ and callable_node.fullname in ('enum.Enum', 'enum.IntEnum',
+ 'enum.Flag', 'enum.IntFlag')):
+ # An Enum() call that failed SemanticAnalyzer.check_enum_call().
+ return callee.ret_type, callee
+
+ if (callee.is_type_obj() and callee.type_object().is_abstract
+ # Exceptions for Type[...] and classmethod first argument
+ and not callee.from_type_type and not callee.is_classmethod_class):
type = callee.type_object()
self.msg.cannot_instantiate_abstract_class(
callee.type_object().name(), type.abstract_attributes,
@@ -429,7 +457,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if isinstance(item, AnyType):
return AnyType()
if isinstance(item, Instance):
- return type_object_type(item.type, self.named_type)
+ res = type_object_type(item.type, self.named_type)
+ if isinstance(res, CallableType):
+ res = res.copy_modified(from_type_type=True)
+ return res
if isinstance(item, UnionType):
return UnionType([self.analyze_type_type_callee(item, context)
for item in item.items], item.line)
@@ -438,7 +469,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# i.e. its constructor (a poor approximation for reality,
# but better than AnyType...), but replace the return type
# with typevar.
- callee = self.analyze_type_type_callee(item.upper_bound, context)
+ callee = self.analyze_type_type_callee(item.upper_bound,
+ context) # type: Optional[Type]
if isinstance(callee, CallableType):
if callee.is_generic():
callee = None
@@ -519,7 +551,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
of callable, and if the context is set[int], return callable modified
by substituting 't' with 'int'.
"""
- ctx = self.chk.type_context[-1]
+ ctx = self.type_context[-1]
if not ctx:
return callable
# The return type may have references to type metavariables that
@@ -549,9 +581,6 @@ class ExpressionChecker(ExpressionVisitor[Type]):
for arg in args:
if isinstance(arg, UninhabitedType) or has_erased_component(arg):
new_args.append(None)
- elif not experiments.STRICT_OPTIONAL and isinstance(arg, NoneTyp):
- # Don't substitute None types in non-strict-Optional mode.
- new_args.append(None)
else:
new_args.append(arg)
return self.apply_generic_arguments(callable, new_args, error_context)
@@ -828,10 +857,16 @@ class ExpressionChecker(ExpressionVisitor[Type]):
callee_type: Type, n: int, m: int, callee: CallableType,
context: Context, messages: MessageBuilder) -> None:
"""Check the type of a single argument in a call."""
- if self.chk.is_unusable_type(caller_type):
- messages.does_not_return_value(caller_type, context)
- elif isinstance(caller_type, DeletedType):
+ if isinstance(caller_type, DeletedType):
messages.deleted_as_rvalue(caller_type, context)
+ # Only non-abstract class can be given where Type[...] is expected...
+ elif (isinstance(caller_type, CallableType) and isinstance(callee_type, TypeType) and
+ caller_type.is_type_obj() and caller_type.type_object().is_abstract and
+ isinstance(callee_type.item, Instance) and callee_type.item.type.is_abstract and
+ # ...except for classmethod first argument
+ not caller_type.is_classmethod_class):
+ messages.fail("Only non-abstract class can be given where '{}' is expected"
+ .format(callee_type), context)
elif not is_subtype(caller_type, callee_type):
if self.chk.should_suppress_optional_error([caller_type, callee_type]):
return
@@ -975,7 +1010,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
"""Visit member expression (of form e.id)."""
self.chk.module_refs.update(extract_refexpr_names(e))
result = self.analyze_ordinary_member_access(e, False)
- return self.chk.narrow_type_from_binder(e, result)
+ return self.narrow_type_from_binder(e, result)
def analyze_ordinary_member_access(self, e: MemberExpr,
is_lvalue: bool) -> Type:
@@ -1158,22 +1193,22 @@ class ExpressionChecker(ExpressionVisitor[Type]):
[left_type],
[nodes.ARG_POS],
[None],
- self.chk.bool_type(),
+ self.bool_type(),
self.named_type('builtins.function'))
- sub_result = self.chk.bool_type()
+ sub_result = self.bool_type()
if not is_subtype(left_type, itertype):
self.msg.unsupported_operand_types('in', left_type, right_type, e)
else:
self.msg.add_errors(local_errors)
if operator == 'not in':
- sub_result = self.chk.bool_type()
+ sub_result = self.bool_type()
elif operator in nodes.op_methods:
method = self.get_operator_method(operator)
sub_result, method_type = self.check_op(method, left_type, right, e,
allow_reverse=True)
elif operator == 'is' or operator == 'is not':
- sub_result = self.chk.bool_type()
+ sub_result = self.bool_type()
method_type = None
else:
raise RuntimeError('Unknown comparison operator {}'.format(operator))
@@ -1184,8 +1219,6 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if result is None:
result = sub_result
else:
- # TODO: check on void needed?
- self.check_usable_type(sub_result, e)
result = join.join_types(result, sub_result)
return result
@@ -1320,7 +1353,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# the left operand. We also use the left operand type to guide the type
# inference of the right operand so that expressions such as
# '[1] or []' are inferred correctly.
- ctx = self.chk.type_context[-1]
+ ctx = self.type_context[-1]
left_type = self.accept(e.left, ctx)
assert e.op in ('and', 'or') # Checked by visit_op_expr
@@ -1336,9 +1369,6 @@ class ExpressionChecker(ExpressionVisitor[Type]):
right_type = self.analyze_cond_branch(right_map, e.right, left_type)
- self.check_usable_type(left_type, context)
- self.check_usable_type(right_type, context)
-
if right_map is None:
# The boolean expression is statically known to be the left value
assert left_map is not None # find_isinstance_check guarantees this
@@ -1366,7 +1396,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if is_subtype(right_type, self.named_type('builtins.int')):
# Special case: [...] * <int value>. Use the type context of the
# OpExpr, since the multiplication does not affect the type.
- left_type = self.accept(e.left, context=self.chk.type_context[-1])
+ left_type = self.accept(e.left, type_context=self.type_context[-1])
else:
left_type = self.accept(e.left)
result, method_type = self.check_op('__mul__', left_type, e.right, e)
@@ -1378,8 +1408,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
operand_type = self.accept(e.expr)
op = e.op
if op == 'not':
- self.check_usable_type(operand_type, e)
- result = self.chk.bool_type() # type: Type
+ result = self.bool_type() # type: Type
elif op == '-':
method_type = self.analyze_external_member_access('__neg__',
operand_type, e)
@@ -1404,7 +1433,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
It may also represent type application.
"""
result = self.visit_index_expr_helper(e)
- return self.chk.narrow_type_from_binder(e, result)
+ return self.narrow_type_from_binder(e, result)
def visit_index_expr_helper(self, e: IndexExpr) -> Type:
if e.analyzed:
@@ -1439,6 +1468,9 @@ class ExpressionChecker(ExpressionVisitor[Type]):
return AnyType()
elif isinstance(left_type, TypedDictType):
return self.visit_typeddict_index_expr(left_type, e.index)
+ elif (isinstance(left_type, CallableType)
+ and left_type.is_type_obj() and left_type.type_object().is_enum):
+ return self.visit_enum_index_expr(left_type.type_object(), e.index, e)
else:
result, method_type = self.check_op('__getitem__', left_type, e.index, e)
e.method_type = method_type
@@ -1497,25 +1529,27 @@ class ExpressionChecker(ExpressionVisitor[Type]):
return AnyType()
return item_type
+ def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression,
+ context: Context) -> Type:
+ string_type = self.named_type('builtins.str') # type: Type
+ if self.chk.options.python_version[0] < 3:
+ string_type = UnionType.make_union([string_type,
+ self.named_type('builtins.unicode')])
+ self.chk.check_subtype(self.accept(index), string_type, context,
+ "Enum index should be a string", "actual index type")
+ return Instance(enum_type, [])
+
def visit_cast_expr(self, expr: CastExpr) -> Type:
"""Type check a cast expression."""
- source_type = self.accept(expr.expr, context=AnyType())
+ source_type = self.accept(expr.expr, type_context=AnyType(), allow_none_return=True)
target_type = expr.type
if self.chk.options.warn_redundant_casts and is_same_type(source_type, target_type):
self.msg.redundant_cast(target_type, expr)
- if not self.is_valid_cast(source_type, target_type):
- self.msg.invalid_cast(target_type, source_type, expr)
return target_type
- def is_valid_cast(self, source_type: Type, target_type: Type) -> bool:
- """Is a cast from source_type to target_type meaningful?"""
- return (isinstance(target_type, AnyType) or
- (not isinstance(source_type, Void) and
- not isinstance(target_type, Void)))
-
def visit_reveal_type_expr(self, expr: RevealTypeExpr) -> Type:
"""Type check a reveal_type expression."""
- revealed_type = self.accept(expr.expr, context=self.chk.type_context[-1])
+ revealed_type = self.accept(expr.expr, type_context=self.type_context[-1])
if not self.chk.current_node_deferred:
self.msg.reveal_type(revealed_type, expr)
return revealed_type
@@ -1590,7 +1624,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
sym = self.chk.lookup_qualified(arg.name)
except KeyError:
pass
- if sym and (sym.kind == UNBOUND_TVAR or sym.kind == BOUND_TVAR):
+ if sym and (sym.kind == TVAR):
new_args[i] = AnyType()
else:
new_args[i] = self.replace_tvars_any(arg)
@@ -1609,7 +1643,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# Used for list and set expressions, as well as for tuples
# containing star expressions that don't refer to a
# Tuple. (Note: "lst" stands for list-set-tuple. :-)
- tvdef = TypeVarDef('T', -1, [], self.chk.object_type())
+ tvdef = TypeVarDef('T', -1, [], self.object_type())
tv = TypeVarType(tvdef)
constructor = CallableType(
[tv],
@@ -1629,7 +1663,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
def visit_tuple_expr(self, e: TupleExpr) -> Type:
"""Type check a tuple expression."""
# Try to determine type context for type inference.
- type_context = self.chk.type_context[-1]
+ type_context = self.type_context[-1]
type_context_items = None
if isinstance(type_context, UnionType):
tuples_in_context = [t for t in type_context.items
@@ -1667,7 +1701,6 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# context? Counterargument: Why would anyone write
# (1, *(2, 3)) instead of (1, 2, 3) except in a test?
tt = self.accept(item.expr)
- self.check_usable_type(tt, e)
if isinstance(tt, TupleType):
items.extend(tt.items)
j += len(tt.items)
@@ -1681,7 +1714,6 @@ class ExpressionChecker(ExpressionVisitor[Type]):
else:
tt = self.accept(item, type_context_items[j])
j += 1
- self.check_usable_type(tt, e)
items.append(tt)
fallback_item = join.join_type_list(items)
return TupleType(items, self.chk.named_generic_type('builtins.tuple', [fallback_item]))
@@ -1691,6 +1723,18 @@ class ExpressionChecker(ExpressionVisitor[Type]):
Translate it into a call to dict(), with provisions for **expr.
"""
+ # if the dict literal doesn't match TypedDict, check_typeddict_call_with_dict reports
+ # an error, but returns the TypedDict type that matches the literal it found
+ # that would cause a second error when that TypedDict type is returned upstream
+ # to avoid the second error, we always return TypedDict type that was requested
+ if isinstance(self.type_context[-1], TypedDictType):
+ self.check_typeddict_call_with_dict(
+ callee=self.type_context[-1],
+ kwargs=e,
+ context=e
+ )
+ return self.type_context[-1].copy_modified()
+
# Collect function arguments, watching out for **expr.
args = [] # type: List[Expression] # Regular "key: value"
stargs = [] # type: List[Expression] # For "**expr"
@@ -1700,8 +1744,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
else:
args.append(TupleExpr([key, value]))
# Define type variables (used in constructors below).
- ktdef = TypeVarDef('KT', -1, [], self.chk.object_type())
- vtdef = TypeVarDef('VT', -2, [], self.chk.object_type())
+ ktdef = TypeVarDef('KT', -1, [], self.object_type())
+ vtdef = TypeVarDef('VT', -2, [], self.object_type())
kt = TypeVarType(ktdef)
vt = TypeVarType(vtdef)
# Call dict(*args), unless it's empty and stargs is not.
@@ -1715,7 +1759,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
[None],
self.chk.named_generic_type('builtins.dict', [kt, vt]),
self.named_type('builtins.function'),
- name='<list>',
+ name='<dict>',
variables=[ktdef, vtdef])
rv = self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0]
else:
@@ -1740,21 +1784,19 @@ class ExpressionChecker(ExpressionVisitor[Type]):
self.check_call(method, [arg], [nodes.ARG_POS], arg)
return rv
- def visit_func_expr(self, e: FuncExpr) -> Type:
+ def visit_lambda_expr(self, e: LambdaExpr) -> Type:
"""Type check lambda expression."""
- inferred_type = self.infer_lambda_type_using_context(e)
+ inferred_type, type_override = self.infer_lambda_type_using_context(e)
if not inferred_type:
# No useful type context.
- ret_type = self.accept(e.expr())
- if isinstance(ret_type, NoneTyp):
- ret_type = Void()
+ ret_type = self.accept(e.expr(), allow_none_return=True)
fallback = self.named_type('builtins.function')
return callable_type(e, fallback, ret_type)
else:
# Type context available.
- self.chk.check_func_item(e, type_override=inferred_type)
+ self.chk.check_func_item(e, type_override=type_override)
if e.expr() not in self.chk.type_map:
- self.accept(e.expr())
+ self.accept(e.expr(), allow_none_return=True)
ret_type = self.chk.type_map[e.expr()]
if isinstance(ret_type, NoneTyp):
# For "lambda ...: None", just use type from the context.
@@ -1763,13 +1805,15 @@ class ExpressionChecker(ExpressionVisitor[Type]):
return inferred_type
return replace_callable_return_type(inferred_type, ret_type)
- def infer_lambda_type_using_context(self, e: FuncExpr) -> Optional[CallableType]:
+ def infer_lambda_type_using_context(self, e: LambdaExpr) -> Tuple[Optional[CallableType],
+ Optional[CallableType]]:
"""Try to infer lambda expression type using context.
Return None if could not infer type.
+ The second item in the return type is the type_override parameter for check_func_item.
"""
# TODO also accept 'Any' context
- ctx = self.chk.type_context[-1]
+ ctx = self.type_context[-1]
if isinstance(ctx, UnionType):
callables = [t for t in ctx.items if isinstance(t, CallableType)]
@@ -1777,7 +1821,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
ctx = callables[0]
if not ctx or not isinstance(ctx, CallableType):
- return None
+ return None, None
# The context may have function type variables in it. We replace them
# since these are the type variables we are ultimately trying to infer;
@@ -1799,13 +1843,13 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if ARG_STAR in arg_kinds or ARG_STAR2 in arg_kinds:
# TODO treat this case appropriately
- return None
+ return callable_ctx, None
if callable_ctx.arg_kinds != arg_kinds:
# Incompatible context; cannot use it to infer types.
self.chk.fail(messages.CANNOT_INFER_LAMBDA_TYPE, e)
- return None
+ return None, None
- return callable_ctx
+ return callable_ctx, callable_ctx
def visit_super_expr(self, e: SuperExpr) -> Type:
"""Type check a super expression (non-lvalue)."""
@@ -1828,6 +1872,9 @@ class ExpressionChecker(ExpressionVisitor[Type]):
return AnyType()
if not self.chk.in_checked_function():
return AnyType()
+ if self.chk.scope.active_class() is not None:
+ self.chk.fail('super() outside of a method is not supported', e)
+ return AnyType()
args = self.chk.scope.top_function().arguments
# An empty args with super() is an error; we need something in declared_self
if not args:
@@ -1862,8 +1909,13 @@ class ExpressionChecker(ExpressionVisitor[Type]):
e.generator, 'builtins.set', '<set-comprehension>')
def visit_generator_expr(self, e: GeneratorExpr) -> Type:
- return self.check_generator_or_comprehension(e, 'typing.Iterator',
- '<generator>')
+ # If any of the comprehensions use async for, the expression will return an async generator
+ # object
+ if any(e.is_async):
+ typ = 'typing.AsyncIterator'
+ else:
+ typ = 'typing.Iterator'
+ return self.check_generator_or_comprehension(e, typ, '<generator>')
def check_generator_or_comprehension(self, gen: GeneratorExpr,
type_name: str,
@@ -1874,7 +1926,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# Infer the type of the list comprehension by using a synthetic generic
# callable type.
- tvdef = TypeVarDef('T', -1, [], self.chk.object_type())
+ tvdef = TypeVarDef('T', -1, [], self.object_type())
tv = TypeVarType(tvdef)
constructor = CallableType(
[tv],
@@ -1894,8 +1946,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# Infer the type of the list comprehension by using a synthetic generic
# callable type.
- ktdef = TypeVarDef('KT', -1, [], self.chk.object_type())
- vtdef = TypeVarDef('VT', -2, [], self.chk.object_type())
+ ktdef = TypeVarDef('KT', -1, [], self.object_type())
+ vtdef = TypeVarDef('VT', -2, [], self.object_type())
kt = TypeVarType(ktdef)
vt = TypeVarType(vtdef)
constructor = CallableType(
@@ -1915,9 +1967,12 @@ class ExpressionChecker(ExpressionVisitor[Type]):
Note: This adds the type information derived from the condlists to the current binder.
"""
- for index, sequence, conditions in zip(e.indices, e.sequences,
- e.condlists):
- sequence_type = self.chk.analyze_iterable_item_type(sequence)
+ for index, sequence, conditions, is_async in zip(e.indices, e.sequences,
+ e.condlists, e.is_async):
+ if is_async:
+ sequence_type = self.chk.analyze_async_iterable_item_type(sequence)
+ else:
+ sequence_type = self.chk.analyze_iterable_item_type(sequence)
self.chk.analyze_index_variables(index, sequence_type, True, e)
for condition in conditions:
self.accept(condition)
@@ -1927,17 +1982,16 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if true_map:
for var, type in true_map.items():
- self.chk.binder.push(var, type)
+ self.chk.binder.put(var, type)
def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
cond_type = self.accept(e.cond)
- self.check_usable_type(cond_type, e)
if self.chk.options.strict_boolean:
is_bool = (isinstance(cond_type, Instance)
and cond_type.type.fullname() == 'builtins.bool')
if not (is_bool or isinstance(cond_type, AnyType)):
self.chk.fail(messages.NON_BOOLEAN_IN_CONDITIONAL, e)
- ctx = self.chk.type_context[-1]
+ ctx = self.type_context[-1]
# Gain type information from isinstance if it is there
# but only for the current expression
@@ -1972,10 +2026,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
if map is None:
# We still need to type check node, in case we want to
# process it for isinstance checks later
- self.accept(node, context=context)
+ self.accept(node, type_context=context)
return UninhabitedType()
self.chk.push_type_map(map)
- return self.accept(node, context=context)
+ return self.accept(node, type_context=context)
def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
self.accept(e.expr)
@@ -1985,13 +2039,33 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# Helpers
#
- def accept(self, node: Expression, context: Type = None) -> Type:
- """Type check a node. Alias for TypeChecker.accept."""
- return self.chk.accept(node, context)
-
- def check_usable_type(self, typ: Type, context: Context) -> None:
- """Generate an error if type is Void."""
- self.chk.check_usable_type(typ, context)
+ def accept(self,
+ node: Expression,
+ type_context: Type = None,
+ allow_none_return: bool = False
+ ) -> Type:
+ """Type check a node in the given type context. If allow_none_return
+ is True and this expression is a call, allow it to return None. This
+ applies only to this expression and not any subexpressions.
+ """
+ self.type_context.append(type_context)
+ try:
+ if allow_none_return and isinstance(node, CallExpr):
+ typ = self.visit_call_expr(node, allow_none_return=True)
+ elif allow_none_return and isinstance(node, YieldFromExpr):
+ typ = self.visit_yield_from_expr(node, allow_none_return=True)
+ else:
+ typ = node.accept(self)
+ except Exception as err:
+ report_internal_error(err, self.chk.errors.file,
+ node.line, self.chk.errors, self.chk.options)
+ self.type_context.pop()
+ assert typ is not None
+ self.chk.store_type(node, typ)
+ if not self.chk.in_checked_function():
+ return AnyType()
+ else:
+ return typ
def named_type(self, name: str) -> Instance:
"""Return an instance type with type given by the name and no type
@@ -2029,6 +2103,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# TODO TupleType => also consider tuple attributes
if isinstance(typ, Instance):
return typ.type.has_readable_member(member)
+ if isinstance(typ, CallableType) and typ.is_type_obj():
+ return typ.fallback.type.has_readable_member(member)
elif isinstance(typ, AnyType):
return True
elif isinstance(typ, UnionType):
@@ -2051,7 +2127,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
return_type = self.chk.return_types[-1]
expected_item_type = self.chk.get_generator_yield_type(return_type, False)
if e.expr is None:
- if (not isinstance(expected_item_type, (Void, NoneTyp, AnyType))
+ if (not isinstance(expected_item_type, (NoneTyp, AnyType))
and self.chk.in_checked_function()):
self.chk.fail(messages.YIELD_VALUE_EXPECTED, e)
else:
@@ -2062,7 +2138,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
return self.chk.get_generator_receive_type(return_type, False)
def visit_await_expr(self, e: AwaitExpr) -> Type:
- expected_type = self.chk.type_context[-1]
+ expected_type = self.type_context[-1]
if expected_type is not None:
expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type])
actual_type = self.accept(e.expr, expected_type)
@@ -2083,7 +2159,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
generator = self.check_call(method, [], [], ctx)[0]
return self.chk.get_generator_return_type(generator, False)
- def visit_yield_from_expr(self, e: YieldFromExpr) -> Type:
+ def visit_yield_from_expr(self, e: YieldFromExpr, allow_none_return: bool = False) -> Type:
# NOTE: Whether `yield from` accepts an `async def` decorated
# with `@types.coroutine` (or `@asyncio.coroutine`) depends on
# whether the generator containing the `yield from` is itself
@@ -2097,8 +2173,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# by __iter__.
if isinstance(subexpr_type, AnyType):
iter_type = AnyType()
- elif (isinstance(subexpr_type, Instance) and
- is_subtype(subexpr_type, self.chk.named_type('typing.Iterable'))):
+ elif self.chk.type_is_iterable(subexpr_type):
if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type):
self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
iter_method_type = self.analyze_external_member_access(
@@ -2130,17 +2205,18 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# Determine the type of the entire yield from expression.
if (isinstance(iter_type, Instance) and
iter_type.type.fullname() == 'typing.Generator'):
- return self.chk.get_generator_return_type(iter_type, False)
+ expr_type = self.chk.get_generator_return_type(iter_type, False)
else:
# Non-Generators don't return anything from `yield from` expressions.
# However special-case Any (which might be produced by an error).
if isinstance(actual_item_type, AnyType):
- return AnyType()
+ expr_type = AnyType()
else:
- if experiments.STRICT_OPTIONAL:
- return NoneTyp(is_ret_type=True)
- else:
- return Void()
+ expr_type = NoneTyp()
+
+ if not allow_none_return and isinstance(expr_type, NoneTyp):
+ self.chk.msg.does_not_return_value(None, e)
+ return expr_type
def visit_temp_node(self, e: TempNode) -> Type:
return e.type
@@ -2156,6 +2232,22 @@ class ExpressionChecker(ExpressionVisitor[Type]):
# TODO: Perhaps return a type object type?
return AnyType()
+ def visit_enum_call_expr(self, e: EnumCallExpr) -> Type:
+ for name, value in zip(e.items, e.values):
+ if value is not None:
+ typ = self.accept(value)
+ if not isinstance(typ, AnyType):
+ var = e.info.names[name].node
+ if isinstance(var, Var):
+ # Inline TypeCheker.set_inferred_type(),
+ # without the lvalue. (This doesn't really do
+ # much, since the value attribute is defined
+ # to have type Any in the typeshed stub.)
+ var.type = typ
+ var.is_inferred = True
+ # TODO: Perhaps return a type object type?
+ return AnyType()
+
def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
# TODO: Perhaps return a type object type?
return AnyType()
@@ -2166,6 +2258,22 @@ class ExpressionChecker(ExpressionVisitor[Type]):
def visit_star_expr(self, e: StarExpr) -> StarType:
return StarType(self.accept(e.expr))
+ def object_type(self) -> Instance:
+ """Return instance type 'object'."""
+ return self.named_type('builtins.object')
+
+ def bool_type(self) -> Instance:
+ """Return instance type 'bool'."""
+ return self.named_type('builtins.bool')
+
+ def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type:
+ if expr.literal >= LITERAL_TYPE:
+ restriction = self.chk.binder.get(expr)
+ if restriction:
+ ans = narrow_declared_type(known_type, restriction)
+ return ans
+ return known_type
+
def has_coroutine_decorator(t: Type) -> bool:
"""Whether t came from a function decorated with `@coroutine`."""
@@ -2281,7 +2389,7 @@ def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> Callabl
return c.copy_modified(ret_type=new_ret_type)
-class ArgInferSecondPassQuery(types.TypeQuery):
+class ArgInferSecondPassQuery(types.TypeQuery[bool]):
"""Query whether an argument type should be inferred in the second pass.
The result is True if the type has a type variable in a callable return
@@ -2289,16 +2397,16 @@ class ArgInferSecondPassQuery(types.TypeQuery):
a type variable.
"""
def __init__(self) -> None:
- super().__init__(False, types.ANY_TYPE_STRATEGY)
+ super().__init__(any)
def visit_callable_type(self, t: CallableType) -> bool:
return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery())
-class HasTypeVarQuery(types.TypeQuery):
+class HasTypeVarQuery(types.TypeQuery[bool]):
"""Visitor for querying whether a type has a type variable component."""
def __init__(self) -> None:
- super().__init__(False, types.ANY_TYPE_STRATEGY)
+ super().__init__(any)
def visit_type_var(self, t: TypeVarType) -> bool:
return True
@@ -2308,10 +2416,10 @@ def has_erased_component(t: Type) -> bool:
return t is not None and t.accept(HasErasedComponentsQuery())
-class HasErasedComponentsQuery(types.TypeQuery):
+class HasErasedComponentsQuery(types.TypeQuery[bool]):
"""Visitor for querying whether a type has an erased component."""
def __init__(self) -> None:
- super().__init__(False, types.ANY_TYPE_STRATEGY)
+ super().__init__(any)
def visit_erased_type(self, t: ErasedType) -> bool:
return True
@@ -2341,9 +2449,12 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
(isinstance(actual, Instance) and actual.type.fallback_to_any)):
# These could match anything at runtime.
return 2
- if isinstance(formal, CallableType) and isinstance(actual, (CallableType, Overloaded)):
- # TODO: do more sophisticated callable matching
- return 2
+ if isinstance(formal, CallableType):
+ if isinstance(actual, (CallableType, Overloaded)):
+ # TODO: do more sophisticated callable matching
+ return 2
+ if isinstance(actual, TypeType):
+ return 2 if is_subtype(actual, formal) else 0
if isinstance(actual, NoneTyp):
if not experiments.STRICT_OPTIONAL:
# NoneTyp matches anything if we're not doing strict Optional checking
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 456ad6b..ed3239a 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -5,7 +5,7 @@ from typing import cast, Callable, List, Optional, TypeVar
from mypy.types import (
Type, Instance, AnyType, TupleType, TypedDictType, CallableType, FunctionLike, TypeVarDef,
Overloaded, TypeVarType, UnionType, PartialType,
- DeletedType, NoneTyp, TypeType, function_type
+ DeletedType, NoneTyp, TypeType, function_type, get_type_vars,
)
from mypy.nodes import (
TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context, MypyFile, TypeVarExpr,
@@ -75,7 +75,8 @@ def analyze_member_access(name: str,
if method:
if method.is_property:
assert isinstance(method, OverloadedFuncDef)
- return analyze_var(name, method.items[0].var, typ, info, node, is_lvalue, msg,
+ first_item = cast(Decorator, method.items[0])
+ return analyze_var(name, first_item.var, typ, info, node, is_lvalue, msg,
original_type, not_ready_callback)
if is_lvalue:
msg.cant_assign_to_method(node)
@@ -88,7 +89,9 @@ def analyze_member_access(name: str,
else:
signature = bind_self(signature, original_type)
typ = map_instance_to_supertype(typ, method.info)
- return expand_type_by_instance(signature, typ)
+ member_type = expand_type_by_instance(signature, typ)
+ freeze_type_vars(member_type)
+ return member_type
else:
# Not a method.
return analyze_member_var_access(name, typ, info, node,
@@ -134,7 +137,7 @@ def analyze_member_access(name: str,
if not is_operator:
# When Python sees an operator (eg `3 == 4`), it automatically translates that
# into something like `int.__eq__(3, 4)` instead of `(3).__eq__(4)` as an
- # optimation.
+ # optimization.
#
# While it normally it doesn't matter which of the two versions are used, it
# does cause inconsistencies when working with classes. For example, translating
@@ -173,6 +176,13 @@ def analyze_member_access(name: str,
item = None
if isinstance(typ.item, Instance):
item = typ.item
+ elif isinstance(typ.item, AnyType):
+ fallback = builtin_type('builtins.type')
+ ignore_messages = msg.copy()
+ ignore_messages.disable_errors()
+ return analyze_member_access(name, fallback, node, is_lvalue, is_super,
+ is_operator, builtin_type, not_ready_callback,
+ ignore_messages, original_type=original_type, chk=chk)
elif isinstance(typ.item, TypeVarType):
if isinstance(typ.item.upper_bound, Instance):
item = typ.item.upper_bound
@@ -184,6 +194,8 @@ def analyze_member_access(name: str,
if result:
return result
fallback = builtin_type('builtins.type')
+ if item is not None:
+ fallback = item.type.metaclass_type or fallback
return analyze_member_access(name, fallback, node, is_lvalue, is_super,
is_operator, builtin_type, not_ready_callback, msg,
original_type=original_type, chk=chk)
@@ -266,6 +278,8 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
if is_lvalue and var.is_property and not var.is_settable_property:
# TODO allow setting attributes in subclass (although it is probably an error)
msg.read_only_property(name, info, node)
+ if is_lvalue and var.is_classvar:
+ msg.cant_assign_to_classvar(name, node)
if var.is_initialized_in_class and isinstance(t, FunctionLike) and not t.is_type_obj():
if is_lvalue:
if var.is_property:
@@ -296,6 +310,16 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
return AnyType()
+def freeze_type_vars(member_type: Type) -> None:
+ if isinstance(member_type, CallableType):
+ for v in member_type.variables:
+ v.id.meta_level = 0
+ if isinstance(member_type, Overloaded):
+ for it in member_type.items():
+ for v in it.variables:
+ v.id.meta_level = 0
+
+
def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: MessageBuilder,
context: Context) -> Type:
if typ.type is None:
@@ -357,7 +381,7 @@ def analyze_class_attribute_access(itype: Instance,
builtin_type: Callable[[str], Instance],
not_ready_callback: Callable[[str, Context], None],
msg: MessageBuilder,
- original_type: Type) -> Type:
+ original_type: Type) -> Optional[Type]:
"""original_type is the type of E in the expression E.var"""
node = itype.type.get(name)
if not node:
@@ -379,7 +403,11 @@ def analyze_class_attribute_access(itype: Instance,
t = node.type
if t:
if isinstance(t, PartialType):
- return handle_partial_attribute_type(t, is_lvalue, msg, node.node)
+ symnode = node.node
+ assert symnode is not None
+ return handle_partial_attribute_type(t, is_lvalue, msg, symnode)
+ if not is_method and (isinstance(t, TypeVarType) or get_type_vars(t)):
+ msg.fail(messages.GENERIC_INSTANCE_VAR_CLASS_ACCESS, context)
is_classmethod = is_decorated and cast(Decorator, node.node).func.is_class
return add_class_tvars(t, itype, is_classmethod, builtin_type, original_type)
elif isinstance(node.node, Var):
@@ -387,14 +415,16 @@ def analyze_class_attribute_access(itype: Instance,
return AnyType()
if isinstance(node.node, TypeVarExpr):
- return TypeVarType(node.tvar_def, node.tvar_def.line, node.tvar_def.column)
+ msg.fail('Type variable "{}.{}" cannot be used as an expression'.format(
+ itype.type.name(), name), context)
+ return AnyType()
if isinstance(node.node, TypeInfo):
return type_object_type(node.node, builtin_type)
if isinstance(node.node, MypyFile):
# Reference to a module object.
- return builtin_type('builtins.module')
+ return builtin_type('types.ModuleType')
if is_decorated:
# TODO: Return type of decorated function. This is quick hack to work around #998.
@@ -423,7 +453,7 @@ def add_class_tvars(t: Type, itype: Instance, is_classmethod: bool,
info = itype.type # type: TypeInfo
if isinstance(t, CallableType):
# TODO: Should we propagate type variable values?
- tvars = [TypeVarDef(n, i + 1, None, builtin_type('builtins.object'), tv.variance)
+ tvars = [TypeVarDef(n, i + 1, [], builtin_type('builtins.object'), tv.variance)
for (i, n), tv in zip(enumerate(info.type_vars), info.defn.type_vars)]
if is_classmethod:
t = bind_self(t, original_type)
@@ -450,7 +480,7 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) ->
# Must be an invalid class definition.
return AnyType()
else:
- fallback = builtin_type('builtins.type')
+ fallback = info.metaclass_type or builtin_type('builtins.type')
if init_method.info.fullname() == 'builtins.object':
# No non-default __init__ -> look at __new__ instead.
new_method = info.get_method('__new__')
@@ -487,12 +517,10 @@ def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo,
# We need to first map B's __init__ to the type (List[T]) -> None.
signature = cast(FunctionLike,
map_type_from_supertype(signature, info, init_or_new.info))
-
+ special_sig = None # type: Optional[str]
if init_or_new.info.fullname() == 'builtins.dict':
# Special signature!
special_sig = 'dict'
- else:
- special_sig = None
if isinstance(signature, CallableType):
return class_callable(signature, info, fallback, special_sig)
@@ -516,7 +544,6 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance,
ret_type=fill_typevars(info), fallback=type_type, name=None, variables=variables,
special_sig=special_sig)
c = callable_type.with_name('"{}"'.format(info.name()))
- c.is_classmethod_class = True
return c
@@ -603,6 +630,7 @@ def bind_self(method: F, original_type: Type = None) -> F:
self_param_type, original_type)[0]
def expand(target: Type) -> Type:
+ assert typearg is not None
return expand_type(target, {func.variables[0].id: typearg})
arg_types = [expand(x) for x in func.arg_types[1:]]
@@ -612,11 +640,14 @@ def bind_self(method: F, original_type: Type = None) -> F:
arg_types = func.arg_types[1:]
ret_type = func.ret_type
variables = func.variables
+ if isinstance(original_type, CallableType) and original_type.is_type_obj():
+ original_type = TypeType(original_type.ret_type)
res = func.copy_modified(arg_types=arg_types,
arg_kinds=func.arg_kinds[1:],
arg_names=func.arg_names[1:],
variables=variables,
- ret_type=ret_type)
+ ret_type=ret_type,
+ bound_args=[original_type])
return cast(F, res)
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index ddf69c2..3dde6d7 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -2,13 +2,13 @@
import re
-from typing import cast, List, Tuple, Dict, Callable, Union
+from typing import cast, List, Tuple, Dict, Callable, Union, Optional
from mypy.types import (
Type, AnyType, TupleType, Instance, UnionType
)
from mypy.nodes import (
- StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression
+ StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression, StarExpr
)
if False:
# break import cycle only needed for mypy
@@ -17,6 +17,9 @@ if False:
from mypy import messages
from mypy.messages import MessageBuilder
+FormatStringExpr = Union[StrExpr, BytesExpr, UnicodeExpr]
+Checkers = Tuple[Callable[[Expression], None], Callable[[Type], None]]
+
class ConversionSpecifier:
def __init__(self, key: str, flags: str, width: str, precision: str, type: str) -> None:
@@ -57,27 +60,31 @@ class StringFormatterChecker:
# TODO: In Python 3, the bytes formatting has a more restricted set of options
# compared to string formatting.
- # TODO: Bytes formatting in Python 3 is only supported in 3.5 and up.
def check_str_interpolation(self,
- str: Union[StrExpr, BytesExpr, UnicodeExpr],
+ expr: FormatStringExpr,
replacements: Expression) -> Type:
"""Check the types of the 'replacements' in a string interpolation
expression: str % replacements
"""
- specifiers = self.parse_conversion_specifiers(str.value)
- has_mapping_keys = self.analyze_conversion_specifiers(specifiers, str)
+ specifiers = self.parse_conversion_specifiers(expr.value)
+ has_mapping_keys = self.analyze_conversion_specifiers(specifiers, expr)
+ if isinstance(expr, BytesExpr) and (3, 0) <= self.chk.options.python_version < (3, 5):
+ self.msg.fail('Bytes formatting is only supported in Python 3.5 and later',
+ replacements)
+ return AnyType()
+
if has_mapping_keys is None:
pass # Error was reported
elif has_mapping_keys:
- self.check_mapping_str_interpolation(specifiers, replacements)
+ self.check_mapping_str_interpolation(specifiers, replacements, expr)
else:
- self.check_simple_str_interpolation(specifiers, replacements)
+ self.check_simple_str_interpolation(specifiers, replacements, expr)
- if isinstance(str, BytesExpr):
+ if isinstance(expr, BytesExpr):
return self.named_type('builtins.bytes')
- elif isinstance(str, UnicodeExpr):
+ elif isinstance(expr, UnicodeExpr):
return self.named_type('builtins.unicode')
- elif isinstance(str, StrExpr):
+ elif isinstance(expr, StrExpr):
return self.named_type('builtins.str')
else:
assert False
@@ -99,7 +106,7 @@ class StringFormatterChecker:
return specifiers
def analyze_conversion_specifiers(self, specifiers: List[ConversionSpecifier],
- context: Context) -> bool:
+ context: Context) -> Optional[bool]:
has_star = any(specifier.has_star() for specifier in specifiers)
has_key = any(specifier.has_key() for specifier in specifiers)
all_have_keys = all(
@@ -115,8 +122,8 @@ class StringFormatterChecker:
return has_key
def check_simple_str_interpolation(self, specifiers: List[ConversionSpecifier],
- replacements: Expression) -> None:
- checkers = self.build_replacement_checkers(specifiers, replacements)
+ replacements: Expression, expr: FormatStringExpr) -> None:
+ checkers = self.build_replacement_checkers(specifiers, replacements, expr)
if checkers is None:
return
@@ -140,7 +147,8 @@ class StringFormatterChecker:
check_type(rhs_type.items[0])
else:
check_node(replacements)
- elif isinstance(replacements, TupleExpr):
+ elif (isinstance(replacements, TupleExpr)
+ and not any(isinstance(item, StarExpr) for item in replacements.items)):
for checks, rep_node in zip(checkers, replacements.items):
check_node, check_type = checks
check_node(rep_node)
@@ -150,7 +158,8 @@ class StringFormatterChecker:
check_type(rep_type)
def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier],
- replacements: Expression) -> None:
+ replacements: Expression,
+ expr: FormatStringExpr) -> None:
if (isinstance(replacements, DictExpr) and
all(isinstance(k, (StrExpr, BytesExpr))
for k, v in replacements.items)):
@@ -167,7 +176,7 @@ class StringFormatterChecker:
self.msg.key_not_in_mapping(specifier.key, replacements)
return
rep_type = mapping[specifier.key]
- expected_type = self.conversion_type(specifier.type, replacements)
+ expected_type = self.conversion_type(specifier.type, replacements, expr)
if expected_type is None:
return
self.chk.check_subtype(rep_type, expected_type, replacements,
@@ -183,49 +192,47 @@ class StringFormatterChecker:
'expression has type', 'expected type for mapping is')
def build_replacement_checkers(self, specifiers: List[ConversionSpecifier],
- context: Context) -> List[Tuple[Callable[[Expression], None],
- Callable[[Type], None]]]:
- checkers = [] # type: List[Tuple[Callable[[Expression], None], Callable[[Type], None]]]
+ context: Context, expr: FormatStringExpr
+ ) -> Optional[List[Checkers]]:
+ checkers = [] # type: List[Checkers]
for specifier in specifiers:
- checker = self.replacement_checkers(specifier, context)
+ checker = self.replacement_checkers(specifier, context, expr)
if checker is None:
return None
checkers.extend(checker)
return checkers
- def replacement_checkers(self, specifier: ConversionSpecifier,
- context: Context) -> List[Tuple[Callable[[Expression], None],
- Callable[[Type], None]]]:
+ def replacement_checkers(self, specifier: ConversionSpecifier, context: Context,
+ expr: FormatStringExpr) -> Optional[List[Checkers]]:
"""Returns a list of tuples of two functions that check whether a replacement is
of the right type for the specifier. The first functions take a node and checks
its type in the right type context. The second function just checks a type.
"""
- checkers = [] # type: List[Tuple[Callable[[Expression], None], Callable[[Type], None]]]
+ checkers = [] # type: List[Checkers]
if specifier.width == '*':
checkers.append(self.checkers_for_star(context))
if specifier.precision == '*':
checkers.append(self.checkers_for_star(context))
if specifier.type == 'c':
- c = self.checkers_for_c_type(specifier.type, context)
+ c = self.checkers_for_c_type(specifier.type, context, expr)
if c is None:
return None
checkers.append(c)
elif specifier.type != '%':
- c = self.checkers_for_regular_type(specifier.type, context)
+ c = self.checkers_for_regular_type(specifier.type, context, expr)
if c is None:
return None
checkers.append(c)
return checkers
- def checkers_for_star(self, context: Context) -> Tuple[Callable[[Expression], None],
- Callable[[Type], None]]:
+ def checkers_for_star(self, context: Context) -> Checkers:
"""Returns a tuple of check functions that check whether, respectively,
a node or a type is compatible with a star in a conversion specifier
"""
expected = self.named_type('builtins.int')
- def check_type(type: Type = None) -> None:
+ def check_type(type: Type) -> None:
expected = self.named_type('builtins.int')
self.chk.check_subtype(type, expected, context, '* wants int')
@@ -236,16 +243,17 @@ class StringFormatterChecker:
return check_expr, check_type
def checkers_for_regular_type(self, type: str,
- context: Context) -> Tuple[Callable[[Expression], None],
- Callable[[Type], None]]:
+ context: Context,
+ expr: FormatStringExpr) -> Optional[Checkers]:
"""Returns a tuple of check functions that check whether, respectively,
a node or a type is compatible with 'type'. Return None in case of an
"""
- expected_type = self.conversion_type(type, context)
+ expected_type = self.conversion_type(type, context, expr)
if expected_type is None:
return None
- def check_type(type: Type = None) -> None:
+ def check_type(type: Type) -> None:
+ assert expected_type is not None
self.chk.check_subtype(type, expected_type, context,
messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
'expression has type', 'placeholder has type')
@@ -257,16 +265,17 @@ class StringFormatterChecker:
return check_expr, check_type
def checkers_for_c_type(self, type: str,
- context: Context) -> Tuple[Callable[[Expression], None],
- Callable[[Type], None]]:
+ context: Context,
+ expr: FormatStringExpr) -> Optional[Checkers]:
"""Returns a tuple of check functions that check whether, respectively,
a node or a type is compatible with 'type' that is a character type
"""
- expected_type = self.conversion_type(type, context)
+ expected_type = self.conversion_type(type, context, expr)
if expected_type is None:
return None
- def check_type(type: Type = None) -> None:
+ def check_type(type: Type) -> None:
+ assert expected_type is not None
self.chk.check_subtype(type, expected_type, context,
messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
'expression has type', 'placeholder has type')
@@ -280,14 +289,28 @@ class StringFormatterChecker:
return check_expr, check_type
- def conversion_type(self, p: str, context: Context) -> Type:
+ def conversion_type(self, p: str, context: Context, expr: FormatStringExpr) -> Optional[Type]:
"""Return the type that is accepted for a string interpolation
conversion specifier type.
Note that both Python's float (e.g. %f) and integer (e.g. %d)
specifier types accept both float and integers.
"""
- if p in ['s', 'r']:
+ if p == 'b':
+ if self.chk.options.python_version < (3, 5):
+ self.msg.fail("Format character 'b' is only supported in Python 3.5 and later",
+ context)
+ return None
+ if not isinstance(expr, BytesExpr):
+ self.msg.fail("Format character 'b' is only supported on bytes patterns", context)
+ return None
+ return self.named_type('builtins.bytes')
+ elif p == 'a':
+ if self.chk.options.python_version < (3, 0):
+ self.msg.fail("Format character 'a' is only supported in Python 3", context)
+ return None
+ return AnyType()
+ elif p in ['s', 'r']:
return AnyType()
elif p in ['d', 'i', 'o', 'u', 'x', 'X',
'e', 'E', 'f', 'F', 'g', 'G']:
@@ -313,4 +336,4 @@ class StringFormatterChecker:
def accept(self, expr: Expression, context: Type = None) -> Type:
"""Type check a node. Alias for TypeChecker.accept."""
- return self.chk.accept(expr, context)
+ return self.chk.expr_checker.accept(expr, context)
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 533a436..d65a418 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -4,14 +4,14 @@ from typing import Iterable, List, Optional
from mypy import experiments
from mypy.types import (
- CallableType, Type, TypeVisitor, UnboundType, AnyType, Void, NoneTyp, TypeVarType,
+ CallableType, Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarType,
Instance, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType,
- DeletedType, UninhabitedType, TypeType, TypeVarId, TypeQuery, ALL_TYPES_STRATEGY,
- is_named_instance
+ DeletedType, UninhabitedType, TypeType, TypeVarId, TypeQuery, is_named_instance
)
from mypy.maptype import map_instance_to_supertype
from mypy import nodes
import mypy.subtypes
+from mypy.sametypes import is_same_type
from mypy.erasetype import erase_typevars
@@ -25,7 +25,7 @@ class Constraint:
It can be either T <: type or T :> type (T is a type variable).
"""
- type_var = None # Type variable id
+ type_var = None # type: TypeVarId
op = 0 # SUBTYPE_OF or SUPERTYPE_OF
target = None # type: Type
@@ -53,10 +53,11 @@ def infer_constraints_for_callable(
for i, actuals in enumerate(formal_to_actual):
for actual in actuals:
- if arg_types[actual] is None:
+ actual_arg_type = arg_types[actual]
+ if actual_arg_type is None:
continue
- actual_type = get_actual_type(arg_types[actual], arg_kinds[actual],
+ actual_type = get_actual_type(actual_arg_type, arg_kinds[actual],
tuple_counter)
c = infer_constraints(callee.arg_types[i], actual_type,
SUPERTYPE_OF)
@@ -203,15 +204,33 @@ def any_constraints(options: List[Optional[List[Constraint]]], eager: bool) -> L
valid_options = [option for option in options if option is not None]
if len(valid_options) == 1:
return valid_options[0]
- # Otherwise, there are either no valid options or multiple valid options.
- # Give up and deduce nothing.
+ elif (len(valid_options) > 1 and
+ all(is_same_constraints(valid_options[0], c)
+ for c in valid_options[1:])):
+ # Multiple sets of constraints that are all the same. Just pick any one of them.
+ # TODO: More generally, if a given (variable, direction) pair appears in
+ # every option, combine the bounds with meet/join.
+ return valid_options[0]
+
+ # Otherwise, there are either no valid options or multiple, inconsistent valid
+ # options. Give up and deduce nothing.
return []
- # TODO: In the latter case, it could happen that every valid
- # option requires the same constraint on the same variable. Then
- # we could include that that constraint in the result. Or more
- # generally, if a given (variable, direction) pair appears in
- # every option, combine the bounds with meet/join.
+
+def is_same_constraints(x: List[Constraint], y: List[Constraint]) -> bool:
+ for c1 in x:
+ if not any(is_same_constraint(c1, c2) for c2 in y):
+ return False
+ for c1 in y:
+ if not any(is_same_constraint(c1, c2) for c2 in x):
+ return False
+ return True
+
+
+def is_same_constraint(c1: Constraint, c2: Constraint) -> bool:
+ return (c1.type_var == c2.type_var
+ and c1.op == c2.op
+ and is_same_type(c1.target, c2.target))
def simplify_away_incomplete_types(types: List[Type]) -> List[Type]:
@@ -231,9 +250,9 @@ def is_complete_type(typ: Type) -> bool:
return typ.accept(CompleteTypeVisitor())
-class CompleteTypeVisitor(TypeQuery):
+class CompleteTypeVisitor(TypeQuery[bool]):
def __init__(self) -> None:
- super().__init__(default=True, strategy=ALL_TYPES_STRATEGY)
+ super().__init__(all)
def visit_none_type(self, t: NoneTyp) -> bool:
return experiments.STRICT_OPTIONAL
@@ -262,9 +281,6 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
def visit_any(self, template: AnyType) -> List[Constraint]:
return []
- def visit_void(self, template: Void) -> List[Constraint]:
- return []
-
def visit_none_type(self, template: NoneTyp) -> List[Constraint]:
return []
@@ -294,6 +310,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
def visit_instance(self, template: Instance) -> List[Constraint]:
actual = self.actual
res = [] # type: List[Constraint]
+ if isinstance(actual, CallableType) and actual.fallback is not None:
+ actual = actual.fallback
if isinstance(actual, Instance):
instance = actual
if (self.direction == SUBTYPE_OF and
@@ -360,6 +378,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
return res
elif isinstance(self.actual, Overloaded):
return self.infer_against_overloaded(self.actual, template)
+ elif isinstance(self.actual, TypeType):
+ return infer_constraints(template.ret_type, self.actual.item, self.direction)
else:
return []
@@ -421,9 +441,9 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
return res
def visit_type_type(self, template: TypeType) -> List[Constraint]:
- if isinstance(self.actual, CallableType) and self.actual.is_type_obj():
+ if isinstance(self.actual, CallableType):
return infer_constraints(template.item, self.actual.ret_type, self.direction)
- elif isinstance(self.actual, Overloaded) and self.actual.is_type_obj():
+ elif isinstance(self.actual, Overloaded):
return infer_constraints(template.item, self.actual.items()[0].ret_type,
self.direction)
elif isinstance(self.actual, TypeType):
diff --git a/mypy/defaults.py b/mypy/defaults.py
index d9b6741..b5398f9 100644
--- a/mypy/defaults.py
+++ b/mypy/defaults.py
@@ -1,4 +1,4 @@
PYTHON2_VERSION = (2, 7)
-PYTHON3_VERSION = (3, 5)
+PYTHON3_VERSION = (3, 6)
CACHE_DIR = '.mypy_cache'
CONFIG_FILE = 'mypy.ini'
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index 47a0d72..49035ac 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -1,7 +1,7 @@
from typing import Optional, Container, Callable
from mypy.types import (
- Type, TypeVisitor, UnboundType, ErrorType, AnyType, Void, NoneTyp, TypeVarId,
+ Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarId,
Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded,
ErasedType, PartialType, DeletedType, TypeTranslator, TypeList, UninhabitedType, TypeType
)
@@ -26,21 +26,13 @@ def erase_type(typ: Type) -> Type:
class EraseTypeVisitor(TypeVisitor[Type]):
- def visit_unbound_type(self, t: UnboundType) -> Type:
- assert False, 'Not supported'
-
- def visit_error_type(self, t: ErrorType) -> Type:
- return t
- def visit_type_list(self, t: TypeList) -> Type:
+ def visit_unbound_type(self, t: UnboundType) -> Type:
assert False, 'Not supported'
def visit_any(self, t: AnyType) -> Type:
return t
- def visit_void(self, t: Void) -> Type:
- return t
-
def visit_none_type(self, t: NoneTyp) -> Type:
return t
@@ -66,10 +58,7 @@ class EraseTypeVisitor(TypeVisitor[Type]):
def visit_callable_type(self, t: CallableType) -> Type:
# We must preserve the fallback type for overload resolution to work.
- if experiments.STRICT_OPTIONAL:
- ret_type = NoneTyp(is_ret_type=True) # type: Type
- else:
- ret_type = Void()
+ ret_type = NoneTyp() # type: Type
return CallableType([], [], [], ret_type, t.fallback)
def visit_overloaded(self, t: Overloaded) -> Type:
@@ -82,7 +71,8 @@ class EraseTypeVisitor(TypeVisitor[Type]):
return t.fallback.accept(self)
def visit_union_type(self, t: UnionType) -> Type:
- return AnyType() # XXX: return underlying type if only one?
+ erased_items = [erase_type(item) for item in t.items]
+ return UnionType.make_simplified_union(erased_items)
def visit_type_type(self, t: TypeType) -> Type:
return TypeType(t.item.accept(self), line=t.line)
diff --git a/mypy/errors.py b/mypy/errors.py
index acbd3d1..6648784 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -2,10 +2,12 @@ import os.path
import sys
import traceback
from collections import OrderedDict, defaultdict
+from contextlib import contextmanager
-from typing import Tuple, List, TypeVar, Set, Dict
+from typing import Tuple, List, TypeVar, Set, Dict, Iterator, Optional
from mypy.options import Options
+from mypy.version import __version__ as mypy_version
T = TypeVar('T')
@@ -21,11 +23,14 @@ class ErrorInfo:
# The source file that was the source of this error.
file = ''
+ # The fully-qualified id of the source module for this error.
+ module = None # type: Optional[str]
+
# The name of the type in which this error is located at.
- type = '' # Unqualified, may be None
+ type = '' # type: Optional[str] # Unqualified, may be None
# The name of the function or member in which this error is located at.
- function_or_member = '' # Unqualified, may be None
+ function_or_member = '' # type: Optional[str] # Unqualified, may be None
# The line number related to this error within file.
line = 0 # -1 if unknown
@@ -45,12 +50,26 @@ class ErrorInfo:
# Only report this particular messages once per program.
only_once = False
- def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str,
- function_or_member: str, line: int, column: int, severity: str,
- message: str, blocker: bool, only_once: bool,
- origin: Tuple[str, int] = None) -> None:
+ # Fine-grained incremental target where this was reported
+ target = None # type: Optional[str]
+
+ def __init__(self,
+ import_ctx: List[Tuple[str, int]],
+ file: str,
+ module: Optional[str],
+ typ: Optional[str],
+ function_or_member: Optional[str],
+ line: int,
+ column: int,
+ severity: str,
+ message: str,
+ blocker: bool,
+ only_once: bool,
+ origin: Tuple[str, int] = None,
+ target: str = None) -> None:
self.import_ctx = import_ctx
self.file = file
+ self.module = module
self.type = typ
self.function_or_member = function_or_member
self.line = line
@@ -60,6 +79,7 @@ class ErrorInfo:
self.blocker = blocker
self.only_once = only_once
self.origin = origin or (file, line)
+ self.target = target
class Errors:
@@ -75,6 +95,9 @@ class Errors:
# Current error context: nested import context/stack, as a list of (path, line) pairs.
import_ctx = None # type: List[Tuple[str, int]]
+ # Set of files with errors.
+ error_files = None # type: Set[str]
+
# Path name prefix that is removed from all paths, if set.
ignore_prefix = None # type: str
@@ -82,10 +105,10 @@ class Errors:
file = None # type: str
# Stack of short names of currents types (or None).
- type_name = None # type: List[str]
+ type_name = None # type: List[Optional[str]]
# Stack of short names of current functions or members (or None).
- function_or_member = None # type: List[str]
+ function_or_member = None # type: List[Optional[str]]
# Ignore errors on these lines of each file.
ignored_lines = None # type: Dict[str, Set[int]]
@@ -99,31 +122,45 @@ class Errors:
# Collection of reported only_once messages.
only_once_messages = None # type: Set[str]
- # Set to False to show "In function "foo":" messages.
- hide_error_context = True # type: bool
+ # Set to True to show "In function "foo":" messages.
+ show_error_context = False # type: bool
- # Set to True to show column numbers in error messages
+ # Set to True to show column numbers in error messages.
show_column_numbers = False # type: bool
- def __init__(self, hide_error_context: bool = True,
+ # Stack of active fine-grained incremental checking targets within
+ # a module. The first item is always the current module id.
+ # (See mypy.server.update for more about targets.)
+ target = None # type: List[str]
+
+ def __init__(self, show_error_context: bool = False,
show_column_numbers: bool = False) -> None:
+ self.show_error_context = show_error_context
+ self.show_column_numbers = show_column_numbers
+ self.initialize()
+
+ def initialize(self) -> None:
self.error_info = []
self.import_ctx = []
+ self.error_files = set()
self.type_name = [None]
self.function_or_member = [None]
self.ignored_lines = OrderedDict()
self.used_ignored_lines = defaultdict(set)
self.ignored_files = set()
self.only_once_messages = set()
- self.hide_error_context = hide_error_context
- self.show_column_numbers = show_column_numbers
+ self.target = []
+
+ def reset(self) -> None:
+ self.initialize()
def copy(self) -> 'Errors':
- new = Errors(self.hide_error_context, self.show_column_numbers)
+ new = Errors(self.show_error_context, self.show_column_numbers)
new.file = self.file
new.import_ctx = self.import_ctx[:]
new.type_name = self.type_name[:]
new.function_or_member = self.function_or_member[:]
+ new.target = self.target[:]
return new
def set_ignore_prefix(self, prefix: str) -> None:
@@ -138,8 +175,8 @@ class Errors:
file = os.path.normpath(file)
return remove_path_prefix(file, self.ignore_prefix)
- def set_file(self, file: str, ignored_lines: Set[int] = None) -> None:
- """Set the path of the current file."""
+ def set_file(self, file: str, module: Optional[str], ignored_lines: Set[int] = None) -> None:
+ """Set the path and module id of the current file."""
# The path will be simplified later, in render_messages. That way
# * 'file' is always a key that uniquely identifies a source file
# that mypy read (simplified paths might not be unique); and
@@ -147,9 +184,11 @@ class Errors:
# reporting errors for files other than the one currently being
# processed.
self.file = file
+ if module:
+ self.target = [module]
def set_file_ignored_lines(self, file: str,
- ignored_lines: Set[int] = None,
+ ignored_lines: Set[int],
ignore_all: bool = False) -> None:
self.ignored_lines[file] = ignored_lines
if ignore_all:
@@ -157,17 +196,52 @@ class Errors:
def push_function(self, name: str) -> None:
"""Set the current function or member short name (it can be None)."""
+ self.push_target_component(name)
self.function_or_member.append(name)
def pop_function(self) -> None:
self.function_or_member.pop()
+ self.pop_target_component()
+
+ @contextmanager
+ def enter_function(self, name: str) -> Iterator[None]:
+ self.push_function(name)
+ yield
+ self.pop_function()
def push_type(self, name: str) -> None:
"""Set the short name of the current type (it can be None)."""
+ self.push_target_component(name)
self.type_name.append(name)
def pop_type(self) -> None:
self.type_name.pop()
+ self.pop_target_component()
+
+ def push_target_component(self, name: str) -> None:
+ if self.target and not self.function_or_member[-1]:
+ self.target.append('{}.{}'.format(self.target[-1], name))
+
+ def pop_target_component(self) -> None:
+ if self.target and not self.function_or_member[-1]:
+ self.target.pop()
+
+ def current_target(self) -> Optional[str]:
+ if self.target:
+ return self.target[-1]
+ return None
+
+ def current_module(self) -> Optional[str]:
+ if self.target:
+ return self.target[0]
+ return None
+
+ @contextmanager
+ def enter_type(self, name: str) -> Iterator[None]:
+ """Set the short name of the current type (it can be None)."""
+ self.push_type(name)
+ yield
+ self.pop_type()
def import_context(self) -> List[Tuple[str, int]]:
"""Return a copy of the import context."""
@@ -191,15 +265,16 @@ class Errors:
only_once: if True, only report this exact message once per build
origin_line: if non-None, override current context as origin
"""
- type = self.type_name[-1]
+ type = self.type_name[-1] # type: Optional[str]
if len(self.function_or_member) > 2:
type = None # Omit type context if nested function
if file is None:
file = self.file
- info = ErrorInfo(self.import_context(), file, type,
+ info = ErrorInfo(self.import_context(), file, self.current_module(), type,
self.function_or_member[-1], line, column, severity, message,
blocker, only_once,
- origin=(self.file, origin_line) if origin_line else None)
+ origin=(self.file, origin_line) if origin_line else None,
+ target=self.current_target())
self.add_error_info(info)
def add_error_info(self, info: ErrorInfo) -> None:
@@ -216,15 +291,16 @@ class Errors:
return
self.only_once_messages.add(info.message)
self.error_info.append(info)
+ self.error_files.add(file)
def generate_unused_ignore_notes(self) -> None:
for file, ignored_lines in self.ignored_lines.items():
if not self.is_typeshed_file(file):
for line in ignored_lines - self.used_ignored_lines[file]:
# Don't use report since add_error_info will ignore the error!
- info = ErrorInfo(self.import_context(), file, None, None,
- line, -1, 'note', "unused 'type: ignore' comment",
- False, False)
+ info = ErrorInfo(self.import_context(), file, self.current_module(), None,
+ None, line, -1, 'note', "unused 'type: ignore' comment",
+ False, False)
self.error_info.append(info)
def is_typeshed_file(self, file: str) -> bool:
@@ -243,6 +319,10 @@ class Errors:
"""Are the any errors that are blockers?"""
return any(err for err in self.error_info if err.blocker)
+ def is_errors_for_file(self, file: str) -> bool:
+ """Are there any errors for the given file?"""
+ return file in self.error_files
+
def raise_error(self) -> None:
"""Raise a CompileError with the generated messages.
@@ -274,7 +354,15 @@ class Errors:
a.append(s)
return a
- def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[str, int, int,
+ def targets(self) -> Set[str]:
+ """Return a set of all targets that contain errors."""
+ # TODO: Make sure that either target is always defined or that not being defined
+ # is okay for fine-grained incremental checking.
+ return set(info.target
+ for info in self.error_info
+ if info.target)
+
+ def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[Optional[str], int, int,
str, str]]:
"""Translate the messages into a sequence of tuples.
@@ -283,16 +371,16 @@ class Errors:
item may be None. If the line item is negative, the line
number is not defined for the tuple.
"""
- result = [] # type: List[Tuple[str, int, int, str, str]]
+ result = [] # type: List[Tuple[Optional[str], int, int, str, str]]
# (path, line, column, severity, message)
prev_import_context = [] # type: List[Tuple[str, int]]
- prev_function_or_member = None # type: str
- prev_type = None # type: str
+ prev_function_or_member = None # type: Optional[str]
+ prev_type = None # type: Optional[str]
for e in errors:
# Report module import context, if different from previous message.
- if self.hide_error_context:
+ if not self.show_error_context:
pass
elif e.import_ctx != prev_import_context:
last = len(e.import_ctx) - 1
@@ -315,7 +403,7 @@ class Errors:
file = self.simplify_path(e.file)
# Report context within a source file.
- if self.hide_error_context:
+ if not self.show_error_context:
pass
elif (e.function_or_member != prev_function_or_member or
e.type != prev_type):
@@ -372,10 +460,10 @@ class Errors:
result.extend(a)
return result
- def remove_duplicates(self, errors: List[Tuple[str, int, int, str, str]]
- ) -> List[Tuple[str, int, int, str, str]]:
+ def remove_duplicates(self, errors: List[Tuple[Optional[str], int, int, str, str]]
+ ) -> List[Tuple[Optional[str], int, int, str, str]]:
"""Remove duplicates from a sorted error list."""
- res = [] # type: List[Tuple[str, int, int, str, str]]
+ res = [] # type: List[Tuple[Optional[str], int, int, str, str]]
i = 0
while i < len(errors):
dup = False
@@ -449,6 +537,7 @@ def report_internal_error(err: Exception, file: str, line: int,
# Print "INTERNAL ERROR" message.
print('{}: error: INTERNAL ERROR --'.format(prefix),
'please report a bug at https://github.com/python/mypy/issues',
+ 'version: {}'.format(mypy_version),
file=sys.stderr)
# If requested, drop into pdb. This overrides show_tb.
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index e90a89f..1830119 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -1,8 +1,8 @@
from typing import Dict, Iterable, List, TypeVar, Mapping, cast
from mypy.types import (
- Type, Instance, CallableType, TypeVisitor, UnboundType, ErrorType, AnyType,
- Void, NoneTyp, TypeVarType, Overloaded, TupleType, TypedDictType, UnionType,
+ Type, Instance, CallableType, TypeVisitor, UnboundType, AnyType,
+ NoneTyp, TypeVarType, Overloaded, TupleType, TypedDictType, UnionType,
ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId,
FunctionLike, TypeVarDef
)
@@ -63,18 +63,9 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
def visit_unbound_type(self, t: UnboundType) -> Type:
return t
- def visit_error_type(self, t: ErrorType) -> Type:
- return t
-
- def visit_type_list(self, t: TypeList) -> Type:
- assert False, 'Not supported'
-
def visit_any(self, t: AnyType) -> Type:
return t
- def visit_void(self, t: Void) -> Type:
- return t
-
def visit_none_type(self, t: NoneTyp) -> Type:
return t
diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index abc091a..db46cfd 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -2,22 +2,38 @@
from mypy.nodes import (
Expression, NameExpr, MemberExpr, IndexExpr, TupleExpr,
- ListExpr, StrExpr, BytesExpr, UnicodeExpr, EllipsisExpr
+ ListExpr, StrExpr, BytesExpr, UnicodeExpr, EllipsisExpr, CallExpr,
+ ARG_POS, ARG_NAMED, get_member_expr_fullname
+)
+from mypy.fastparse import parse_type_comment
+from mypy.types import (
+ Type, UnboundType, TypeList, EllipsisType, AnyType, Optional, CallableArgument,
)
-from mypy.parsetype import parse_str_as_type, TypeParseError
-from mypy.types import Type, UnboundType, TypeList, EllipsisType
class TypeTranslationError(Exception):
"""Exception raised when an expression is not valid as a type."""
-def expr_to_unanalyzed_type(expr: Expression) -> Type:
+def _extract_argument_name(expr: Expression) -> Optional[str]:
+ if isinstance(expr, NameExpr) and expr.name == 'None':
+ return None
+ elif isinstance(expr, StrExpr):
+ return expr.value
+ elif isinstance(expr, UnicodeExpr):
+ return expr.value
+ else:
+ raise TypeTranslationError()
+
+
+def expr_to_unanalyzed_type(expr: Expression, _parent: Optional[Expression] = None) -> Type:
"""Translate an expression to the corresponding type.
The result is not semantically analyzed. It can be UnboundType or TypeList.
Raise TypeTranslationError if the expression cannot represent a type.
"""
+ # The `parent` paremeter is used in recursive calls to provide context for
+ # understanding whether an CallableArgument is ok.
if isinstance(expr, NameExpr):
name = expr.name
return UnboundType(name, line=expr.line, column=expr.column)
@@ -28,7 +44,7 @@ def expr_to_unanalyzed_type(expr: Expression) -> Type:
else:
raise TypeTranslationError()
elif isinstance(expr, IndexExpr):
- base = expr_to_unanalyzed_type(expr.base)
+ base = expr_to_unanalyzed_type(expr.base, expr)
if isinstance(base, UnboundType):
if base.args:
raise TypeTranslationError()
@@ -36,38 +52,67 @@ def expr_to_unanalyzed_type(expr: Expression) -> Type:
args = expr.index.items
else:
args = [expr.index]
- base.args = [expr_to_unanalyzed_type(arg) for arg in args]
+ base.args = [expr_to_unanalyzed_type(arg, expr) for arg in args]
if not base.args:
base.empty_tuple_index = True
return base
else:
raise TypeTranslationError()
+ elif isinstance(expr, CallExpr) and isinstance(_parent, ListExpr):
+ c = expr.callee
+ names = []
+ # Go through the dotted member expr chain to get the full arg
+ # constructor name to look up
+ while True:
+ if isinstance(c, NameExpr):
+ names.append(c.name)
+ break
+ elif isinstance(c, MemberExpr):
+ names.append(c.name)
+ c = c.expr
+ else:
+ raise TypeTranslationError()
+ arg_const = '.'.join(reversed(names))
+
+ # Go through the constructor args to get its name and type.
+ name = None
+ default_type = AnyType(implicit=True)
+ typ = default_type # type: Type
+ for i, arg in enumerate(expr.args):
+ if expr.arg_names[i] is not None:
+ if expr.arg_names[i] == "name":
+ if name is not None:
+ # Two names
+ raise TypeTranslationError()
+ name = _extract_argument_name(arg)
+ continue
+ elif expr.arg_names[i] == "type":
+ if typ is not default_type:
+ # Two types
+ raise TypeTranslationError()
+ typ = expr_to_unanalyzed_type(arg, expr)
+ continue
+ else:
+ raise TypeTranslationError()
+ elif i == 0:
+ typ = expr_to_unanalyzed_type(arg, expr)
+ elif i == 1:
+ name = _extract_argument_name(arg)
+ else:
+ raise TypeTranslationError()
+ return CallableArgument(typ, name, arg_const, expr.line, expr.column)
elif isinstance(expr, ListExpr):
- return TypeList([expr_to_unanalyzed_type(t) for t in expr.items],
+ return TypeList([expr_to_unanalyzed_type(t, expr) for t in expr.items],
line=expr.line, column=expr.column)
elif isinstance(expr, (StrExpr, BytesExpr, UnicodeExpr)):
# Parse string literal type.
try:
- result = parse_str_as_type(expr.value, expr.line)
- except TypeParseError:
+ result = parse_type_comment(expr.value, expr.line, None)
+ assert result is not None
+ except SyntaxError:
raise TypeTranslationError()
return result
elif isinstance(expr, EllipsisExpr):
return EllipsisType(expr.line)
else:
raise TypeTranslationError()
-
-
-def get_member_expr_fullname(expr: MemberExpr) -> str:
- """Return the qualified name representation of a member expression.
-
- Return a string of form foo.bar, foo.bar.baz, or similar, or None if the
- argument cannot be represented in this form.
- """
- if isinstance(expr.expr, NameExpr):
- initial = expr.expr.name
- elif isinstance(expr.expr, MemberExpr):
- initial = get_member_expr_fullname(expr.expr)
- else:
- return None
- return '{}.{}'.format(initial, expr.name)
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 1d8fe4b..fe31e4a 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -2,9 +2,12 @@ from functools import wraps
import sys
from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set
-from mypy.sharedparse import special_function_elide_names, argument_elide_name
+from mypy.sharedparse import (
+ special_function_elide_names, argument_elide_name,
+)
from mypy.nodes import (
- MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
+ MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef,
+ OverloadedFuncDef, OverloadPart,
ClassDef, Decorator, Block, Var, OperatorAssignmentStmt,
ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt,
DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl,
@@ -12,14 +15,16 @@ from mypy.nodes import (
TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
- UnaryExpr, FuncExpr, ComparisonExpr,
+ UnaryExpr, LambdaExpr, ComparisonExpr,
StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument,
AwaitExpr, TempNode, Expression, Statement,
- ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR2
+ ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR2,
+ check_arg_names,
)
from mypy.types import (
Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType,
+ CallableArgument,
)
from mypy import defaults
from mypy import experiments
@@ -27,24 +32,31 @@ from mypy import messages
from mypy.errors import Errors
try:
- from typed_ast import ast35
+ from typed_ast import ast3
except ImportError:
if sys.version_info.minor > 2:
- print('You must install the typed_ast package before you can run mypy'
- ' with `--fast-parser`.\n'
- 'You can do this with `python3 -m pip install typed-ast`.',
- file=sys.stderr)
+ try:
+ from typed_ast import ast35 # type: ignore
+ except ImportError:
+ print('The typed_ast package is not installed.\n'
+ 'You can install it with `python3 -m pip install typed-ast`.',
+ file=sys.stderr)
+ else:
+ print('You need a more recent version of the typed_ast package.\n'
+ 'You can update to the latest version with '
+ '`python3 -m pip install -U typed-ast`.',
+ file=sys.stderr)
else:
- print('The typed_ast package required by --fast-parser is only compatible with'
- ' Python 3.3 and greater.')
+ print('Mypy requires the typed_ast package, which is only compatible with\n'
+ 'Python 3.3 and greater.', file=sys.stderr)
sys.exit(1)
-T = TypeVar('T', bound=Union[ast35.expr, ast35.stmt])
+T = TypeVar('T', bound=Union[ast3.expr, ast3.stmt])
U = TypeVar('U', bound=Node)
V = TypeVar('V')
TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment'
-TYPE_COMMENT_AST_ERROR = 'invalid type comment'
+TYPE_COMMENT_AST_ERROR = 'invalid type comment or annotation'
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
@@ -61,11 +73,12 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
if errors is None:
errors = Errors()
raise_on_error = True
- errors.set_file('<input>' if fnam is None else fnam)
+ errors.set_file('<input>' if fnam is None else fnam, None)
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
assert pyversion[0] >= 3 or is_stub_file
- ast = ast35.parse(source, fnam, 'exec')
+ feature_version = pyversion[1] if not is_stub_file else defaults.PYTHON3_VERSION[1]
+ ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version)
tree = ASTConverter(pyversion=pyversion,
is_stub=is_stub_file,
@@ -84,14 +97,17 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
return tree
-def parse_type_comment(type_comment: str, line: int, errors: Errors) -> Optional[Type]:
+def parse_type_comment(type_comment: str, line: int, errors: Optional[Errors]) -> Optional[Type]:
try:
- typ = ast35.parse(type_comment, '<type_comment>', 'eval')
+ typ = ast3.parse(type_comment, '<type_comment>', 'eval')
except SyntaxError as e:
- errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR)
- return None
+ if errors is not None:
+ errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR)
+ return None
+ else:
+ raise
else:
- assert isinstance(typ, ast35.Expression)
+ assert isinstance(typ, ast3.Expression)
return TypeConverter(errors, line=line).visit(typ.body)
@@ -111,16 +127,16 @@ def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
return None
-def is_no_type_check_decorator(expr: ast35.expr) -> bool:
- if isinstance(expr, ast35.Name):
+def is_no_type_check_decorator(expr: ast3.expr) -> bool:
+ if isinstance(expr, ast3.Name):
return expr.id == 'no_type_check'
- elif isinstance(expr, ast35.Attribute):
- if isinstance(expr.value, ast35.Name):
+ elif isinstance(expr, ast3.Attribute):
+ if isinstance(expr.value, ast3.Name):
return expr.value.id == 'typing' and expr.attr == 'no_type_check'
return False
-class ASTConverter(ast35.NodeTransformer):
+class ASTConverter(ast3.NodeTransformer): # type: ignore # typeshed PR #931
def __init__(self,
pyversion: Tuple[int, int],
is_stub: bool,
@@ -137,13 +153,13 @@ class ASTConverter(ast35.NodeTransformer):
def fail(self, msg: str, line: int, column: int) -> None:
self.errors.report(line, column, msg)
- def generic_visit(self, node: ast35.AST) -> None:
+ def generic_visit(self, node: ast3.AST) -> None:
raise RuntimeError('AST node not implemented: ' + str(type(node)))
def visit_NoneType(self, n: Any) -> Optional[Node]:
return None
- def translate_expr_list(self, l: Sequence[ast35.AST]) -> List[Expression]:
+ def translate_expr_list(self, l: Sequence[ast3.AST]) -> List[Expression]:
res = [] # type: List[Expression]
for e in l:
exp = self.visit(e)
@@ -151,7 +167,7 @@ class ASTConverter(ast35.NodeTransformer):
res.append(exp)
return res
- def translate_stmt_list(self, l: Sequence[ast35.AST]) -> List[Statement]:
+ def translate_stmt_list(self, l: Sequence[ast3.AST]) -> List[Statement]:
res = [] # type: List[Statement]
for e in l:
stmt = self.visit(e)
@@ -160,22 +176,22 @@ class ASTConverter(ast35.NodeTransformer):
return res
op_map = {
- ast35.Add: '+',
- ast35.Sub: '-',
- ast35.Mult: '*',
- ast35.MatMult: '@',
- ast35.Div: '/',
- ast35.Mod: '%',
- ast35.Pow: '**',
- ast35.LShift: '<<',
- ast35.RShift: '>>',
- ast35.BitOr: '|',
- ast35.BitXor: '^',
- ast35.BitAnd: '&',
- ast35.FloorDiv: '//'
+ ast3.Add: '+',
+ ast3.Sub: '-',
+ ast3.Mult: '*',
+ ast3.MatMult: '@',
+ ast3.Div: '/',
+ ast3.Mod: '%',
+ ast3.Pow: '**',
+ ast3.LShift: '<<',
+ ast3.RShift: '>>',
+ ast3.BitOr: '|',
+ ast3.BitXor: '^',
+ ast3.BitAnd: '&',
+ ast3.FloorDiv: '//'
}
- def from_operator(self, op: ast35.operator) -> str:
+ def from_operator(self, op: ast3.operator) -> str:
op_name = ASTConverter.op_map.get(type(op))
if op_name is None:
raise RuntimeError('Unknown operator ' + str(type(op)))
@@ -183,26 +199,26 @@ class ASTConverter(ast35.NodeTransformer):
return op_name
comp_op_map = {
- ast35.Gt: '>',
- ast35.Lt: '<',
- ast35.Eq: '==',
- ast35.GtE: '>=',
- ast35.LtE: '<=',
- ast35.NotEq: '!=',
- ast35.Is: 'is',
- ast35.IsNot: 'is not',
- ast35.In: 'in',
- ast35.NotIn: 'not in'
+ ast3.Gt: '>',
+ ast3.Lt: '<',
+ ast3.Eq: '==',
+ ast3.GtE: '>=',
+ ast3.LtE: '<=',
+ ast3.NotEq: '!=',
+ ast3.Is: 'is',
+ ast3.IsNot: 'is not',
+ ast3.In: 'in',
+ ast3.NotIn: 'not in'
}
- def from_comp_operator(self, op: ast35.cmpop) -> str:
+ def from_comp_operator(self, op: ast3.cmpop) -> str:
op_name = ASTConverter.comp_op_map.get(type(op))
if op_name is None:
raise RuntimeError('Unknown comparison operator ' + str(type(op)))
else:
return op_name
- def as_block(self, stmts: List[ast35.stmt], lineno: int) -> Block:
+ def as_block(self, stmts: List[ast3.stmt], lineno: int) -> Block:
b = None
if stmts:
b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
@@ -211,11 +227,12 @@ class ASTConverter(ast35.NodeTransformer):
def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]:
ret = [] # type: List[Statement]
- current_overload = []
+ current_overload = [] # type: List[OverloadPart]
current_overload_name = None
- # mypy doesn't actually check that the decorator is literally @overload
for stmt in stmts:
- if isinstance(stmt, Decorator) and stmt.name() == current_overload_name:
+ if (current_overload_name is not None
+ and isinstance(stmt, (Decorator, FuncDef))
+ and stmt.name() == current_overload_name):
current_overload.append(stmt)
else:
if len(current_overload) == 1:
@@ -253,7 +270,7 @@ class ASTConverter(ast35.NodeTransformer):
return 'builtins'
return id
- def visit_Module(self, mod: ast35.Module) -> MypyFile:
+ def visit_Module(self, mod: ast3.Module) -> MypyFile:
body = self.fix_function_overloads(self.translate_stmt_list(mod.body))
return MypyFile(body,
@@ -268,16 +285,16 @@ class ASTConverter(ast35.NodeTransformer):
# arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults,
# arg? kwarg, expr* defaults)
@with_line
- def visit_FunctionDef(self, n: ast35.FunctionDef) -> Union[FuncDef, Decorator]:
+ def visit_FunctionDef(self, n: ast3.FunctionDef) -> Union[FuncDef, Decorator]:
return self.do_func_def(n)
# AsyncFunctionDef(identifier name, arguments args,
# stmt* body, expr* decorator_list, expr? returns, string? type_comment)
@with_line
- def visit_AsyncFunctionDef(self, n: ast35.AsyncFunctionDef) -> Union[FuncDef, Decorator]:
+ def visit_AsyncFunctionDef(self, n: ast3.AsyncFunctionDef) -> Union[FuncDef, Decorator]:
return self.do_func_def(n, is_coroutine=True)
- def do_func_def(self, n: Union[ast35.FunctionDef, ast35.AsyncFunctionDef],
+ def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef],
is_coroutine: bool = False) -> Union[FuncDef, Decorator]:
"""Helper shared between visit_FunctionDef and visit_AsyncFunctionDef."""
no_type_check = bool(n.decorator_list and
@@ -296,11 +313,11 @@ class ASTConverter(ast35.NodeTransformer):
return_type = None
elif n.type_comment is not None:
try:
- func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
- assert isinstance(func_type_ast, ast35.FunctionType)
+ func_type_ast = ast3.parse(n.type_comment, '<func_type>', 'func_type')
+ assert isinstance(func_type_ast, ast3.FunctionType)
# for ellipsis arg
if (len(func_type_ast.argtypes) == 1 and
- isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
+ isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)):
if n.returns:
# PEP 484 disallows both type annotations and type comments
self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
@@ -331,9 +348,6 @@ class ASTConverter(ast35.NodeTransformer):
for arg, arg_type in zip(args, arg_types):
self.set_type_optional(arg_type, arg.initializer)
- if isinstance(return_type, UnboundType):
- return_type.is_ret_type = True
-
func_type = None
if any(arg_types) or return_type:
if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types):
@@ -384,19 +398,25 @@ class ASTConverter(ast35.NodeTransformer):
type.optional = optional
def transform_args(self,
- args: ast35.arguments,
+ args: ast3.arguments,
line: int,
no_type_check: bool = False,
) -> List[Argument]:
- def make_argument(arg: ast35.arg, default: Optional[ast35.expr], kind: int) -> Argument:
+ def make_argument(arg: ast3.arg, default: Optional[ast3.expr], kind: int) -> Argument:
if no_type_check:
arg_type = None
else:
- arg_type = TypeConverter(self.errors, line=line).visit(arg.annotation)
+ if arg.annotation is not None and arg.type_comment is not None:
+ self.fail(messages.DUPLICATE_TYPE_SIGNATURES, arg.lineno, arg.col_offset)
+ arg_type = None
+ if arg.annotation is not None:
+ arg_type = TypeConverter(self.errors, line=line).visit(arg.annotation)
+ elif arg.type_comment is not None:
+ arg_type = parse_type_comment(arg.type_comment, arg.lineno, self.errors)
return Argument(Var(arg.arg), arg_type, self.visit(default), kind)
new_args = []
- names = [] # type: List[ast35.arg]
+ names = [] # type: List[ast3.arg]
num_no_defaults = len(args.args) - len(args.defaults)
# positional arguments without defaults
for a in args.args[:num_no_defaults]:
@@ -426,24 +446,12 @@ class ASTConverter(ast35.NodeTransformer):
new_args.append(make_argument(args.kwarg, None, ARG_STAR2))
names.append(args.kwarg)
- seen_names = set() # type: Set[str]
- for name in names:
- if name.arg in seen_names:
- self.fail("duplicate argument '{}' in function definition".format(name.arg),
- name.lineno, name.col_offset)
- break
- seen_names.add(name.arg)
+ def fail_arg(msg: str, arg: ast3.arg) -> None:
+ self.fail(msg, arg.lineno, arg.col_offset)
- return new_args
+ check_arg_names([name.arg for name in names], names, fail_arg)
- def stringify_name(self, n: ast35.AST) -> str:
- if isinstance(n, ast35.Name):
- return n.id
- elif isinstance(n, ast35.Attribute):
- sv = self.stringify_name(n.value)
- if sv is not None:
- return "{}.{}".format(sv, n.attr)
- return None # Can't do it.
+ return new_args
# ClassDef(identifier name,
# expr* bases,
@@ -451,12 +459,12 @@ class ASTConverter(ast35.NodeTransformer):
# stmt* body,
# expr* decorator_list)
@with_line
- def visit_ClassDef(self, n: ast35.ClassDef) -> ClassDef:
+ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef:
self.class_nesting += 1
metaclass_arg = find(lambda x: x.arg == 'metaclass', n.keywords)
metaclass = None
if metaclass_arg:
- metaclass = self.stringify_name(metaclass_arg.value)
+ metaclass = stringify_name(metaclass_arg.value)
if metaclass is None:
metaclass = '<error>' # To be reported later
@@ -471,12 +479,12 @@ class ASTConverter(ast35.NodeTransformer):
# Return(expr? value)
@with_line
- def visit_Return(self, n: ast35.Return) -> ReturnStmt:
+ def visit_Return(self, n: ast3.Return) -> ReturnStmt:
return ReturnStmt(self.visit(n.value))
# Delete(expr* targets)
@with_line
- def visit_Delete(self, n: ast35.Delete) -> DelStmt:
+ def visit_Delete(self, n: ast3.Delete) -> DelStmt:
if len(n.targets) > 1:
tup = TupleExpr(self.translate_expr_list(n.targets))
tup.set_line(n.lineno)
@@ -486,40 +494,36 @@ class ASTConverter(ast35.NodeTransformer):
# Assign(expr* targets, expr? value, string? type_comment, expr? annotation)
@with_line
- def visit_Assign(self, n: ast35.Assign) -> AssignmentStmt:
- typ = None
- if hasattr(n, 'annotation') and n.annotation is not None: # type: ignore
- new_syntax = True
- else:
- new_syntax = False
- if new_syntax and self.pyversion < (3, 6):
- self.fail('Variable annotation syntax is only supported in Python 3.6, '
- 'use type comment instead', n.lineno, n.col_offset)
- # typed_ast prevents having both type_comment and annotation.
+ def visit_Assign(self, n: ast3.Assign) -> AssignmentStmt:
+ lvalues = self.translate_expr_list(n.targets)
+ rvalue = self.visit(n.value)
if n.type_comment is not None:
typ = parse_type_comment(n.type_comment, n.lineno, self.errors)
- elif new_syntax:
- typ = TypeConverter(self.errors, line=n.lineno).visit(n.annotation) # type: ignore
- typ.column = n.annotation.col_offset
+ else:
+ typ = None
+ return AssignmentStmt(lvalues, rvalue, type=typ, new_syntax=False)
+
+ # AnnAssign(expr target, expr annotation, expr? value, int simple)
+ @with_line
+ def visit_AnnAssign(self, n: ast3.AnnAssign) -> AssignmentStmt:
if n.value is None: # always allow 'x: int'
rvalue = TempNode(AnyType()) # type: Expression
else:
rvalue = self.visit(n.value)
- lvalues = self.translate_expr_list(n.targets)
- return AssignmentStmt(lvalues,
- rvalue,
- type=typ, new_syntax=new_syntax)
+ typ = TypeConverter(self.errors, line=n.lineno).visit(n.annotation)
+ typ.column = n.annotation.col_offset
+ return AssignmentStmt([self.visit(n.target)], rvalue, type=typ, new_syntax=True)
# AugAssign(expr target, operator op, expr value)
@with_line
- def visit_AugAssign(self, n: ast35.AugAssign) -> OperatorAssignmentStmt:
+ def visit_AugAssign(self, n: ast3.AugAssign) -> OperatorAssignmentStmt:
return OperatorAssignmentStmt(self.from_operator(n.op),
self.visit(n.target),
self.visit(n.value))
# For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
@with_line
- def visit_For(self, n: ast35.For) -> ForStmt:
+ def visit_For(self, n: ast3.For) -> ForStmt:
if n.type_comment is not None:
target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
else:
@@ -530,33 +534,38 @@ class ASTConverter(ast35.NodeTransformer):
self.as_block(n.orelse, n.lineno),
target_type)
- # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse)
+ # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
@with_line
- def visit_AsyncFor(self, n: ast35.AsyncFor) -> ForStmt:
+ def visit_AsyncFor(self, n: ast3.AsyncFor) -> ForStmt:
+ if n.type_comment is not None:
+ target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
+ else:
+ target_type = None
r = ForStmt(self.visit(n.target),
self.visit(n.iter),
self.as_block(n.body, n.lineno),
- self.as_block(n.orelse, n.lineno))
+ self.as_block(n.orelse, n.lineno),
+ target_type)
r.is_async = True
return r
# While(expr test, stmt* body, stmt* orelse)
@with_line
- def visit_While(self, n: ast35.While) -> WhileStmt:
+ def visit_While(self, n: ast3.While) -> WhileStmt:
return WhileStmt(self.visit(n.test),
self.as_block(n.body, n.lineno),
self.as_block(n.orelse, n.lineno))
# If(expr test, stmt* body, stmt* orelse)
@with_line
- def visit_If(self, n: ast35.If) -> IfStmt:
+ def visit_If(self, n: ast3.If) -> IfStmt:
return IfStmt([self.visit(n.test)],
[self.as_block(n.body, n.lineno)],
self.as_block(n.orelse, n.lineno))
# With(withitem* items, stmt* body, string? type_comment)
@with_line
- def visit_With(self, n: ast35.With) -> WithStmt:
+ def visit_With(self, n: ast3.With) -> WithStmt:
if n.type_comment is not None:
target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
else:
@@ -566,23 +575,28 @@ class ASTConverter(ast35.NodeTransformer):
self.as_block(n.body, n.lineno),
target_type)
- # AsyncWith(withitem* items, stmt* body)
+ # AsyncWith(withitem* items, stmt* body, string? type_comment)
@with_line
- def visit_AsyncWith(self, n: ast35.AsyncWith) -> WithStmt:
+ def visit_AsyncWith(self, n: ast3.AsyncWith) -> WithStmt:
+ if n.type_comment is not None:
+ target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
+ else:
+ target_type = None
r = WithStmt([self.visit(i.context_expr) for i in n.items],
[self.visit(i.optional_vars) for i in n.items],
- self.as_block(n.body, n.lineno))
+ self.as_block(n.body, n.lineno),
+ target_type)
r.is_async = True
return r
# Raise(expr? exc, expr? cause)
@with_line
- def visit_Raise(self, n: ast35.Raise) -> RaiseStmt:
+ def visit_Raise(self, n: ast3.Raise) -> RaiseStmt:
return RaiseStmt(self.visit(n.exc), self.visit(n.cause))
# Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
@with_line
- def visit_Try(self, n: ast35.Try) -> TryStmt:
+ def visit_Try(self, n: ast3.Try) -> TryStmt:
vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers]
types = [self.visit(h.type) for h in n.handlers]
handlers = [self.as_block(h.body, h.lineno) for h in n.handlers]
@@ -596,12 +610,12 @@ class ASTConverter(ast35.NodeTransformer):
# Assert(expr test, expr? msg)
@with_line
- def visit_Assert(self, n: ast35.Assert) -> AssertStmt:
+ def visit_Assert(self, n: ast3.Assert) -> AssertStmt:
return AssertStmt(self.visit(n.test), self.visit(n.msg))
# Import(alias* names)
@with_line
- def visit_Import(self, n: ast35.Import) -> Import:
+ def visit_Import(self, n: ast3.Import) -> Import:
names = [] # type: List[Tuple[str, str]]
for alias in n.names:
name = self.translate_module_id(alias.name)
@@ -618,7 +632,7 @@ class ASTConverter(ast35.NodeTransformer):
# ImportFrom(identifier? module, alias* names, int? level)
@with_line
- def visit_ImportFrom(self, n: ast35.ImportFrom) -> ImportBase:
+ def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase:
i = None # type: ImportBase
if len(n.names) == 1 and n.names[0].name == '*':
i = ImportAll(n.module, n.level)
@@ -631,45 +645,45 @@ class ASTConverter(ast35.NodeTransformer):
# Global(identifier* names)
@with_line
- def visit_Global(self, n: ast35.Global) -> GlobalDecl:
+ def visit_Global(self, n: ast3.Global) -> GlobalDecl:
return GlobalDecl(n.names)
# Nonlocal(identifier* names)
@with_line
- def visit_Nonlocal(self, n: ast35.Nonlocal) -> NonlocalDecl:
+ def visit_Nonlocal(self, n: ast3.Nonlocal) -> NonlocalDecl:
return NonlocalDecl(n.names)
# Expr(expr value)
@with_line
- def visit_Expr(self, n: ast35.Expr) -> ExpressionStmt:
+ def visit_Expr(self, n: ast3.Expr) -> ExpressionStmt:
value = self.visit(n.value)
return ExpressionStmt(value)
# Pass
@with_line
- def visit_Pass(self, n: ast35.Pass) -> PassStmt:
+ def visit_Pass(self, n: ast3.Pass) -> PassStmt:
return PassStmt()
# Break
@with_line
- def visit_Break(self, n: ast35.Break) -> BreakStmt:
+ def visit_Break(self, n: ast3.Break) -> BreakStmt:
return BreakStmt()
# Continue
@with_line
- def visit_Continue(self, n: ast35.Continue) -> ContinueStmt:
+ def visit_Continue(self, n: ast3.Continue) -> ContinueStmt:
return ContinueStmt()
# --- expr ---
# BoolOp(boolop op, expr* values)
@with_line
- def visit_BoolOp(self, n: ast35.BoolOp) -> OpExpr:
+ def visit_BoolOp(self, n: ast3.BoolOp) -> OpExpr:
# mypy translates (1 and 2 and 3) as (1 and (2 and 3))
assert len(n.values) >= 2
op = None
- if isinstance(n.op, ast35.And):
+ if isinstance(n.op, ast3.And):
op = 'and'
- elif isinstance(n.op, ast35.Or):
+ elif isinstance(n.op, ast3.Or):
op = 'or'
else:
raise RuntimeError('unknown BoolOp ' + str(type(n)))
@@ -685,7 +699,7 @@ class ASTConverter(ast35.NodeTransformer):
# BinOp(expr left, operator op, expr right)
@with_line
- def visit_BinOp(self, n: ast35.BinOp) -> OpExpr:
+ def visit_BinOp(self, n: ast3.BinOp) -> OpExpr:
op = self.from_operator(n.op)
if op is None:
@@ -695,15 +709,15 @@ class ASTConverter(ast35.NodeTransformer):
# UnaryOp(unaryop op, expr operand)
@with_line
- def visit_UnaryOp(self, n: ast35.UnaryOp) -> UnaryExpr:
+ def visit_UnaryOp(self, n: ast3.UnaryOp) -> UnaryExpr:
op = None
- if isinstance(n.op, ast35.Invert):
+ if isinstance(n.op, ast3.Invert):
op = '~'
- elif isinstance(n.op, ast35.Not):
+ elif isinstance(n.op, ast3.Not):
op = 'not'
- elif isinstance(n.op, ast35.UAdd):
+ elif isinstance(n.op, ast3.UAdd):
op = '+'
- elif isinstance(n.op, ast35.USub):
+ elif isinstance(n.op, ast3.USub):
op = '-'
if op is None:
@@ -713,84 +727,88 @@ class ASTConverter(ast35.NodeTransformer):
# Lambda(arguments args, expr body)
@with_line
- def visit_Lambda(self, n: ast35.Lambda) -> FuncExpr:
- body = ast35.Return(n.body)
+ def visit_Lambda(self, n: ast3.Lambda) -> LambdaExpr:
+ body = ast3.Return(n.body)
body.lineno = n.lineno
body.col_offset = n.col_offset
- return FuncExpr(self.transform_args(n.args, n.lineno),
+ return LambdaExpr(self.transform_args(n.args, n.lineno),
self.as_block([body], n.lineno))
# IfExp(expr test, expr body, expr orelse)
@with_line
- def visit_IfExp(self, n: ast35.IfExp) -> ConditionalExpr:
+ def visit_IfExp(self, n: ast3.IfExp) -> ConditionalExpr:
return ConditionalExpr(self.visit(n.test),
self.visit(n.body),
self.visit(n.orelse))
# Dict(expr* keys, expr* values)
@with_line
- def visit_Dict(self, n: ast35.Dict) -> DictExpr:
+ def visit_Dict(self, n: ast3.Dict) -> DictExpr:
return DictExpr(list(zip(self.translate_expr_list(n.keys),
self.translate_expr_list(n.values))))
# Set(expr* elts)
@with_line
- def visit_Set(self, n: ast35.Set) -> SetExpr:
+ def visit_Set(self, n: ast3.Set) -> SetExpr:
return SetExpr(self.translate_expr_list(n.elts))
# ListComp(expr elt, comprehension* generators)
@with_line
- def visit_ListComp(self, n: ast35.ListComp) -> ListComprehension:
- return ListComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
+ def visit_ListComp(self, n: ast3.ListComp) -> ListComprehension:
+ return ListComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n)))
# SetComp(expr elt, comprehension* generators)
@with_line
- def visit_SetComp(self, n: ast35.SetComp) -> SetComprehension:
- return SetComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
+ def visit_SetComp(self, n: ast3.SetComp) -> SetComprehension:
+ return SetComprehension(self.visit_GeneratorExp(cast(ast3.GeneratorExp, n)))
# DictComp(expr key, expr value, comprehension* generators)
@with_line
- def visit_DictComp(self, n: ast35.DictComp) -> DictionaryComprehension:
+ def visit_DictComp(self, n: ast3.DictComp) -> DictionaryComprehension:
targets = [self.visit(c.target) for c in n.generators]
iters = [self.visit(c.iter) for c in n.generators]
ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators]
+ is_async = [bool(c.is_async) for c in n.generators]
return DictionaryComprehension(self.visit(n.key),
self.visit(n.value),
targets,
iters,
- ifs_list)
+ ifs_list,
+ is_async)
# GeneratorExp(expr elt, comprehension* generators)
@with_line
- def visit_GeneratorExp(self, n: ast35.GeneratorExp) -> GeneratorExpr:
+ def visit_GeneratorExp(self, n: ast3.GeneratorExp) -> GeneratorExpr:
targets = [self.visit(c.target) for c in n.generators]
iters = [self.visit(c.iter) for c in n.generators]
ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators]
+ is_async = [bool(c.is_async) for c in n.generators]
return GeneratorExpr(self.visit(n.elt),
targets,
iters,
- ifs_list)
+ ifs_list,
+ is_async)
# Await(expr value)
@with_line
- def visit_Await(self, n: ast35.Await) -> AwaitExpr:
+ def visit_Await(self, n: ast3.Await) -> AwaitExpr:
v = self.visit(n.value)
return AwaitExpr(v)
# Yield(expr? value)
@with_line
- def visit_Yield(self, n: ast35.Yield) -> YieldExpr:
+ def visit_Yield(self, n: ast3.Yield) -> YieldExpr:
return YieldExpr(self.visit(n.value))
# YieldFrom(expr value)
@with_line
- def visit_YieldFrom(self, n: ast35.YieldFrom) -> YieldFromExpr:
+ def visit_YieldFrom(self, n: ast3.YieldFrom) -> YieldFromExpr:
return YieldFromExpr(self.visit(n.value))
# Compare(expr left, cmpop* ops, expr* comparators)
@with_line
- def visit_Compare(self, n: ast35.Compare) -> ComparisonExpr:
+ def visit_Compare(self, n: ast3.Compare) -> ComparisonExpr:
operators = [self.from_comp_operator(o) for o in n.ops]
operands = self.translate_expr_list([n.left] + n.comparators)
return ComparisonExpr(operators, operands)
@@ -798,14 +816,14 @@ class ASTConverter(ast35.NodeTransformer):
# Call(expr func, expr* args, keyword* keywords)
# keyword = (identifier? arg, expr value)
@with_line
- def visit_Call(self, n: ast35.Call) -> CallExpr:
- def is_star2arg(k: ast35.keyword) -> bool:
+ def visit_Call(self, n: ast3.Call) -> CallExpr:
+ def is_star2arg(k: ast3.keyword) -> bool:
return k.arg is None
arg_types = self.translate_expr_list(
- [a.value if isinstance(a, ast35.Starred) else a for a in n.args] +
+ [a.value if isinstance(a, ast3.Starred) else a for a in n.args] +
[k.value for k in n.keywords])
- arg_kinds = ([ARG_STAR if isinstance(a, ast35.Starred) else ARG_POS for a in n.args] +
+ arg_kinds = ([ARG_STAR if isinstance(a, ast3.Starred) else ARG_POS for a in n.args] +
[ARG_STAR2 if is_star2arg(k) else ARG_NAMED for k in n.keywords])
return CallExpr(self.visit(n.func),
arg_types,
@@ -814,10 +832,7 @@ class ASTConverter(ast35.NodeTransformer):
# Num(object n) -- a number as a PyObject.
@with_line
- def visit_Num(self, n: ast35.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]:
- if getattr(n, 'contains_underscores', None) and self.pyversion < (3, 6):
- self.fail('Underscores in numeric literals are only supported in Python 3.6',
- n.lineno, n.col_offset)
+ def visit_Num(self, n: ast3.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]:
if isinstance(n.n, int):
return IntExpr(n.n)
elif isinstance(n.n, float):
@@ -829,7 +844,7 @@ class ASTConverter(ast35.NodeTransformer):
# Str(string s)
@with_line
- def visit_Str(self, n: ast35.Str) -> Union[UnicodeExpr, StrExpr]:
+ def visit_Str(self, n: ast3.Str) -> Union[UnicodeExpr, StrExpr]:
if self.pyversion[0] >= 3 or self.is_stub:
# Hack: assume all string literals in Python 2 stubs are normal
# strs (i.e. not unicode). All stubs are parsed with the Python 3
@@ -841,9 +856,31 @@ class ASTConverter(ast35.NodeTransformer):
else:
return UnicodeExpr(n.s)
+ # Only available with typed_ast >= 0.6.2
+ if hasattr(ast3, 'JoinedStr'):
+ # JoinedStr(expr* values)
+ @with_line
+ def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression:
+ arg_count = len(n.values)
+ format_string = StrExpr('{}' * arg_count)
+ format_string.set_line(n.lineno, n.col_offset)
+ format_method = MemberExpr(format_string, 'format')
+ format_method.set_line(format_string)
+ format_args = self.translate_expr_list(n.values)
+ format_arg_kinds = [ARG_POS] * arg_count
+ result_expression = CallExpr(format_method,
+ format_args,
+ format_arg_kinds)
+ return result_expression
+
+ # FormattedValue(expr value)
+ @with_line
+ def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression:
+ return self.visit(n.value)
+
# Bytes(bytes s)
@with_line
- def visit_Bytes(self, n: ast35.Bytes) -> Union[BytesExpr, StrExpr]:
+ def visit_Bytes(self, n: ast3.Bytes) -> Union[BytesExpr, StrExpr]:
# The following line is a bit hacky, but is the best way to maintain
# compatibility with how mypy currently parses the contents of bytes literals.
contents = str(n.s)[2:-1]
@@ -854,19 +891,19 @@ class ASTConverter(ast35.NodeTransformer):
return StrExpr(contents)
# NameConstant(singleton value)
- def visit_NameConstant(self, n: ast35.NameConstant) -> NameExpr:
+ def visit_NameConstant(self, n: ast3.NameConstant) -> NameExpr:
return NameExpr(str(n.value))
# Ellipsis
@with_line
- def visit_Ellipsis(self, n: ast35.Ellipsis) -> EllipsisExpr:
+ def visit_Ellipsis(self, n: ast3.Ellipsis) -> EllipsisExpr:
return EllipsisExpr()
# Attribute(expr value, identifier attr, expr_context ctx)
@with_line
- def visit_Attribute(self, n: ast35.Attribute) -> Union[MemberExpr, SuperExpr]:
- if (isinstance(n.value, ast35.Call) and
- isinstance(n.value.func, ast35.Name) and
+ def visit_Attribute(self, n: ast3.Attribute) -> Union[MemberExpr, SuperExpr]:
+ if (isinstance(n.value, ast3.Call) and
+ isinstance(n.value.func, ast3.Name) and
n.value.func.id == 'super'):
return SuperExpr(n.attr)
@@ -874,50 +911,65 @@ class ASTConverter(ast35.NodeTransformer):
# Subscript(expr value, slice slice, expr_context ctx)
@with_line
- def visit_Subscript(self, n: ast35.Subscript) -> IndexExpr:
+ def visit_Subscript(self, n: ast3.Subscript) -> IndexExpr:
return IndexExpr(self.visit(n.value), self.visit(n.slice))
# Starred(expr value, expr_context ctx)
@with_line
- def visit_Starred(self, n: ast35.Starred) -> StarExpr:
+ def visit_Starred(self, n: ast3.Starred) -> StarExpr:
return StarExpr(self.visit(n.value))
# Name(identifier id, expr_context ctx)
@with_line
- def visit_Name(self, n: ast35.Name) -> NameExpr:
+ def visit_Name(self, n: ast3.Name) -> NameExpr:
return NameExpr(n.id)
# List(expr* elts, expr_context ctx)
@with_line
- def visit_List(self, n: ast35.List) -> ListExpr:
+ def visit_List(self, n: ast3.List) -> ListExpr:
return ListExpr([self.visit(e) for e in n.elts])
# Tuple(expr* elts, expr_context ctx)
@with_line
- def visit_Tuple(self, n: ast35.Tuple) -> TupleExpr:
+ def visit_Tuple(self, n: ast3.Tuple) -> TupleExpr:
return TupleExpr([self.visit(e) for e in n.elts])
# --- slice ---
# Slice(expr? lower, expr? upper, expr? step)
- def visit_Slice(self, n: ast35.Slice) -> SliceExpr:
+ def visit_Slice(self, n: ast3.Slice) -> SliceExpr:
return SliceExpr(self.visit(n.lower),
self.visit(n.upper),
self.visit(n.step))
# ExtSlice(slice* dims)
- def visit_ExtSlice(self, n: ast35.ExtSlice) -> TupleExpr:
+ def visit_ExtSlice(self, n: ast3.ExtSlice) -> TupleExpr:
return TupleExpr(self.translate_expr_list(n.dims))
# Index(expr value)
- def visit_Index(self, n: ast35.Index) -> Node:
+ def visit_Index(self, n: ast3.Index) -> Node:
return self.visit(n.value)
-class TypeConverter(ast35.NodeTransformer):
+class TypeConverter(ast3.NodeTransformer): # type: ignore # typeshed PR #931
def __init__(self, errors: Errors, line: int = -1) -> None:
self.errors = errors
self.line = line
+ self.node_stack = [] # type: List[ast3.AST]
+
+ def visit(self, node: ast3.AST) -> Type:
+ """Modified visit -- keep track of the stack of nodes"""
+ self.node_stack.append(node)
+ try:
+ return super().visit(node)
+ finally:
+ self.node_stack.pop()
+
+ def parent(self) -> ast3.AST:
+ """Return the AST node above the one we are processing"""
+ if len(self.node_stack) < 2:
+ return None
+ return self.node_stack[-2]
def fail(self, msg: str, line: int, column: int) -> None:
self.errors.report(line, column, msg)
@@ -925,66 +977,127 @@ class TypeConverter(ast35.NodeTransformer):
def visit_raw_str(self, s: str) -> Type:
# An escape hatch that allows the AST walker in fastparse2 to
# directly hook into the Python 3.5 type converter in some cases
- # without needing to create an intermediary `ast35.Str` object.
+ # without needing to create an intermediary `ast3.Str` object.
return parse_type_comment(s.strip(), self.line, self.errors) or AnyType()
- def generic_visit(self, node: ast35.AST) -> Type: # type: ignore
+ def generic_visit(self, node: ast3.AST) -> Type: # type: ignore
self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(node, 'col_offset', -1))
return AnyType()
def visit_NoneType(self, n: Any) -> Type:
return None
- def translate_expr_list(self, l: Sequence[ast35.AST]) -> List[Type]:
+ def translate_expr_list(self, l: Sequence[ast3.AST]) -> List[Type]:
return [self.visit(e) for e in l]
- def visit_Name(self, n: ast35.Name) -> Type:
+ def visit_Call(self, e: ast3.Call) -> Type:
+ # Parse the arg constructor
+ if not isinstance(self.parent(), ast3.List):
+ return self.generic_visit(e)
+ f = e.func
+ constructor = stringify_name(f)
+ if not constructor:
+ self.fail("Expected arg constructor name", e.lineno, e.col_offset)
+ name = None # type: Optional[str]
+ default_type = AnyType(implicit=True)
+ typ = default_type # type: Type
+ for i, arg in enumerate(e.args):
+ if i == 0:
+ typ = self.visit(arg)
+ elif i == 1:
+ name = self._extract_argument_name(arg)
+ else:
+ self.fail("Too many arguments for argument constructor",
+ f.lineno, f.col_offset)
+ for k in e.keywords:
+ value = k.value
+ if k.arg == "name":
+ if name is not None:
+ self.fail('"{}" gets multiple values for keyword argument "name"'.format(
+ constructor), f.lineno, f.col_offset)
+ name = self._extract_argument_name(value)
+ elif k.arg == "type":
+ if typ is not default_type:
+ self.fail('"{}" gets multiple values for keyword argument "type"'.format(
+ constructor), f.lineno, f.col_offset)
+ typ = self.visit(value)
+ else:
+ self.fail(
+ 'Unexpected argument "{}" for argument constructor'.format(k.arg),
+ value.lineno, value.col_offset)
+ return CallableArgument(typ, name, constructor, e.lineno, e.col_offset)
+
+ def translate_argument_list(self, l: Sequence[ast3.AST]) -> TypeList:
+ return TypeList([self.visit(e) for e in l], line=self.line)
+
+ def _extract_argument_name(self, n: ast3.expr) -> str:
+ if isinstance(n, ast3.Str):
+ return n.s.strip()
+ elif isinstance(n, ast3.NameConstant) and str(n.value) == 'None':
+ return None
+ self.fail('Expected string literal for argument name, got {}'.format(
+ type(n).__name__), self.line, 0)
+ return None
+
+ def visit_Name(self, n: ast3.Name) -> Type:
return UnboundType(n.id, line=self.line)
- def visit_NameConstant(self, n: ast35.NameConstant) -> Type:
+ def visit_NameConstant(self, n: ast3.NameConstant) -> Type:
return UnboundType(str(n.value))
# Str(string s)
- def visit_Str(self, n: ast35.Str) -> Type:
+ def visit_Str(self, n: ast3.Str) -> Type:
return parse_type_comment(n.s.strip(), self.line, self.errors) or AnyType()
# Subscript(expr value, slice slice, expr_context ctx)
- def visit_Subscript(self, n: ast35.Subscript) -> Type:
- if not isinstance(n.slice, ast35.Index):
+ def visit_Subscript(self, n: ast3.Subscript) -> Type:
+ if not isinstance(n.slice, ast3.Index):
self.fail(TYPE_COMMENT_SYNTAX_ERROR, self.line, getattr(n, 'col_offset', -1))
return AnyType()
- value = self.visit(n.value)
-
- assert isinstance(value, UnboundType)
- assert not value.args
-
empty_tuple_index = False
- if isinstance(n.slice.value, ast35.Tuple):
+ if isinstance(n.slice.value, ast3.Tuple):
params = self.translate_expr_list(n.slice.value.elts)
if len(n.slice.value.elts) == 0:
empty_tuple_index = True
else:
params = [self.visit(n.slice.value)]
- return UnboundType(value.name, params, line=self.line, empty_tuple_index=empty_tuple_index)
+ value = self.visit(n.value)
+ if isinstance(value, UnboundType) and not value.args:
+ return UnboundType(value.name, params, line=self.line,
+ empty_tuple_index=empty_tuple_index)
+ else:
+ self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(n, 'col_offset', -1))
+ return AnyType()
- def visit_Tuple(self, n: ast35.Tuple) -> Type:
+ def visit_Tuple(self, n: ast3.Tuple) -> Type:
return TupleType(self.translate_expr_list(n.elts), None, implicit=True, line=self.line)
# Attribute(expr value, identifier attr, expr_context ctx)
- def visit_Attribute(self, n: ast35.Attribute) -> Type:
+ def visit_Attribute(self, n: ast3.Attribute) -> Type:
before_dot = self.visit(n.value)
- assert isinstance(before_dot, UnboundType)
- assert not before_dot.args
-
- return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line)
+ if isinstance(before_dot, UnboundType) and not before_dot.args:
+ return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line)
+ else:
+ self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(n, 'col_offset', -1))
+ return AnyType()
# Ellipsis
- def visit_Ellipsis(self, n: ast35.Ellipsis) -> Type:
+ def visit_Ellipsis(self, n: ast3.Ellipsis) -> Type:
return EllipsisType(line=self.line)
# List(expr* elts, expr_context ctx)
- def visit_List(self, n: ast35.List) -> Type:
- return TypeList(self.translate_expr_list(n.elts), line=self.line)
+ def visit_List(self, n: ast3.List) -> Type:
+ return self.translate_argument_list(n.elts)
+
+
+def stringify_name(n: ast3.AST) -> Optional[str]:
+ if isinstance(n, ast3.Name):
+ return n.id
+ elif isinstance(n, ast3.Attribute):
+ sv = stringify_name(n.value)
+ if sv is not None:
+ return "{}.{}".format(sv, n.attr)
+ return None # Can't do it.
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index 2f693cb..b7d5e9d 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -18,7 +18,9 @@ from functools import wraps
import sys
from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set
-from mypy.sharedparse import special_function_elide_names, argument_elide_name
+from mypy.sharedparse import (
+ special_function_elide_names, argument_elide_name,
+)
from mypy.nodes import (
MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
ClassDef, Decorator, Block, Var, OperatorAssignmentStmt,
@@ -28,10 +30,10 @@ from mypy.nodes import (
TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
- UnaryExpr, FuncExpr, ComparisonExpr, DictionaryComprehension,
+ UnaryExpr, LambdaExpr, ComparisonExpr, DictionaryComprehension,
SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument,
Expression, Statement, BackquoteExpr, PrintStmt, ExecStmt,
- ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2
+ ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2, OverloadPart, check_arg_names,
)
from mypy.types import (
Type, CallableType, AnyType, UnboundType, EllipsisType
@@ -44,16 +46,23 @@ from mypy.fastparse import TypeConverter, parse_type_comment
try:
from typed_ast import ast27
- from typed_ast import ast35
+ from typed_ast import ast3
except ImportError:
if sys.version_info.minor > 2:
- print('You must install the typed_ast package before you can run mypy'
- ' with `--fast-parser`.\n'
- 'You can do this with `python3 -m pip install typed-ast`.',
- file=sys.stderr)
+ try:
+ from typed_ast import ast35 # type: ignore
+ except ImportError:
+ print('The typed_ast package is not installed.\n'
+ 'You can install it with `python3 -m pip install typed-ast`.',
+ file=sys.stderr)
+ else:
+ print('You need a more recent version of the typed_ast package.\n'
+ 'You can update to the latest version with '
+ '`python3 -m pip install -U typed-ast`.',
+ file=sys.stderr)
else:
- print('The typed_ast package required by --fast-parser is only compatible with'
- ' Python 3.3 and greater.')
+ print('Mypy requires the typed_ast package, which is only compatible with\n'
+ 'Python 3.3 and greater.', file=sys.stderr)
sys.exit(1)
T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt])
@@ -78,7 +87,7 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
if errors is None:
errors = Errors()
raise_on_error = True
- errors.set_file('<input>' if fnam is None else fnam)
+ errors.set_file('<input>' if fnam is None else fnam, None)
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
assert pyversion[0] < 3 and not is_stub_file
@@ -218,11 +227,12 @@ class ASTConverter(ast27.NodeTransformer):
def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]:
ret = [] # type: List[Statement]
- current_overload = []
+ current_overload = [] # type: List[OverloadPart]
current_overload_name = None
- # mypy doesn't actually check that the decorator is literally @overload
for stmt in stmts:
- if isinstance(stmt, Decorator) and stmt.name() == current_overload_name:
+ if (current_overload_name is not None
+ and isinstance(stmt, (Decorator, FuncDef))
+ and stmt.name() == current_overload_name):
current_overload.append(stmt)
else:
if len(current_overload) == 1:
@@ -291,11 +301,11 @@ class ASTConverter(ast27.NodeTransformer):
return_type = None
elif n.type_comment is not None and len(n.type_comment) > 0:
try:
- func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
- assert isinstance(func_type_ast, ast35.FunctionType)
+ func_type_ast = ast3.parse(n.type_comment, '<func_type>', 'func_type')
+ assert isinstance(func_type_ast, ast3.FunctionType)
# for ellipsis arg
if (len(func_type_ast.argtypes) == 1 and
- isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
+ isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)):
arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
for a in args]
else:
@@ -320,9 +330,6 @@ class ASTConverter(ast27.NodeTransformer):
for arg, arg_type in zip(args, arg_types):
self.set_type_optional(arg_type, arg.initializer)
- if isinstance(return_type, UnboundType):
- return_type.is_ret_type = True
-
func_type = None
if any(arg_types) or return_type:
if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types):
@@ -432,12 +439,10 @@ class ASTConverter(ast27.NodeTransformer):
new_args.append(Argument(Var(n.kwarg), typ, None, ARG_STAR2))
names.append(n.kwarg)
- seen_names = set() # type: Set[str]
- for name in names:
- if name in seen_names:
- self.fail("duplicate argument '{}' in function definition".format(name), line, 0)
- break
- seen_names.add(name)
+ # We don't have any context object to give, but we have closed around the line num
+ def fail_arg(msg: str, arg: None) -> None:
+ self.fail(msg, line, 0)
+ check_arg_names(names, [None] * len(names), fail_arg)
return new_args, decompose_stmts
@@ -719,7 +724,7 @@ class ASTConverter(ast27.NodeTransformer):
# Lambda(arguments args, expr body)
@with_line
- def visit_Lambda(self, n: ast27.Lambda) -> FuncExpr:
+ def visit_Lambda(self, n: ast27.Lambda) -> LambdaExpr:
args, decompose_stmts = self.transform_args(n.args, n.lineno)
n_body = ast27.Return(n.body)
@@ -729,7 +734,7 @@ class ASTConverter(ast27.NodeTransformer):
if decompose_stmts:
body.body = decompose_stmts + body.body
- return FuncExpr(args, body)
+ return LambdaExpr(args, body)
# IfExp(expr test, expr body, expr orelse)
@with_line
@@ -769,7 +774,8 @@ class ASTConverter(ast27.NodeTransformer):
self.visit(n.value),
targets,
iters,
- ifs_list)
+ ifs_list,
+ [False for _ in n.generators])
# GeneratorExp(expr elt, comprehension* generators)
@with_line
@@ -780,7 +786,8 @@ class ASTConverter(ast27.NodeTransformer):
return GeneratorExpr(self.visit(n.elt),
targets,
iters,
- ifs_list)
+ ifs_list,
+ [False for _ in n.generators])
# Yield(expr? value)
@with_line
diff --git a/mypy/fixup.py b/mypy/fixup.py
index 8375b9f..5854797 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -11,17 +11,19 @@ from mypy.nodes import (
from mypy.types import (
CallableType, EllipsisType, Instance, Overloaded, TupleType, TypedDictType,
TypeList, TypeVarType, UnboundType, UnionType, TypeVisitor,
- TypeType
+ TypeType, NOT_READY
)
from mypy.visitor import NodeVisitor
-def fixup_module_pass_one(tree: MypyFile, modules: Dict[str, MypyFile]) -> None:
- node_fixer = NodeFixer(modules)
+def fixup_module_pass_one(tree: MypyFile, modules: Dict[str, MypyFile],
+ quick_and_dirty: bool) -> None:
+ node_fixer = NodeFixer(modules, quick_and_dirty)
node_fixer.visit_symbol_table(tree.names)
-def fixup_module_pass_two(tree: MypyFile, modules: Dict[str, MypyFile]) -> None:
+def fixup_module_pass_two(tree: MypyFile, modules: Dict[str, MypyFile],
+ quick_and_dirty: bool) -> None:
compute_all_mros(tree.names, modules)
@@ -38,11 +40,10 @@ def compute_all_mros(symtab: SymbolTable, modules: Dict[str, MypyFile]) -> None:
class NodeFixer(NodeVisitor[None]):
current_info = None # type: Optional[TypeInfo]
- def __init__(self, modules: Dict[str, MypyFile], type_fixer: 'TypeFixer' = None) -> None:
+ def __init__(self, modules: Dict[str, MypyFile], quick_and_dirty: bool) -> None:
self.modules = modules
- if type_fixer is None:
- type_fixer = TypeFixer(self.modules)
- self.type_fixer = type_fixer
+ self.quick_and_dirty = quick_and_dirty
+ self.type_fixer = TypeFixer(self.modules, quick_and_dirty)
# NOTE: This method isn't (yet) part of the NodeVisitor API.
def visit_type_info(self, info: TypeInfo) -> None:
@@ -63,6 +64,12 @@ class NodeFixer(NodeVisitor[None]):
info._promote.accept(self.type_fixer)
if info.tuple_type:
info.tuple_type.accept(self.type_fixer)
+ if info.typeddict_type:
+ info.typeddict_type.accept(self.type_fixer)
+ if info.declared_metaclass:
+ info.declared_metaclass.accept(self.type_fixer)
+ if info.metaclass_type:
+ info.metaclass_type.accept(self.type_fixer)
finally:
self.current_info = save_info
@@ -76,10 +83,13 @@ class NodeFixer(NodeVisitor[None]):
if cross_ref in self.modules:
value.node = self.modules[cross_ref]
else:
- stnode = lookup_qualified_stnode(self.modules, cross_ref)
- assert stnode is not None, "Could not find cross-ref %s" % (cross_ref,)
- value.node = stnode.node
- value.type_override = stnode.type_override
+ stnode = lookup_qualified_stnode(self.modules, cross_ref,
+ self.quick_and_dirty)
+ if stnode is not None:
+ value.node = stnode.node
+ value.type_override = stnode.type_override
+ elif not self.quick_and_dirty:
+ assert stnode is not None, "Could not find cross-ref %s" % (cross_ref,)
else:
if isinstance(value.node, TypeInfo):
# TypeInfo has no accept(). TODO: Add it?
@@ -102,6 +112,8 @@ class NodeFixer(NodeVisitor[None]):
o.type.accept(self.type_fixer)
for item in o.items:
item.accept(self)
+ if o.impl:
+ o.impl.accept(self)
def visit_decorator(self, d: Decorator) -> None:
if self.current_info is not None:
@@ -132,8 +144,9 @@ class NodeFixer(NodeVisitor[None]):
class TypeFixer(TypeVisitor[None]):
- def __init__(self, modules: Dict[str, MypyFile]) -> None:
+ def __init__(self, modules: Dict[str, MypyFile], quick_and_dirty: bool) -> None:
self.modules = modules
+ self.quick_and_dirty = quick_and_dirty
def visit_instance(self, inst: Instance) -> None:
# TODO: Combine Instances that are exactly the same?
@@ -141,13 +154,13 @@ class TypeFixer(TypeVisitor[None]):
if type_ref is None:
return # We've already been here.
del inst.type_ref
- node = lookup_qualified(self.modules, type_ref)
+ node = lookup_qualified(self.modules, type_ref, self.quick_and_dirty)
if isinstance(node, TypeInfo):
inst.type = node
# TODO: Is this needed or redundant?
# Also fix up the bases, just in case.
for base in inst.type.bases:
- if base.type is None:
+ if base.type is NOT_READY:
base.accept(self)
for a in inst.args:
a.accept(self)
@@ -170,9 +183,6 @@ class TypeFixer(TypeVisitor[None]):
val.accept(self)
v.upper_bound.accept(self)
- def visit_ellipsis_type(self, e: EllipsisType) -> None:
- pass # Nothing to descend into.
-
def visit_overloaded(self, t: Overloaded) -> None:
for ct in t.items():
ct.accept(self)
@@ -203,10 +213,6 @@ class TypeFixer(TypeVisitor[None]):
if tdt.fallback is not None:
tdt.fallback.accept(self)
- def visit_type_list(self, tl: TypeList) -> None:
- for t in tl.items:
- t.accept(self)
-
def visit_type_var(self, tvt: TypeVarType) -> None:
if tvt.values:
for vt in tvt.values:
@@ -230,19 +236,24 @@ class TypeFixer(TypeVisitor[None]):
t.item.accept(self)
-def lookup_qualified(modules: Dict[str, MypyFile], name: str) -> SymbolNode:
- stnode = lookup_qualified_stnode(modules, name)
+def lookup_qualified(modules: Dict[str, MypyFile], name: str,
+ quick_and_dirty: bool) -> Optional[SymbolNode]:
+ stnode = lookup_qualified_stnode(modules, name, quick_and_dirty)
if stnode is None:
return None
else:
return stnode.node
-def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str) -> SymbolTableNode:
+def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str,
+ quick_and_dirty: bool) -> Optional[SymbolTableNode]:
head = name
rest = []
while True:
- assert '.' in head, "Cannot find %s" % (name,)
+ if '.' not in head:
+ if not quick_and_dirty:
+ assert '.' in head, "Cannot find %s" % (name,)
+ return None
head, tail = head.rsplit('.', 1)
rest.append(tail)
mod = modules.get(head)
@@ -250,9 +261,15 @@ def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str) -> SymbolTa
break
names = mod.names
while True:
- assert rest, "Cannot find %s" % (name,)
+ if not rest:
+ if not quick_and_dirty:
+ assert rest, "Cannot find %s" % (name,)
+ return None
key = rest.pop()
- assert key in names, "Cannot find %s for %s" % (key, name)
+ if key not in names:
+ return None
+ elif not quick_and_dirty:
+ assert key in names, "Cannot find %s for %s" % (key, name)
stnode = names[key]
if not rest:
return stnode
diff --git a/mypy/indirection.py b/mypy/indirection.py
index b36d999..2e69c5e 100644
--- a/mypy/indirection.py
+++ b/mypy/indirection.py
@@ -2,7 +2,7 @@ from typing import Dict, Iterable, List, Optional, Set
from abc import abstractmethod
from mypy.visitor import NodeVisitor
-from mypy.types import TypeVisitor
+from mypy.types import SyntheticTypeVisitor
from mypy.nodes import MODULE_REF
import mypy.nodes as nodes
import mypy.types as types
@@ -19,7 +19,7 @@ def extract_module_names(type_name: Optional[str]) -> List[str]:
return []
-class TypeIndirectionVisitor(TypeVisitor[Set[str]]):
+class TypeIndirectionVisitor(SyntheticTypeVisitor[Set[str]]):
"""Returns all module references within a particular type."""
def __init__(self) -> None:
@@ -45,15 +45,12 @@ class TypeIndirectionVisitor(TypeVisitor[Set[str]]):
def visit_type_list(self, t: types.TypeList) -> Set[str]:
return self._visit(*t.items)
- def visit_error_type(self, t: types.ErrorType) -> Set[str]:
- return set()
+ def visit_callable_argument(self, t: types.CallableArgument) -> Set[str]:
+ return self._visit(t.typ)
def visit_any(self, t: types.AnyType) -> Set[str]:
return set()
- def visit_void(self, t: types.Void) -> Set[str]:
- return set()
-
def visit_none_type(self, t: types.NoneTyp) -> Set[str]:
return set()
diff --git a/mypy/infer.py b/mypy/infer.py
index 0047fe4..6820a2c 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -12,7 +12,7 @@ def infer_function_type_arguments(callee_type: CallableType,
arg_types: List[Optional[Type]],
arg_kinds: List[int],
formal_to_actual: List[List[int]],
- strict: bool = True) -> List[Type]:
+ strict: bool = True) -> List[Optional[Type]]:
"""Infer the type arguments of a generic function.
Return an array of lower bound types for the type variables -1 (at
@@ -36,7 +36,7 @@ def infer_function_type_arguments(callee_type: CallableType,
def infer_type_arguments(type_var_ids: List[TypeVarId],
- template: Type, actual: Type) -> List[Type]:
+ template: Type, actual: Type) -> List[Optional[Type]]:
# Like infer_function_type_arguments, but only match a single type
# against a generic type.
constraints = infer_constraints(template, actual, SUBTYPE_OF)
diff --git a/mypy/join.py b/mypy/join.py
index d14b83d..586d281 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -1,16 +1,16 @@
"""Calculation of the least upper bound types (joins)."""
from collections import OrderedDict
-from typing import cast, List
+from typing import cast, List, Optional
from mypy.types import (
- Type, AnyType, NoneTyp, Void, TypeVisitor, Instance, UnboundType,
- ErrorType, TypeVarType, CallableType, TupleType, TypedDictType, ErasedType, TypeList,
+ Type, AnyType, NoneTyp, TypeVisitor, Instance, UnboundType,
+ TypeVarType, CallableType, TupleType, TypedDictType, ErasedType, TypeList,
UnionType, FunctionLike, Overloaded, PartialType, DeletedType,
UninhabitedType, TypeType, true_or_false
)
from mypy.maptype import map_instance_to_supertype
-from mypy.subtypes import is_subtype, is_equivalent, is_subtype_ignoring_tvars
+from mypy.subtypes import is_subtype, is_equivalent, is_subtype_ignoring_tvars, is_proper_subtype
from mypy import experiments
@@ -29,10 +29,10 @@ def join_simple(declaration: Type, s: Type, t: Type) -> Type:
if isinstance(s, ErasedType):
return t
- if is_subtype(s, t):
+ if is_proper_subtype(s, t):
return t
- if is_subtype(t, s):
+ if is_proper_subtype(t, s):
return s
if isinstance(declaration, UnionType):
@@ -63,8 +63,6 @@ def join_types(s: Type, t: Type) -> Type:
"""Return the least upper bound of s and t.
For example, the join of 'int' and 'object' is 'object'.
-
- If the join does not exist, return an ErrorType instance.
"""
if (s.can_be_true, s.can_be_false) != (t.can_be_true, t.can_be_false):
# if types are restricted in different ways, use the more general versions
@@ -101,10 +99,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
self.s = s
def visit_unbound_type(self, t: UnboundType) -> Type:
- if isinstance(self.s, Void) or isinstance(self.s, ErrorType):
- return ErrorType()
- else:
- return AnyType()
+ return AnyType()
def visit_union_type(self, t: UnionType) -> Type:
if is_subtype(self.s, t):
@@ -112,48 +107,25 @@ class TypeJoinVisitor(TypeVisitor[Type]):
else:
return UnionType.make_simplified_union([self.s, t])
- def visit_error_type(self, t: ErrorType) -> Type:
- return t
-
- def visit_type_list(self, t: TypeList) -> Type:
- assert False, 'Not supported'
-
def visit_any(self, t: AnyType) -> Type:
return t
- def visit_void(self, t: Void) -> Type:
- if isinstance(self.s, Void):
- return t
- else:
- return ErrorType()
-
def visit_none_type(self, t: NoneTyp) -> Type:
if experiments.STRICT_OPTIONAL:
if isinstance(self.s, (NoneTyp, UninhabitedType)):
return t
elif isinstance(self.s, UnboundType):
return AnyType()
- elif isinstance(self.s, Void) or isinstance(self.s, ErrorType):
- return ErrorType()
else:
return UnionType.make_simplified_union([self.s, t])
else:
- if not isinstance(self.s, Void):
- return self.s
- else:
- return self.default(self.s)
+ return self.s
def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
- if not isinstance(self.s, Void):
- return self.s
- else:
- return self.default(self.s)
+ return self.s
def visit_deleted_type(self, t: DeletedType) -> Type:
- if not isinstance(self.s, Void):
- return self.s
- else:
- return self.default(self.s)
+ return self.s
def visit_erased_type(self, t: ErasedType) -> Type:
return self.s
@@ -177,9 +149,14 @@ class TypeJoinVisitor(TypeVisitor[Type]):
return self.default(self.s)
def visit_callable_type(self, t: CallableType) -> Type:
- # TODO: Consider subtyping instead of just similarity.
if isinstance(self.s, CallableType) and is_similar_callables(t, self.s):
- return combine_similar_callables(t, self.s)
+ if is_equivalent(t, self.s):
+ return combine_similar_callables(t, self.s)
+ result = join_similar_callables(t, self.s)
+ if any(isinstance(tp, (NoneTyp, UninhabitedType)) for tp in result.arg_types):
+ # We don't want to return unusable Callable, attempt fallback instead.
+ return join_types(t.fallback, self.s)
+ return result
elif isinstance(self.s, Overloaded):
# Switch the order of arguments to that we'll get to visit_overloaded.
return join_types(t, self.s)
@@ -214,7 +191,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
# join(Ov([int, Any] -> Any, [str, Any] -> Any), [Any, int] -> Any) ==
# Ov([Any, int] -> Any, [Any, int] -> Any)
#
- # TODO: Use callable subtyping instead of just similarity.
+ # TODO: Consider more cases of callable subtyping.
result = [] # type: List[CallableType]
s = self.s
if isinstance(s, FunctionLike):
@@ -222,7 +199,10 @@ class TypeJoinVisitor(TypeVisitor[Type]):
for t_item in t.items():
for s_item in s.items():
if is_similar_callables(t_item, s_item):
- result.append(combine_similar_callables(t_item, s_item))
+ if is_equivalent(t_item, s_item):
+ result.append(combine_similar_callables(t_item, s_item))
+ elif is_subtype(t_item, s_item):
+ result.append(s_item)
if result:
# TODO: Simplify redundancies from the result.
if len(result) == 1:
@@ -279,8 +259,6 @@ class TypeJoinVisitor(TypeVisitor[Type]):
return object_from_instance(typ)
elif isinstance(typ, UnboundType):
return AnyType()
- elif isinstance(typ, Void) or isinstance(typ, ErrorType):
- return ErrorType()
elif isinstance(typ, TupleType):
return self.default(typ.fallback)
elif isinstance(typ, TypedDictType):
@@ -295,8 +273,6 @@ class TypeJoinVisitor(TypeVisitor[Type]):
def join_instances(t: Instance, s: Instance) -> Type:
"""Calculate the join of two instance types.
-
- Return ErrorType if the result is ambiguous.
"""
if t.type == s.type:
# Simplest case: join two types with the same base type (but
@@ -329,7 +305,7 @@ def join_instances_via_supertype(t: Instance, s: Instance) -> Type:
# Compute the "best" supertype of t when joined with s.
# The definition of "best" may evolve; for now it is the one with
# the longest MRO. Ties are broken by using the earlier base.
- best = None # type: Type
+ best = None # type: Optional[Type]
for base in t.type.bases:
mapped = map_instance_to_supertype(t, base.type)
res = join_instances(mapped, s)
@@ -352,12 +328,30 @@ def is_better(t: Type, s: Type) -> bool:
def is_similar_callables(t: CallableType, s: CallableType) -> bool:
- """Return True if t and s are equivalent and have identical numbers of
+ """Return True if t and s have identical numbers of
arguments, default arguments and varargs.
"""
- return (len(t.arg_types) == len(s.arg_types) and t.min_args == s.min_args
- and t.is_var_arg == s.is_var_arg and is_equivalent(t, s))
+ return (len(t.arg_types) == len(s.arg_types) and t.min_args == s.min_args and
+ t.is_var_arg == s.is_var_arg)
+
+
+def join_similar_callables(t: CallableType, s: CallableType) -> CallableType:
+ from mypy.meet import meet_types
+ arg_types = [] # type: List[Type]
+ for i in range(len(t.arg_types)):
+ arg_types.append(meet_types(t.arg_types[i], s.arg_types[i]))
+ # TODO in combine_similar_callables also applies here (names and kinds)
+ # The fallback type can be either 'function' or 'type'. The result should have 'type' as
+ # fallback only if both operands have it as 'type'.
+ if t.fallback.type.fullname() != 'builtins.type':
+ fallback = t.fallback
+ else:
+ fallback = s.fallback
+ return t.copy_modified(arg_types=arg_types,
+ ret_type=join_types(t.ret_type, s.ret_type),
+ fallback=fallback,
+ name=None)
def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType:
@@ -387,13 +381,8 @@ def object_from_instance(instance: Instance) -> Instance:
def join_type_list(types: List[Type]) -> Type:
if not types:
# This is a little arbitrary but reasonable. Any empty tuple should be compatible
- # with all variable length tuples, and this makes it possible. A better approach
- # would be to use a special bottom type, which we do when strict Optional
- # checking is enabled.
- if experiments.STRICT_OPTIONAL:
- return UninhabitedType()
- else:
- return NoneTyp()
+ # with all variable length tuples, and this makes it possible.
+ return UninhabitedType()
joined = types[0]
for t in types[1:]:
joined = join_types(joined, t)
diff --git a/mypy/lex.py b/mypy/lex.py
deleted file mode 100644
index 2ef7662..0000000
--- a/mypy/lex.py
+++ /dev/null
@@ -1,904 +0,0 @@
-"""Lexical analyzer for mypy.
-
-Translate a string that represents a file or a compilation unit to a list of
-tokens.
-
-This module can be run as a script (lex.py FILE).
-"""
-
-import re
-
-from mypy.util import short_type, find_python_encoding
-from mypy import defaults
-from typing import List, Callable, Dict, Any, Match, Pattern, Set, Union, Tuple
-
-
-class Token:
- """Base class for all tokens."""
-
- def __init__(self, string: str, pre: str = '') -> None:
- """Initialize a token.
-
- Arguments:
- string: Token string in program text
- pre: Space, comments etc. before token
- """
-
- self.string = string
- self.pre = pre
- self.line = 0
- self.column = 0
-
- def __repr__(self) -> str:
- """The representation is of form 'Keyword( if)'."""
- t = short_type(self)
- return t + '(' + self.fix(self.pre) + self.fix(self.string) + ')'
-
- def rep(self) -> str:
- return self.pre + self.string
-
- def fix(self, s: str) -> str:
- """Replace common non-printable chars with escape sequences.
-
- Do not use repr() since we don't want do duplicate backslashes.
- """
- return s.replace('\n', '\\n').replace('\t', '\\t').replace('\r', '\\r')
-
-
-# Token classes
-
-
-class Break(Token):
- """Statement break (line break or semicolon)"""
-
-
-class Indent(Token):
- """Increase block indent level."""
-
-
-class Dedent(Token):
- """Decrease block indent level."""
-
-
-class Eof(Token):
- """End of file"""
-
-
-class Keyword(Token):
- """Reserved word (other than keyword operators; they use Op).
-
- Examples: if, class, while, def.
- """
-
-
-class Name(Token):
- """An alphanumeric identifier"""
-
-
-class IntLit(Token):
- """Integer literal"""
-
-
-class StrLit(Token):
- """String literal"""
-
- def parsed(self) -> str:
- """Return the parsed contents of the literal."""
- return _parse_str_literal(self.string)
-
-
-class BytesLit(Token):
- """Bytes literal"""
-
- def parsed(self) -> str:
- """Return the parsed contents of the literal."""
- return _parse_str_literal(self.string)
-
-
-class UnicodeLit(Token):
- """Unicode literal (Python 2.x)"""
-
- def parsed(self) -> str:
- """Return the parsed contents of the literal."""
- return _parse_str_literal(self.string)
-
-
-class FloatLit(Token):
- """Float literal"""
-
-
-class ComplexLit(Token):
- """Complex literal"""
-
-
-class Punct(Token):
- """Punctuator (e.g. comma, '(' or '=')"""
-
-
-class Colon(Token):
- pass
-
-
-class EllipsisToken(Token):
- pass
-
-
-class Op(Token):
- """Operator (e.g. '+' or 'in')"""
-
-
-class Bom(Token):
- """Byte order mark (at the start of a file)"""
-
-
-class LexError(Token):
- """Lexer error token"""
-
- def __init__(self, string: str, type: int, message: str = None) -> None:
- """Initialize token.
-
- The type argument is one of the error types below.
- """
- super().__init__(string)
- self.type = type
- self.message = message
-
- def __str__(self) -> str:
- if self.message:
- return 'LexError(%s)' % self.message
- else:
- return super().__str__()
-
-
-# Lexer error types
-NUMERIC_LITERAL_ERROR = 0
-UNTERMINATED_STRING_LITERAL = 1
-INVALID_CHARACTER = 2
-DECODE_ERROR = 3
-INVALID_BACKSLASH = 4
-INVALID_DEDENT = 5
-
-
-def lex(string: Union[str, bytes], first_line: int = 1,
- pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
- is_stub_file: bool = False) -> Tuple[List[Token], Set[int]]:
- """Analyze string, and return an array of token objects and the lines to ignore.
-
- The last token is always Eof. The intention is to ignore any
- semantic and type check errors on the ignored lines.
- """
- l = Lexer(pyversion, is_stub_file=is_stub_file)
- l.lex(string, first_line)
- return l.tok, l.ignored_lines
-
-
-# Reserved words (not including operators)
-keywords_common = set([
- 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif',
- 'else', 'except', 'finally', 'from', 'for', 'global', 'if', 'import',
- 'lambda', 'pass', 'raise', 'return', 'try', 'while', 'with',
- 'yield'])
-
-# Reserved words specific for Python version 2
-# TODO (jukka): 'print' should be here, but it breaks the parsing of Python 2
-# builtins, since they also define the function 'print'.
-keywords2 = set([]) # type: Set[str]
-
-# Reserved words specific for Python version 3
-keywords3 = set(['nonlocal'])
-
-# Alphabetical operators (reserved words)
-alpha_operators = set(['in', 'is', 'not', 'and', 'or'])
-
-# String literal prefixes
-str_prefixes = set(['r', 'b', 'br', 'rb', 'u', 'ur', 'R', 'B', 'U'])
-
-# List of regular expressions that match non-alphabetical operators
-operators = [re.compile('[-+*/<>.%&|^~]'),
- re.compile('==|!=|<=|>=|\\*\\*|@|//|<<|>>|<>')]
-
-# List of regular expressions that match punctuator tokens
-punctuators = [re.compile('[=,()@`]|(->)'),
- re.compile('\\['),
- re.compile(']'),
- re.compile('([-+*/%@&|^]|\\*\\*|//|<<|>>)=')]
-
-
-# Map single-character string escape sequences to corresponding characters.
-escape_map = {'a': '\x07',
- 'b': '\x08',
- 'f': '\x0c',
- 'n': '\x0a',
- 'r': '\x0d',
- 't': '\x09',
- 'v': '\x0b',
- '"': '"',
- "'": "'"}
-
-
-# Matches the optional prefix of a string literal, e.g. the 'r' in r"foo".
-str_prefix_re = re.compile('[rRbBuU]*')
-
-# Matches an escape sequence in a string, e.g. \n or \x4F.
-escape_re = re.compile(
- "\\\\([abfnrtv'\"]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|[0-7]{1,3})")
-
-
-def _parse_str_literal(string: str) -> str:
- """Translate escape sequences in str literal to the corresponding chars.
-
- For example, \t is translated to the tab character (ascii 9).
-
- Return the translated contents of the literal. Also handle raw and
- triple-quoted string literals.
- """
-
- prefix = str_prefix_re.match(string).group(0).lower()
- s = string[len(prefix):]
- if s.startswith("'''") or s.startswith('"""'):
- return s[3:-3]
- elif 'r' in prefix or 'R' in prefix:
- return s[1:-1].replace('\\' + s[0], s[0])
- else:
- return escape_re.sub(lambda m: escape_repl(m, prefix), s[1:-1])
-
-
-def escape_repl(m: Match[str], prefix: str) -> str:
- """Translate a string escape sequence, e.g. \t -> the tab character.
-
- Assume that the Match object is from escape_re.
- """
-
- seq = m.group(1)
- if len(seq) == 1 and seq in escape_map:
- # Single-character escape sequence, e.g. \n.
- return escape_map[seq]
- elif seq.startswith('x'):
- # Hexadecimal sequence \xNN.
- return chr(int(seq[1:], 16))
- elif seq.startswith('u'):
- # Unicode sequence \uNNNN.
- if 'b' not in prefix and 'B' not in prefix:
- return chr(int(seq[1:], 16))
- else:
- return '\\' + seq
- else:
- # Octal sequence.
- ord = int(seq, 8)
- if 'b' in prefix and 'B' in prefix:
- # Make sure code is no larger than 255 for bytes literals.
- ord = ord % 256
- return chr(ord)
-
-
-class Lexer:
- """Lexical analyzer."""
-
- i = 0 # Current string index (into s)
- s = '' # The string being analyzed
- line = 0 # Current line number
- column = 0 # Current column number
- pre_whitespace = '' # Whitespace and comments before the next token
- enc = '' # Encoding
-
- # Generated tokens
- tok = None # type: List[Token]
-
- # Table from byte character value to lexer method. E.g. entry at ord('0')
- # contains the method lex_number().
- map = None # type: Dict[str, Callable[[], None]]
-
- # Indent levels of currently open blocks, in spaces.
- indents = None # type: List[int]
-
- # Open ('s, ['s and {'s without matching closing bracket; used for ignoring
- # newlines within parentheses/brackets.
- open_brackets = None # type: List[str]
-
- pyversion = defaults.PYTHON3_VERSION
-
- # Ignore errors on these lines (defined using '# type: ignore').
- ignored_lines = None # type: Set[int]
-
- def __init__(self, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
- is_stub_file: bool = False) -> None:
- self.map = {}
- self.tok = []
- self.indents = [0]
- self.open_brackets = []
- self.pyversion = pyversion
- self.is_stub_file = is_stub_file
- self.ignored_lines = set()
- # Fill in the map from valid character codes to relevant lexer methods.
- extra_misc = '' if pyversion[0] >= 3 else '`'
- for seq, method in [('ABCDEFGHIJKLMNOPQRSTUVWXYZ', self.lex_name),
- ('abcdefghijklmnopqrstuvwxyz_', self.lex_name),
- ('0123456789', self.lex_number),
- ('.', self.lex_number_or_dot),
- (' ' + '\t' + '\x0c', self.lex_space),
- ('"', self.lex_str_double),
- ("'", self.lex_str_single),
- ('\r' + '\n', self.lex_break),
- (';', self.lex_semicolon),
- (':', self.lex_colon),
- ('#', self.lex_comment),
- ('\\', self.lex_backslash),
- ('([{', self.lex_open_bracket),
- (')]}', self.lex_close_bracket),
- ('-+*/<>%&|^~=!,@' + extra_misc, self.lex_misc)]:
- for c in seq:
- self.map[c] = method
- if pyversion[0] == 2:
- self.keywords = keywords_common | keywords2
- # Decimal/hex/octal/binary literal or integer complex literal
- self.number_exp1 = re.compile('(0[xXoObB][0-9a-fA-F]+|[0-9]+)[lL]?')
-
- if pyversion[0] == 3:
- self.keywords = keywords_common | keywords3
- self.number_exp1 = re.compile('0[xXoObB][0-9a-fA-F]+|[0-9]+')
-
- def lex(self, text: Union[str, bytes], first_line: int) -> None:
- """Lexically analyze a string, storing the tokens at the tok list."""
- self.i = 0
- self.line = first_line
- self.column = 0
-
- if isinstance(text, bytes):
- if text.startswith(b'\xef\xbb\xbf'):
- self.enc = 'utf8'
- bom = True
- else:
- self.enc, enc_line = find_python_encoding(text, self.pyversion)
- bom = False
- try:
- decoded_text = text.decode(self.enc)
- except UnicodeDecodeError as err:
- self.report_unicode_decode_error(err, text)
- return
- except LookupError:
- self.report_unknown_encoding(enc_line)
- return
- text = decoded_text
- if bom:
- self.add_token(Bom(text[0]))
- self.s = text
-
- # Parse initial indent; otherwise first-line indent would not generate
- # an error.
- self.lex_indent()
-
- # Use some local variables as a simple optimization.
- map = self.map
- default = self.unknown_character
-
- # Lex the file. Repeatedly call the lexer method for the current char.
- while self.i < len(text):
- # Get the character code of the next character to lex.
- c = text[self.i]
- # Dispatch to the relevant lexer method. This will consume some
- # characters in the text, add a token to self.tok and increment
- # self.i.
- map.get(c, default)()
-
- # Append a break if there is no statement/block terminator at the end
- # of input.
- if len(self.tok) > 0 and (not isinstance(self.tok[-1], Break) and
- not isinstance(self.tok[-1], Dedent)):
- self.add_token(Break(''))
-
- # Attach any dangling comments/whitespace to a final Break token.
- if self.tok and isinstance(self.tok[-1], Break):
- self.tok[-1].string += self.pre_whitespace
- self.pre_whitespace = ''
-
- # Close remaining open blocks with Dedent tokens.
- self.lex_indent()
-
- self.add_token(Eof(''))
-
- def report_unicode_decode_error(self, exc: UnicodeDecodeError, text: bytes) -> None:
- lines = text.splitlines()
- for line in lines:
- try:
- line.decode(self.enc)
- except UnicodeDecodeError as new_exc:
- exc = new_exc
- break
- self.line += 1
- else:
- self.line = 1
- self.add_token(
- LexError('', DECODE_ERROR,
- "%r codec can't decode byte %d in column %d" % (
- self.enc, line[exc.start], exc.start + 1)))
- self.add_token(Break(''))
- self.add_token(Eof(''))
-
- def report_unknown_encoding(self, encoding_line: int) -> None:
- self.line = encoding_line
- self.add_token(
- LexError('', DECODE_ERROR,
- "Unknown encoding %r" % self.enc))
- self.add_token(Break(''))
- self.add_token(Eof(''))
-
- def lex_number_or_dot(self) -> None:
- """Analyse a token starting with a dot.
-
- It can be the member access operator, a float literal such as '.123',
- or an ellipsis (for Python 3 and for all stub files).
- """
- if self.is_at_number():
- self.lex_number()
- elif self.is_at_ellipsis():
- # '...' is valid in Python 2 as a token but it's use is limited to indexing.
- # Example: Tuple[int, ...] is valid in Python 2.
- self.lex_ellipsis()
- else:
- self.lex_misc()
-
- number_exp = re.compile(r'[0-9]|\.[0-9]')
-
- def is_at_number(self) -> bool:
- """Is the current location at a numeric literal?"""
- return self.match(self.number_exp) != ''
-
- ellipsis_exp = re.compile(r'\.\.\.')
-
- def is_at_ellipsis(self) -> bool:
- """Is the current location at a ellipsis '...'"""
- return self.match(self.ellipsis_exp) != ''
-
- # Regexps used by lex_number
-
- # NOTE: number_exp1 depends on Python version and is defined in __init__.
-
- # Float literal, e.g. '1.23' or '12e+34' or '1.2j'
- number_exp2 = re.compile(
- r'[0-9]*\.[0-9]*([eE][-+]?[0-9]+)?|[0-9]+[eE][-+]?[0-9]+')
-
- # Complex literal, e.g. '3j' or '1.5e2J'
- number_complex = re.compile(
- r'([0-9]*\.[0-9]*([eE][-+]?[0-9]+)?|[0-9]+([eE][-+]?[0-9]+)?)[jJ]')
-
- # These characters must not appear after a number literal.
- name_char_exp = re.compile('[a-zA-Z0-9_]')
- octal_int = re.compile('0+[1-9]')
-
- def lex_number(self) -> None:
- """Analyse an int or float literal.
-
- Assume that the current location points to one of them.
- """
- s1 = self.match(self.number_exp1)
- s2 = self.match(self.number_exp2)
- sc = self.match(self.number_complex)
-
- maxlen = max(len(s1), len(s2), len(sc))
- if self.name_char_exp.match(
- self.s[self.i + maxlen:self.i + maxlen + 1]) is not None:
- # Error: alphanumeric character after number literal.
- s3 = self.match(re.compile('[0-9][0-9a-zA-Z_]*'))
- maxlen = max(maxlen, len(s3))
- self.add_token(LexError(' ' * maxlen, NUMERIC_LITERAL_ERROR))
- elif len(s1) == maxlen:
- # Integer literal.
- if self.pyversion[0] >= 3 and self.octal_int.match(s1):
- # Python 2 style octal literal such as 0377 not supported in Python 3.
- self.add_token(LexError(s1, NUMERIC_LITERAL_ERROR))
- else:
- self.add_token(IntLit(s1))
- elif len(s2) == maxlen:
- # Float literal.
- self.add_token(FloatLit(s2))
- else:
- # Complex literal.
- self.add_token(ComplexLit(sc))
-
- def lex_ellipsis(self) -> None:
- self.add_token(EllipsisToken('...'))
-
- name_exp = re.compile('[a-zA-Z_][a-zA-Z0-9_]*')
-
- def lex_name(self) -> None:
- """Analyse a name.
-
- A name can be an identifier, a keyword or an alphabetical operator.
- Also deal with prefixed string literals such as r'...'.
- """
- s = self.match(self.name_exp)
- if s in self.keywords:
- self.add_token(Keyword(s))
- elif s in alpha_operators:
- self.add_token(Op(s))
- elif s in str_prefixes and self.match(re.compile('[a-zA-Z]+[\'"]')) != '':
- self.lex_prefixed_str(s)
- else:
- self.add_token(Name(s))
-
- # Regexps representing components of string literals
-
- # Initial part of a single-quoted literal, e.g. b'foo' or b'foo\\\n
- str_exp_single = re.compile(
- r"[a-zA-Z]*'([^'\\\r\n]|\\[^\r\n])*('|\\(\n|\r\n?))")
- # Non-initial part of a multiline single-quoted literal, e.g. foo'
- str_exp_single_multi = re.compile(
- r"([^'\\\r\n]|\\[^\r\n])*('|\\(\n|\r\n?))")
- # Initial part of a single-quoted raw literal, e.g. r'foo' or r'foo\\\n
- str_exp_raw_single = re.compile(
- r"[a-zA-Z]*'([^'\r\n\\]|\\'|\\[^\n\r])*('|\\(\n|\r\n?))")
- # Non-initial part of a raw multiline single-quoted literal, e.g. foo'
- str_exp_raw_single_multi = re.compile(
- r"([^'\r\n]|'')*('|\\(\n|\r\n?))")
-
- # Start of a ''' literal, e.g. b'''
- str_exp_single3 = re.compile("[a-z]*'''")
- # End of a ''' literal, e.g. foo'''
- str_exp_single3end = re.compile(r"([^\n\r\\]|\\[^\n\r])*?'''")
-
- # The following are similar to above (but use double quotes).
-
- str_exp_double = re.compile(
- r'[a-z]*"([^"\\\r\n]|\\[^\r\n])*("|\\(\n|\r\n?))')
- str_exp_double_multi = re.compile(
- r'([^"\\\r\n]|\\[^\r\n])*("|\\(\n|\r\n?))')
- str_exp_raw_double = re.compile(
- r'[a-z]*"([^"\r\n\\]|\\"|\\[^\n\r])*("|\\(\n|\r\n?))')
- str_exp_raw_double_multi = re.compile(
- r'([^"\r\n]|"")*("|\\(\n|\r\n?))')
-
- str_exp_double3 = re.compile('[a-z]*"""')
- str_exp_double3end = re.compile(r'([^\n\r\\]|\\[^\n\r])*?"""')
-
- def lex_str_single(self) -> None:
- """Analyse single-quoted string literal"""
- self.lex_str(self.str_exp_single, self.str_exp_single_multi,
- self.str_exp_single3, self.str_exp_single3end)
-
- def lex_str_double(self) -> None:
- """Analyse double-quoted string literal"""
- self.lex_str(self.str_exp_double, self.str_exp_double_multi,
- self.str_exp_double3, self.str_exp_double3end)
-
- def lex_prefixed_str(self, prefix: str) -> None:
- """Analyse a string literal with a prefix, such as r'...'."""
- s = self.match(re.compile('[a-zA-Z]+[\'"]'))
- if s.endswith("'"):
- re1 = self.str_exp_single
- re2 = self.str_exp_single_multi
- if 'r' in prefix or 'R' in prefix:
- re1 = self.str_exp_raw_single
- re2 = self.str_exp_raw_single_multi
- self.lex_str(re1, re2, self.str_exp_single3,
- self.str_exp_single3end, prefix)
- else:
- re1 = self.str_exp_double
- re2 = self.str_exp_double_multi
- if 'r' in prefix or 'R' in prefix:
- re1 = self.str_exp_raw_double
- re2 = self.str_exp_raw_double_multi
- self.lex_str(re1, re2, self.str_exp_double3,
- self.str_exp_double3end, prefix)
-
- def lex_str(self, regex: Pattern[str], re2: Pattern[str],
- re3: Pattern[str], re3end: Pattern[str],
- prefix: str = '') -> None:
- """Analyse a string literal described by regexps.
-
- Assume that the current location is at the beginning of the
- literal. The arguments re3 and re3end describe the
- corresponding triple-quoted literals.
- """
- s3 = self.match(re3)
- if s3 != '':
- # Triple-quoted string literal.
- self.lex_triple_quoted_str(re3end, prefix)
- else:
- # Single or double quoted string literal.
- s = self.match(regex)
- if s != '':
- if s.endswith('\n') or s.endswith('\r'):
- self.lex_multiline_string_literal(re2, s)
- else:
- if 'b' in prefix or 'B' in prefix:
- self.add_token(BytesLit(s))
- elif 'u' in prefix or 'U' in prefix:
- self.add_token(UnicodeLit(s))
- else:
- self.add_token(StrLit(s))
- else:
- # Unterminated string literal.
- s = self.match(re.compile('[^\\n\\r]*'))
- self.add_token(LexError(s, UNTERMINATED_STRING_LITERAL))
-
- def lex_triple_quoted_str(self, re3end: Pattern[str], prefix: str) -> None:
- line = self.line
- ss = self.s[self.i:self.i + len(prefix) + 3]
- self.i += len(prefix) + 3
- self.column += len(prefix) + 3
- while True:
- m = re3end.match(self.s, self.i)
- if m is not None:
- break
- m = re.match('[^\\n\\r]*(\\n|\\r\\n?)', self.s[self.i:])
- if m is None:
- self.add_special_token(
- LexError(ss, UNTERMINATED_STRING_LITERAL), line, 0)
- return
- s = m.group(0)
- ss += s
- self.line += 1
- self.i += len(s)
- self.column += len(s)
- lit = None # type: Token
- if 'b' in prefix or 'B' in prefix:
- lit = BytesLit(ss + m.group(0))
- elif 'u' in prefix or 'U' in prefix:
- lit = UnicodeLit(ss + m.group(0))
- else:
- lit = StrLit(ss + m.group(0))
- self.add_special_token(lit, line, len(m.group(0)))
-
- def lex_multiline_string_literal(self, re_end: Pattern[str],
- prefix: str) -> None:
- """Analyze multiline single/double-quoted string literal.
-
- Use explicit \ for line continuation.
- """
- line = self.line
- self.i += len(prefix)
- self.column += len(prefix)
- ss = prefix
- while True:
- m = self.match(re_end)
- if m == '':
- self.add_special_token(
- LexError(ss, UNTERMINATED_STRING_LITERAL), line, 0)
- return
- ss += m
- self.line += 1
- self.i += len(m)
- self.column += len(m)
- if not m.endswith('\n') and not m.endswith('\r'): break
- self.add_special_token(StrLit(ss), line, 0) # TODO bytes
-
- comment_exp = re.compile(r'#[^\n\r]*')
-
- def lex_comment(self) -> None:
- """Analyze a comment."""
- s = self.match(self.comment_exp)
- self.add_pre_whitespace(s)
-
- backslash_exp = re.compile(r'\\(\n|\r\n?)')
-
- def lex_backslash(self) -> None:
- s = self.match(self.backslash_exp)
- if s != '':
- self.add_pre_whitespace(s)
- self.line += 1
- else:
- self.add_token(LexError('\\', INVALID_BACKSLASH))
-
- space_exp = re.compile(r'[ \t\x0c]*')
- indent_exp = re.compile(r'[ \t]*[#\n\r]?')
-
- def lex_space(self) -> None:
- """Analyze a run of whitespace characters (within a line, not indents).
-
- Only store them in self.pre_whitespace.
- """
- s = self.match(self.space_exp)
- self.add_pre_whitespace(s)
-
- comment_or_newline = '#' + '\n' + '\r' # type: str
-
- def lex_indent(self) -> None:
- """Analyze whitespace chars at the beginning of a line (indents)."""
- s = self.match(self.indent_exp)
- while True:
- s = self.match(self.indent_exp)
- if s == '' or s[-1] not in self.comment_or_newline:
- break
- # Empty line (whitespace only or comment only).
- self.add_pre_whitespace(s[:-1])
- if s[-1] == '#':
- self.lex_comment()
- else:
- self.lex_break()
- indent = self.calc_indent(s)
- if indent == self.indents[-1]:
- # No change in indent: just whitespace.
- self.add_pre_whitespace(s)
- elif indent > self.indents[-1]:
- # An increased indent (new block).
- self.indents.append(indent)
- self.add_token(Indent(s))
- else:
- # Decreased indent (end of one or more blocks).
- pre = self.pre_whitespace
- self.pre_whitespace = ''
- while indent < self.indents[-1]:
- self.add_token(Dedent(''))
- self.indents.pop()
- self.pre_whitespace = pre
- self.add_pre_whitespace(s)
- if indent != self.indents[-1]:
- # Error: indent level does not match a previous indent level.
- self.add_token(LexError('', INVALID_DEDENT))
-
- def calc_indent(self, s: str) -> int:
- indent = 0
- for ch in s:
- if ch == ' ':
- indent += 1
- else:
- # Tab: 8 spaces (rounded to a multiple of 8).
- indent += 8 - indent % 8
- return indent
-
- break_exp = re.compile(r'\r\n|\r|\n|;')
-
- def lex_break(self) -> None:
- """Analyse a line break."""
- s = self.match(self.break_exp)
- last_tok = self.tok[-1] if self.tok else None
- if isinstance(last_tok, Break):
- was_semicolon = last_tok.string == ';'
- last_tok.string += self.pre_whitespace + s
- self.i += len(s)
- self.line += 1
- self.column = 0
- self.pre_whitespace = ''
- if was_semicolon:
- self.lex_indent()
- elif self.ignore_break():
- self.add_pre_whitespace(s)
- self.line += 1
- self.column = 0
- else:
- self.add_token(Break(s))
- self.line += 1
- self.column = 0
- self.lex_indent()
-
- def lex_semicolon(self) -> None:
- self.add_token(Break(';'))
-
- def lex_colon(self) -> None:
- self.add_token(Colon(':'))
-
- open_bracket_exp = re.compile('[[({]')
-
- def lex_open_bracket(self) -> None:
- s = self.match(self.open_bracket_exp)
- self.open_brackets.append(s)
- self.add_token(Punct(s))
-
- close_bracket_exp = re.compile('[])}]')
-
- open_bracket = {')': '(', ']': '[', '}': '{'}
-
- def lex_close_bracket(self) -> None:
- s = self.match(self.close_bracket_exp)
- if (self.open_brackets != []
- and self.open_bracket[s] == self.open_brackets[-1]):
- self.open_brackets.pop()
- self.add_token(Punct(s))
-
- def lex_misc(self) -> None:
- """Analyze a non-alphabetical operator or a punctuator."""
- s = ''
- t = None # type: Any
- for re_list, type in [(operators, Op), (punctuators, Punct)]:
- for regexp in re_list:
- s2 = self.match(regexp)
- if len(s2) > len(s):
- t = type
- s = s2
- if s == '':
- # Could not match any token; report an invalid character. This is
- # reached at least if the current character is '!' not followed by
- # '='.
- self.add_token(LexError(self.s[self.i], INVALID_CHARACTER))
- else:
- if s == '<>':
- if self.pyversion[0] == 2:
- s = '!='
- else:
- self.add_token(Op('<'))
- s = '>'
- self.add_token(t(s))
-
- def unknown_character(self) -> None:
- """Report an unknown character as a lexical analysis error."""
- self.add_token(LexError(self.s[self.i], INVALID_CHARACTER))
-
- # Utility methods
-
- def match(self, pattern: Pattern[str]) -> str:
- """Try to match a regular expression at current location.
-
- If the argument regexp is matched at the current location,
- return the matched string; otherwise return the empty string.
- """
- m = pattern.match(self.s, self.i)
- if m is not None:
- return m.group(0)
- else:
- return ''
-
- def add_pre_whitespace(self, s: str) -> None:
- """Record whitespace and comments before the next token.
-
- The accumulated whitespace/comments will be stored in the next token
- and then it will be cleared.
-
- This is needed for pretty-printing the original source code while
- preserving comments, indentation, whitespace etc.
- """
- self.pre_whitespace += s
- self.i += len(s)
- self.column += len(s)
-
- type_ignore_exp = re.compile(r'[ \t]*#[ \t]*type:[ \t]*ignore\b')
-
- def add_token(self, tok: Token) -> None:
- """Store a token.
-
- Update its line number and record preceding whitespace
- characters and comments.
- """
- if (tok.string == '' and not isinstance(tok, Eof)
- and not isinstance(tok, Break)
- and not isinstance(tok, LexError)
- and not isinstance(tok, Dedent)):
- raise ValueError('Empty token')
- tok.pre = self.pre_whitespace
- if self.type_ignore_exp.match(tok.pre):
- delta = 0
- if '\n' in tok.pre or '\r' in tok.pre:
- delta += 1
- self.ignored_lines.add(self.line - delta)
- tok.line = self.line
- tok.column = self.column
- self.tok.append(tok)
- self.i += len(tok.string)
- self.column += len(tok.string)
- self.pre_whitespace = ''
-
- def add_special_token(self, tok: Token, line: int, skip: int) -> None:
- """Like add_token, but caller sets the number of chars to skip."""
- if (tok.string == '' and not isinstance(tok, Eof)
- and not isinstance(tok, Break)
- and not isinstance(tok, LexError)
- and not isinstance(tok, Dedent)):
- raise ValueError('Empty token')
- tok.pre = self.pre_whitespace
- tok.line = line
- self.tok.append(tok)
- self.i += skip
- self.column += skip
- self.pre_whitespace = ''
-
- def ignore_break(self) -> bool:
- """If the next token is a break, can we ignore it?"""
- if len(self.open_brackets) > 0 or len(self.tok) == 0:
- # Ignore break after open ( [ or { or at the beginning of file.
- return True
- else:
- # Ignore break after another break or dedent.
- t = self.tok[-1]
- return isinstance(t, Break) or isinstance(t, Dedent)
-
-
-if __name__ == '__main__':
- # Lexically analyze a file and dump the tokens to stdout.
- import sys
- if len(sys.argv) != 2:
- print('Usage: lex.py FILE', file=sys.stderr)
- sys.exit(2)
- fnam = sys.argv[1]
- with open(fnam, 'rb') as f:
- s = f.read()
- for t in lex(s):
- print(t)
diff --git a/mypy/main.py b/mypy/main.py
index 1c1f57e..a551167 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -24,11 +24,13 @@ from mypy.version import __version__
PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS)
-def main(script_path: str) -> None:
+def main(script_path: str, args: List[str] = None) -> None:
"""Main entry point to the type checker.
Args:
script_path: Path to the 'mypy' script (used for finding data files).
+ args: Custom command-line arguments. If not given, sys.argv[1:] will
+ be used.
"""
t0 = time.time()
if script_path:
@@ -36,7 +38,9 @@ def main(script_path: str) -> None:
else:
bin_dir = None
sys.setrecursionlimit(2 ** 14)
- sources, options = process_options(sys.argv[1:])
+ if args is None:
+ args = sys.argv[1:]
+ sources, options = process_options(args)
serious = False
try:
res = type_check_only(sources, bin_dir, options)
@@ -50,8 +54,11 @@ def main(script_path: str) -> None:
util.write_junit_xml(t1 - t0, serious, a, options.junit_xml)
if a:
f = sys.stderr if serious else sys.stdout
- for m in a:
- f.write(m + '\n')
+ try:
+ for m in a:
+ f.write(m + '\n')
+ except BrokenPipeError:
+ pass
sys.exit(1)
@@ -114,11 +121,22 @@ class SplitNamespace(argparse.Namespace):
def parse_version(v: str) -> Tuple[int, int]:
m = re.match(r'\A(\d)\.(\d+)\Z', v)
- if m:
- return int(m.group(1)), int(m.group(2))
- else:
+ if not m:
raise argparse.ArgumentTypeError(
"Invalid python version '{}' (expected format: 'x.y')".format(v))
+ major, minor = int(m.group(1)), int(m.group(2))
+ if major == 2:
+ if minor != 7:
+ raise argparse.ArgumentTypeError(
+ "Python 2.{} is not supported (must be 2.7)".format(minor))
+ elif major == 3:
+ if minor <= 2:
+ raise argparse.ArgumentTypeError(
+ "Python 3.{} is not supported (must be 3.3 or higher)".format(minor))
+ else:
+ raise argparse.ArgumentTypeError(
+ "Python major version '{}' out of range (must be 2 or 3)".format(major))
+ return major, minor
# Make the help output a little less jarring.
@@ -219,17 +237,21 @@ def process_options(args: List[str],
" --check-untyped-defs enabled")
add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True,
help="warn about casting an expression to its inferred type")
- add_invertible_flag('--warn-no-return', default=False,
- help="warn about functions that end without returning")
+ add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True,
+ help="do not warn about functions that end without returning")
+ add_invertible_flag('--warn-return-any', default=False, strict_flag=True,
+ help="warn about returning values of type Any"
+ " from non-Any typed functions")
add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True,
help="warn about unneeded '# type: ignore' comments")
- add_invertible_flag('--show-error-context', default=True,
- dest='hide_error_context',
+ add_invertible_flag('--show-error-context', default=False,
+ dest='show_error_context',
help='Precede errors with "note:" messages explaining context')
- add_invertible_flag('--no-fast-parser', default=True, dest='fast_parser',
- help="disable the fast parser (not recommended)")
parser.add_argument('-i', '--incremental', action='store_true',
- help="enable experimental module cache")
+ help="enable module cache")
+ parser.add_argument('--quick-and-dirty', action='store_true',
+ help="use cache even if dependencies out of date "
+ "(implies --incremental)")
parser.add_argument('--cache-dir', action='store', metavar='DIR',
help="store module cache info in the given folder in incremental mode "
"(defaults to '{}')".format(defaults.CACHE_DIR))
@@ -261,8 +283,6 @@ def process_options(args: List[str],
parser.add_argument('--find-occurrences', metavar='CLASS.MEMBER',
dest='special-opts:find_occurrences',
help="print out all usages of a class member (experimental)")
- add_invertible_flag('--strict-boolean', default=False, strict_flag=True,
- help='enable strict boolean checks in conditions')
strict_help = "Strict mode. Enables the following flags: {}".format(
", ".join(strict_flag_names))
parser.add_argument('--strict', action='store_true', dest='special-opts:strict',
@@ -281,6 +301,8 @@ def process_options(args: List[str],
# --dump-graph will dump the contents of the graph of SCCs and exit.
parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS)
# deprecated options
+ add_invertible_flag('--strict-boolean', default=False,
+ help=argparse.SUPPRESS)
parser.add_argument('-f', '--dirty-stubs', action='store_true',
dest='special-opts:dirty_stubs',
help=argparse.SUPPRESS)
@@ -293,6 +315,11 @@ def process_options(args: List[str],
parser.add_argument('--almost-silent', action='store_true',
dest='special-opts:almost_silent',
help=argparse.SUPPRESS)
+ parser.add_argument('--fast-parser', action='store_true', dest='special-opts:fast_parser',
+ help=argparse.SUPPRESS)
+ parser.add_argument('--no-fast-parser', action='store_true',
+ dest='special-opts:no_fast_parser',
+ help=argparse.SUPPRESS)
report_group = parser.add_argument_group(
title='report generation',
@@ -321,16 +348,13 @@ def process_options(args: List[str],
# filename for the config file and know if the user requested all strict options.
dummy = argparse.Namespace()
parser.parse_args(args, dummy)
- config_file = defaults.CONFIG_FILE
- if dummy.config_file:
- config_file = dummy.config_file
- if not os.path.exists(config_file):
- parser.error("Cannot file config file '%s'" % config_file)
+ config_file = dummy.config_file
+ if config_file is not None and not os.path.exists(config_file):
+ parser.error("Cannot file config file '%s'" % config_file)
# Parse config file first, so command line can override.
options = Options()
- if config_file and os.path.exists(config_file):
- parse_config_file(options, config_file)
+ parse_config_file(options, config_file)
# Set strict flags before parsing (if strict mode enabled), so other command
# line options can override.
@@ -351,6 +375,9 @@ def process_options(args: List[str],
)
# Process deprecated options
+ if options.strict_boolean:
+ print("Warning: --strict-boolean is deprecated; "
+ "see https://github.com/python/mypy/issues/3195", file=sys.stderr)
if special_opts.almost_silent:
print("Warning: --almost-silent has been replaced by "
"--follow-imports=errors", file=sys.stderr)
@@ -366,6 +393,11 @@ def process_options(args: List[str],
print("Warning: -f/--dirty-stubs is deprecated and no longer necessary. Mypy no longer "
"checks the git status of stubs.",
file=sys.stderr)
+ if special_opts.fast_parser:
+ print("Warning: --fast-parser is now the default (and only) parser.")
+ if special_opts.no_fast_parser:
+ print("Warning: --no-fast-parser no longer has any effect. The fast parser "
+ "is now mypy's default and only parser.")
# Check for invalid argument combinations.
if require_targets:
@@ -398,6 +430,10 @@ def process_options(args: List[str],
report_dir = val
options.report_dirs[report_type] = report_dir
+ # Let quick_and_dirty imply incremental.
+ if options.quick_and_dirty:
+ options.incremental = True
+
# Set target.
if special_opts.modules:
options.build_type = BuildType.MODULE
@@ -486,7 +522,6 @@ def crawl_up(arg: str) -> Tuple[str, str]:
"""
dir, mod = os.path.split(arg)
mod = strip_py(mod) or mod
- assert '.' not in mod
while dir and get_init_file(dir):
dir, base = os.path.split(dir)
if not base:
@@ -529,8 +564,7 @@ def get_init_file(dir: str) -> Optional[str]:
# exists to specify types for values initialized to None or container
# types.
config_types = {
- # TODO: Check validity of python version
- 'python_version': lambda s: tuple(map(int, s.split('.'))),
+ 'python_version': parse_version,
'strict_optional_whitelist': lambda s: s.split(),
'custom_typing_module': str,
'custom_typeshed_dir': str,
@@ -541,23 +575,44 @@ config_types = {
'almost_silent': bool,
}
+SHARED_CONFIG_FILES = ('setup.cfg',)
-def parse_config_file(options: Options, filename: str) -> None:
+
+def parse_config_file(options: Options, filename: Optional[str]) -> None:
"""Parse a config file into an Options object.
Errors are written to stderr but are not fatal.
+
+ If filename is None, fall back to default config file and then
+ to setup.cfg.
"""
+ config_files = None # type: Tuple[str, ...]
+ if filename is not None:
+ config_files = (filename,)
+ else:
+ config_files = (defaults.CONFIG_FILE,) + SHARED_CONFIG_FILES
+
parser = configparser.RawConfigParser()
- try:
- parser.read(filename)
- except configparser.Error as err:
- print("%s: %s" % (filename, err), file=sys.stderr)
+
+ for config_file in config_files:
+ if not os.path.exists(config_file):
+ continue
+ try:
+ parser.read(config_file)
+ except configparser.Error as err:
+ print("%s: %s" % (config_file, err), file=sys.stderr)
+ else:
+ file_read = config_file
+ break
+ else:
return
+
if 'mypy' not in parser:
- print("%s: No [mypy] section in config file" % filename, file=sys.stderr)
+ if filename or file_read not in SHARED_CONFIG_FILES:
+ print("%s: No [mypy] section in config file" % file_read, file=sys.stderr)
else:
section = parser['mypy']
- prefix = '%s: [%s]' % (filename, 'mypy')
+ prefix = '%s: [%s]' % (file_read, 'mypy')
updates, report_dirs = parse_section(prefix, options, section)
for k, v in updates.items():
setattr(options, k, v)
@@ -565,7 +620,7 @@ def parse_config_file(options: Options, filename: str) -> None:
for name, section in parser.items():
if name.startswith('mypy-'):
- prefix = '%s: [%s]' % (filename, name)
+ prefix = '%s: [%s]' % (file_read, name)
updates, report_dirs = parse_section(prefix, options, section)
if report_dirs:
print("%s: Per-module sections should not specify reports (%s)" %
@@ -618,7 +673,11 @@ def parse_section(prefix: str, template: Options,
if ct is bool:
v = section.getboolean(key) # type: ignore # Until better stub
elif callable(ct):
- v = ct(section.get(key))
+ try:
+ v = ct(section.get(key))
+ except argparse.ArgumentTypeError as err:
+ print("%s: %s: %s" % (prefix, key, err), file=sys.stderr)
+ continue
else:
print("%s: Don't know what type %s should have" % (prefix, key), file=sys.stderr)
continue
diff --git a/mypy/maptype.py b/mypy/maptype.py
index ff76035..cdffd74 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -57,12 +57,13 @@ def class_derivation_paths(typ: TypeInfo,
result = [] # type: List[List[TypeInfo]]
for base in typ.bases:
- if base.type == supertype:
- result.append([base.type])
+ btype = base.type
+ if btype == supertype:
+ result.append([btype])
else:
# Try constructing a longer path via the base class.
- for path in class_derivation_paths(base.type, supertype):
- result.append([base.type] + path)
+ for path in class_derivation_paths(btype, supertype):
+ result.append([btype] + path)
return result
diff --git a/mypy/meet.py b/mypy/meet.py
index 7aa479c..e32997f 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -1,9 +1,9 @@
from collections import OrderedDict
-from typing import List, Optional
+from typing import List, Optional, cast, Tuple
from mypy.join import is_similar_callables, combine_similar_callables, join_type_list
from mypy.types import (
- Type, AnyType, TypeVisitor, UnboundType, Void, ErrorType, NoneTyp, TypeVarType,
+ Type, AnyType, TypeVisitor, UnboundType, NoneTyp, TypeVarType,
Instance, CallableType, TupleType, TypedDictType, ErasedType, TypeList, UnionType, PartialType,
DeletedType, UninhabitedType, TypeType
)
@@ -25,21 +25,26 @@ def meet_types(s: Type, t: Type) -> Type:
return t.accept(TypeMeetVisitor(s))
-def meet_simple(s: Type, t: Type, default_right: bool = True) -> Type:
- if s == t:
- return s
- if isinstance(s, UnionType):
- return UnionType.make_simplified_union([meet_types(x, t) for x in s.items])
- elif not is_overlapping_types(s, t, use_promotions=True):
+def narrow_declared_type(declared: Type, narrowed: Type) -> Type:
+ """Return the declared type narrowed down to another type."""
+ if declared == narrowed:
+ return declared
+ if isinstance(declared, UnionType):
+ return UnionType.make_simplified_union([narrow_declared_type(x, narrowed)
+ for x in declared.items])
+ elif not is_overlapping_types(declared, narrowed, use_promotions=True):
if experiments.STRICT_OPTIONAL:
return UninhabitedType()
else:
return NoneTyp()
- else:
- if default_right:
- return t
- else:
- return s
+ elif isinstance(narrowed, UnionType):
+ return UnionType.make_simplified_union([narrow_declared_type(declared, x)
+ for x in narrowed.items])
+ elif isinstance(narrowed, AnyType):
+ return narrowed
+ elif isinstance(declared, (Instance, TupleType)):
+ return meet_types(declared, narrowed)
+ return narrowed
def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool:
@@ -69,6 +74,10 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
TODO: Don't consider callables always overlapping.
TODO: Don't consider type variables with values always overlapping.
"""
+ # Any overlaps with everything
+ if isinstance(t, AnyType) or isinstance(s, AnyType):
+ return True
+
# Since we are effectively working with the erased types, we only
# need to handle occurrences of TypeVarType at the top level.
if isinstance(t, TypeVarType):
@@ -100,11 +109,12 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
elif isinstance(t, TypeType) or isinstance(s, TypeType):
# If exactly only one of t or s is a TypeType, check if one of them
# is an `object` or a `type` and otherwise assume no overlap.
+ one = t if isinstance(t, TypeType) else s
other = s if isinstance(t, TypeType) else t
if isinstance(other, Instance):
return other.type.fullname() in {'builtins.object', 'builtins.type'}
else:
- return False
+ return isinstance(other, CallableType) and is_subtype(other, one)
if experiments.STRICT_OPTIONAL:
if isinstance(t, NoneTyp) != isinstance(s, NoneTyp):
# NoneTyp does not overlap with other non-Union types under strict Optional checking
@@ -119,9 +129,7 @@ class TypeMeetVisitor(TypeVisitor[Type]):
self.s = s
def visit_unbound_type(self, t: UnboundType) -> Type:
- if isinstance(self.s, Void) or isinstance(self.s, ErrorType):
- return ErrorType()
- elif isinstance(self.s, NoneTyp):
+ if isinstance(self.s, NoneTyp):
if experiments.STRICT_OPTIONAL:
return AnyType()
else:
@@ -131,12 +139,6 @@ class TypeMeetVisitor(TypeVisitor[Type]):
else:
return AnyType()
- def visit_error_type(self, t: ErrorType) -> Type:
- return t
-
- def visit_type_list(self, t: TypeList) -> Type:
- assert False, 'Not supported'
-
def visit_any(self, t: AnyType) -> Type:
return self.s
@@ -151,12 +153,6 @@ class TypeMeetVisitor(TypeVisitor[Type]):
for x in t.items]
return UnionType.make_simplified_union(meets)
- def visit_void(self, t: Void) -> Type:
- if isinstance(self.s, Void):
- return t
- else:
- return ErrorType()
-
def visit_none_type(self, t: NoneTyp) -> Type:
if experiments.STRICT_OPTIONAL:
if isinstance(self.s, NoneTyp) or (isinstance(self.s, Instance) and
@@ -165,30 +161,21 @@ class TypeMeetVisitor(TypeVisitor[Type]):
else:
return UninhabitedType()
else:
- if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
- return t
- else:
- return ErrorType()
+ return t
def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
- if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
- return t
- else:
- return ErrorType()
+ return t
def visit_deleted_type(self, t: DeletedType) -> Type:
- if not isinstance(self.s, Void) and not isinstance(self.s, ErrorType):
- if isinstance(self.s, NoneTyp):
- if experiments.STRICT_OPTIONAL:
- return t
- else:
- return self.s
- elif isinstance(self.s, UninhabitedType):
- return self.s
- else:
+ if isinstance(self.s, NoneTyp):
+ if experiments.STRICT_OPTIONAL:
return t
+ else:
+ return self.s
+ elif isinstance(self.s, UninhabitedType):
+ return self.s
else:
- return ErrorType()
+ return t
def visit_erased_type(self, t: ErasedType) -> Type:
return self.s
@@ -235,7 +222,13 @@ class TypeMeetVisitor(TypeVisitor[Type]):
def visit_callable_type(self, t: CallableType) -> Type:
if isinstance(self.s, CallableType) and is_similar_callables(t, self.s):
- return combine_similar_callables(t, self.s)
+ if is_equivalent(t, self.s):
+ return combine_similar_callables(t, self.s)
+ result = meet_similar_callables(t, self.s)
+ if isinstance(result.ret_type, UninhabitedType):
+ # Return a plain None or <uninhabited> instead of a weird function.
+ return self.default(self.s)
+ return result
else:
return self.default(self.s)
@@ -250,6 +243,10 @@ class TypeMeetVisitor(TypeVisitor[Type]):
elif (isinstance(self.s, Instance) and
self.s.type.fullname() == 'builtins.tuple' and self.s.args):
return t.copy_modified(items=[meet_types(it, self.s.args[0]) for it in t.items])
+ elif (isinstance(self.s, Instance) and t.fallback.type == self.s.type):
+ # Uh oh, a broken named tuple type (https://github.com/python/mypy/issues/3016).
+ # Do something reasonable until that bug is fixed.
+ return t
else:
return self.default(self.s)
@@ -258,10 +255,15 @@ class TypeMeetVisitor(TypeVisitor[Type]):
for (_, l, r) in self.s.zip(t):
if not is_equivalent(l, r):
return self.default(self.s)
- items = OrderedDict([
- (item_name, s_item_type or t_item_type)
- for (item_name, s_item_type, t_item_type) in self.s.zipall(t)
- ])
+ item_list = [] # type: List[Tuple[str, Type]]
+ for (item_name, s_item_type, t_item_type) in self.s.zipall(t):
+ if s_item_type is not None:
+ item_list.append((item_name, s_item_type))
+ else:
+ # at least one of s_item_type and t_item_type is not None
+ assert t_item_type is not None
+ item_list.append((item_name, t_item_type))
+ items = OrderedDict(item_list)
mapping_value_type = join_type_list(list(items.values()))
fallback = self.s.create_anonymous_fallback(value_type=mapping_value_type)
return TypedDictType(items, fallback)
@@ -289,10 +291,26 @@ class TypeMeetVisitor(TypeVisitor[Type]):
def default(self, typ: Type) -> Type:
if isinstance(typ, UnboundType):
return AnyType()
- elif isinstance(typ, Void) or isinstance(typ, ErrorType):
- return ErrorType()
else:
if experiments.STRICT_OPTIONAL:
return UninhabitedType()
else:
return NoneTyp()
+
+
+def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType:
+ from mypy.join import join_types
+ arg_types = [] # type: List[Type]
+ for i in range(len(t.arg_types)):
+ arg_types.append(join_types(t.arg_types[i], s.arg_types[i]))
+ # TODO in combine_similar_callables also applies here (names and kinds)
+ # The fallback type can be either 'function' or 'type'. The result should have 'function' as
+ # fallback only if both operands have it as 'function'.
+ if t.fallback.type.fullname() != 'builtins.function':
+ fallback = t.fallback
+ else:
+ fallback = s.fallback
+ return t.copy_modified(arg_types=arg_types,
+ ret_type=meet_types(t.ret_type, s.ret_type),
+ fallback=fallback,
+ name=None)
diff --git a/mypy/messages.py b/mypy/messages.py
index 3d38333..f6cadf0 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -11,7 +11,8 @@ from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple
from mypy.errors import Errors
from mypy.types import (
Type, CallableType, Instance, TypeVarType, TupleType, TypedDictType,
- UnionType, Void, NoneTyp, AnyType, Overloaded, FunctionLike, DeletedType, TypeType
+ UnionType, NoneTyp, AnyType, Overloaded, FunctionLike, DeletedType, TypeType,
+ UninhabitedType
)
from mypy.nodes import (
TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases,
@@ -24,12 +25,18 @@ from mypy.nodes import (
NO_RETURN_VALUE_EXPECTED = 'No return value expected'
MISSING_RETURN_STATEMENT = 'Missing return statement'
+INVALID_IMPLICIT_RETURN = 'Implicit return in function which does not return'
INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type'
+RETURN_ANY = 'Returning Any from function with declared return type "{}"'
RETURN_VALUE_EXPECTED = 'Return value expected'
+NO_RETURN_EXPECTED = 'Return statement in function which does not return'
INVALID_EXCEPTION = 'Exception must be derived from BaseException'
INVALID_EXCEPTION_TYPE = 'Exception type must be derived from BaseException'
INVALID_RETURN_TYPE_FOR_GENERATOR = \
'The return type of a generator function should be "Generator" or one of its supertypes'
+INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR = \
+ 'The return type of an async generator function should be "AsyncGenerator" or one of its ' \
+ 'supertypes'
INVALID_GENERATOR_RETURN_ITEM_TYPE = \
'The return type of a generator function must be None in its third type parameter in Python 2'
YIELD_VALUE_EXPECTED = 'Yield value expected'
@@ -79,13 +86,14 @@ TYPEDDICT_ITEM_NAME_MUST_BE_STRING_LITERAL = \
MALFORMED_ASSERT = 'Assertion is always true, perhaps remove parentheses?'
NON_BOOLEAN_IN_CONDITIONAL = 'Condition must be a boolean'
DUPLICATE_TYPE_SIGNATURES = 'Function has duplicate type signatures'
+GENERIC_INSTANCE_VAR_CLASS_ACCESS = 'Access to generic instance variables via class is ambiguous'
ARG_CONSTRUCTOR_NAMES = {
ARG_POS: "Arg",
ARG_OPT: "DefaultArg",
ARG_NAMED: "NamedArg",
ARG_NAMED_OPT: "DefaultNamedArg",
- ARG_STAR: "StarArg",
+ ARG_STAR: "VarArg",
ARG_STAR2: "KwArg",
}
@@ -206,15 +214,15 @@ class MessageBuilder:
verbosity = max(verbosity - 1, 0))))
else:
constructor = ARG_CONSTRUCTOR_NAMES[arg_kind]
- if arg_kind in (ARG_STAR, ARG_STAR2):
+ if arg_kind in (ARG_STAR, ARG_STAR2) or arg_name is None:
arg_strings.append("{}({})".format(
constructor,
strip_quotes(self.format(arg_type))))
else:
- arg_strings.append("{}('{}', {})".format(
+ arg_strings.append("{}({}, {})".format(
constructor,
- arg_name,
- strip_quotes(self.format(arg_type))))
+ strip_quotes(self.format(arg_type)),
+ repr(arg_name)))
return 'Callable[[{}], {}]'.format(", ".join(arg_strings), return_type)
else:
@@ -241,6 +249,10 @@ class MessageBuilder:
if isinstance(typ, Instance):
itype = typ
# Get the short name of the type.
+ if itype.type.fullname() in ('types.ModuleType',
+ '_importlib_modulespec.ModuleType'):
+ # Make some common error messages simpler and tidier.
+ return 'Module'
if verbosity >= 2:
base_str = itype.type.fullname()
else:
@@ -311,14 +323,17 @@ class MessageBuilder:
return s
else:
return 'union type ({} items)'.format(len(items))
- elif isinstance(typ, Void):
- return 'None'
elif isinstance(typ, NoneTyp):
return 'None'
elif isinstance(typ, AnyType):
return '"Any"'
elif isinstance(typ, DeletedType):
return '<deleted>'
+ elif isinstance(typ, UninhabitedType):
+ if typ.is_noreturn:
+ return 'NoReturn'
+ else:
+ return '<nothing>'
elif isinstance(typ, TypeType):
return 'Type[{}]'.format(
strip_quotes(self.format_simple(typ.item, verbosity)))
@@ -361,8 +376,6 @@ class MessageBuilder:
if (isinstance(typ, Instance) and
typ.type.has_readable_member(member)):
self.fail('Member "{}" is not assignable'.format(member), context)
- elif self.check_unusable_type(typ, context):
- pass
elif member == '__contains__':
self.fail('Unsupported right operand type for in ({})'.format(
self.format(typ)), context)
@@ -384,8 +397,13 @@ class MessageBuilder:
self.format(typ)), context)
elif member == '__getitem__':
# Indexed get.
- self.fail('Value of type {} is not indexable'.format(
- self.format(typ)), context)
+ # TODO: Fix this consistently in self.format
+ if isinstance(typ, CallableType) and typ.is_type_obj():
+ self.fail('The type {} is not generic and not indexable'.format(
+ self.format(typ)), context)
+ else:
+ self.fail('Value of type {} is not indexable'.format(
+ self.format(typ)), context)
elif member == '__setitem__':
# Indexed set.
self.fail('Unsupported target for indexed assignment', context)
@@ -422,9 +440,6 @@ class MessageBuilder:
Types can be Type objects or strings.
"""
- if (self.check_unusable_type(left_type, context) or
- self.check_unusable_type(right_type, context)):
- return
left_str = ''
if isinstance(left_type, str):
left_str = left_type
@@ -446,13 +461,12 @@ class MessageBuilder:
def unsupported_left_operand(self, op: str, typ: Type,
context: Context) -> None:
- if not self.check_unusable_type(typ, context):
- if self.disable_type_names:
- msg = 'Unsupported left operand type for {} (some union)'.format(op)
- else:
- msg = 'Unsupported left operand type for {} ({})'.format(
- op, self.format(typ))
- self.fail(msg, context)
+ if self.disable_type_names:
+ msg = 'Unsupported left operand type for {} (some union)'.format(op)
+ else:
+ msg = 'Unsupported left operand type for {} ({})'.format(
+ op, self.format(typ))
+ self.fail(msg, context)
def not_callable(self, typ: Type, context: Context) -> Type:
self.fail('{} not callable'.format(self.format(typ)), context)
@@ -475,7 +489,10 @@ class MessageBuilder:
target = ''
if callee.name:
name = callee.name
- base = extract_type(name)
+ if callee.bound_args and callee.bound_args[0] is not None:
+ base = self.format(callee.bound_args[0])
+ else:
+ base = extract_type(name)
for op, method in op_methods.items():
for variant in method, '__r' + method[2:]:
@@ -512,7 +529,13 @@ class MessageBuilder:
name = callee.name[1:-1]
n -= 1
msg = '{} item {} has incompatible type {}'.format(
- name[0].upper() + name[1:], n, self.format_simple(arg_type))
+ name.title(), n, self.format_simple(arg_type))
+ elif callee.name == '<dict>':
+ name = callee.name[1:-1]
+ n -= 1
+ key_type, value_type = cast(TupleType, arg_type).items
+ msg = '{} entry {} has incompatible type {}: {}'.format(
+ name.title(), n, self.format_simple(key_type), self.format_simple(value_type))
elif callee.name == '<list-comprehension>':
msg = 'List comprehension has incompatible type List[{}]'.format(
strip_quotes(self.format(arg_type)))
@@ -556,7 +579,7 @@ class MessageBuilder:
msg = 'Missing positional argument'
else:
msg = 'Missing positional arguments'
- if callee.name and diff:
+ if callee.name and diff and all(d is not None for d in diff):
msg += ' "{}" in call to {}'.format('", "'.join(diff), callee.name)
else:
msg = 'Too few arguments'
@@ -603,15 +626,11 @@ class MessageBuilder:
format(capitalize(callable_name(callee)),
callee.arg_names[index]), context)
- def does_not_return_value(self, unusable_type: Type, context: Context) -> None:
- """Report an error about use of an unusable type.
-
- If the type is a Void type and has a source in it, report it in the error message.
- This allows giving messages such as 'Foo does not return a value'.
- """
- if isinstance(unusable_type, Void) and unusable_type.source is not None:
+ def does_not_return_value(self, callee_type: Type, context: Context) -> None:
+ """Report an error about use of an unusable type."""
+ if isinstance(callee_type, FunctionLike) and callee_type.get_name() is not None:
self.fail('{} does not return a value'.format(
- capitalize((cast(Void, unusable_type)).source)), context)
+ capitalize(callee_type.get_name())), context)
else:
self.fail('Function does not return a value', context)
@@ -643,12 +662,6 @@ class MessageBuilder:
else:
self.fail('No overload variant matches argument types {}'.format(arg_types), context)
- def invalid_cast(self, target_type: Type, source_type: Type,
- context: Context) -> None:
- if not self.check_unusable_type(source_type, context):
- self.fail('Cannot cast from {} to {}'.format(
- self.format(source_type), self.format(target_type)), context)
-
def wrong_number_values_to_unpack(self, provided: int, expected: int,
context: Context) -> None:
if provided < expected:
@@ -732,18 +745,6 @@ class MessageBuilder:
def undefined_in_superclass(self, member: str, context: Context) -> None:
self.fail('"{}" undefined in superclass'.format(member), context)
- def check_unusable_type(self, typ: Type, context: Context) -> bool:
- """If type is a type which is not meant to be used (like Void or
- NoneTyp(is_ret_type=True)), report an error such as '.. does not
- return a value' and return True. Otherwise, return False.
- """
- if (isinstance(typ, Void) or
- (isinstance(typ, NoneTyp) and typ.is_ret_type)):
- self.does_not_return_value(typ, context)
- return True
- else:
- return False
-
def too_few_string_formatting_arguments(self, context: Context) -> None:
self.fail('Not enough arguments for format string', context)
@@ -800,6 +801,9 @@ class MessageBuilder:
def cant_assign_to_method(self, context: Context) -> None:
self.fail(CANNOT_ASSIGN_TO_METHOD, context)
+ def cant_assign_to_classvar(self, name: str, context: Context) -> None:
+ self.fail('Cannot assign to class variable "%s" via instance' % name, context)
+
def read_only_property(self, name: str, type: TypeInfo,
context: Context) -> None:
self.fail('Property "{}" defined in "{}" is read-only'.format(
@@ -815,6 +819,14 @@ class MessageBuilder:
self.fail('Overloaded function signatures {} and {} overlap with '
'incompatible return types'.format(index1, index2), context)
+ def overloaded_signatures_arg_specific(self, index1: int, context: Context) -> None:
+ self.fail('Overloaded function implementation does not accept all possible arguments '
+ 'of signature {}'.format(index1), context)
+
+ def overloaded_signatures_ret_specific(self, index1: int, context: Context) -> None:
+ self.fail('Overloaded function implementation cannot produce return type '
+ 'of signature {}'.format(index1), context)
+
def operator_method_signatures_overlap(
self, reverse_class: str, reverse_method: str, forward_class: str,
forward_method: str, context: Context) -> None:
@@ -873,6 +885,9 @@ class MessageBuilder:
self.fail('\'{}\' is not a valid item name; expected one of {}'.format(
item_name, format_item_name_list(typ.items.keys())), context)
+ def type_arguments_not_allowed(self, context: Context) -> None:
+ self.fail('Parameterized generics cannot be used with class or instance checks', context)
+
def capitalize(s: str) -> str:
"""Capitalize the first character of a string."""
diff --git a/mypy/moduleinfo.py b/mypy/moduleinfo.py
index 6deca6f..ec6a8fd 100644
--- a/mypy/moduleinfo.py
+++ b/mypy/moduleinfo.py
@@ -225,6 +225,9 @@ third_party_modules = {
'PyQt4',
'PyQt5',
'pylons',
+
+ # for use in tests
+ '__dummy_third_party1',
}
# Modules and packages common to Python 2.7 and 3.x.
@@ -422,6 +425,9 @@ common_std_lib_modules = {
'xml.sax.xmlreader',
'zipfile',
'zlib',
+ # fake names to use in tests
+ '__dummy_stdlib1',
+ '__dummy_stdlib2',
}
# Python 2 standard library modules.
diff --git a/mypy/myunit/__init__.py b/mypy/myunit/__init__.py
deleted file mode 100644
index 26b9a45..0000000
--- a/mypy/myunit/__init__.py
+++ /dev/null
@@ -1,380 +0,0 @@
-import importlib
-import os
-import sys
-import re
-import tempfile
-import time
-import traceback
-
-from typing import List, Tuple, Any, Callable, Union, cast
-from types import TracebackType
-
-
-# TODO remove global state
-is_verbose = False
-is_quiet = False
-patterns = [] # type: List[str]
-times = [] # type: List[Tuple[float, str]]
-
-
-class AssertionFailure(Exception):
- """Exception used to signal failed test cases."""
- def __init__(self, s: str = None) -> None:
- if s:
- super().__init__(s)
- else:
- super().__init__()
-
-
-class SkipTestCaseException(Exception):
- """Exception used to signal skipped test cases."""
- pass
-
-
-def assert_true(b: bool, msg: str = None) -> None:
- if not b:
- raise AssertionFailure(msg)
-
-
-def assert_false(b: bool, msg: str = None) -> None:
- if b:
- raise AssertionFailure(msg)
-
-
-def good_repr(obj: object) -> str:
- if isinstance(obj, str):
- if obj.count('\n') > 1:
- bits = ["'''\\"]
- for line in obj.split('\n'):
- # force repr to use ' not ", then cut it off
- bits.append(repr('"' + line)[2:-1])
- bits[-1] += "'''"
- return '\n'.join(bits)
- return repr(obj)
-
-
-def assert_equal(a: object, b: object, fmt: str = '{} != {}') -> None:
- if a != b:
- raise AssertionFailure(fmt.format(good_repr(a), good_repr(b)))
-
-
-def assert_not_equal(a: object, b: object, fmt: str = '{} == {}') -> None:
- if a == b:
- raise AssertionFailure(fmt.format(good_repr(a), good_repr(b)))
-
-
-def assert_raises(typ: type, *rest: Any) -> None:
- """Usage: assert_raises(exception class[, message], function[, args])
-
- Call function with the given arguments and expect an exception of the given
- type.
-
- TODO use overloads for better type checking
- """
- # Parse arguments.
- msg = None # type: str
- if isinstance(rest[0], str) or rest[0] is None:
- msg = rest[0]
- rest = rest[1:]
- f = rest[0]
- args = [] # type: List[Any]
- if len(rest) > 1:
- args = rest[1]
- assert len(rest) <= 2
-
- # Perform call and verify the exception.
- try:
- f(*args)
- except BaseException as e:
- if isinstance(e, KeyboardInterrupt):
- raise
- assert_type(typ, e)
- if msg:
- assert_equal(e.args[0], msg, 'Invalid message {}, expected {}')
- else:
- raise AssertionFailure('No exception raised')
-
-
-def assert_type(typ: type, value: object) -> None:
- if type(value) != typ:
- raise AssertionFailure('Invalid type {}, expected {}'.format(
- typename(type(value)), typename(typ)))
-
-
-def fail() -> None:
- raise AssertionFailure()
-
-
-class TestCase:
- def __init__(self, name: str, suite: 'Suite' = None,
- func: Callable[[], None] = None) -> None:
- self.func = func
- self.name = name
- self.suite = suite
- self.old_cwd = None # type: str
- self.tmpdir = None # type: tempfile.TemporaryDirectory
-
- def run(self) -> None:
- if self.func:
- self.func()
-
- def set_up(self) -> None:
- self.old_cwd = os.getcwd()
- self.tmpdir = tempfile.TemporaryDirectory(prefix='mypy-test-',
- dir=os.path.abspath('tmp-test-dirs'))
- os.chdir(self.tmpdir.name)
- os.mkdir('tmp')
- if self.suite:
- self.suite.set_up()
-
- def tear_down(self) -> None:
- if self.suite:
- self.suite.tear_down()
- os.chdir(self.old_cwd)
- self.tmpdir.cleanup()
- self.old_cwd = None
- self.tmpdir = None
-
-
-class Suite:
- def __init__(self) -> None:
- self.prefix = typename(type(self)) + '.'
- # Each test case is either a TestCase object or (str, function).
- self._test_cases = [] # type: List[Any]
- self.init()
-
- def set_up(self) -> None:
- pass
-
- def tear_down(self) -> None:
- pass
-
- def init(self) -> None:
- for m in dir(self):
- if m.startswith('test'):
- t = getattr(self, m)
- if isinstance(t, Suite):
- self.add_test((m + '.', t))
- else:
- self.add_test(TestCase(m, self, getattr(self, m)))
-
- def add_test(self, test: Union[TestCase,
- Tuple[str, Callable[[], None]],
- Tuple[str, 'Suite']]) -> None:
- self._test_cases.append(test)
-
- def cases(self) -> List[Any]:
- return self._test_cases[:]
-
- def skip(self) -> None:
- raise SkipTestCaseException()
-
-
-def add_suites_from_module(suites: List[Suite], mod_name: str) -> None:
- mod = importlib.import_module(mod_name)
- got_suite = False
- for suite in mod.__dict__.values():
- if isinstance(suite, type) and issubclass(suite, Suite) and suite is not Suite:
- got_suite = True
- suites.append(cast(Callable[[], Suite], suite)())
- if not got_suite:
- # Sanity check in case e.g. it uses unittest instead of a myunit.
- # The codecs tests do since they need to be python2-compatible.
- sys.exit('Test module %s had no test!' % mod_name)
-
-
-class ListSuite(Suite):
- def __init__(self, suites: List[Suite]) -> None:
- for suite in suites:
- mod_name = type(suite).__module__.replace('.', '_')
- mod_name = mod_name.replace('mypy_', '')
- mod_name = mod_name.replace('test_', '')
- mod_name = mod_name.strip('_').replace('__', '_')
- type_name = type(suite).__name__
- name = 'test_%s_%s' % (mod_name, type_name)
- setattr(self, name, suite)
- super().__init__()
-
-
-def main(args: List[str] = None) -> None:
- global patterns, is_verbose, is_quiet
- if not args:
- args = sys.argv[1:]
- is_verbose = False
- is_quiet = False
- suites = [] # type: List[Suite]
- patterns = []
- i = 0
- while i < len(args):
- a = args[i]
- if a == '-v':
- is_verbose = True
- elif a == '-q':
- is_quiet = True
- elif a == '-m':
- i += 1
- if i == len(args):
- sys.exit('-m requires an argument')
- add_suites_from_module(suites, args[i])
- elif not a.startswith('-'):
- patterns.append(a)
- else:
- sys.exit('Usage: python -m mypy.myunit [-v] [-q]'
- + ' -m mypy.test.module [-m mypy.test.module ...] [filter ...]')
- i += 1
- if len(patterns) == 0:
- patterns.append('*')
- if not suites:
- sys.exit('At least one -m argument is required')
-
- t = ListSuite(suites)
- num_total, num_fail, num_skip = run_test_recursive(t, 0, 0, 0, '', 0)
-
- skip_msg = ''
- if num_skip > 0:
- skip_msg = ', {} skipped'.format(num_skip)
-
- if num_fail == 0:
- if not is_quiet:
- print('%d test cases run%s, all passed.' % (num_total, skip_msg))
- print('*** OK ***')
- else:
- sys.stderr.write('%d/%d test cases failed%s.\n' % (num_fail,
- num_total,
- skip_msg))
- sys.stderr.write('*** FAILURE ***\n')
- sys.exit(1)
-
-
-def run_test_recursive(test: Any, num_total: int, num_fail: int, num_skip: int,
- prefix: str, depth: int) -> Tuple[int, int, int]:
- """The first argument may be TestCase, Suite or (str, Suite)."""
- if isinstance(test, TestCase):
- name = prefix + test.name
- for pattern in patterns:
- if match_pattern(name, pattern):
- match = True
- break
- else:
- match = False
- if match:
- is_fail, is_skip = run_single_test(name, test)
- if is_fail: num_fail += 1
- if is_skip: num_skip += 1
- num_total += 1
- else:
- suite = None # type: Suite
- suite_prefix = ''
- if isinstance(test, list) or isinstance(test, tuple):
- suite = test[1]
- suite_prefix = test[0]
- else:
- suite = test
- suite_prefix = test.prefix
-
- for stest in suite.cases():
- new_prefix = prefix
- if depth > 0:
- new_prefix = prefix + suite_prefix
- num_total, num_fail, num_skip = run_test_recursive(
- stest, num_total, num_fail, num_skip, new_prefix, depth + 1)
- return num_total, num_fail, num_skip
-
-
-def run_single_test(name: str, test: Any) -> Tuple[bool, bool]:
- if is_verbose:
- sys.stderr.write(name)
- sys.stderr.flush()
-
- time0 = time.time()
- test.set_up() # FIX: check exceptions
- exc_traceback = None # type: Any
- try:
- test.run()
- except BaseException as e:
- if isinstance(e, KeyboardInterrupt):
- raise
- exc_type, exc_value, exc_traceback = sys.exc_info()
- test.tear_down() # FIX: check exceptions
- times.append((time.time() - time0, name))
-
- if exc_traceback:
- if isinstance(exc_value, SkipTestCaseException):
- if is_verbose:
- sys.stderr.write(' (skipped)\n')
- return False, True
- else:
- handle_failure(name, exc_type, exc_value, exc_traceback)
- return True, False
- elif is_verbose:
- sys.stderr.write('\n')
-
- return False, False
-
-
-def handle_failure(name: str,
- exc_type: type,
- exc_value: BaseException,
- exc_traceback: TracebackType,
- ) -> None:
- # Report failed test case.
- if is_verbose:
- sys.stderr.write('\n\n')
- msg = ''
- if exc_value.args and exc_value.args[0]:
- msg = ': ' + str(exc_value)
- else:
- msg = ''
- if not isinstance(exc_value, SystemExit):
- # We assume that before doing exit() (which raises SystemExit) we've printed
- # enough context about what happened so that a stack trace is not useful.
- # In particular, uncaught exceptions during semantic analysis or type checking
- # call exit() and they already print out a stack trace.
- sys.stderr.write('Traceback (most recent call last):\n')
- tb = traceback.format_tb(exc_traceback)
- tb = clean_traceback(tb)
- for s in tb:
- sys.stderr.write(s)
- else:
- sys.stderr.write('\n')
- exception = typename(exc_type)
- sys.stderr.write('{}{}\n\n'.format(exception, msg))
- sys.stderr.write('{} failed\n\n'.format(name))
-
-
-def typename(t: type) -> str:
- if '.' in str(t):
- return str(t).split('.')[-1].rstrip("'>")
- else:
- return str(t)[8:-2]
-
-
-def match_pattern(s: str, p: str) -> bool:
- if len(p) == 0:
- return len(s) == 0
- elif p[0] == '*':
- if len(p) == 1:
- return True
- else:
- for i in range(len(s) + 1):
- if match_pattern(s[i:], p[1:]):
- return True
- return False
- elif len(s) == 0:
- return False
- else:
- return s[0] == p[0] and match_pattern(s[1:], p[1:])
-
-
-def clean_traceback(tb: List[str]) -> List[str]:
- # Remove clutter from the traceback.
- start = 0
- for i, s in enumerate(tb):
- if '\n test.run()\n' in s or '\n self.func()\n' in s:
- start = i + 1
- tb = tb[start:]
- for f in ['assert_equal', 'assert_not_equal', 'assert_type',
- 'assert_raises', 'assert_true']:
- if tb != [] and ', in {}\n'.format(f) in tb[-1]:
- tb = tb[:-1]
- return tb
diff --git a/mypy/myunit/__main__.py b/mypy/myunit/__main__.py
deleted file mode 100644
index 78ef01f..0000000
--- a/mypy/myunit/__main__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# This is a separate module from mypy.myunit so it doesn't exist twice.
-"""Myunit test runner command line tool.
-
-Usually used as a slave by runtests.py, but can be used directly.
-"""
-
-from mypy.myunit import main
-
-# In Python 3.3, mypy.__path__ contains a relative path to the mypy module
-# (whereas in later Python versions it contains an absolute path). Because the
-# test runner changes directories, this breaks non-toplevel mypy imports. We
-# fix that problem by fixing up the path to be absolute here.
-import os.path
-import mypy
-# User-defined packages always have __path__ attributes, but mypy doesn't know that.
-mypy.__path__ = [os.path.abspath(p) for p in mypy.__path__] # type: ignore
-
-main()
diff --git a/mypy/nodes.py b/mypy/nodes.py
index c9485fe..05901d0 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -4,13 +4,12 @@ import os
from abc import abstractmethod
from typing import (
- Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional
+ Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable,
)
-from mypy.lex import Token
import mypy.strconv
-from mypy.visitor import NodeVisitor, ExpressionVisitor
-from mypy.util import dump_tagged, short_type
+from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor
+from mypy.util import short_type
class Context:
@@ -40,12 +39,12 @@ LDEF = 0 # type: int
GDEF = 1 # type: int
MDEF = 2 # type: int
MODULE_REF = 3 # type: int
-# Type variable declared using TypeVar(...) has kind UNBOUND_TVAR. It's not
-# valid as a type. A type variable is valid as a type (kind BOUND_TVAR) within
+# Type variable declared using TypeVar(...) has kind TVAR. It's not
+# valid as a type unless bound in a TypeVarScope. That happens within:
# (1) a generic class that uses the type variable as a type argument or
# (2) a generic function that refers to the type variable in its signature.
-UNBOUND_TVAR = 4 # type: int
-BOUND_TVAR = 5 # type: int
+TVAR = 4 # type: int
+
TYPE_ALIAS = 6 # type: int
# Placeholder for a name imported via 'from ... import'. Second phase of
# semantic will replace this the actual imported reference. This is
@@ -66,8 +65,7 @@ node_kinds = {
GDEF: 'Gdef',
MDEF: 'Mdef',
MODULE_REF: 'ModuleRef',
- UNBOUND_TVAR: 'UnboundTvar',
- BOUND_TVAR: 'Tvar',
+ TVAR: 'Tvar',
TYPE_ALIAS: 'TypeAlias',
UNBOUND_IMPORTED: 'UnboundImported',
}
@@ -84,11 +82,28 @@ type_aliases = {
'typing.List': '__builtins__.list',
'typing.Dict': '__builtins__.dict',
'typing.Set': '__builtins__.set',
+ 'typing.FrozenSet': '__builtins__.frozenset',
}
reverse_type_aliases = dict((name.replace('__builtins__', 'builtins'), alias)
for alias, name in type_aliases.items()) # type: Dict[str, str]
+collections_type_aliases = {
+ 'typing.ChainMap': '__mypy_collections__.ChainMap',
+ 'typing.Counter': '__mypy_collections__.Counter',
+ 'typing.DefaultDict': '__mypy_collections__.defaultdict',
+ 'typing.Deque': '__mypy_collections__.deque',
+}
+
+reverse_collection_aliases = dict((name.replace('__mypy_collections__', 'collections'), alias)
+ for alias, name in
+ collections_type_aliases.items()) # type: Dict[str, str]
+
+nongen_builtins = {'builtins.tuple': 'typing.Tuple',
+ 'builtins.enumerate': ''}
+nongen_builtins.update(reverse_type_aliases)
+nongen_builtins.update(reverse_collection_aliases)
+
# See [Note Literals and literal_hash] below
Key = tuple
@@ -100,21 +115,16 @@ class Node(Context):
line = -1
column = -1
- # TODO: Move to Expression
- # See [Note Literals and literal_hash] below
- literal = LITERAL_NO
- literal_hash = None # type: Key
-
def __str__(self) -> str:
ans = self.accept(mypy.strconv.StrConv())
if ans is None:
return repr(self)
return ans
- def set_line(self, target: Union[Token, 'Node', int], column: int = None) -> None:
- """If target is a node or token, pull line (and column) information
+ def set_line(self, target: Union['Node', int], column: int = None) -> None:
+ """If target is a node, pull line (and column) information
into this node. If column is specified, this will override any column
- information coming from a node/token.
+ information coming from a node.
"""
if isinstance(target, int):
self.line = target
@@ -139,10 +149,15 @@ class Node(Context):
class Statement(Node):
"""A statement node."""
+ def accept(self, visitor: StatementVisitor[T]) -> T:
+ raise RuntimeError('Not implemented')
class Expression(Node):
"""An expression node."""
+ literal = LITERAL_NO
+ literal_hash = None # type: Key
+
def accept(self, visitor: ExpressionVisitor[T]) -> T:
raise RuntimeError('Not implemented')
@@ -208,11 +223,9 @@ class SymbolNode(Node):
@classmethod
def deserialize(cls, data: JsonDict) -> 'SymbolNode':
classname = data['.class']
- glo = globals()
- if classname in glo:
- cl = glo[classname]
- if issubclass(cl, cls) and 'deserialize' in cl.__dict__:
- return cl.deserialize(data)
+ method = deserialize_map.get(classname)
+ if method is not None:
+ return method(data)
raise NotImplementedError('unexpected .class {}'.format(classname))
@@ -313,7 +326,7 @@ class Import(ImportBase):
super().__init__()
self.ids = ids
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_import(self)
@@ -330,7 +343,7 @@ class ImportFrom(ImportBase):
self.names = names
self.relative = relative
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_import_from(self)
@@ -344,7 +357,7 @@ class ImportAll(ImportBase):
self.id = id
self.relative = relative
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_import_all(self)
@@ -353,7 +366,9 @@ class FuncBase(Node):
# Type signature. This is usually CallableType or Overloaded, but it can be something else for
# decorated functions/
- type = None # type: mypy.types.Type
+ type = None # type: Optional[mypy.types.Type]
+ # Original, not semantically analyzed type (used for reprocessing)
+ unanalyzed_type = None # type: Optional[mypy.types.Type]
# If method, reference to TypeInfo
info = None # type: TypeInfo
is_property = False
@@ -366,23 +381,32 @@ class FuncBase(Node):
return self._fullname
+OverloadPart = Union['FuncDef', 'Decorator']
+
+
class OverloadedFuncDef(FuncBase, SymbolNode, Statement):
- """A logical node representing all the variants of an overloaded function.
+ """A logical node representing all the variants of a multi-declaration function.
+
+ A multi-declaration function is often an @overload, but can also be a
+ @property with a setter and a/or a deleter.
This node has no explicit representation in the source program.
Overloaded variants must be consecutive in the source file.
"""
- items = None # type: List[Decorator]
+ items = None # type: List[OverloadPart]
+ impl = None # type: Optional[OverloadPart]
- def __init__(self, items: List['Decorator']) -> None:
+ def __init__(self, items: List['OverloadPart']) -> None:
+ assert len(items) > 0
self.items = items
+ self.impl = None
self.set_line(items[0].line)
def name(self) -> str:
- return self.items[0].func.name()
+ return self.items[0].name()
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_overloaded_func_def(self)
def serialize(self) -> JsonDict:
@@ -391,14 +415,19 @@ class OverloadedFuncDef(FuncBase, SymbolNode, Statement):
'type': None if self.type is None else self.type.serialize(),
'fullname': self._fullname,
'is_property': self.is_property,
+ 'impl': None if self.impl is None else self.impl.serialize()
}
@classmethod
def deserialize(cls, data: JsonDict) -> 'OverloadedFuncDef':
assert data['.class'] == 'OverloadedFuncDef'
- res = OverloadedFuncDef([Decorator.deserialize(d) for d in data['items']])
+ res = OverloadedFuncDef([
+ cast(OverloadPart, SymbolNode.deserialize(d))
+ for d in data['items']])
+ if data.get('impl') is not None:
+ res.impl = cast(OverloadPart, SymbolNode.deserialize(data['impl']))
if data.get('type') is not None:
- res.type = mypy.types.Type.deserialize(data['type'])
+ res.type = mypy.types.deserialize_type(data['type'])
res._fullname = data['fullname']
res.is_property = data['is_property']
# NOTE: res.info will be set in the fixup phase.
@@ -411,7 +440,7 @@ class Argument(Node):
variable = None # type: Var
type_annotation = None # type: Optional[mypy.types.Type]
initializer = None # type: Optional[Expression]
- kind = None # type: int
+ kind = None # type: int # must be an ARG_* constant
initialization_statement = None # type: Optional[AssignmentStmt]
def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]',
@@ -439,7 +468,7 @@ class Argument(Node):
assign = AssignmentStmt([lvalue], rvalue)
return assign
- def set_line(self, target: Union[Token, Node, int], column: int = None) -> None:
+ def set_line(self, target: Union[Node, int], column: int = None) -> None:
super().set_line(target, column)
if self.initializer:
@@ -465,6 +494,7 @@ class FuncItem(FuncBase):
is_overload = False
is_generator = False # Contains a yield statement?
is_coroutine = False # Defined using 'async def' syntax?
+ is_async_generator = False # Is an async def generator?
is_awaitable_coroutine = False # Decorated with '@{typing,asyncio}.coroutine'?
is_static = False # Uses @staticmethod?
is_class = False # Uses @classmethod?
@@ -472,8 +502,8 @@ class FuncItem(FuncBase):
expanded = None # type: List[FuncItem]
FLAGS = [
- 'is_overload', 'is_generator', 'is_coroutine', 'is_awaitable_coroutine',
- 'is_static', 'is_class',
+ 'is_overload', 'is_generator', 'is_coroutine', 'is_async_generator',
+ 'is_awaitable_coroutine', 'is_static', 'is_class',
]
def __init__(self, arguments: List[Argument], body: 'Block',
@@ -484,6 +514,7 @@ class FuncItem(FuncBase):
self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT)
self.body = body
self.type = typ
+ self.unanalyzed_type = typ
self.expanded = []
self.min_args = 0
@@ -494,7 +525,7 @@ class FuncItem(FuncBase):
def max_fixed_argc(self) -> int:
return self.max_pos
- def set_line(self, target: Union[Token, Node, int], column: int = None) -> None:
+ def set_line(self, target: Union[Node, int], column: int = None) -> None:
super().set_line(target, column)
for arg in self.arguments:
arg.set_line(self.line, self.column)
@@ -530,7 +561,7 @@ class FuncDef(FuncItem, SymbolNode, Statement):
def name(self) -> str:
return self._name
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_func_def(self)
def serialize(self) -> JsonDict:
@@ -558,16 +589,18 @@ class FuncDef(FuncItem, SymbolNode, Statement):
[],
body,
(None if data['type'] is None
- else mypy.types.FunctionLike.deserialize(data['type'])))
+ else cast(mypy.types.FunctionLike,
+ mypy.types.deserialize_type(data['type']))))
ret._fullname = data['fullname']
set_flags(ret, data['flags'])
# NOTE: ret.info is set in the fixup phase.
ret.arg_names = data['arg_names']
ret.arg_kinds = data['arg_kinds']
# Mark these as 'None' so that future uses will trigger an error
- ret.arguments = None
- ret.max_pos = None
- ret.min_args = None
+ _dummy = None # type: Any
+ ret.arguments = _dummy
+ ret.max_pos = _dummy
+ ret.min_args = _dummy
return ret
@@ -595,7 +628,7 @@ class Decorator(SymbolNode, Statement):
def fullname(self) -> str:
return self.func.fullname()
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_decorator(self)
def serialize(self) -> JsonDict:
@@ -624,7 +657,7 @@ class Var(SymbolNode):
_name = None # type: str # Name without module prefix
_fullname = None # type: str # Name with module prefix
info = None # type: TypeInfo # Defining class (for member variables)
- type = None # type: mypy.types.Type # Declared or inferred type, or None
+ type = None # type: Optional[mypy.types.Type] # Declared or inferred type, or None
# Is this the first argument to an ordinary method (usually "self")?
is_self = False
is_ready = False # If inferred, is the inferred type available?
@@ -635,13 +668,15 @@ class Var(SymbolNode):
is_classmethod = False
is_property = False
is_settable_property = False
+ is_classvar = False
# Set to true when this variable refers to a module we were unable to
# parse for some reason (eg a silenced module)
is_suppressed_import = False
FLAGS = [
'is_self', 'is_ready', 'is_initialized_in_class', 'is_staticmethod',
- 'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import'
+ 'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import',
+ 'is_classvar'
]
def __init__(self, name: str, type: 'mypy.types.Type' = None) -> None:
@@ -659,7 +694,7 @@ class Var(SymbolNode):
def fullname(self) -> str:
return self._fullname
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_var(self)
def serialize(self) -> JsonDict:
@@ -677,7 +712,7 @@ class Var(SymbolNode):
def deserialize(cls, data: JsonDict) -> 'Var':
assert data['.class'] == 'Var'
name = data['name']
- type = None if data['type'] is None else mypy.types.Type.deserialize(data['type'])
+ type = None if data['type'] is None else mypy.types.deserialize_type(data['type'])
v = Var(name, type)
v._fullname = data['fullname']
set_flags(v, data['flags'])
@@ -694,7 +729,7 @@ class ClassDef(Statement):
# Base class expressions (not semantically analyzed -- can be arbitrary expressions)
base_type_exprs = None # type: List[Expression]
info = None # type: TypeInfo # Related TypeInfo
- metaclass = ''
+ metaclass = '' # type: Optional[str]
decorators = None # type: List[Expression]
has_incompatible_baseclass = False
@@ -711,7 +746,7 @@ class ClassDef(Statement):
self.metaclass = metaclass
self.decorators = []
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_class_def(self)
def is_generic(self) -> bool:
@@ -746,7 +781,7 @@ class GlobalDecl(Statement):
def __init__(self, names: List[str]) -> None:
self.names = names
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_global_decl(self)
@@ -758,7 +793,7 @@ class NonlocalDecl(Statement):
def __init__(self, names: List[str]) -> None:
self.names = names
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_nonlocal_decl(self)
@@ -772,7 +807,7 @@ class Block(Statement):
def __init__(self, body: List[Statement]) -> None:
self.body = body
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_block(self)
@@ -786,7 +821,7 @@ class ExpressionStmt(Statement):
def __init__(self, expr: Expression) -> None:
self.expr = expr
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_expression_stmt(self)
@@ -802,7 +837,9 @@ class AssignmentStmt(Statement):
lvalues = None # type: List[Lvalue]
rvalue = None # type: Expression
# Declared type in a comment, may be None.
- type = None # type: mypy.types.Type
+ type = None # type: Optional[mypy.types.Type]
+ # Original, not semantically analyzed type in annotation (used for reprocessing)
+ unanalyzed_type = None # type: Optional[mypy.types.Type]
# This indicates usage of PEP 526 type annotation syntax in assignment.
new_syntax = False # type: bool
@@ -811,9 +848,10 @@ class AssignmentStmt(Statement):
self.lvalues = lvalues
self.rvalue = rvalue
self.type = type
+ self.unanalyzed_type = type
self.new_syntax = new_syntax
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_assignment_stmt(self)
@@ -829,21 +867,21 @@ class OperatorAssignmentStmt(Statement):
self.lvalue = lvalue
self.rvalue = rvalue
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_operator_assignment_stmt(self)
class WhileStmt(Statement):
expr = None # type: Expression
body = None # type: Block
- else_body = None # type: Block
+ else_body = None # type: Optional[Block]
- def __init__(self, expr: Expression, body: Block, else_body: Block) -> None:
+ def __init__(self, expr: Expression, body: Block, else_body: Optional[Block]) -> None:
self.expr = expr
self.body = body
self.else_body = else_body
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_while_stmt(self)
@@ -851,22 +889,22 @@ class ForStmt(Statement):
# Index variables
index = None # type: Lvalue
# Type given by type comments for index, can be None
- index_type = None # type: mypy.types.Type
+ index_type = None # type: Optional[mypy.types.Type]
# Expression to iterate
expr = None # type: Expression
body = None # type: Block
- else_body = None # type: Block
+ else_body = None # type: Optional[Block]
is_async = False # True if `async for ...` (PEP 492, Python 3.5)
def __init__(self, index: Lvalue, expr: Expression, body: Block,
- else_body: Block, index_type: 'mypy.types.Type' = None) -> None:
+ else_body: Optional[Block], index_type: 'mypy.types.Type' = None) -> None:
self.index = index
self.index_type = index_type
self.expr = expr
self.body = body
self.else_body = else_body
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_for_stmt(self)
@@ -876,7 +914,7 @@ class ReturnStmt(Statement):
def __init__(self, expr: Optional[Expression]) -> None:
self.expr = expr
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_return_stmt(self)
@@ -888,7 +926,7 @@ class AssertStmt(Statement):
self.expr = expr
self.msg = msg
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_assert_stmt(self)
@@ -898,63 +936,66 @@ class DelStmt(Statement):
def __init__(self, expr: Lvalue) -> None:
self.expr = expr
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_del_stmt(self)
class BreakStmt(Statement):
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_break_stmt(self)
class ContinueStmt(Statement):
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_continue_stmt(self)
class PassStmt(Statement):
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_pass_stmt(self)
class IfStmt(Statement):
expr = None # type: List[Expression]
body = None # type: List[Block]
- else_body = None # type: Block
+ else_body = None # type: Optional[Block]
def __init__(self, expr: List[Expression], body: List[Block],
- else_body: Block) -> None:
+ else_body: Optional[Block]) -> None:
self.expr = expr
self.body = body
self.else_body = else_body
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_if_stmt(self)
class RaiseStmt(Statement):
- expr = None # type: Expression
- from_expr = None # type: Expression
+ # Plain 'raise' is a valid statement.
+ expr = None # type: Optional[Expression]
+ from_expr = None # type: Optional[Expression]
- def __init__(self, expr: Expression, from_expr: Expression = None) -> None:
+ def __init__(self, expr: Optional[Expression], from_expr: Optional[Expression]) -> None:
self.expr = expr
self.from_expr = from_expr
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_raise_stmt(self)
class TryStmt(Statement):
body = None # type: Block # Try body
- types = None # type: List[Expression] # Except type expressions
- vars = None # type: List[NameExpr] # Except variable names
+ # Plain 'except:' also possible
+ types = None # type: List[Optional[Expression]] # Except type expressions
+ vars = None # type: List[Optional[NameExpr]] # Except variable names
handlers = None # type: List[Block] # Except bodies
- else_body = None # type: Block
- finally_body = None # type: Block
+ else_body = None # type: Optional[Block]
+ finally_body = None # type: Optional[Block]
- def __init__(self, body: Block, vars: List['NameExpr'], types: List[Expression],
- handlers: List[Block], else_body: Block,
- finally_body: Block) -> None:
+ def __init__(self, body: Block, vars: List[Optional['NameExpr']],
+ types: List[Optional[Expression]],
+ handlers: List[Block], else_body: Optional[Block],
+ finally_body: Optional[Block]) -> None:
self.body = body
self.vars = vars
self.types = types
@@ -962,26 +1003,26 @@ class TryStmt(Statement):
self.else_body = else_body
self.finally_body = finally_body
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_try_stmt(self)
class WithStmt(Statement):
expr = None # type: List[Expression]
- target = None # type: List[Lvalue]
+ target = None # type: List[Optional[Lvalue]]
# Type given by type comments for target, can be None
- target_type = None # type: mypy.types.Type
+ target_type = None # type: Optional[mypy.types.Type]
body = None # type: Block
is_async = False # True if `async with ...` (PEP 492, Python 3.5)
- def __init__(self, expr: List[Expression], target: List[Lvalue],
+ def __init__(self, expr: List[Expression], target: List[Optional[Lvalue]],
body: Block, target_type: 'mypy.types.Type' = None) -> None:
self.expr = expr
self.target = target
self.target_type = target_type
self.body = body
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_with_stmt(self)
@@ -998,7 +1039,7 @@ class PrintStmt(Statement):
self.newline = newline
self.target = target
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_print_stmt(self)
@@ -1016,7 +1057,7 @@ class ExecStmt(Statement):
self.variables1 = variables1
self.variables2 = variables2
- def accept(self, visitor: NodeVisitor[T]) -> T:
+ def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_exec_stmt(self)
@@ -1146,7 +1187,7 @@ class RefExpr(Expression):
"""Abstract base class for name-like constructs"""
kind = None # type: int # LDEF/GDEF/MDEF/... (None if not available)
- node = None # type: SymbolNode # Var, FuncDef or TypeInfo that describes this
+ node = None # type: Optional[SymbolNode] # Var, FuncDef or TypeInfo that describes this
fullname = None # type: str # Fully qualified name (or name if not global)
# Does this define a new name with inferred type?
@@ -1242,13 +1283,13 @@ class CallExpr(Expression):
args = None # type: List[Expression]
arg_kinds = None # type: List[int] # ARG_ constants
# Each name can be None if not a keyword argument.
- arg_names = None # type: List[str]
+ arg_names = None # type: List[Optional[str]]
# If not None, the node that represents the meaning of the CallExpr. For
# cast(...) this is a CastExpr.
analyzed = None # type: Optional[Expression]
def __init__(self, callee: Expression, args: List[Expression], arg_kinds: List[int],
- arg_names: List[str] = None, analyzed: Expression = None) -> None:
+ arg_names: List[Optional[str]] = None, analyzed: Expression = None) -> None:
if not arg_names:
arg_names = [None] * len(args)
@@ -1294,7 +1335,7 @@ class IndexExpr(Expression):
method_type = None # type: mypy.types.Type
# If not None, this is actually semantically a type application
# Class[type, ...] or a type alias initializer.
- analyzed = None # type: Union[TypeApplication, TypeAliasExpr]
+ analyzed = None # type: Union[TypeApplication, TypeAliasExpr, None]
def __init__(self, base: Expression, index: Expression) -> None:
self.base = base
@@ -1315,7 +1356,7 @@ class UnaryExpr(Expression):
op = ''
expr = None # type: Expression
# Inferred operator method type
- method_type = None # type: mypy.types.Type
+ method_type = None # type: Optional[mypy.types.Type]
def __init__(self, op: str, expr: Expression) -> None:
self.op = op
@@ -1397,7 +1438,7 @@ class OpExpr(Expression):
left = None # type: Expression
right = None # type: Expression
# Inferred type for the operator method type (when relevant).
- method_type = None # type: mypy.types.Type
+ method_type = None # type: Optional[mypy.types.Type]
def __init__(self, op: str, left: Expression, right: Expression) -> None:
self.op = op
@@ -1416,7 +1457,7 @@ class ComparisonExpr(Expression):
operators = None # type: List[str]
operands = None # type: List[Expression]
# Inferred type for the operator methods (when relevant; None for 'is').
- method_types = None # type: List[mypy.types.Type]
+ method_types = None # type: List[Optional[mypy.types.Type]]
def __init__(self, operators: List[str], operands: List[Expression]) -> None:
self.operators = operators
@@ -1490,7 +1531,7 @@ class SuperExpr(Expression):
return visitor.visit_super_expr(self)
-class FuncExpr(FuncItem, Expression):
+class LambdaExpr(FuncItem, Expression):
"""Lambda expression"""
def name(self) -> str:
@@ -1499,10 +1540,12 @@ class FuncExpr(FuncItem, Expression):
def expr(self) -> Expression:
"""Return the expression (the body) of the lambda."""
ret = cast(ReturnStmt, self.body.body[-1])
- return ret.expr
+ expr = ret.expr
+ assert expr is not None # lambda can't have empty body
+ return expr
def accept(self, visitor: ExpressionVisitor[T]) -> T:
- return visitor.visit_func_expr(self)
+ return visitor.visit_lambda_expr(self)
class ListExpr(Expression):
@@ -1575,14 +1618,17 @@ class GeneratorExpr(Expression):
left_expr = None # type: Expression
sequences = None # type: List[Expression]
condlists = None # type: List[List[Expression]]
+ is_async = None # type: List[bool]
indices = None # type: List[Lvalue]
def __init__(self, left_expr: Expression, indices: List[Lvalue],
- sequences: List[Expression], condlists: List[List[Expression]]) -> None:
+ sequences: List[Expression], condlists: List[List[Expression]],
+ is_async: List[bool]) -> None:
self.left_expr = left_expr
self.sequences = sequences
self.condlists = condlists
self.indices = indices
+ self.is_async = is_async
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_generator_expr(self)
@@ -1619,15 +1665,18 @@ class DictionaryComprehension(Expression):
value = None # type: Expression
sequences = None # type: List[Expression]
condlists = None # type: List[List[Expression]]
+ is_async = None # type: List[bool]
indices = None # type: List[Lvalue]
def __init__(self, key: Expression, value: Expression, indices: List[Lvalue],
- sequences: List[Expression], condlists: List[List[Expression]]) -> None:
+ sequences: List[Expression], condlists: List[List[Expression]],
+ is_async: List[bool]) -> None:
self.key = key
self.value = value
self.sequences = sequences
self.condlists = condlists
self.indices = indices
+ self.is_async = is_async
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_dictionary_comprehension(self)
@@ -1739,8 +1788,8 @@ class TypeVarExpr(SymbolNode, Expression):
assert data['.class'] == 'TypeVarExpr'
return TypeVarExpr(data['name'],
data['fullname'],
- [mypy.types.Type.deserialize(v) for v in data['values']],
- mypy.types.Type.deserialize(data['upper_bound']),
+ [mypy.types.deserialize_type(v) for v in data['values']],
+ mypy.types.deserialize_type(data['upper_bound']),
data['variance'])
@@ -1750,7 +1799,7 @@ class TypeAliasExpr(Expression):
type = None # type: mypy.types.Type
# Simple fallback type for aliases that are invalid in runtime expressions
# (for example Union, Tuple, Callable).
- fallback = None # type: mypy.types.Type
+ fallback = None # type: Optional[mypy.types.Type]
# This type alias is subscripted in a runtime expression like Alias[int](42)
# (not in a type context like type annotation or base class).
in_runtime = False # type: bool
@@ -1792,6 +1841,25 @@ class TypedDictExpr(Expression):
return visitor.visit_typeddict_expr(self)
+class EnumCallExpr(Expression):
+ """Named tuple expression Enum('name', 'val1 val2 ...')."""
+
+ # The class representation of this enumerated type
+ info = None # type: TypeInfo
+ # The item names (for debugging)
+ items = None # type: List[str]
+ values = None # type: List[Optional[Expression]]
+
+ def __init__(self, info: 'TypeInfo', items: List[str],
+ values: List[Optional[Expression]]) -> None:
+ self.info = info
+ self.items = items
+ self.values = values
+
+ def accept(self, visitor: ExpressionVisitor[T]) -> T:
+ return visitor.visit_enum_call_expr(self)
+
+
class PromoteExpr(Expression):
"""Ducktype class decorator expression _promote(...)."""
@@ -1876,6 +1944,10 @@ class TypeInfo(SymbolNode):
# Method Resolution Order: the order of looking up attributes. The first
# value always to refers to this class.
mro = None # type: List[TypeInfo]
+
+ declared_metaclass = None # type: Optional[mypy.types.Instance]
+ metaclass_type = None # type: mypy.types.Instance
+
subtypes = None # type: Set[TypeInfo] # Direct subclasses encountered so far
names = None # type: SymbolTable # Names defined directly in this type
is_abstract = False # Does the class have any abstract attributes?
@@ -1902,7 +1974,7 @@ class TypeInfo(SymbolNode):
# even though it's not a subclass in Python. The non-standard
# `@_promote` decorator introduces this, and there are also
# several builtin examples, in particular `int` -> `float`.
- _promote = None # type: mypy.types.Type
+ _promote = None # type: Optional[mypy.types.Type]
# Representation of a Tuple[...] base class, if the class has any
# (e.g., for named tuples). If this is not None, the actual Type
@@ -1921,9 +1993,6 @@ class TypeInfo(SymbolNode):
# Is this a newtype type?
is_newtype = False
- # Alternative to fullname() for 'anonymous' classes.
- alt_fullname = None # type: Optional[str]
-
FLAGS = [
'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple',
'is_newtype'
@@ -1942,8 +2011,11 @@ class TypeInfo(SymbolNode):
self._fullname = defn.fullname
self.is_abstract = False
self.abstract_attributes = []
- if defn.type_vars:
- for vd in defn.type_vars:
+ self.add_type_vars()
+
+ def add_type_vars(self) -> None:
+ if self.defn.type_vars:
+ for vd in self.defn.type_vars:
self.type_vars.append(vd.name)
def name(self) -> str:
@@ -1957,7 +2029,7 @@ class TypeInfo(SymbolNode):
"""Is the type generic (i.e. does it have type variables)?"""
return len(self.type_vars) > 0
- def get(self, name: str) -> 'SymbolTableNode':
+ def get(self, name: str) -> Optional['SymbolTableNode']:
for cls in self.mro:
n = cls.names.get(name)
if n:
@@ -1983,7 +2055,7 @@ class TypeInfo(SymbolNode):
def has_method(self, name: str) -> bool:
return self.get_method(name) is not None
- def get_method(self, name: str) -> FuncBase:
+ def get_method(self, name: str) -> Optional[FuncBase]:
if self.mro is None: # Might be because of a previous error.
return None
for cls in self.mro:
@@ -2005,6 +2077,27 @@ class TypeInfo(SymbolNode):
self.mro = mro
self.is_enum = self._calculate_is_enum()
+ def calculate_metaclass_type(self) -> 'Optional[mypy.types.Instance]':
+ declared = self.declared_metaclass
+ if declared is not None and not declared.type.has_base('builtins.type'):
+ return declared
+ if self._fullname == 'builtins.type':
+ return mypy.types.Instance(self, [])
+ candidates = [s.declared_metaclass
+ for s in self.mro
+ if s.declared_metaclass is not None
+ and s.declared_metaclass.type is not None]
+ for c in candidates:
+ if c.type.mro is None:
+ continue
+ if all(other.type in c.type.mro for other in candidates):
+ return c
+ return None
+
+ def is_metaclass(self) -> bool:
+ return (self.has_base('builtins.type') or self.fullname() == 'abc.ABCMeta' or
+ self.fallback_to_any)
+
def _calculate_is_enum(self) -> bool:
"""
If this is "enum.Enum" itself, then yes, it's an enum.
@@ -2039,27 +2132,57 @@ class TypeInfo(SymbolNode):
This includes the most important information about the type.
"""
- base = None # type: str
+ return self.dump()
+
+ def dump(self,
+ str_conv: 'mypy.strconv.StrConv' = None,
+ type_str_conv: 'mypy.types.TypeStrVisitor' = None) -> str:
+ """Return a string dump of the contents of the TypeInfo."""
+ if not str_conv:
+ str_conv = mypy.strconv.StrConv()
+ base = '' # type: str
+
+ def type_str(typ: 'mypy.types.Type') -> str:
+ if type_str_conv:
+ return typ.accept(type_str_conv)
+ return str(typ)
+
+ head = 'TypeInfo' + str_conv.format_id(self)
if self.bases:
- base = 'Bases({})'.format(', '.join(str(base)
+ base = 'Bases({})'.format(', '.join(type_str(base)
for base in self.bases))
- return dump_tagged(['Name({})'.format(self.fullname()),
- base,
- ('Names', sorted(self.names.keys()))],
- 'TypeInfo')
+ mro = 'Mro({})'.format(', '.join(item.fullname() + str_conv.format_id(item)
+ for item in self.mro))
+ names = []
+ for name in sorted(self.names):
+ description = name + str_conv.format_id(self.names[name].node)
+ node = self.names[name].node
+ if isinstance(node, Var) and node.type:
+ description += ' ({})'.format(type_str(node.type))
+ names.append(description)
+ return mypy.strconv.dump_tagged(
+ ['Name({})'.format(self.fullname()),
+ base,
+ mro,
+ ('Names', names)],
+ head,
+ str_conv=str_conv)
def serialize(self) -> JsonDict:
# NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates.
data = {'.class': 'TypeInfo',
'module_name': self.module_name,
'fullname': self.fullname(),
- 'alt_fullname': self.alt_fullname,
- 'names': self.names.serialize(self.alt_fullname or self.fullname()),
+ 'names': self.names.serialize(self.fullname()),
'defn': self.defn.serialize(),
'abstract_attributes': self.abstract_attributes,
'type_vars': self.type_vars,
'bases': [b.serialize() for b in self.bases],
'_promote': None if self._promote is None else self._promote.serialize(),
+ 'declared_metaclass': (None if self.declared_metaclass is None
+ else self.declared_metaclass.serialize()),
+ 'metaclass_type':
+ None if self.metaclass_type is None else self.metaclass_type.serialize(),
'tuple_type': None if self.tuple_type is None else self.tuple_type.serialize(),
'typeddict_type':
None if self.typeddict_type is None else self.typeddict_type.serialize(),
@@ -2074,13 +2197,17 @@ class TypeInfo(SymbolNode):
module_name = data['module_name']
ti = TypeInfo(names, defn, module_name)
ti._fullname = data['fullname']
- ti.alt_fullname = data['alt_fullname']
# TODO: Is there a reason to reconstruct ti.subtypes?
ti.abstract_attributes = data['abstract_attributes']
ti.type_vars = data['type_vars']
ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']]
ti._promote = (None if data['_promote'] is None
- else mypy.types.Type.deserialize(data['_promote']))
+ else mypy.types.deserialize_type(data['_promote']))
+ ti.declared_metaclass = (None if data['declared_metaclass'] is None
+ else mypy.types.Instance.deserialize(data['declared_metaclass']))
+ ti.metaclass_type = (None if data['metaclass_type'] is None
+ else mypy.types.Instance.deserialize(data['metaclass_type']))
+ # NOTE: ti.mro will be set in the fixup phase.
ti.tuple_type = (None if data['tuple_type'] is None
else mypy.types.TupleType.deserialize(data['tuple_type']))
ti.typeddict_type = (None if data['typeddict_type'] is None
@@ -2089,13 +2216,29 @@ class TypeInfo(SymbolNode):
return ti
+class FakeInfo(TypeInfo):
+ # types.py defines a single instance of this class, called types.NOT_READY.
+ # This instance is used as a temporary placeholder in the process of de-serialization
+ # of 'Instance' types. The de-serialization happens in two steps: In the first step,
+ # Instance.type is set to NOT_READY. In the second step (in fixup.py) it is replaced by
+ # an actual TypeInfo. If you see the assertion error below, then most probably something
+ # went wrong during the second step and an 'Instance' that raised this error was not fixed.
+ # Note:
+ # 'None' is not used as a dummy value for two reasons:
+ # 1. This will require around 80-100 asserts to make 'mypy --strict-optional mypy'
+ # pass cleanly.
+ # 2. If NOT_READY value is accidentally used somewhere, it will be obvious where the value
+ # is from, whereas a 'None' value could come from anywhere.
+ def __getattr__(self, attr: str) -> None:
+ raise AssertionError('De-serialization failure: TypeInfo not fixed')
+
+
class SymbolTableNode:
# Kind of node. Possible values:
# - LDEF: local definition (of any kind)
# - GDEF: global (module-level) definition
# - MDEF: class member definition
- # - UNBOUND_TVAR: TypeVar(...) definition, not bound
- # - TVAR: type variable in a bound scope (generic function / generic clas)
+ # - TVAR: TypeVar(...) definition
# - MODULE_REF: reference to a module
# - TYPE_ALIAS: type alias
# - UNBOUND_IMPORTED: temporary kind for imported names
@@ -2103,10 +2246,8 @@ class SymbolTableNode:
# AST node of definition (FuncDef/Var/TypeInfo/Decorator/TypeVarExpr,
# or None for a bound type variable).
node = None # type: Optional[SymbolNode]
- # Type variable definition (for bound type variables only)
- tvar_def = None # type: Optional[mypy.types.TypeVarDef]
# Module id (e.g. "foo.bar") or None
- mod_id = ''
+ mod_id = '' # type: Optional[str]
# If this not None, override the type of the 'node' attribute.
type_override = None # type: Optional[mypy.types.Type]
# If False, this name won't be imported via 'from <module> import *'.
@@ -2115,27 +2256,28 @@ class SymbolTableNode:
# For deserialized MODULE_REF nodes, the referenced module name;
# for other nodes, optionally the name of the referenced object.
cross_ref = None # type: Optional[str]
+ # Was this node created by normalіze_type_alias?
+ normalized = False # type: bool
def __init__(self, kind: int, node: Optional[SymbolNode], mod_id: str = None,
typ: 'mypy.types.Type' = None,
- tvar_def: 'mypy.types.TypeVarDef' = None,
- module_public: bool = True) -> None:
+ module_public: bool = True, normalized: bool = False) -> None:
self.kind = kind
self.node = node
self.type_override = typ
self.mod_id = mod_id
- self.tvar_def = tvar_def
self.module_public = module_public
+ self.normalized = normalized
@property
- def fullname(self) -> str:
+ def fullname(self) -> Optional[str]:
if self.node is not None:
return self.node.fullname()
else:
return None
@property
- def type(self) -> 'mypy.types.Type':
+ def type(self) -> 'Optional[mypy.types.Type]':
# IDEA: Get rid of the Any type.
node = self.node # type: Any
if self.type_override is not None:
@@ -2167,8 +2309,6 @@ class SymbolTableNode:
data = {'.class': 'SymbolTableNode',
'kind': node_kinds[self.kind],
} # type: JsonDict
- if self.tvar_def:
- data['tvar_def'] = self.tvar_def.serialize()
if not self.module_public:
data['module_public'] = False
if self.kind == MODULE_REF:
@@ -2177,14 +2317,7 @@ class SymbolTableNode:
else:
if self.node is not None:
if prefix is not None:
- # Check whether this is an alias for another object.
- # If the object's canonical full name differs from
- # the full name computed from prefix and name,
- # it's an alias, and we serialize it as a cross ref.
- if isinstance(self.node, TypeInfo):
- fullname = self.node.alt_fullname or self.node.fullname()
- else:
- fullname = self.node.fullname()
+ fullname = self.node.fullname()
if (fullname is not None and '.' in fullname and
fullname != prefix + '.' + name):
data['cross_ref'] = fullname
@@ -2208,10 +2341,8 @@ class SymbolTableNode:
node = SymbolNode.deserialize(data['node'])
typ = None
if 'type_override' in data:
- typ = mypy.types.Type.deserialize(data['type_override'])
+ typ = mypy.types.deserialize_type(data['type_override'])
stnode = SymbolTableNode(kind, node, typ=typ)
- if 'tvar_def' in data:
- stnode.tvar_def = mypy.types.TypeVarDef.deserialize(data['tvar_def'])
if 'module_public' in data:
stnode.module_public = data['module_public']
return stnode
@@ -2299,3 +2430,70 @@ def get_flags(node: Node, names: List[str]) -> List[str]:
def set_flags(node: Node, flags: List[str]) -> None:
for name in flags:
setattr(node, name, True)
+
+
+def get_member_expr_fullname(expr: MemberExpr) -> Optional[str]:
+ """Return the qualified name representation of a member expression.
+
+ Return a string of form foo.bar, foo.bar.baz, or similar, or None if the
+ argument cannot be represented in this form.
+ """
+ initial = None # type: Optional[str]
+ if isinstance(expr.expr, NameExpr):
+ initial = expr.expr.name
+ elif isinstance(expr.expr, MemberExpr):
+ initial = get_member_expr_fullname(expr.expr)
+ else:
+ return None
+ return '{}.{}'.format(initial, expr.name)
+
+
+deserialize_map = {
+ key: obj.deserialize # type: ignore
+ for key, obj in globals().items()
+ if isinstance(obj, type) and issubclass(obj, SymbolNode) and obj is not SymbolNode
+}
+
+
+def check_arg_kinds(arg_kinds: List[int], nodes: List[T], fail: Callable[[str, T], None]) -> None:
+ is_var_arg = False
+ is_kw_arg = False
+ seen_named = False
+ seen_opt = False
+ for kind, node in zip(arg_kinds, nodes):
+ if kind == ARG_POS:
+ if is_var_arg or is_kw_arg or seen_named or seen_opt:
+ fail("Required positional args may not appear "
+ "after default, named or var args",
+ node)
+ break
+ elif kind == ARG_OPT:
+ if is_var_arg or is_kw_arg or seen_named:
+ fail("Positional default args may not appear after named or var args", node)
+ break
+ seen_opt = True
+ elif kind == ARG_STAR:
+ if is_var_arg or is_kw_arg or seen_named:
+ fail("Var args may not appear after named or var args", node)
+ break
+ is_var_arg = True
+ elif kind == ARG_NAMED or kind == ARG_NAMED_OPT:
+ seen_named = True
+ if is_kw_arg:
+ fail("A **kwargs argument must be the last argument", node)
+ break
+ elif kind == ARG_STAR2:
+ if is_kw_arg:
+ fail("You may only have one **kwargs argument", node)
+ break
+ is_kw_arg = True
+
+
+def check_arg_names(names: List[str], nodes: List[T], fail: Callable[[str, T], None],
+ description: str = 'function definition') -> None:
+ seen_names = set() # type: Set[str]
+ for name, node in zip(names, nodes):
+ if name is not None and name in seen_names:
+ fail("Duplicate argument '{}' in {}".format(name, description), node)
+ break
+ seen_names.add(name)
diff --git a/mypy/options.py b/mypy/options.py
index 77f9713..8c87642 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -2,7 +2,7 @@ import fnmatch
import pprint
import sys
-from typing import Any, Mapping, Optional, Tuple, List, Pattern
+from typing import Any, Mapping, Optional, Tuple, List, Pattern, Dict
from mypy import defaults
@@ -26,11 +26,12 @@ class Options:
"strict_optional_whitelist",
"show_none_errors",
"warn_no_return",
+ "warn_return_any",
"ignore_errors",
"strict_boolean",
}
- OPTIONS_AFFECTING_CACHE = PER_MODULE_OPTIONS | {"strict_optional"}
+ OPTIONS_AFFECTING_CACHE = PER_MODULE_OPTIONS | {"strict_optional", "quick_and_dirty"}
def __init__(self) -> None:
# -- build options --
@@ -63,7 +64,11 @@ class Options:
self.warn_redundant_casts = False
# Warn about falling off the end of a function returning non-None
- self.warn_no_return = False
+ self.warn_no_return = True
+
+ # Warn about returning objects of type Any when the function is
+ # declared with a precise type
+ self.warn_return_any = False
# Warn about unused '# type: ignore' comments
self.warn_unused_ignores = False
@@ -77,8 +82,8 @@ class Options:
# Apply strict None checking
self.strict_optional = False
- # Hide "note: In function "foo":" messages.
- self.hide_error_context = True
+ # Show "note: In function "foo":" messages.
+ self.show_error_context = False
# Files in which to allow strict-Optional related errors
# TODO: Kill this in favor of show_none_errors
@@ -96,6 +101,12 @@ class Options:
# Write junit.xml to given file
self.junit_xml = None # type: Optional[str]
+ # Caching options
+ self.incremental = False
+ self.cache_dir = defaults.CACHE_DIR
+ self.debug_cache = False
+ self.quick_and_dirty = False
+
# Per-module options (raw)
self.per_module_options = {} # type: Dict[Pattern[str], Dict[str, object]]
@@ -114,10 +125,6 @@ class Options:
self.use_builtins_fixtures = False
# -- experimental options --
- self.fast_parser = True
- self.incremental = False
- self.cache_dir = defaults.CACHE_DIR
- self.debug_cache = False
self.shadow_file = None # type: Optional[Tuple[str, str]]
self.show_column_numbers = False # type: bool
self.dump_graph = False
diff --git a/mypy/parse.py b/mypy/parse.py
index cbfec39..13fd58b 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -1,82 +1,13 @@
-"""Mypy parser.
-
-Constructs a parse tree (abstract syntax tree) based on a string
-representing a source file. Performs only minimal semantic checks.
-"""
-
-import re
-
from typing import List, Tuple, Set, cast, Union, Optional
-from mypy import lex
-from mypy.lex import (
- Token, Eof, Bom, Break, Name, Colon, Dedent, IntLit, StrLit, BytesLit,
- UnicodeLit, FloatLit, Op, Indent, Keyword, Punct, LexError, ComplexLit,
- EllipsisToken
-)
-from mypy.sharedparse import special_function_elide_names, argument_elide_name
-from mypy.nodes import (
- MypyFile, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
- ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, Statement,
- ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt,
- DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl,
- WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, Expression,
- TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
- DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
- FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
- UnaryExpr, FuncExpr, PrintStmt, ImportBase, ComparisonExpr,
- StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
- SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, ExecStmt, Argument,
- BackquoteExpr
-)
-from mypy import defaults
-from mypy import nodes
-from mypy.errors import Errors, CompileError
-from mypy.types import Type, CallableType, AnyType, UnboundType
-from mypy.parsetype import (
- parse_type, parse_types, parse_signature, TypeParseError
-)
+from mypy.errors import Errors
from mypy.options import Options
-
-from mypy import experiments
-
-
-class ParseError(Exception): pass
-
-
-precedence = {
- '**': 16,
- '-u': 15, '+u': 15, '~': 15, # unary operators (-, + and ~)
- '<cast>': 14,
- '*': 13, '/': 13, '//': 13, '%': 13, '@': 13,
- '+': 12, '-': 12,
- '>>': 11, '<<': 11,
- '&': 10,
- '^': 9,
- '|': 8,
- '==': 7, '!=': 7, '<': 7, '>': 7, '<=': 7, '>=': 7, 'is': 7, 'in': 7,
- '*u': 7, # unary * for star expressions
- 'not': 6,
- 'and': 5,
- 'or': 4,
- '<if>': 3, # conditional expression
- '<for>': 2, # list comprehension
- ',': 1}
-
-
-op_assign = set([
- '+=', '-=', '*=', '/=', '//=', '%=', '**=', '@=', '|=', '&=', '^=', '>>=',
- '<<='])
-
-op_comp = set([
- '>', '<', '==', '>=', '<=', '<>', '!=', 'is', 'is', 'in', 'not'])
-
-none = Token('') # Empty token
+from mypy.nodes import MypyFile
def parse(source: Union[str, bytes],
fnam: str,
- errors: Errors,
+ errors: Optional[Errors],
options: Options) -> MypyFile:
"""Parse a source file, without doing any semantic analysis.
@@ -86,1963 +17,17 @@ def parse(source: Union[str, bytes],
The python_version (major, minor) option determines the Python syntax variant.
"""
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
- if options.fast_parser:
- if options.python_version[0] >= 3 or is_stub_file:
- import mypy.fastparse
- return mypy.fastparse.parse(source,
- fnam=fnam,
- errors=errors,
- pyversion=options.python_version,
- custom_typing_module=options.custom_typing_module)
- else:
- import mypy.fastparse2
- return mypy.fastparse2.parse(source,
- fnam=fnam,
- errors=errors,
- pyversion=options.python_version,
- custom_typing_module=options.custom_typing_module)
-
- parser = Parser(fnam,
- errors,
- options.python_version,
- options.custom_typing_module,
- is_stub_file=is_stub_file)
- tree = parser.parse(source)
- tree.path = fnam
- tree.is_stub = is_stub_file
- return tree
-
-
-class Parser:
- """Mypy parser that parses a string into an AST.
-
- Parses type annotations in addition to basic Python syntax. It supports both Python 2 and 3
- (though Python 2 support is incomplete).
-
- The AST classes are defined in mypy.nodes and mypy.types.
- """
-
- tok = None # type: List[Token]
- ind = 0
- errors = None # type: Errors
- # If True, raise an exception on any parse error. Otherwise, errors are reported via 'errors'.
- raise_on_error = False
-
- # Are we currently parsing the body of a class definition?
- is_class_body = False
- # All import nodes encountered so far in this parse unit.
- imports = None # type: List[ImportBase]
- # Names imported from __future__.
- future_options = None # type: List[str]
- # Lines to ignore (using # type: ignore).
- ignored_lines = None # type: Set[int]
-
- def __init__(self, fnam: str, errors: Errors, pyversion: Tuple[int, int],
- custom_typing_module: str = None, is_stub_file: bool = False) -> None:
- self.raise_on_error = errors is None
- self.pyversion = pyversion
- self.custom_typing_module = custom_typing_module
- self.is_stub_file = is_stub_file
- if errors is not None:
- self.errors = errors
- else:
- self.errors = Errors()
- if fnam is not None:
- self.errors.set_file(fnam)
- else:
- self.errors.set_file('<input>')
-
- def parse(self, s: Union[str, bytes]) -> MypyFile:
- self.tok, self.ignored_lines = lex.lex(s, pyversion=self.pyversion,
- is_stub_file=self.is_stub_file)
- self.ind = 0
- self.imports = []
- self.future_options = []
- file = self.parse_file()
- if self.raise_on_error and self.errors.is_errors():
- self.errors.raise_error()
- return file
-
- def parse_file(self) -> MypyFile:
- """Parse a mypy source file."""
- is_bom = self.parse_bom()
- defs = self.parse_defs()
- self.expect_type(Eof)
- node = MypyFile(defs, self.imports, is_bom, self.ignored_lines)
- return node
-
- # Parse the initial part
-
- def parse_bom(self) -> bool:
- """Parse the optional byte order mark at the beginning of a file."""
- if isinstance(self.current(), Bom):
- self.expect_type(Bom)
- if isinstance(self.current(), Break):
- self.expect_break()
- return True
- else:
- return False
-
- def parse_import(self) -> Import:
- self.expect('import')
- ids = []
- while True:
- id = self.parse_qualified_name()
- translated = self.translate_module_id(id)
- as_id = None
- if self.current_str() == 'as':
- self.expect('as')
- name_tok = self.expect_type(Name)
- as_id = name_tok.string
- elif translated != id:
- as_id = id
- ids.append((translated, as_id))
- if self.current_str() != ',':
- break
- self.expect(',')
- node = Import(ids)
- self.imports.append(node)
- return node
-
- def translate_module_id(self, id: str) -> str:
- """Return the actual, internal module id for a source text id.
-
- For example, translate '__builtin__' in Python 2 to 'builtins'.
- """
- if id == self.custom_typing_module:
- return 'typing'
- elif id == '__builtin__' and self.pyversion[0] == 2:
- # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation
- # is named __builtin__.py (there is another layer of translation elsewhere).
- return 'builtins'
- return id
-
- def parse_import_from(self) -> ImportBase:
- self.expect('from')
-
- # Build the list of beginning relative tokens.
- relative = 0
- while self.current_str() in (".", "..."):
- relative += len(self.current_str())
- self.skip()
-
- # Parse qualified name to actually import from.
- if self.current_str() == "import":
- # Empty/default values.
- name = ""
- else:
- name = self.parse_qualified_name()
-
- name = self.translate_module_id(name)
-
- # Parse import list
- self.expect('import')
- node = None # type: ImportBase
- if self.current_str() == '*':
- if name == '__future__':
- raise self.parse_error()
- # An import all from a module node:
- self.skip()
- node = ImportAll(name, relative)
- else:
- is_paren = self.current_str() == '('
- if is_paren:
- self.expect('(')
- targets = [] # type: List[Tuple[str, str]]
- while True:
- id, as_id = self.parse_import_name()
- if '%s.%s' % (name, id) == self.custom_typing_module:
- if targets or self.current_str() == ',':
- self.fail('You cannot import any other modules when you '
- 'import a custom typing module',
- self.current().line, self.current().column)
- node = Import([('typing', as_id)])
- self.skip_until_break()
- break
- targets.append((id, as_id))
- if self.current_str() != ',':
- break
- self.expect(',')
- if is_paren and self.current_str() == ')':
- break
- if is_paren:
- self.expect(')')
- if node is None:
- node = ImportFrom(name, relative, targets)
- self.imports.append(node)
- if name == '__future__':
- self.future_options.extend(target[0] for target in targets)
- return node
-
- def parse_import_name(self) -> Tuple[str, Optional[str]]:
- tok = self.expect_type(Name)
- name = tok.string
- if self.current_str() == 'as':
- self.skip()
- as_name = self.expect_type(Name)
- return name, as_name.string
- else:
- return name, None
-
- def parse_qualified_name(self) -> str:
- """Parse a name with an optional module qualifier.
-
- Return a tuple with the name as a string and a token array
- containing all the components of the name.
- """
- tok = self.expect_type(Name)
- n = tok.string
- while self.current_str() == '.':
- self.expect('.')
- tok = self.expect_type(Name)
- n += '.' + tok.string
- return n
-
- # Parsing global definitions
-
- def parse_defs(self) -> List[Statement]:
- defs = [] # type: List[Statement]
- while not self.eof():
- try:
- defn, is_simple = self.parse_statement()
- if is_simple:
- self.expect_break()
- if defn is not None:
- if not self.try_combine_overloads(defn, defs):
- defs.append(defn)
- except ParseError:
- pass
- return defs
-
- def parse_class_def(self) -> ClassDef:
- old_is_class_body = self.is_class_body
- self.is_class_body = True
-
- self.expect('class')
- metaclass = None
-
- try:
- base_types = [] # type: List[Expression]
- try:
- name_tok = self.expect_type(Name)
- name = name_tok.string
-
- self.errors.push_type(name)
-
- if self.current_str() == '(':
- self.skip()
- while True:
- if self.current_str() == ')':
- break
- if self.peek().string == '=':
- metaclass = self.parse_class_keywords()
- break
- base_types.append(self.parse_super_type())
- if self.current_str() != ',':
- break
- self.skip()
- self.expect(')')
- except ParseError:
- pass
-
- defs, _ = self.parse_block()
-
- node = ClassDef(name, defs, None, base_types, metaclass=metaclass)
- return node
- finally:
- self.errors.pop_type()
- self.is_class_body = old_is_class_body
-
- def parse_class_keywords(self) -> Optional[str]:
- """Parse the class keyword arguments, keeping the metaclass but
- ignoring all others. Returns None if the metaclass isn't found.
- """
- metaclass = None
- while True:
- key = self.expect_type(Name)
- self.expect('=')
- if key.string == 'metaclass':
- metaclass = self.parse_qualified_name()
- else:
- # skip the class value
- self.parse_expression(precedence[','])
- if self.current_str() != ',':
- break
- self.skip()
- if self.current_str() == ')':
- break
- return metaclass
-
- def parse_super_type(self) -> Expression:
- return self.parse_expression(precedence[','])
-
- def parse_decorated_function_or_class(self) -> Union[Decorator, ClassDef]:
- decorators = []
- no_type_checks = False
- while self.current_str() == '@':
- self.expect('@')
- d_exp = self.parse_expression()
- if self.is_no_type_check_decorator(d_exp):
- no_type_checks = True
- decorators.append(d_exp)
- self.expect_break()
- if self.current_str() != 'class':
- func = self.parse_function(no_type_checks)
- func.is_decorated = True
- var = Var(func.name())
- # Types of decorated functions must always be inferred.
- var.is_ready = False
- var.set_line(decorators[0].line)
- node = Decorator(func, decorators, var)
- return node
- else:
- cls = self.parse_class_def()
- cls.decorators = decorators
- return cls
-
- def is_no_type_check_decorator(self, expr: Expression) -> bool:
- if isinstance(expr, NameExpr):
- return expr.name == 'no_type_check'
- elif isinstance(expr, MemberExpr):
- if isinstance(expr.expr, NameExpr):
- return expr.expr.name == 'typing' and expr.name == 'no_type_check'
- return False
-
- def parse_function(self, no_type_checks: bool = False) -> FuncDef:
- def_tok = self.expect('def')
- is_method = self.is_class_body
- self.is_class_body = False
- try:
- (name, args, typ, is_error, extra_stmts) = self.parse_function_header(no_type_checks)
-
- arg_kinds = [arg.kind for arg in args]
- arg_names = [arg.variable.name() for arg in args]
- # for overloads of special methods, let people name their arguments
- # whatever they want, and don't let them call those functions with
- # arguments by name.
- if special_function_elide_names(name):
- arg_names = [None] * len(arg_names)
-
- body, comment_type = self.parse_block(allow_type=True)
- # Potentially insert extra assignment statements to the beginning of the
- # body, used to decompose Python 2 tuple arguments.
- body.body[:0] = extra_stmts
- if comment_type:
- # The function has a # type: ... signature.
- if typ:
- self.errors.report(
- def_tok.line, def_tok.column, 'Function has duplicate type signatures')
- sig = cast(CallableType, comment_type)
- if sig.is_ellipsis_args:
- # When we encounter an ellipsis, fill in the arg_types with
- # a bunch of AnyTypes, emulating Callable[..., T]
- arg_types = [AnyType()] * len(arg_kinds) # type: List[Type]
- typ = CallableType(
- arg_types,
- arg_kinds,
- arg_names,
- sig.ret_type,
- None,
- line=def_tok.line,
- column=def_tok.column)
- elif is_method and len(sig.arg_kinds) < len(arg_kinds):
- self.check_argument_kinds(arg_kinds,
- [nodes.ARG_POS] + sig.arg_kinds,
- def_tok.line, def_tok.column)
- # Add implicit 'self' argument to signature.
- first_arg = [AnyType()] # type: List[Type]
- typ = CallableType(
- first_arg + sig.arg_types,
- arg_kinds,
- arg_names,
- sig.ret_type,
- None,
- line=def_tok.line,
- column=def_tok.column)
- else:
- self.check_argument_kinds(arg_kinds, sig.arg_kinds,
- def_tok.line, def_tok.column)
- if len(sig.arg_types) > len(arg_kinds):
- raise ParseError('Type signature has too many arguments')
- if len(sig.arg_types) < len(arg_kinds):
- raise ParseError('Type signature has too few arguments')
- typ = CallableType(
- sig.arg_types,
- arg_kinds,
- arg_names,
- sig.ret_type,
- None,
- line=def_tok.line,
- column=def_tok.column)
-
- # If there was a serious error, we really cannot build a parse tree
- # node.
- if is_error:
- return None
-
- if typ:
- for arg, arg_type in zip(args, typ.arg_types):
- self.set_type_optional(arg_type, arg.initializer)
-
- if typ and isinstance(typ.ret_type, UnboundType):
- typ.ret_type.is_ret_type = True
-
- node = FuncDef(name, args, body, typ)
- node.set_line(def_tok)
- if typ is not None:
- typ.definition = node
- return node
- finally:
- self.errors.pop_function()
- self.is_class_body = is_method
-
- def check_argument_kinds(self, funckinds: List[int], sigkinds: List[int],
- line: int, column: int) -> None:
- """Check that arguments are consistent.
-
- This verifies that they have the same number and the kinds correspond.
-
- Arguments:
- funckinds: kinds of arguments in function definition
- sigkinds: kinds of arguments in signature (after # type:)
- """
- if len(funckinds) != len(sigkinds):
- if len(funckinds) > len(sigkinds):
- self.fail("Type signature has too few arguments", line, column)
- else:
- self.fail("Type signature has too many arguments", line, column)
- return
- for kind, token in [(nodes.ARG_STAR, '*'),
- (nodes.ARG_STAR2, '**')]:
- if ((funckinds.count(kind) != sigkinds.count(kind)) or
- (kind in funckinds and sigkinds.index(kind) != funckinds.index(kind))):
- self.fail(
- "Inconsistent use of '{}' in function "
- "signature".format(token), line, column)
-
- def parse_function_header(
- self, no_type_checks: bool=False) -> Tuple[str,
- List[Argument],
- CallableType,
- bool,
- List[AssignmentStmt]]:
- """Parse function header (a name followed by arguments)
-
- Return a 5-tuple with the following items:
- name
- arguments
- signature (annotation)
- error flag (True if error)
- extra statements needed to decompose arguments (usually empty)
-
- See parse_arg_list for an explanation of the final tuple item.
- """
- name = ''
-
- try:
- name_tok = self.expect_type(Name)
- name = name_tok.string
- include_names = not special_function_elide_names(name)
-
- self.errors.push_function(name)
-
- args, typ, extra_stmts = self.parse_args(no_type_checks, include_names)
- except ParseError:
- if not isinstance(self.current(), Break):
- self.ind -= 1 # Kludge: go back to the Break token
- # Resynchronise parsing by going back over :, if present.
- if isinstance(self.tok[self.ind - 1], Colon):
- self.ind -= 1
- return (name, [], None, True, [])
-
- return (name, args, typ, False, extra_stmts)
-
- def parse_args(self,
- no_type_checks: bool = False,
- include_names: bool = True) -> Tuple[List[Argument],
- CallableType,
- List[AssignmentStmt]]:
- """Parse a function signature (...) [-> t].
-
- See parse_arg_list for an explanation of the final tuple item.
- """
- lparen = self.expect('(')
-
- # Parse the argument list (everything within '(' and ')').
- args, extra_stmts = self.parse_arg_list(no_type_checks=no_type_checks)
-
- self.expect(')')
-
- if self.current_str() == '->':
- self.skip()
- if no_type_checks:
- self.parse_expression()
- ret_type = None
- else:
- ret_type = self.parse_type()
- else:
- ret_type = None
-
- arg_kinds = [arg.kind for arg in args]
- self.verify_argument_kinds(arg_kinds, lparen.line, lparen.column)
-
- annotation = self.build_func_annotation(
- ret_type, args, lparen.line, lparen.column, include_names=include_names)
-
- return args, annotation, extra_stmts
-
- def build_func_annotation(self,
- ret_type: Type,
- args: List[Argument],
- line: int,
- column: int,
- is_default_ret: bool = False,
- *,
- include_names: bool = True) -> CallableType:
- arg_types = [arg.type_annotation for arg in args]
- # Are there any type annotations?
- if ((ret_type and not is_default_ret)
- or arg_types != [None] * len(arg_types)):
- # Yes. Construct a type for the function signature.
- return self.construct_function_type(args, ret_type, line, column, include_names)
- else:
- return None
-
- def parse_arg_list(self, allow_signature: bool = True,
- no_type_checks: bool = False) -> Tuple[List[Argument],
- List[AssignmentStmt]]:
- """Parse function definition argument list.
-
- This includes everything between '(' and ')' (but not the
- parentheses).
-
- Return tuple (arguments,
- extra statements for decomposing arguments).
-
- The final argument is only used for Python 2 argument lists with
- tuples; they contain destructuring assignment statements used to
- decompose tuple arguments. For example, consider a header like this:
-
- . def f((x, y))
-
- The actual (sole) argument will be __tuple_arg_1 (a generated
- name), whereas the extra statement list will contain a single
- assignment statement corresponding to this assignment:
-
- x, y = __tuple_arg_1
- """
- args = [] # type: List[Argument]
- extra_stmts = []
- # This is for checking duplicate argument names.
- arg_names = [] # type: List[str]
- has_tuple_arg = False
-
- require_named = False
- bare_asterisk_before = -1
-
- if self.current_str() != ')' and self.current_str() != ':':
- while self.current_str() != ')':
- if self.current_str() == '*' and self.peek().string == ',':
- self.expect('*')
- require_named = True
- bare_asterisk_before = len(args)
- elif self.current_str() in ['*', '**']:
- if bare_asterisk_before == len(args):
- # named arguments must follow bare *
- raise self.parse_error()
-
- arg = self.parse_asterisk_arg(
- allow_signature,
- no_type_checks,
- )
- args.append(arg)
- require_named = True
- elif self.current_str() == '(':
- arg, extra_stmt, names = self.parse_tuple_arg(len(args))
- args.append(arg)
- if extra_stmt is not None:
- extra_stmts.append(extra_stmt)
- has_tuple_arg = True
- arg_names.extend(names)
- else:
- arg, require_named = self.parse_normal_arg(
- require_named,
- allow_signature,
- no_type_checks,
- )
- args.append(arg)
- arg_names.append(arg.variable.name())
-
- if self.current().string != ',':
- break
-
- self.expect(',')
-
- # Non-tuple argument dupes will be checked elsewhere. Avoid
- # generating duplicate errors.
- if has_tuple_arg:
- self.check_duplicate_argument_names(arg_names)
-
- return args, extra_stmts
-
- def check_duplicate_argument_names(self, names: List[str]) -> None:
- found = set() # type: Set[str]
- for name in names:
- if name in found:
- self.fail('Duplicate argument name "{}"'.format(name),
- self.current().line, self.current().column)
- found.add(name)
-
- def parse_asterisk_arg(self,
- allow_signature: bool,
- no_type_checks: bool) -> Argument:
- asterisk = self.skip()
- name = self.expect_type(Name)
- variable = Var(name.string)
- if asterisk.string == '*':
- kind = nodes.ARG_STAR
- else:
- kind = nodes.ARG_STAR2
-
- type = None
- if no_type_checks:
- self.parse_parameter_annotation()
- else:
- type = self.parse_arg_type(allow_signature)
-
- return Argument(variable, type, None, kind)
-
- def parse_tuple_arg(self, index: int) -> Tuple[Argument, AssignmentStmt, List[str]]:
- """Parse a single Python 2 tuple argument.
-
- Example: def f(x, (y, z)): ...
-
- The tuple arguments gets transformed into an assignment in the
- function body (the second return value).
-
- Return tuple (argument, decomposing assignment, list of names defined).
-
- Special case: if the argument is just (x) then it's not a tuple;
- we indicate this by returning (argument, None, ['x']).
- However, if the argument is (x,) then it *is* a (singleton) tuple.
- """
- line = self.current().line
- column = self.current().column
- # Generate a new argument name that is very unlikely to clash with anything.
- arg_name = '__tuple_arg_{}'.format(index + 1)
- if self.pyversion[0] >= 3:
- self.fail('Tuples in argument lists only supported in Python 2 mode', line, column)
- paren_arg = self.parse_parentheses()
- self.verify_tuple_arg(paren_arg)
- if isinstance(paren_arg, NameExpr):
- # This isn't a tuple. Revert to a normal argument.
- arg_name = paren_arg.name
- decompose = None
- else:
- rvalue = NameExpr(arg_name)
- rvalue.set_line(line)
- decompose = AssignmentStmt([paren_arg], rvalue)
- decompose.set_line(line, column)
- kind = nodes.ARG_POS
- initializer = None
- if self.current_str() == '=':
- self.expect('=')
- initializer = self.parse_expression(precedence[','])
- kind = nodes.ARG_OPT
- var = Var(arg_name)
- arg_names = self.find_tuple_arg_argument_names(paren_arg)
- return Argument(var, None, initializer, kind), decompose, arg_names
-
- def verify_tuple_arg(self, paren_arg: Expression) -> None:
- if isinstance(paren_arg, TupleExpr):
- if not paren_arg.items:
- self.fail('Empty tuple not valid as an argument', paren_arg.line, paren_arg.column)
- for item in paren_arg.items:
- self.verify_tuple_arg(item)
- elif not isinstance(paren_arg, NameExpr):
- self.fail('Invalid item in tuple argument', paren_arg.line, paren_arg.column)
-
- def find_tuple_arg_argument_names(self, node: Expression) -> List[str]:
- result = [] # type: List[str]
- if isinstance(node, TupleExpr):
- for item in node.items:
- result.extend(self.find_tuple_arg_argument_names(item))
- elif isinstance(node, NameExpr):
- result.append(node.name)
- return result
-
- def parse_normal_arg(self, require_named: bool,
- allow_signature: bool,
- no_type_checks: bool) -> Tuple[Argument, bool]:
- name = self.expect_type(Name)
- variable = Var(name.string)
-
- type = None
- if no_type_checks:
- self.parse_parameter_annotation()
- else:
- type = self.parse_arg_type(allow_signature)
-
- initializer = None # type: Expression
- if self.current_str() == '=':
- self.expect('=')
- initializer = self.parse_expression(precedence[','])
- if require_named:
- kind = nodes.ARG_NAMED_OPT
- else:
- kind = nodes.ARG_OPT
- else:
- if require_named:
- kind = nodes.ARG_NAMED
- else:
- kind = nodes.ARG_POS
-
- return Argument(variable, type, initializer, kind), require_named
-
- def set_type_optional(self, type: Type, initializer: Expression) -> None:
- if not experiments.STRICT_OPTIONAL:
- return
- # Indicate that type should be wrapped in an Optional if arg is initialized to None.
- optional = isinstance(initializer, NameExpr) and initializer.name == 'None'
- if isinstance(type, UnboundType):
- type.optional = optional
-
- def parse_parameter_annotation(self) -> Expression:
- if self.current_str() == ':':
- self.skip()
- return self.parse_expression(precedence[','])
- else:
- return None
-
- def parse_arg_type(self, allow_signature: bool) -> Type:
- if self.current_str() == ':' and allow_signature:
- self.skip()
- return self.parse_type()
- else:
- return None
-
- def verify_argument_kinds(self, kinds: List[int], line: int, column: int) -> None:
- found = set() # type: Set[int]
- for i, kind in enumerate(kinds):
- if kind == nodes.ARG_POS and found & set([nodes.ARG_OPT,
- nodes.ARG_STAR,
- nodes.ARG_STAR2]):
- self.fail('Invalid argument list', line, column)
- elif kind == nodes.ARG_STAR and nodes.ARG_STAR in found:
- self.fail('Invalid argument list', line, column)
- elif kind == nodes.ARG_STAR2 and i != len(kinds) - 1:
- self.fail('Invalid argument list', line, column)
- found.add(kind)
-
- def construct_function_type(self,
- args: List[Argument],
- ret_type: Type,
- line: int,
- column: int,
- include_names: bool = True) -> CallableType:
- # Complete the type annotation by replacing omitted types with 'Any'.
- arg_types = [arg.type_annotation for arg in args]
- for i in range(len(arg_types)):
- if arg_types[i] is None:
- arg_types[i] = AnyType(implicit=True)
- if ret_type is None:
- ret_type = AnyType(implicit=True)
- arg_kinds = [arg.kind for arg in args]
- if include_names:
- arg_names = [None if argument_elide_name(arg.variable.name()) else arg.variable.name()
- for arg in args]
- else:
- arg_names = [None] * len(args)
- return CallableType(arg_types, arg_kinds, arg_names, ret_type, None, name=None,
- variables=None, line=line, column=column)
-
- # Parsing statements
-
- def parse_block(self, allow_type: bool = False) -> Tuple[Block, Type]:
- colon = self.expect(':')
- if not isinstance(self.current(), Break):
- # Block immediately after ':'.
- nodes = []
- while True:
- ind = self.ind
- stmt, is_simple = self.parse_statement()
- if not is_simple:
- self.parse_error_at(self.tok[ind])
- break
- nodes.append(stmt)
- brk = self.expect_break()
- if brk.string != ';':
- break
- node = Block(nodes)
- node.set_line(colon)
- return node, None
- else:
- # Indented block.
- brk = self.expect_break()
- type = self.parse_type_comment(brk, signature=True)
- self.expect_indent()
- stmt_list = [] # type: List[Statement]
- while (not isinstance(self.current(), Dedent) and
- not isinstance(self.current(), Eof)):
- try:
- stmt, is_simple = self.parse_statement()
- if is_simple:
- self.expect_break()
- if stmt is not None:
- if not self.try_combine_overloads(stmt, stmt_list):
- stmt_list.append(stmt)
- except ParseError:
- pass
- if isinstance(self.current(), Dedent):
- self.skip()
- node = Block(stmt_list)
- node.set_line(colon)
- return node, type
-
- def try_combine_overloads(self, s: Statement, stmt: List[Statement]) -> bool:
- if isinstance(s, Decorator) and stmt:
- fdef = s
- n = fdef.func.name()
- if isinstance(stmt[-1], Decorator) and stmt[-1].func.name() == n:
- stmt[-1] = OverloadedFuncDef([stmt[-1], fdef])
- return True
- elif isinstance(stmt[-1], OverloadedFuncDef) and stmt[-1].name() == n:
- stmt[-1].items.append(fdef)
- return True
- return False
-
- def parse_statement(self) -> Tuple[Statement, bool]:
- stmt = None # type: Statement
- t = self.current()
- ts = self.current_str()
- is_simple = True # Is this a non-block statement?
- if ts == 'if':
- stmt = self.parse_if_stmt()
- is_simple = False
- elif ts == 'def':
- stmt = self.parse_function()
- is_simple = False
- elif ts == 'while':
- stmt = self.parse_while_stmt()
- is_simple = False
- elif ts == 'return':
- stmt = self.parse_return_stmt()
- elif ts == 'for':
- stmt = self.parse_for_stmt()
- is_simple = False
- elif ts == 'try':
- stmt = self.parse_try_stmt()
- is_simple = False
- elif ts == 'break':
- stmt = self.parse_break_stmt()
- elif ts == 'continue':
- stmt = self.parse_continue_stmt()
- elif ts == 'pass':
- stmt = self.parse_pass_stmt()
- elif ts == 'raise':
- stmt = self.parse_raise_stmt()
- elif ts == 'import':
- stmt = self.parse_import()
- elif ts == 'from':
- stmt = self.parse_import_from()
- elif ts == 'class':
- stmt = self.parse_class_def()
- is_simple = False
- elif ts == 'global':
- stmt = self.parse_global_decl()
- elif ts == 'nonlocal' and self.pyversion[0] >= 3:
- stmt = self.parse_nonlocal_decl()
- elif ts == 'assert':
- stmt = self.parse_assert_stmt()
- elif ts == 'del':
- stmt = self.parse_del_stmt()
- elif ts == 'with':
- stmt = self.parse_with_stmt()
- is_simple = False
- elif ts == '@':
- stmt = self.parse_decorated_function_or_class()
- is_simple = False
- elif ts == 'print' and (self.pyversion[0] == 2 and
- 'print_function' not in self.future_options):
- stmt = self.parse_print_stmt()
- elif ts == 'exec' and self.pyversion[0] == 2:
- stmt = self.parse_exec_stmt()
- else:
- stmt = self.parse_expression_or_assignment()
- if ts == 'async' and self.current_str() == 'def':
- self.parse_error_at(self.current(),
- reason='Use --fast-parser to parse code using "async def"')
- raise ParseError()
- if stmt is not None:
- stmt.set_line(t)
- return stmt, is_simple
-
- def parse_expression_or_assignment(self) -> Union[AssignmentStmt,
- OperatorAssignmentStmt,
- ExpressionStmt]:
- expr = self.parse_expression(star_expr_allowed=True)
- if self.current_str() == '=':
- return self.parse_assignment(expr)
- elif self.current_str() in op_assign:
- # Operator assignment statement.
- op = self.current_str()[:-1]
- self.skip()
- rvalue = self.parse_expression()
- return OperatorAssignmentStmt(op, expr, rvalue)
- else:
- # Expression statement.
- return ExpressionStmt(expr)
-
- def parse_assignment(self, lvalue: Expression) -> AssignmentStmt:
- """Parse an assignment statement.
-
- Assume that lvalue has been parsed already, and the current token is '='.
- Also parse an optional '# type:' comment.
- """
- self.expect('=')
- lvalues = [lvalue]
- expr = self.parse_expression(star_expr_allowed=True)
- while self.current_str() == '=':
- self.skip()
- lvalues.append(expr)
- expr = self.parse_expression(star_expr_allowed=True)
- cur = self.current()
- if isinstance(cur, Break):
- type = self.parse_type_comment(cur, signature=False)
- else:
- type = None
- return AssignmentStmt(lvalues, expr, type)
-
- def parse_return_stmt(self) -> ReturnStmt:
- self.expect('return')
- expr = None
- current = self.current()
- if current.string == 'yield':
- raise self.parse_error()
- if not isinstance(current, Break):
- expr = self.parse_expression()
- node = ReturnStmt(expr)
- return node
-
- def parse_raise_stmt(self) -> RaiseStmt:
- self.expect('raise')
- expr = None
- from_expr = None
- if not isinstance(self.current(), Break):
- expr = self.parse_expression()
- if self.current_str() == 'from':
- self.expect('from')
- from_expr = self.parse_expression()
- node = RaiseStmt(expr, from_expr)
- return node
-
- def parse_assert_stmt(self) -> AssertStmt:
- self.expect('assert')
- expr = self.parse_expression()
- node = AssertStmt(expr)
- return node
-
- def parse_yield_or_yield_from_expr(self) -> Union[YieldFromExpr, YieldExpr]:
- self.expect("yield")
- expr = None
- node = YieldExpr(expr) # type: Union[YieldFromExpr, YieldExpr]
- if not isinstance(self.current(), Break):
- if self.current_str() == "from":
- self.expect("from")
- expr = self.parse_expression() # when yield from is assigned to a variable
- node = YieldFromExpr(expr)
- else:
- if self.current_str() == ')':
- node = YieldExpr(None)
- else:
- expr = self.parse_expression()
- node = YieldExpr(expr)
- return node
-
- def parse_ellipsis(self) -> EllipsisExpr:
- self.expect('...')
- node = EllipsisExpr()
- return node
-
- def parse_del_stmt(self) -> DelStmt:
- self.expect('del')
- expr = self.parse_expression()
- node = DelStmt(expr)
- return node
-
- def parse_break_stmt(self) -> BreakStmt:
- self.expect('break')
- node = BreakStmt()
- return node
-
- def parse_continue_stmt(self) -> ContinueStmt:
- self.expect('continue')
- node = ContinueStmt()
- return node
-
- def parse_pass_stmt(self) -> PassStmt:
- self.expect('pass')
- node = PassStmt()
- return node
-
- def parse_global_decl(self) -> GlobalDecl:
- self.expect('global')
- names = self.parse_identifier_list()
- node = GlobalDecl(names)
- return node
-
- def parse_nonlocal_decl(self) -> NonlocalDecl:
- self.expect('nonlocal')
- names = self.parse_identifier_list()
- node = NonlocalDecl(names)
- return node
-
- def parse_identifier_list(self) -> List[str]:
- names = []
- while True:
- n = self.expect_type(Name)
- names.append(n.string)
- if self.current_str() != ',':
- break
- self.skip()
- return names
-
- def parse_while_stmt(self) -> WhileStmt:
- is_error = False
- self.expect('while')
- try:
- expr = self.parse_expression()
- except ParseError:
- is_error = True
- body, _ = self.parse_block()
- if self.current_str() == 'else':
- self.expect('else')
- else_body, _ = self.parse_block()
- else:
- else_body = None
- if is_error is not None:
- node = WhileStmt(expr, body, else_body)
- return node
- else:
- return None
-
- def parse_for_stmt(self) -> ForStmt:
- self.expect('for')
- index = self.parse_for_index_variables()
- self.expect('in')
- expr = self.parse_expression()
-
- body, _ = self.parse_block()
-
- if self.current_str() == 'else':
- self.expect('else')
- else_body, _ = self.parse_block()
- else:
- else_body = None
-
- node = ForStmt(index, expr, body, else_body)
- return node
-
- def parse_for_index_variables(self) -> Expression:
- # Parse index variables of a 'for' statement.
- index_items = []
- force_tuple = False
-
- while True:
- v = self.parse_expression(precedence['in'],
- star_expr_allowed=True) # Prevent parsing of for stmt 'in'
- index_items.append(v)
- if self.current_str() != ',':
- break
- self.skip()
- if self.current_str() == 'in':
- force_tuple = True
- break
-
- if len(index_items) == 1 and not force_tuple:
- index = index_items[0]
- else:
- index = TupleExpr(index_items)
- index.set_line(index_items[0])
-
- return index
-
- def parse_if_stmt(self) -> IfStmt:
- is_error = False
-
- self.expect('if')
- expr = []
- try:
- expr.append(self.parse_expression())
- except ParseError:
- is_error = True
-
- body = [self.parse_block()[0]]
-
- while self.current_str() == 'elif':
- self.expect('elif')
- try:
- expr.append(self.parse_expression())
- except ParseError:
- is_error = True
- body.append(self.parse_block()[0])
-
- if self.current_str() == 'else':
- self.expect('else')
- else_body, _ = self.parse_block()
- else:
- else_body = None
-
- if not is_error:
- node = IfStmt(expr, body, else_body)
- return node
- else:
- return None
-
- def parse_try_stmt(self) -> TryStmt:
- self.expect('try')
- body, _ = self.parse_block()
- is_error = False
- vars = [] # type: List[NameExpr]
- types = [] # type: List[Optional[Expression]]
- handlers = [] # type: List[Block]
- while self.current_str() == 'except':
- self.expect('except')
- if not isinstance(self.current(), Colon):
- try:
- t = self.current()
- expr = self.parse_expression(precedence[','])
- expr.set_line(t)
- types.append(expr)
- if self.current_str() == 'as':
- self.expect('as')
- vars.append(self.parse_name_expr())
- elif self.pyversion[0] == 2 and self.current_str() == ',':
- self.expect(',')
- vars.append(self.parse_name_expr())
- else:
- vars.append(None)
- except ParseError:
- is_error = True
- else:
- types.append(None)
- vars.append(None)
- handlers.append(self.parse_block()[0])
- if not is_error:
- if self.current_str() == 'else':
- self.skip()
- else_body, _ = self.parse_block()
- else:
- else_body = None
- if self.current_str() == 'finally':
- self.expect('finally')
- finally_body, _ = self.parse_block()
- else:
- finally_body = None
- node = TryStmt(body, vars, types, handlers, else_body,
- finally_body)
- return node
- else:
- return None
-
- def parse_with_stmt(self) -> WithStmt:
- self.expect('with')
- exprs = []
- targets = []
- while True:
- expr = self.parse_expression(precedence[','])
- if self.current_str() == 'as':
- self.expect('as')
- target = self.parse_expression(precedence[','])
- else:
- target = None
- exprs.append(expr)
- targets.append(target)
- if self.current_str() != ',':
- break
- self.expect(',')
- body, _ = self.parse_block()
- return WithStmt(exprs, targets, body)
-
- def parse_print_stmt(self) -> PrintStmt:
- self.expect('print')
- args = []
- target = None
- if self.current_str() == '>>':
- self.skip()
- target = self.parse_expression(precedence[','])
- if self.current_str() == ',':
- self.skip()
- if isinstance(self.current(), Break):
- raise self.parse_error()
- else:
- if not isinstance(self.current(), Break):
- raise self.parse_error()
- comma = False
- while not isinstance(self.current(), Break):
- args.append(self.parse_expression(precedence[',']))
- if self.current_str() == ',':
- comma = True
- self.skip()
- else:
- comma = False
- break
- return PrintStmt(args, newline=not comma, target=target)
-
- def parse_exec_stmt(self) -> ExecStmt:
- self.expect('exec')
- expr = self.parse_expression(precedence['in'])
- variables1 = None
- variables2 = None
- if self.current_str() == 'in':
- self.skip()
- variables1 = self.parse_expression(precedence[','])
- if self.current_str() == ',':
- self.skip()
- variables2 = self.parse_expression(precedence[','])
- return ExecStmt(expr, variables1, variables2)
-
- # Parsing expressions
-
- def parse_expression(self, prec: int = 0, star_expr_allowed: bool = False) -> Expression:
- """Parse a subexpression within a specific precedence context."""
- expr = None # type: Expression
- current = self.current() # Remember token for setting the line number.
-
- # Parse a "value" expression or unary operator expression and store
- # that in expr.
- s = self.current_str()
- if s == '(':
- # Parenthesised expression or cast.
- expr = self.parse_parentheses()
- elif s == '[':
- expr = self.parse_list_expr()
- elif s in ['-', '+', 'not', '~']:
- # Unary operation.
- expr = self.parse_unary_expr()
- elif s == 'lambda':
- expr = self.parse_lambda_expr()
- elif s == '{':
- expr = self.parse_dict_or_set_expr()
- elif s == '*' and star_expr_allowed:
- expr = self.parse_star_expr()
- elif s == '`' and self.pyversion[0] == 2:
- expr = self.parse_backquote_expr()
- else:
- if isinstance(current, Name):
- # Name expression.
- expr = self.parse_name_expr()
- elif isinstance(current, IntLit):
- expr = self.parse_int_expr()
- elif isinstance(current, StrLit):
- expr = self.parse_str_expr()
- elif isinstance(current, BytesLit):
- expr = self.parse_bytes_literal()
- elif isinstance(current, UnicodeLit):
- expr = self.parse_unicode_literal()
- elif isinstance(current, FloatLit):
- expr = self.parse_float_expr()
- elif isinstance(current, ComplexLit):
- expr = self.parse_complex_expr()
- elif isinstance(current, Keyword) and s == "yield":
- # The expression yield from and yield to assign
- expr = self.parse_yield_or_yield_from_expr()
- elif isinstance(current, EllipsisToken) and (self.pyversion[0] >= 3
- or self.is_stub_file):
- expr = self.parse_ellipsis()
- else:
- # Invalid expression.
- raise self.parse_error()
-
- # Set the line of the expression node, if not specified. This
- # simplifies recording the line number as not every node type needs to
- # deal with it separately.
- if expr.line < 0:
- expr.set_line(current)
-
- # Parse operations that require a left argument (stored in expr).
- while True:
- current = self.current()
- s = self.current_str()
- if s == '(':
- # Call expression.
- expr = self.parse_call_expr(expr)
- elif s == '.':
- # Member access expression.
- expr = self.parse_member_expr(expr)
- elif s == '[':
- # Indexing expression.
- expr = self.parse_index_expr(expr)
- elif s == ',':
- # The comma operator is used to build tuples. Comma also
- # separates array items and function arguments, but in this
- # case the precedence is too low to build a tuple.
- if precedence[','] > prec:
- expr = self.parse_tuple_expr(expr)
- else:
- break
- elif s == 'for':
- if precedence['<for>'] > prec:
- # List comprehension or generator expression. Parse as
- # generator expression; it will be converted to list
- # comprehension if needed elsewhere.
- expr = self.parse_generator_expr(expr)
- else:
- break
- elif s == 'if':
- # Conditional expression.
- if precedence['<if>'] > prec:
- expr = self.parse_conditional_expr(expr)
- else:
- break
- else:
- # Binary operation or a special case.
- if isinstance(current, Op):
- op = self.current_str()
- op_prec = precedence[op]
- if op == 'not':
- # Either "not in" or an error.
- op_prec = precedence['in']
- if op_prec > prec:
- if op in op_comp:
- expr = self.parse_comparison_expr(expr, op_prec)
- else:
- expr = self.parse_bin_op_expr(expr, op_prec)
- else:
- # The operation cannot be associated with the
- # current left operand due to the precedence
- # context; let the caller handle it.
- break
- else:
- # Not an operation that accepts a left argument; let the
- # caller handle the rest.
- break
-
- # Set the line of the expression node, if not specified. This
- # simplifies recording the line number as not every node type
- # needs to deal with it separately.
- if expr.line < 0:
- expr.set_line(current)
-
- return expr
-
- def parse_parentheses(self) -> Expression:
- self.skip()
- if self.current_str() == ')':
- # Empty tuple ().
- expr = self.parse_empty_tuple_expr() # type: Expression
- else:
- # Parenthesised expression.
- expr = self.parse_expression(0, star_expr_allowed=True)
- self.expect(')')
- return expr
-
- def parse_star_expr(self) -> StarExpr:
- star = self.expect('*')
- expr = self.parse_expression(precedence['*u'])
- expr = StarExpr(expr)
- if expr.line < 0:
- expr.set_line(star)
- return expr
-
- def parse_empty_tuple_expr(self) -> TupleExpr:
- self.expect(')')
- node = TupleExpr([])
- return node
-
- def parse_list_expr(self) -> Union[ListExpr, ListComprehension]:
- """Parse list literal or list comprehension."""
- items = []
- self.expect('[')
- while self.current_str() != ']' and not self.eol():
- items.append(self.parse_expression(precedence['<for>'], star_expr_allowed=True))
- if self.current_str() != ',':
- break
- self.expect(',')
- if self.current_str() == 'for' and len(items) == 1:
- items[0] = self.parse_generator_expr(items[0])
- self.expect(']')
- if len(items) == 1 and isinstance(items[0], GeneratorExpr):
- return ListComprehension(items[0])
- else:
- expr = ListExpr(items)
- return expr
-
- def parse_generator_expr(self, left_expr: Expression) -> GeneratorExpr:
- tok = self.current()
- indices, sequences, condlists = self.parse_comp_for()
-
- gen = GeneratorExpr(left_expr, indices, sequences, condlists)
- gen.set_line(tok)
- return gen
-
- def parse_comp_for(self) -> Tuple[List[Expression], List[Expression], List[List[Expression]]]:
- indices = []
- sequences = []
- condlists = [] # type: List[List[Expression]]
- while self.current_str() == 'for':
- conds = []
- self.expect('for')
- index = self.parse_for_index_variables()
- indices.append(index)
- self.expect('in')
- if self.pyversion[0] >= 3:
- sequence = self.parse_expression(precedence['<if>'])
- else:
- sequence = self.parse_expression_list()
- sequences.append(sequence)
- while self.current_str() == 'if':
- self.skip()
- conds.append(self.parse_expression(precedence['<if>']))
- condlists.append(conds)
-
- return indices, sequences, condlists
-
- def parse_expression_list(self) -> Expression:
- prec = precedence['<if>']
- expr = self.parse_expression(prec)
- if self.current_str() != ',':
- return expr
- else:
- t = self.current()
- tuple_expr = self.parse_tuple_expr(expr, prec)
- tuple_expr.set_line(t)
- return tuple_expr
-
- def parse_conditional_expr(self, left_expr: Expression) -> ConditionalExpr:
- self.expect('if')
- cond = self.parse_expression(precedence['<if>'])
- self.expect('else')
- else_expr = self.parse_expression(precedence['<if>'])
- return ConditionalExpr(cond, left_expr, else_expr)
-
- def parse_dict_or_set_expr(self) -> Union[SetComprehension, SetExpr,
- DictionaryComprehension, DictExpr]:
- items = [] # type: List[Tuple[Expression, Expression]]
- self.expect('{')
- while self.current_str() != '}' and not self.eol():
- key = self.parse_expression(precedence['<for>'])
- if self.current_str() in [',', '}'] and items == []:
- return self.parse_set_expr(key)
- elif self.current_str() == 'for' and items == []:
- return self.parse_set_comprehension(key)
- elif self.current_str() != ':':
- raise self.parse_error()
- colon = self.expect(':')
- value = self.parse_expression(precedence['<for>'])
- if self.current_str() == 'for' and items == []:
- return self.parse_dict_comprehension(key, value, colon)
- items.append((key, value))
- if self.current_str() != ',':
- break
- self.expect(',')
- self.expect('}')
- node = DictExpr(items)
- return node
-
- def parse_set_expr(self, first: Expression) -> SetExpr:
- items = [first]
- while self.current_str() != '}' and not self.eol():
- self.expect(',')
- if self.current_str() == '}':
- break
- items.append(self.parse_expression(precedence[',']))
- self.expect('}')
- expr = SetExpr(items)
- return expr
-
- def parse_set_comprehension(self, expr: Expression) -> SetComprehension:
- gen = self.parse_generator_expr(expr)
- self.expect('}')
- set_comp = SetComprehension(gen)
- return set_comp
-
- def parse_dict_comprehension(self, key: Expression, value: Expression,
- colon: Token) -> DictionaryComprehension:
- indices, sequences, condlists = self.parse_comp_for()
- dic = DictionaryComprehension(key, value, indices, sequences, condlists)
- dic.set_line(colon)
- self.expect('}')
- return dic
-
- def parse_tuple_expr(self, expr: Expression,
- prec: int = precedence[',']) -> TupleExpr:
- items = [expr]
- while True:
- self.expect(',')
- if (self.current_str() in [')', ']', '=', ':'] or
- isinstance(self.current(), Break)):
- break
- items.append(self.parse_expression(prec, star_expr_allowed=True))
- if self.current_str() != ',': break
- node = TupleExpr(items)
- return node
-
- def parse_backquote_expr(self) -> BackquoteExpr:
- self.expect('`')
- expr = self.parse_expression()
- self.expect('`')
- return BackquoteExpr(expr)
-
- def parse_name_expr(self) -> NameExpr:
- tok = self.expect_type(Name)
- node = NameExpr(tok.string)
- node.set_line(tok)
- return node
-
- octal_int = re.compile('0+[1-9]')
-
- def parse_int_expr(self) -> IntExpr:
- tok = self.expect_type(IntLit)
- string = tok.string.rstrip('lL') # Strip L prefix (Python 2 long literals)
- if self.octal_int.match(string):
- value = int(string, 8)
- else:
- value = int(string, 0)
- node = IntExpr(value)
- return node
-
- def parse_str_expr(self) -> Union[UnicodeExpr, StrExpr]:
- # XXX \uxxxx literals
- token = self.expect_type(StrLit)
- value = cast(StrLit, token).parsed()
- is_unicode = False
- while isinstance(self.current(), (StrLit, UnicodeLit)):
- token = self.skip()
- if isinstance(token, StrLit):
- value += token.parsed()
- elif isinstance(token, UnicodeLit):
- value += token.parsed()
- is_unicode = True
- if is_unicode or (self.pyversion[0] == 2 and 'unicode_literals' in self.future_options):
- return UnicodeExpr(value)
- else:
- return StrExpr(value)
-
- def parse_bytes_literal(self) -> Union[BytesExpr, StrExpr]:
- # XXX \uxxxx literals
- tok = [self.expect_type(BytesLit)]
- value = (cast(BytesLit, tok[0])).parsed()
- while isinstance(self.current(), BytesLit):
- t = cast(BytesLit, self.skip())
- value += t.parsed()
- if self.pyversion[0] >= 3:
- return BytesExpr(value)
- else:
- return StrExpr(value)
-
- def parse_unicode_literal(self) -> Union[StrExpr, UnicodeExpr]:
- # XXX \uxxxx literals
- token = self.expect_type(UnicodeLit)
- value = cast(UnicodeLit, token).parsed()
- while isinstance(self.current(), (UnicodeLit, StrLit)):
- token = cast(Union[UnicodeLit, StrLit], self.skip())
- value += token.parsed()
- if self.pyversion[0] >= 3:
- # Python 3.3 supports u'...' as an alias of '...'.
- return StrExpr(value)
- else:
- return UnicodeExpr(value)
-
- def parse_float_expr(self) -> FloatExpr:
- tok = self.expect_type(FloatLit)
- return FloatExpr(float(tok.string))
-
- def parse_complex_expr(self) -> ComplexExpr:
- tok = self.expect_type(ComplexLit)
- return ComplexExpr(complex(tok.string))
-
- def parse_call_expr(self, callee: Expression) -> CallExpr:
- self.expect('(')
- args, kinds, names = self.parse_arg_expr()
- self.expect(')')
- return CallExpr(callee, args, kinds, names)
-
- def parse_arg_expr(self) -> Tuple[List[Expression], List[int], List[str]]:
- """Parse arguments in a call expression (within '(' and ')').
-
- Return a tuple with these items:
- argument expressions
- argument kinds
- argument names (for named arguments; None for ordinary args)
- """
- args = [] # type: List[Expression]
- kinds = [] # type: List[int]
- names = [] # type: List[str]
- var_arg = False
- dict_arg = False
- named_args = False
- while self.current_str() != ')' and not self.eol() and not dict_arg:
- if isinstance(self.current(), Name) and self.peek().string == '=':
- # Named argument
- name = self.expect_type(Name)
- self.expect('=')
- kinds.append(nodes.ARG_NAMED)
- names.append(name.string)
- named_args = True
- elif (self.current_str() == '*' and not var_arg and not dict_arg):
- # *args
- var_arg = True
- self.expect('*')
- kinds.append(nodes.ARG_STAR)
- names.append(None)
- elif self.current_str() == '**':
- # **kwargs
- self.expect('**')
- dict_arg = True
- kinds.append(nodes.ARG_STAR2)
- names.append(None)
- elif not var_arg and not named_args:
- # Ordinary argument
- kinds.append(nodes.ARG_POS)
- names.append(None)
- else:
- raise self.parse_error()
- args.append(self.parse_expression(precedence[',']))
- if self.current_str() != ',':
- break
- self.expect(',')
- return args, kinds, names
-
- def parse_member_expr(self, expr: Expression) -> Union[SuperExpr, MemberExpr]:
- self.expect('.')
- name = self.expect_type(Name)
- if (isinstance(expr, CallExpr) and isinstance(expr.callee, NameExpr)
- and expr.callee.name == 'super'):
- # super() expression
- return SuperExpr(name.string)
- else:
- return MemberExpr(expr, name.string)
-
- def parse_index_expr(self, base: Expression) -> IndexExpr:
- self.expect('[')
- index = self.parse_slice_item()
- if self.current_str() == ',':
- # Extended slicing such as x[1:, :2].
- items = [index]
- while self.current_str() == ',':
- self.skip()
- if self.current_str() == ']' or isinstance(self.current(), Break):
- break
- items.append(self.parse_slice_item())
- index = TupleExpr(items)
- index.set_line(items[0])
- self.expect(']')
- node = IndexExpr(base, index)
- return node
-
- def parse_slice_item(self) -> Expression:
- if self.current_str() != ':':
- if self.current_str() == '...':
- # Ellipsis is valid here even in Python 2 (but not elsewhere).
- ellipsis = EllipsisExpr()
- token = self.skip()
- ellipsis.set_line(token)
- return ellipsis
- else:
- item = self.parse_expression(precedence[','])
- else:
- item = None
- if self.current_str() == ':':
- # Slice.
- index = item
- colon = self.expect(':')
- if self.current_str() not in (']', ':', ','):
- end_index = self.parse_expression(precedence[','])
- else:
- end_index = None
- stride = None
- if self.current_str() == ':':
- self.expect(':')
- if self.current_str() not in (']', ','):
- stride = self.parse_expression(precedence[','])
- item = SliceExpr(index, end_index, stride)
- item.set_line(colon)
- return item
-
- def parse_bin_op_expr(self, left: Expression, prec: int) -> OpExpr:
- op = self.expect_type(Op)
- op_str = op.string
- if op_str == '~':
- self.ind -= 1
- raise self.parse_error()
- right = self.parse_expression(prec)
- node = OpExpr(op_str, left, right)
- return node
-
- def parse_comparison_expr(self, left: Expression, prec: int) -> ComparisonExpr:
- operators_str = []
- operands = [left]
-
- while True:
- op = self.expect_type(Op)
- op_str = op.string
- if op_str == 'not':
- if self.current_str() == 'in':
- op_str = 'not in'
- self.skip()
- else:
- raise self.parse_error()
- elif op_str == 'is' and self.current_str() == 'not':
- op_str = 'is not'
- self.skip()
-
- operators_str.append(op_str)
- operand = self.parse_expression(prec)
- operands.append(operand)
-
- # Continue if next token is a comparison operator
- self.current()
- s = self.current_str()
- if s not in op_comp:
- break
-
- node = ComparisonExpr(operators_str, operands)
- return node
-
- def parse_unary_expr(self) -> UnaryExpr:
- op_tok = self.skip()
- op = op_tok.string
- if op == '-' or op == '+':
- prec = precedence['-u']
- else:
- prec = precedence[op]
- expr = self.parse_expression(prec)
- node = UnaryExpr(op, expr)
- return node
-
- def parse_lambda_expr(self) -> FuncExpr:
- lambda_tok = self.expect('lambda')
-
- args, extra_stmts = self.parse_arg_list(allow_signature=False)
-
- # Use 'object' as the placeholder return type; it will be inferred
- # later. We can't use 'Any' since it could make type inference results
- # less precise.
- ret_type = UnboundType('__builtins__.object')
- typ = self.build_func_annotation(ret_type, args,
- lambda_tok.line, lambda_tok.column, is_default_ret=True)
-
- colon = self.expect(':')
-
- expr = self.parse_expression(precedence[','])
-
- return_stmt = ReturnStmt(expr)
- return_stmt.set_line(lambda_tok)
- nodes = [return_stmt] # type: List[Statement]
- # Potentially insert extra assignment statements to the beginning of the
- # body, used to decompose Python 2 tuple arguments.
- nodes[:0] = extra_stmts
- body = Block(nodes)
- body.set_line(colon)
-
- return FuncExpr(args, body, typ)
-
- # Helper methods
-
- def skip(self) -> Token:
- self.ind += 1
- return self.tok[self.ind - 1]
-
- def expect(self, string: str) -> Token:
- if self.current_str() == string:
- self.ind += 1
- return self.tok[self.ind - 1]
- else:
- raise self.parse_error()
-
- def expect_indent(self) -> Token:
- if isinstance(self.current(), Indent):
- return self.expect_type(Indent)
- else:
- self.fail('Expected an indented block', self.current().line, self.current().column)
- return none
-
- def fail(self, msg: str, line: int, column: int) -> None:
- self.errors.report(line, column, msg)
-
- def expect_type(self, typ: type) -> Token:
- current = self.current()
- if isinstance(current, typ):
- self.ind += 1
- return current
- else:
- raise self.parse_error()
-
- def expect_break(self) -> Token:
- return self.expect_type(Break)
-
- def current(self) -> Token:
- return self.tok[self.ind]
-
- def current_str(self) -> str:
- return self.current().string
-
- def peek(self) -> Token:
- return self.tok[self.ind + 1]
-
- def parse_error(self) -> ParseError:
- self.parse_error_at(self.current())
- return ParseError()
-
- def parse_error_at(self, tok: Token, skip: bool = True, reason: Optional[str] = None) -> None:
- msg = ''
- if isinstance(tok, LexError):
- msg = token_repr(tok)
- msg = msg[0].upper() + msg[1:]
- elif isinstance(tok, Indent) or isinstance(tok, Dedent):
- msg = 'Inconsistent indentation'
- else:
- formatted_reason = ": {}".format(reason) if reason else ""
- msg = 'Parse error before {}{}'.format(token_repr(tok), formatted_reason)
-
- self.errors.report(tok.line, tok.column, msg)
-
- if skip:
- self.skip_until_next_line()
-
- def skip_until_break(self) -> None:
- n = 0
- while (not isinstance(self.current(), Break)
- and not isinstance(self.current(), Eof)):
- self.skip()
- n += 1
- if isinstance(self.tok[self.ind - 1], Colon) and n > 1:
- self.ind -= 1
-
- def skip_until_next_line(self) -> None:
- self.skip_until_break()
- if isinstance(self.current(), Break):
- self.skip()
-
- def eol(self) -> bool:
- return isinstance(self.current(), Break) or self.eof()
-
- def eof(self) -> bool:
- return isinstance(self.current(), Eof)
-
- # Type annotation related functionality
-
- def parse_type(self) -> Type:
- try:
- typ, self.ind = parse_type(self.tok, self.ind)
- except TypeParseError as e:
- self.parse_error_at(e.token, reason=e.message)
- raise ParseError()
- return typ
-
- annotation_prefix_re = re.compile(r'#\s*type:')
- ignore_prefix_re = re.compile(r'ignore\b')
-
- def parse_type_comment(self, token: Token, signature: bool) -> Type:
- """Parse a '# type: ...' annotation.
-
- Return None if no annotation found. If signature is True, expect
- a type signature of form (...) -> t.
- """
- whitespace_or_comments = token.rep().strip()
- if self.annotation_prefix_re.match(whitespace_or_comments):
- type_as_str = whitespace_or_comments.split(':', 1)[1].strip()
- if self.ignore_prefix_re.match(type_as_str):
- # Actually a "# type: ignore" annotation -> not a type.
- return None
- tokens = lex.lex(type_as_str, token.line)[0]
- if len(tokens) < 2:
- # Empty annotation (only Eof token)
- self.errors.report(token.line, token.column, 'Empty type annotation')
- return None
- try:
- if not signature:
- type, index = parse_types(tokens, 0)
- else:
- type, index = parse_signature(tokens)
- except TypeParseError as e:
- self.parse_error_at(e.token, skip=False, reason=e.message)
- return None
- if index < len(tokens) - 2:
- self.parse_error_at(tokens[index], skip=False)
- return None
- return type
- else:
- return None
-
-
-def token_repr(tok: Token) -> str:
- """Return a representation of a token for use in parse error messages."""
- if isinstance(tok, Break):
- return 'end of line'
- elif isinstance(tok, Eof):
- return 'end of file'
- elif isinstance(tok, Keyword) or isinstance(tok, Name):
- return '"{}"'.format(tok.string)
- elif isinstance(tok, IntLit) or isinstance(tok, FloatLit) or isinstance(tok, ComplexLit):
- return 'numeric literal'
- elif isinstance(tok, StrLit) or isinstance(tok, UnicodeLit):
- return 'string literal'
- elif (isinstance(tok, Punct) or isinstance(tok, Op)
- or isinstance(tok, Colon)):
- return tok.string
- elif isinstance(tok, Bom):
- return 'byte order mark'
- elif isinstance(tok, Indent):
- return 'indent'
- elif isinstance(tok, Dedent):
- return 'dedent'
- elif isinstance(tok, EllipsisToken):
- return '...'
+ if options.python_version[0] >= 3 or is_stub_file:
+ import mypy.fastparse
+ return mypy.fastparse.parse(source,
+ fnam=fnam,
+ errors=errors,
+ pyversion=options.python_version,
+ custom_typing_module=options.custom_typing_module)
else:
- if isinstance(tok, LexError):
- t = tok.type
- if t == lex.NUMERIC_LITERAL_ERROR:
- return 'invalid numeric literal'
- elif t == lex.UNTERMINATED_STRING_LITERAL:
- return 'unterminated string literal'
- elif t == lex.INVALID_CHARACTER:
- msg = 'unrecognized character'
- if ord(tok.string) in range(33, 127):
- msg += ' ' + tok.string
- return msg
- elif t == lex.INVALID_DEDENT:
- return 'inconsistent indentation'
- elif t == lex.DECODE_ERROR:
- return tok.message
- raise ValueError('Unknown token {}'.format(repr(tok)))
-
-
-if __name__ == '__main__':
- # Parse a file and dump the AST (or display errors).
- import sys
-
- def usage() -> None:
- print('Usage: parse.py [--py2] [--quiet] FILE [...]', file=sys.stderr)
- sys.exit(2)
-
- args = sys.argv[1:]
- pyversion = defaults.PYTHON3_VERSION
- quiet = False
- while args and args[0].startswith('--'):
- if args[0] == '--py2':
- pyversion = defaults.PYTHON2_VERSION
- elif args[0] == '--quiet':
- quiet = True
- else:
- usage()
- args = args[1:]
- if len(args) < 1:
- usage()
- status = 0
- for fnam in args:
- with open(fnam, 'rb') as f:
- s = f.read()
- errors = Errors()
- try:
- options = Options()
- options.python_version = pyversion
- tree = parse(s, fnam, None, options=options)
- if not quiet:
- print(tree)
- except CompileError as e:
- for msg in e.messages:
- sys.stderr.write('%s\n' % msg)
- status = 1
- sys.exit(status)
+ import mypy.fastparse2
+ return mypy.fastparse2.parse(source,
+ fnam=fnam,
+ errors=errors,
+ pyversion=options.python_version,
+ custom_typing_module=options.custom_typing_module)
diff --git a/mypy/parsetype.py b/mypy/parsetype.py
deleted file mode 100644
index 1af0352..0000000
--- a/mypy/parsetype.py
+++ /dev/null
@@ -1,249 +0,0 @@
-"""Type parser"""
-
-from typing import List, Tuple, Union, Optional
-
-from mypy.types import (
- Type, UnboundType, TupleType, TypeList, CallableType, StarType,
- EllipsisType
-)
-from mypy.lex import Token, Name, StrLit, lex
-from mypy import nodes
-
-
-none = Token('') # Empty token
-
-
-class TypeParseError(Exception):
- def __init__(self, token: Token, index: int, message: Optional[str] = None) -> None:
- super().__init__()
- self.token = token
- self.index = index
- self.message = message
-
-
-def parse_type(tok: List[Token], index: int) -> Tuple[Type, int]:
- """Parse a type.
-
- Return (type, index after type).
- """
-
- p = TypeParser(tok, index)
- return p.parse_type(), p.index()
-
-
-def parse_types(tok: List[Token], index: int) -> Tuple[Type, int]:
- """Parse one or more types separated by commas (optional parentheses).
-
- Return (type, index after type).
- """
-
- p = TypeParser(tok, index)
- return p.parse_types(), p.index()
-
-
-class TypeParser:
- def __init__(self, tok: List[Token], ind: int) -> None:
- self.tok = tok
- self.ind = ind
-
- def index(self) -> int:
- return self.ind
-
- def parse_type(self) -> Type:
- """Parse a type."""
- t = self.current_token()
- if t.string == '(':
- return self.parse_parens()
- if isinstance(t, Name):
- return self.parse_named_type()
- elif t.string == '[':
- return self.parse_type_list()
- elif t.string == '*':
- return self.parse_star_type()
- elif t.string == '...':
- return self.parse_ellipsis_type()
- elif isinstance(t, StrLit):
- # Type escaped as string literal.
- typestr = t.parsed()
- line = t.line
- self.skip()
- try:
- result = parse_str_as_type(typestr, line)
- except TypeParseError as e:
- raise TypeParseError(e.token, self.ind)
- return result
- else:
- raise self.parse_error()
-
- def parse_parens(self) -> Type:
- self.expect('(')
- types = self.parse_types()
- self.expect(')')
- return types
-
- def parse_types(self) -> Type:
- """ Parse either a single type or a comma separated
- list of types as a tuple type. In the latter case, a
- trailing comma is needed when the list contains only
- a single type (and optional otherwise).
-
- int -> int
- int, -> TupleType[int]
- int, int, int -> TupleType[int, int, int]
- """
- type = self.parse_type()
- if self.current_token_str() == ',':
- items = [type]
- while self.current_token_str() == ',':
- self.skip()
- if self.current_token_str() == ')':
- break
- items.append(self.parse_type())
- type = TupleType(items, None, type.line, implicit=True)
- return type
-
- def parse_type_list(self) -> TypeList:
- """Parse type list [t, ...]."""
- lbracket = self.expect('[')
- commas = [] # type: List[Token]
- items = [] # type: List[Type]
- while self.current_token_str() != ']':
- t = self.parse_type()
- items.append(t)
- if self.current_token_str() != ',':
- break
- commas.append(self.skip())
- self.expect(']')
- return TypeList(items, line=lbracket.line)
-
- def parse_named_type(self) -> Type:
- line = self.current_token().line
- name = ''
- components = [] # type: List[Token]
-
- components.append(self.expect_type(Name))
- name += components[-1].string
-
- while self.current_token_str() == '.':
- components.append(self.skip())
- t = self.expect_type(Name)
- components.append(t)
- name += '.' + t.string
-
- commas = [] # type: List[Token]
- args = [] # type: List[Type]
- if self.current_token_str() == '[':
- self.skip()
- while True:
- typ = self.parse_type()
- args.append(typ)
- if self.current_token_str() != ',':
- break
- commas.append(self.skip())
-
- self.expect(']')
-
- typ = UnboundType(name, args, line)
- return typ
-
- def parse_star_type(self) -> Type:
- star = self.expect('*')
- type = self.parse_type()
- return StarType(type, star.line)
-
- def parse_ellipsis_type(self) -> Type:
- ellipsis = self.expect('...')
- return EllipsisType(ellipsis.line)
-
- # Helpers
-
- def skip(self) -> Token:
- self.ind += 1
- return self.tok[self.ind - 1]
-
- def expect(self, string: str) -> Token:
- if self.tok[self.ind].string == string:
- self.ind += 1
- return self.tok[self.ind - 1]
- else:
- raise self.parse_error()
-
- def expect_type(self, typ: type) -> Token:
- if isinstance(self.current_token(), typ):
- self.ind += 1
- return self.tok[self.ind - 1]
- else:
- raise self.parse_error()
-
- def current_token(self) -> Token:
- return self.tok[self.ind]
-
- def current_token_str(self) -> str:
- return self.current_token().string
-
- def parse_error(self) -> TypeParseError:
- return TypeParseError(self.tok[self.ind], self.ind)
-
-
-def parse_str_as_type(typestr: str, line: int) -> Type:
- """Parse a type represented as a string.
-
- Raise TypeParseError on parse error.
- """
-
- typestr = typestr.strip()
- tokens = lex(typestr, line)[0]
- result, i = parse_type(tokens, 0)
- if i < len(tokens) - 2:
- raise TypeParseError(tokens[i], i)
- return result
-
-
-def parse_signature(tokens: List[Token]) -> Tuple[CallableType, int]:
- """Parse signature of form (argtype, ...) -> ...
-
- Return tuple (signature type, token index).
- """
- i = 0
- if tokens[i].string != '(':
- raise TypeParseError(tokens[i], i)
- i += 1
- arg_types = [] # type: List[Type]
- arg_kinds = [] # type: List[int]
- encountered_ellipsis = False
- while tokens[i].string != ')':
- if tokens[i].string == '*':
- arg_kinds.append(nodes.ARG_STAR)
- i += 1
- elif tokens[i].string == '**':
- arg_kinds.append(nodes.ARG_STAR2)
- i += 1
- else:
- arg_kinds.append(nodes.ARG_POS)
- arg, i = parse_type(tokens, i)
- arg_types.append(arg)
- next = tokens[i].string
-
- # Check for ellipsis. If it exists, assert it's the only arg_type.
- # Disallow '(..., int) -> None' for example.
- if isinstance(arg, EllipsisType):
- encountered_ellipsis = True
- if encountered_ellipsis and len(arg_types) != 1:
- raise TypeParseError(tokens[i], i,
- "Ellipses cannot accompany other argument types"
- " in function type signature.")
-
- if next not in ',)':
- raise TypeParseError(tokens[i], i)
- if next == ',':
- i += 1
- i += 1
- if tokens[i].string != '->':
- raise TypeParseError(tokens[i], i)
- i += 1
- ret_type, i = parse_type(tokens, i)
- return CallableType(arg_types,
- arg_kinds,
- [None] * len(arg_types),
- ret_type, None,
- is_ellipsis_args=encountered_ellipsis), i
diff --git a/mypy/report.py b/mypy/report.py
index 42c4a1c..74b44ac 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -16,6 +16,7 @@ import sys
from mypy.nodes import MypyFile, Expression, FuncDef
from mypy import stats
+from mypy.options import Options
from mypy.traverser import TraverserVisitor
from mypy.types import Type
from mypy.version import __version__
@@ -56,9 +57,9 @@ class Reports:
self.named_reporters[report_type] = reporter
return reporter
- def file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None:
for reporter in self.reporters:
- reporter.on_file(tree, type_map)
+ reporter.on_file(tree, type_map, options)
def finish(self) -> None:
for reporter in self.reporters:
@@ -70,7 +71,7 @@ class AbstractReporter(metaclass=ABCMeta):
self.output_dir = output_dir
@abstractmethod
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None:
pass
@abstractmethod
@@ -104,7 +105,10 @@ class LineCountReporter(AbstractReporter):
stats.ensure_dir_exists(output_dir)
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
# Count physical lines. This assumes the file's encoding is a
# superset of ASCII (or at least uses \n in its line endings).
with open(tree.path, 'rb') as f:
@@ -115,6 +119,10 @@ class LineCountReporter(AbstractReporter):
unannotated_funcs, annotated_funcs = func_counter.counts
total_funcs = annotated_funcs + unannotated_funcs
+ # Don't count lines or functions as annotated if they have their errors ignored.
+ if options.ignore_errors:
+ annotated_funcs = 0
+
imputed_annotated_lines = (physical_lines * annotated_funcs // total_funcs
if total_funcs else physical_lines)
@@ -192,7 +200,7 @@ class LineCoverageVisitor(TraverserVisitor):
if cur_indent is None:
# Consume the line, but don't mark it as belonging to the function yet.
cur_line += 1
- elif cur_indent > start_indent:
+ elif start_indent is not None and cur_indent > start_indent:
# A non-blank line that belongs to the function.
cur_line += 1
end_line = cur_line
@@ -203,7 +211,7 @@ class LineCoverageVisitor(TraverserVisitor):
is_typed = defn.type is not None
for line in range(start_line, end_line):
old_indent, _ = self.lines_covered[line]
- assert start_indent > old_indent
+ assert start_indent is not None and start_indent > old_indent
self.lines_covered[line] = (start_indent, is_typed)
# Visit the body, in case there are nested functions
@@ -225,7 +233,10 @@ class LineCoverageReporter(AbstractReporter):
stats.ensure_dir_exists(output_dir)
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
with open(tree.path) as f:
tree_source = f.readlines()
@@ -254,7 +265,9 @@ class OldHtmlReporter(AbstractReporter):
variables to preserve state for the index.
"""
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type], options: Options) -> None:
stats.generate_html_report(tree, tree.path, type_map, self.output_dir)
def on_finish(self) -> None:
@@ -291,10 +304,13 @@ class MemoryXmlReporter(AbstractReporter):
self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css')
xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd')
self.schema = etree.XMLSchema(etree.parse(xsd_path))
- self.last_xml = None # type: etree._ElementTree
+ self.last_xml = None # type: Optional[etree._ElementTree]
self.files = [] # type: List[FileInfo]
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
self.last_xml = None
path = os.path.relpath(tree.path)
if stats.is_special_module(path):
@@ -404,7 +420,10 @@ class CoberturaXmlReporter(AbstractReporter):
self.doc = etree.ElementTree(self.root)
self.root_package = CoberturaPackage('.')
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
path = os.path.relpath(tree.path)
visitor = stats.StatisticsVisitor(inferred=True, typemap=type_map, all_nodes=True)
tree.accept(visitor)
@@ -497,7 +516,10 @@ class XmlReporter(AbstractXmlReporter):
that makes it fail from file:// URLs but work on http:// URLs.
"""
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
last_xml = self.memory_xml.last_xml
if last_xml is None:
return
@@ -510,6 +532,7 @@ class XmlReporter(AbstractXmlReporter):
def on_finish(self) -> None:
last_xml = self.memory_xml.last_xml
+ assert last_xml is not None
out_path = os.path.join(self.output_dir, 'index.xml')
out_xslt = os.path.join(self.output_dir, 'mypy-html.xslt')
out_css = os.path.join(self.output_dir, 'mypy-html.css')
@@ -535,7 +558,10 @@ class XsltHtmlReporter(AbstractXmlReporter):
self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path))
self.param_html = etree.XSLT.strparam('html')
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
last_xml = self.memory_xml.last_xml
if last_xml is None:
return
@@ -550,6 +576,7 @@ class XsltHtmlReporter(AbstractXmlReporter):
def on_finish(self) -> None:
last_xml = self.memory_xml.last_xml
+ assert last_xml is not None
out_path = os.path.join(self.output_dir, 'index.html')
out_css = os.path.join(self.output_dir, 'mypy-html.css')
transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html))
@@ -573,11 +600,15 @@ class XsltTxtReporter(AbstractXmlReporter):
self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path))
- def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type]) -> None:
+ def on_file(self,
+ tree: MypyFile,
+ type_map: Dict[Expression, Type],
+ options: Options) -> None:
pass
def on_finish(self) -> None:
last_xml = self.memory_xml.last_xml
+ assert last_xml is not None
out_path = os.path.join(self.output_dir, 'index.txt')
stats.ensure_dir_exists(os.path.dirname(out_path))
transformed_txt = bytes(self.xslt_txt(last_xml))
diff --git a/mypy/sametypes.py b/mypy/sametypes.py
index e3cc561..0531ecc 100644
--- a/mypy/sametypes.py
+++ b/mypy/sametypes.py
@@ -1,7 +1,7 @@
from typing import Sequence
from mypy.types import (
- Type, UnboundType, ErrorType, AnyType, NoneTyp, Void, TupleType, TypedDictType,
+ Type, UnboundType, AnyType, NoneTyp, TupleType, TypedDictType,
UnionType, CallableType, TypeVarType, Instance, TypeVisitor, ErasedType,
TypeList, Overloaded, PartialType, DeletedType, UninhabitedType, TypeType
)
@@ -55,18 +55,9 @@ class SameTypeVisitor(TypeVisitor[bool]):
def visit_unbound_type(self, left: UnboundType) -> bool:
return True
- def visit_error_type(self, left: ErrorType) -> bool:
- return False
-
- def visit_type_list(self, t: TypeList) -> bool:
- assert False, 'Not supported'
-
def visit_any(self, left: AnyType) -> bool:
return isinstance(self.right, AnyType)
- def visit_void(self, left: Void) -> bool:
- return isinstance(self.right, Void)
-
def visit_none_type(self, left: NoneTyp) -> bool:
return isinstance(self.right, NoneTyp)
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 71a8323..2d01ca2 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -44,8 +44,10 @@ TODO: Check if the third pass slows down type checking significantly.
"""
from collections import OrderedDict
+from contextlib import contextmanager
+
from typing import (
- List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable
+ List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable, Iterator,
)
from mypy.nodes import (
@@ -57,27 +59,34 @@ from mypy.nodes import (
ForStmt, BreakStmt, ContinueStmt, IfStmt, TryStmt, WithStmt, DelStmt, PassStmt,
GlobalDecl, SuperExpr, DictExpr, CallExpr, RefExpr, OpExpr, UnaryExpr,
SliceExpr, CastExpr, RevealTypeExpr, TypeApplication, Context, SymbolTable,
- SymbolTableNode, BOUND_TVAR, UNBOUND_TVAR, ListComprehension, GeneratorExpr,
- FuncExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr, NewTypeExpr,
+ SymbolTableNode, TVAR, ListComprehension, GeneratorExpr,
+ LambdaExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr, NewTypeExpr,
StrExpr, BytesExpr, PrintStmt, ConditionalExpr, PromoteExpr,
ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, ARG_NAMED_OPT, MroError, type_aliases,
YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SymbolNode,
SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr,
YieldExpr, ExecStmt, Argument, BackquoteExpr, ImportBase, AwaitExpr,
- IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr, TempNode,
- COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES,
+ IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr, TempNode, EnumCallExpr,
+ COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES, ARG_OPT, nongen_builtins,
+ collections_type_aliases, get_member_expr_fullname,
)
+from mypy.tvar_scope import TypeVarScope
from mypy.typevars import has_no_typevars, fill_typevars
from mypy.visitor import NodeVisitor
from mypy.traverser import TraverserVisitor
from mypy.errors import Errors, report_internal_error
+from mypy.messages import CANNOT_ASSIGN_TO_TYPE
from mypy.types import (
NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType,
FunctionLike, UnboundType, TypeList, TypeVarDef, TypeType,
TupleType, UnionType, StarType, EllipsisType, function_type, TypedDictType,
+ TypeQuery
)
from mypy.nodes import implicit_module_attrs
-from mypy.typeanal import TypeAnalyser, TypeAnalyserPass3, analyze_type_alias
+from mypy.typeanal import (
+ TypeAnalyser, TypeAnalyserPass3, analyze_type_alias, no_subscript_builtin_alias,
+ TypeVariableQuery, TypeVarList, remove_dups,
+)
from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
from mypy.sametypes import is_same_type
from mypy.options import Options
@@ -173,20 +182,18 @@ class SemanticAnalyzer(NodeVisitor):
# Nested block depths of scopes
block_depth = None # type: List[int]
# TypeInfo of directly enclosing class (or None)
- type = None # type: TypeInfo
+ type = None # type: Optional[TypeInfo]
# Stack of outer classes (the second tuple item contains tvars).
type_stack = None # type: List[TypeInfo]
# Type variables that are bound by the directly enclosing class
bound_tvars = None # type: List[SymbolTableNode]
- # Stack of type variables that were bound by outer classess
- tvar_stack = None # type: List[List[SymbolTableNode]]
+ # Type variables bound by the current scope, be it class or function
+ tvar_scope = None # type: TypeVarScope
# Per-module options
options = None # type: Options
# Stack of functions being analyzed
function_stack = None # type: List[FuncItem]
- # Stack of next available function type variable ids
- next_function_tvar_id_stack = None # type: List[int]
# Status of postponing analysis of nested function bodies. By using this we
# can have mutually recursive nested functions. Values are FUNCTION_x
@@ -215,10 +222,8 @@ class SemanticAnalyzer(NodeVisitor):
self.imports = set()
self.type = None
self.type_stack = []
- self.bound_tvars = None
- self.tvar_stack = []
+ self.tvar_scope = TypeVarScope()
self.function_stack = []
- self.next_function_tvar_id_stack = [-1]
self.block_depth = [0]
self.loop_depth = 0
self.lib_path = lib_path
@@ -231,7 +236,7 @@ class SemanticAnalyzer(NodeVisitor):
def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
self.options = options
- self.errors.set_file(fnam)
+ self.errors.set_file(fnam, file_node.fullname())
self.cur_mod_node = file_node
self.cur_mod_id = file_node.fullname()
self.is_stub_file = fnam.lower().endswith('.pyi')
@@ -253,6 +258,8 @@ class SemanticAnalyzer(NodeVisitor):
if self.cur_mod_id == 'builtins':
remove_imported_names_from_symtable(self.globals, 'builtins')
+ for alias_name in type_aliases:
+ self.globals.pop(alias_name.split('.')[-1], None)
if '__all__' in self.globals:
for name, g in self.globals.items():
@@ -261,13 +268,62 @@ class SemanticAnalyzer(NodeVisitor):
del self.options
+ def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None:
+ """Refresh a stale target in fine-grained incremental mode."""
+ if isinstance(node, MypyFile):
+ self.refresh_top_level(node)
+ else:
+ self.accept(node)
+
+ def refresh_top_level(self, file_node: MypyFile) -> None:
+ """Reanalyze a stale module top-level in fine-grained incremental mode."""
+ for d in file_node.defs:
+ if isinstance(d, ClassDef):
+ self.refresh_class_def(d)
+ elif not isinstance(d, FuncItem):
+ self.accept(d)
+
+ def refresh_class_def(self, defn: ClassDef) -> None:
+ with self.analyze_class_body(defn) as should_continue:
+ if should_continue:
+ for d in defn.defs.body:
+ # TODO: Make sure refreshing class bodies works.
+ if isinstance(d, ClassDef):
+ self.refresh_class_def(d)
+ elif not isinstance(d, FuncItem):
+ self.accept(d)
+
+ @contextmanager
+ def file_context(self, file_node: MypyFile, fnam: str, options: Options,
+ active_type: Optional[TypeInfo]) -> Iterator[None]:
+ # TODO: Use this above in visit_file
+ self.options = options
+ self.errors.set_file(fnam, file_node.fullname())
+ self.cur_mod_node = file_node
+ self.cur_mod_id = file_node.fullname()
+ self.is_stub_file = fnam.lower().endswith('.pyi')
+ self.globals = file_node.names
+ if active_type:
+ self.enter_class(active_type.defn)
+ # TODO: Bind class type vars
+
+ yield
+
+ if active_type:
+ self.leave_class()
+ self.type = None
+ del self.options
+
def visit_func_def(self, defn: FuncDef) -> None:
+
phase_info = self.postpone_nested_functions_stack[-1]
if phase_info != FUNCTION_SECOND_PHASE:
self.function_stack.append(defn)
# First phase of analysis for function.
self.errors.push_function(defn.name())
- self.update_function_type_variables(defn)
+ if defn.type:
+ assert isinstance(defn.type, CallableType)
+ self.update_function_type_variables(defn.type, defn)
self.errors.pop_function()
self.function_stack.pop()
@@ -283,7 +339,8 @@ class SemanticAnalyzer(NodeVisitor):
# Method definition
defn.info = self.type
if not defn.is_decorated and not defn.is_overload:
- if defn.name() in self.type.names:
+ if (defn.name() in self.type.names and
+ self.type.names[defn.name()].node != defn):
# Redefinition. Conditional redefinition is okay.
n = self.type.names[defn.name()].node
if not self.set_original_def(n, defn):
@@ -318,11 +375,15 @@ class SemanticAnalyzer(NodeVisitor):
self.errors.push_function(defn.name())
self.analyze_function(defn)
if defn.is_coroutine and isinstance(defn.type, CallableType):
- # A coroutine defined as `async def foo(...) -> T: ...`
- # has external return type `Awaitable[T]`.
- defn.type = defn.type.copy_modified(
- ret_type = self.named_type_or_none('typing.Awaitable',
- [defn.type.ret_type]))
+ if defn.is_async_generator:
+ # Async generator types are handled elsewhere
+ pass
+ else:
+ # A coroutine defined as `async def foo(...) -> T: ...`
+ # has external return type `Awaitable[T]`.
+ defn.type = defn.type.copy_modified(
+ ret_type = self.named_type_or_none('typing.Awaitable',
+ [defn.type.ret_type]))
self.errors.pop_function()
def prepare_method_signature(self, func: FuncDef) -> None:
@@ -356,89 +417,104 @@ class SemanticAnalyzer(NodeVisitor):
else:
return False
- def update_function_type_variables(self, defn: FuncDef) -> None:
+ def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> None:
"""Make any type variables in the signature of defn explicit.
Update the signature of defn to contain type variable definitions
if defn is generic.
"""
- if defn.type:
- assert isinstance(defn.type, CallableType)
- typevars = self.infer_type_variables(defn.type)
- # Do not define a new type variable if already defined in scope.
- typevars = [(name, tvar) for name, tvar in typevars
- if not self.is_defined_type_var(name, defn)]
- if typevars:
- next_tvar_id = self.next_function_tvar_id()
- defs = [TypeVarDef(tvar[0], next_tvar_id - i,
- tvar[1].values, tvar[1].upper_bound,
- tvar[1].variance)
- for i, tvar in enumerate(typevars)]
- defn.type.variables = defs
-
- def infer_type_variables(self,
- type: CallableType) -> List[Tuple[str, TypeVarExpr]]:
- """Return list of unique type variables referred to in a callable."""
- names = [] # type: List[str]
- tvars = [] # type: List[TypeVarExpr]
- for arg in type.arg_types + [type.ret_type]:
- for name, tvar_expr in self.find_type_variables_in_type(arg):
- if name not in names:
- names.append(name)
- tvars.append(tvar_expr)
- return list(zip(names, tvars))
-
- def find_type_variables_in_type(self, type: Type) -> List[Tuple[str, TypeVarExpr]]:
- """Return a list of all unique type variable references in type.
-
- This effectively does partial name binding, results of which are mostly thrown away.
- """
- result = [] # type: List[Tuple[str, TypeVarExpr]]
- if isinstance(type, UnboundType):
- name = type.name
- node = self.lookup_qualified(name, type)
- if node and node.kind == UNBOUND_TVAR:
- assert isinstance(node.node, TypeVarExpr)
- result.append((name, node.node))
- for arg in type.args:
- result.extend(self.find_type_variables_in_type(arg))
- elif isinstance(type, TypeList):
- for item in type.items:
- result.extend(self.find_type_variables_in_type(item))
- elif isinstance(type, UnionType):
- for item in type.items:
- result.extend(self.find_type_variables_in_type(item))
- elif isinstance(type, AnyType):
- pass
- elif isinstance(type, EllipsisType) or isinstance(type, TupleType):
- pass
- else:
- assert False, 'Unsupported type %s' % type
- return result
-
- def is_defined_type_var(self, tvar: str, context: Context) -> bool:
- return self.lookup_qualified(tvar, context).kind == BOUND_TVAR
+ with self.tvar_scope_frame(self.tvar_scope.method_frame()):
+ a = self.type_analyzer()
+ fun_type.variables = a.bind_function_type_variables(fun_type, defn)
def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
- t = [] # type: List[CallableType]
- for i, item in enumerate(defn.items):
- # TODO support decorated overloaded functions properly
- item.is_overload = True
- item.func.is_overload = True
- item.accept(self)
- callable = function_type(item.func, self.builtin_type('builtins.function'))
- assert isinstance(callable, CallableType)
- t.append(callable)
- if item.func.is_property and i == 0:
- # This defines a property, probably with a setter and/or deleter.
- self.analyze_property_with_multi_part_definition(defn)
- break
- if not [dec for dec in item.decorators
- if refers_to_fullname(dec, 'typing.overload')]:
- self.fail("'overload' decorator expected", item)
-
- defn.type = Overloaded(t)
- defn.type.line = defn.line
+ # OverloadedFuncDef refers to any legitimate situation where you have
+ # more than one declaration for the same function in a row. This occurs
+ # with a @property with a setter or a deleter, and for a classic
+ # @overload.
+
+ # Decide whether to analyze this as a property or an overload. If an
+ # overload, and we're outside a stub, find the impl and set it. Remove
+ # the impl from the item list, it's special.
+ types = [] # type: List[CallableType]
+ non_overload_indexes = []
+
+ # See if the first item is a property (and not an overload)
+ first_item = defn.items[0]
+ first_item.is_overload = True
+ first_item.accept(self)
+
+ if isinstance(first_item, Decorator) and first_item.func.is_property:
+ first_item.func.is_overload = True
+ self.analyze_property_with_multi_part_definition(defn)
+ typ = function_type(first_item.func, self.builtin_type('builtins.function'))
+ assert isinstance(typ, CallableType)
+ types = [typ]
+ else:
+ for i, item in enumerate(defn.items):
+ if i != 0:
+ # The first item was already visited
+ item.is_overload = True
+ item.accept(self)
+ # TODO support decorated overloaded functions properly
+ if isinstance(item, Decorator):
+ callable = function_type(item.func, self.builtin_type('builtins.function'))
+ assert isinstance(callable, CallableType)
+ if not any(refers_to_fullname(dec, 'typing.overload')
+ for dec in item.decorators):
+ if i == len(defn.items) - 1 and not self.is_stub_file:
+ # Last item outside a stub is impl
+ defn.impl = item
+ else:
+ # Oops it wasn't an overload after all. A clear error
+ # will vary based on where in the list it is, record
+ # that.
+ non_overload_indexes.append(i)
+ else:
+ item.func.is_overload = True
+ types.append(callable)
+ elif isinstance(item, FuncDef):
+ if i == len(defn.items) - 1 and not self.is_stub_file:
+ defn.impl = item
+ else:
+ non_overload_indexes.append(i)
+ if non_overload_indexes:
+ if types:
+ # Some of them were overloads, but not all.
+ for idx in non_overload_indexes:
+ if self.is_stub_file:
+ self.fail("An implementation for an overloaded function "
+ "is not allowed in a stub file", defn.items[idx])
+ else:
+ self.fail("The implementation for an overloaded function "
+ "must come last", defn.items[idx])
+ else:
+ for idx in non_overload_indexes[1:]:
+ self.name_already_defined(defn.name(), defn.items[idx])
+ if defn.impl:
+ self.name_already_defined(defn.name(), defn.impl)
+ # Remove the non-overloads
+ for idx in reversed(non_overload_indexes):
+ del defn.items[idx]
+ # If we found an implementation, remove it from the overloads to
+ # consider.
+ if defn.impl is not None:
+ assert defn.impl is defn.items[-1]
+ defn.items = defn.items[:-1]
+
+ elif not self.is_stub_file and not non_overload_indexes:
+ self.fail(
+ "An overloaded function outside a stub file must have an implementation",
+ defn)
+
+ if types:
+ defn.type = Overloaded(types)
+ defn.type.line = defn.line
+
+ if not defn.items:
+ # It was not any kind of overload def after all. We've visited the
+ # redfinitions already.
+ return
if self.is_class_scope():
self.type.names[defn.name()] = SymbolTableNode(MDEF, defn,
@@ -454,102 +530,82 @@ class SemanticAnalyzer(NodeVisitor):
"""
defn.is_property = True
items = defn.items
+ first_item = cast(Decorator, defn.items[0])
for item in items[1:]:
- if len(item.decorators) == 1:
+ if isinstance(item, Decorator) and len(item.decorators) == 1:
node = item.decorators[0]
if isinstance(node, MemberExpr):
if node.name == 'setter':
# The first item represents the entire property.
- defn.items[0].var.is_settable_property = True
+ first_item.var.is_settable_property = True
# Get abstractness from the original definition.
- item.func.is_abstract = items[0].func.is_abstract
+ item.func.is_abstract = first_item.func.is_abstract
+ item.func.accept(self)
else:
self.fail("Decorated property not supported", item)
- item.func.accept(self)
-
- def next_function_tvar_id(self) -> int:
- return self.next_function_tvar_id_stack[-1]
def analyze_function(self, defn: FuncItem) -> None:
is_method = self.is_class_scope()
+ with self.tvar_scope_frame(self.tvar_scope.method_frame()):
+ if defn.type:
+ self.check_classvar_in_signature(defn.type)
+ assert isinstance(defn.type, CallableType)
+ # Signature must be analyzed in the surrounding scope so that
+ # class-level imported names and type variables are in scope.
+ defn.type = self.type_analyzer().visit_callable_type(defn.type, nested=False)
+ self.check_function_signature(defn)
+ if isinstance(defn, FuncDef):
+ defn.type = set_callable_name(defn.type, defn)
+ for arg in defn.arguments:
+ if arg.initializer:
+ arg.initializer.accept(self)
+ # Bind the type variables again to visit the body.
+ if defn.type:
+ a = self.type_analyzer()
+ a.bind_function_type_variables(cast(CallableType, defn.type), defn)
+ self.function_stack.append(defn)
+ self.enter()
+ for arg in defn.arguments:
+ self.add_local(arg.variable, defn)
+ for arg in defn.arguments:
+ if arg.initialization_statement:
+ lvalue = arg.initialization_statement.lvalues[0]
+ lvalue.accept(self)
+
+ # The first argument of a non-static, non-class method is like 'self'
+ # (though the name could be different), having the enclosing class's
+ # instance type.
+ if is_method and not defn.is_static and not defn.is_class and defn.arguments:
+ defn.arguments[0].variable.is_self = True
+
+ # First analyze body of the function but ignore nested functions.
+ self.postpone_nested_functions_stack.append(FUNCTION_FIRST_PHASE_POSTPONE_SECOND)
+ self.postponed_functions_stack.append([])
+ defn.body.accept(self)
+
+ # Analyze nested functions (if any) as a second phase.
+ self.postpone_nested_functions_stack[-1] = FUNCTION_SECOND_PHASE
+ for postponed in self.postponed_functions_stack[-1]:
+ postponed.accept(self)
+ self.postpone_nested_functions_stack.pop()
+ self.postponed_functions_stack.pop()
+
+ self.leave()
+ self.function_stack.pop()
- tvarnodes = self.add_func_type_variables_to_symbol_table(defn)
- next_function_tvar_id = min([self.next_function_tvar_id()] +
- [n.tvar_def.id.raw_id - 1 for n in tvarnodes])
- self.next_function_tvar_id_stack.append(next_function_tvar_id)
-
- if defn.type:
- # Signature must be analyzed in the surrounding scope so that
- # class-level imported names and type variables are in scope.
- defn.type = self.anal_type(defn.type)
- self.check_function_signature(defn)
- if isinstance(defn, FuncDef):
- defn.type = set_callable_name(defn.type, defn)
- for arg in defn.arguments:
- if arg.initializer:
- arg.initializer.accept(self)
- self.function_stack.append(defn)
- self.enter()
- for arg in defn.arguments:
- self.add_local(arg.variable, defn)
- for arg in defn.arguments:
- if arg.initialization_statement:
- lvalue = arg.initialization_statement.lvalues[0]
- lvalue.accept(self)
-
- # The first argument of a non-static, non-class method is like 'self'
- # (though the name could be different), having the enclosing class's
- # instance type.
- if is_method and not defn.is_static and not defn.is_class and defn.arguments:
- defn.arguments[0].variable.is_self = True
-
- # First analyze body of the function but ignore nested functions.
- self.postpone_nested_functions_stack.append(FUNCTION_FIRST_PHASE_POSTPONE_SECOND)
- self.postponed_functions_stack.append([])
- defn.body.accept(self)
-
- # Analyze nested functions (if any) as a second phase.
- self.postpone_nested_functions_stack[-1] = FUNCTION_SECOND_PHASE
- for postponed in self.postponed_functions_stack[-1]:
- postponed.accept(self)
- self.postpone_nested_functions_stack.pop()
- self.postponed_functions_stack.pop()
-
- self.next_function_tvar_id_stack.pop()
- disable_typevars(tvarnodes)
-
- self.leave()
- self.function_stack.pop()
-
- def add_func_type_variables_to_symbol_table(
- self, defn: FuncItem) -> List[SymbolTableNode]:
- nodes = [] # type: List[SymbolTableNode]
- if defn.type:
- tt = defn.type
- assert isinstance(tt, CallableType)
- items = tt.variables
- names = self.type_var_names()
- for item in items:
- name = item.name
- if name in names:
- self.name_already_defined(name, defn)
- node = self.bind_type_var(name, item, defn)
- nodes.append(node)
- names.add(name)
- return nodes
-
- def type_var_names(self) -> Set[str]:
- if not self.type:
- return set()
- else:
- return set(self.type.type_vars)
-
- def bind_type_var(self, fullname: str, tvar_def: TypeVarDef,
- context: Context) -> SymbolTableNode:
- node = self.lookup_qualified(fullname, context)
- node.kind = BOUND_TVAR
- node.tvar_def = tvar_def
- return node
+ def check_classvar_in_signature(self, typ: Type) -> None:
+ t = None # type: Type
+ if isinstance(typ, Overloaded):
+ for t in typ.items():
+ self.check_classvar_in_signature(t)
+ return
+ if not isinstance(typ, CallableType):
+ return
+ for t in typ.arg_types + [typ.ret_type]:
+ if self.is_classvar(t):
+ self.fail_invalid_classvar(t)
+ # Show only one error per signature
+ break
def check_function_signature(self, fdef: FuncItem) -> None:
sig = fdef.type
@@ -563,29 +619,38 @@ class SemanticAnalyzer(NodeVisitor):
self.fail('Type signature has too many arguments', fdef, blocker=True)
def visit_class_def(self, defn: ClassDef) -> None:
- self.clean_up_bases_and_infer_type_variables(defn)
- if self.analyze_namedtuple_classdef(defn):
- return
- self.setup_class_def_analysis(defn)
-
- self.bind_class_type_vars(defn)
-
- self.analyze_base_classes(defn)
- self.analyze_metaclass(defn)
-
- for decorator in defn.decorators:
- self.analyze_class_decorator(defn, decorator)
+ with self.analyze_class_body(defn) as should_continue:
+ if should_continue:
+ # Analyze class body.
+ defn.defs.accept(self)
+
+ @contextmanager
+ def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]:
+ with self.tvar_scope_frame(self.tvar_scope.class_frame()):
+ self.clean_up_bases_and_infer_type_variables(defn)
+ if self.analyze_typeddict_classdef(defn):
+ yield False
+ return
+ if self.analyze_namedtuple_classdef(defn):
+ # just analyze the class body so we catch type errors in default values
+ self.enter_class(defn)
+ yield True
+ self.leave_class()
+ else:
+ self.setup_class_def_analysis(defn)
+ self.analyze_base_classes(defn)
+ self.analyze_metaclass(defn)
- self.enter_class(defn)
+ for decorator in defn.decorators:
+ self.analyze_class_decorator(defn, decorator)
- # Analyze class body.
- defn.defs.accept(self)
+ self.enter_class(defn)
+ yield True
- self.calculate_abstract_status(defn.info)
- self.setup_type_promotion(defn)
+ self.calculate_abstract_status(defn.info)
+ self.setup_type_promotion(defn)
- self.leave_class()
- self.unbind_class_type_vars()
+ self.leave_class()
def enter_class(self, defn: ClassDef) -> None:
# Remember previous active class
@@ -602,24 +667,6 @@ class SemanticAnalyzer(NodeVisitor):
self.locals.pop()
self.type = self.type_stack.pop()
- def bind_class_type_vars(self, defn: ClassDef) -> None:
- """ Unbind type variables of previously active class and bind
- the type variables for the active class.
- """
- if self.bound_tvars:
- disable_typevars(self.bound_tvars)
- self.tvar_stack.append(self.bound_tvars)
- self.bound_tvars = self.bind_class_type_variables_in_symbol_table(defn.info)
-
- def unbind_class_type_vars(self) -> None:
- """ Unbind the active class' type vars and rebind the
- type vars of the previously active class.
- """
- disable_typevars(self.bound_tvars)
- self.bound_tvars = self.tvar_stack.pop()
- if self.bound_tvars:
- enable_typevars(self.bound_tvars)
-
def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None:
decorator.accept(self)
@@ -682,7 +729,7 @@ class SemanticAnalyzer(NodeVisitor):
Note that this is performed *before* semantic analysis.
"""
removed = [] # type: List[int]
- type_vars = [] # type: List[TypeVarDef]
+ declared_tvars = [] # type: TypeVarList
for i, base_expr in enumerate(defn.base_type_exprs):
try:
base = expr_to_unanalyzed_type(base_expr)
@@ -691,20 +738,33 @@ class SemanticAnalyzer(NodeVisitor):
continue
tvars = self.analyze_typevar_declaration(base)
if tvars is not None:
- if type_vars:
+ if declared_tvars:
self.fail('Duplicate Generic in bases', defn)
removed.append(i)
- for j, (name, tvar_expr) in enumerate(tvars):
- type_vars.append(TypeVarDef(name, j + 1, tvar_expr.values,
- tvar_expr.upper_bound, tvar_expr.variance))
- if type_vars:
- defn.type_vars = type_vars
+ declared_tvars.extend(tvars)
+
+ all_tvars = self.get_all_bases_tvars(defn, removed)
+ if declared_tvars:
+ if len(remove_dups(declared_tvars)) < len(declared_tvars):
+ self.fail("Duplicate type variables in Generic[...]", defn)
+ declared_tvars = remove_dups(declared_tvars)
+ if not set(all_tvars).issubset(set(declared_tvars)):
+ self.fail("If Generic[...] is present it should list all type variables", defn)
+ # In case of error, Generic tvars will go first
+ declared_tvars = remove_dups(declared_tvars + all_tvars)
+ else:
+ declared_tvars = all_tvars
+ if declared_tvars:
if defn.info:
- defn.info.type_vars = [tv.name for tv in type_vars]
+ defn.info.type_vars = [name for name, _ in declared_tvars]
for i in reversed(removed):
del defn.base_type_exprs[i]
+ tvar_defs = [] # type: List[TypeVarDef]
+ for name, tvar_expr in declared_tvars:
+ tvar_defs.append(self.tvar_scope.bind(name, tvar_expr))
+ defn.type_vars = tvar_defs
- def analyze_typevar_declaration(self, t: Type) -> Optional[List[Tuple[str, TypeVarExpr]]]:
+ def analyze_typevar_declaration(self, t: Type) -> Optional[TypeVarList]:
if not isinstance(t, UnboundType):
return None
unbound = t
@@ -712,7 +772,7 @@ class SemanticAnalyzer(NodeVisitor):
if sym is None or sym.node is None:
return None
if sym.node.fullname() == 'typing.Generic':
- tvars = [] # type: List[Tuple[str, TypeVarExpr]]
+ tvars = [] # type: TypeVarList
for arg in unbound.args:
tvar = self.analyze_unbound_tvar(arg)
if tvar:
@@ -728,10 +788,27 @@ class SemanticAnalyzer(NodeVisitor):
return None
unbound = t
sym = self.lookup_qualified(unbound.name, unbound)
- if sym is not None and sym.kind == UNBOUND_TVAR:
+ if sym is None or sym.kind != TVAR:
+ return None
+ elif not self.tvar_scope.allow_binding(sym.fullname):
+ # It's bound by our type variable scope
+ return None
+ else:
assert isinstance(sym.node, TypeVarExpr)
return unbound.name, sym.node
- return None
+
+ def get_all_bases_tvars(self, defn: ClassDef, removed: List[int]) -> TypeVarList:
+ tvars = [] # type: TypeVarList
+ for i, base_expr in enumerate(defn.base_type_exprs):
+ if i not in removed:
+ try:
+ base = expr_to_unanalyzed_type(base_expr)
+ except TypeTranslationError:
+ # This error will be caught later.
+ continue
+ base_tvars = base.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope))
+ tvars.extend(base_tvars)
+ return remove_dups(tvars)
def analyze_namedtuple_classdef(self, defn: ClassDef) -> bool:
# special case for NamedTuple
@@ -742,21 +819,30 @@ class SemanticAnalyzer(NodeVisitor):
node = self.lookup(defn.name, defn)
if node is not None:
node.kind = GDEF # TODO in process_namedtuple_definition also applies here
- items, types = self.check_namedtuple_classdef(defn)
- node.node = self.build_namedtuple_typeinfo(defn.name, items, types)
+ items, types, default_items = self.check_namedtuple_classdef(defn)
+ node.node = self.build_namedtuple_typeinfo(
+ defn.name, items, types, default_items)
+ # We only really need the assignments in the body to be type checked later;
+ # attempting to type check methods may lead to crashes because NamedTuples
+ # do not have a fully functional TypeInfo.
+ # TODO remove this hack and add full support for NamedTuple methods
+ defn.defs.body = [stmt for stmt in defn.defs.body
+ if isinstance(stmt, AssignmentStmt)]
return True
return False
- def check_namedtuple_classdef(self, defn: ClassDef) -> Tuple[List[str], List[Type]]:
+ def check_namedtuple_classdef(
+ self, defn: ClassDef) -> Tuple[List[str], List[Type], Dict[str, Expression]]:
NAMEDTUP_CLASS_ERROR = ('Invalid statement in NamedTuple definition; '
'expected "field_name: field_type"')
if self.options.python_version < (3, 6):
self.fail('NamedTuple class syntax is only supported in Python 3.6', defn)
- return [], []
+ return [], [], {}
if len(defn.base_type_exprs) > 1:
self.fail('NamedTuple should be a single base', defn)
items = [] # type: List[str]
types = [] # type: List[Type]
+ default_items = {} # type: Dict[str, Expression]
for stmt in defn.defs.body:
if not isinstance(stmt, AssignmentStmt):
# Still allow pass or ... (for empty namedtuples).
@@ -778,10 +864,14 @@ class SemanticAnalyzer(NodeVisitor):
.format(name), stmt)
if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax:
self.fail(NAMEDTUP_CLASS_ERROR, stmt)
- elif not isinstance(stmt.rvalue, TempNode):
+ elif isinstance(stmt.rvalue, TempNode):
# x: int assigns rvalue to TempNode(AnyType())
- self.fail('Right hand side values are not supported in NamedTuple', stmt)
- return items, types
+ if default_items:
+ self.fail('Non-default NamedTuple fields cannot follow default fields',
+ stmt)
+ else:
+ default_items[name] = stmt.rvalue
+ return items, types, default_items
def setup_class_def_analysis(self, defn: ClassDef) -> None:
"""Prepare for the analysis of a class definition."""
@@ -792,7 +882,15 @@ class SemanticAnalyzer(NodeVisitor):
kind = MDEF
if self.is_func_scope():
kind = LDEF
- self.add_symbol(defn.name, SymbolTableNode(kind, defn.info), defn)
+ node = SymbolTableNode(kind, defn.info)
+ self.add_symbol(defn.name, node, defn)
+ if kind == LDEF:
+ # We need to preserve local classes, let's store them
+ # in globals under mangled unique names
+ local_name = defn.info._fullname + '@' + str(defn.line)
+ defn.info._fullname = self.cur_mod_id + '.' + local_name
+ defn.fullname = defn.info._fullname
+ self.globals[local_name] = node
def analyze_base_classes(self, defn: ClassDef) -> None:
"""Analyze and set up base classes.
@@ -852,6 +950,8 @@ class SemanticAnalyzer(NodeVisitor):
# the MRO. Fix MRO if needed.
if info.mro and info.mro[-1].fullname() != 'builtins.object':
info.mro.append(self.object_type().type)
+ if defn.info.is_enum and defn.type_vars:
+ self.fail("Enum class cannot be generic", defn)
def expr_to_analyzed_type(self, expr: Expression) -> Type:
if isinstance(expr, CallExpr):
@@ -900,13 +1000,61 @@ class SemanticAnalyzer(NodeVisitor):
return False
def analyze_metaclass(self, defn: ClassDef) -> None:
+ error_context = defn # type: Context
+ if defn.metaclass is None and self.options.python_version[0] == 2:
+ # Look for "__metaclass__ = <metaclass>" in Python 2.
+ for body_node in defn.defs.body:
+ if isinstance(body_node, ClassDef) and body_node.name == "__metaclass__":
+ self.fail("Metaclasses defined as inner classes are not supported", body_node)
+ return
+ elif isinstance(body_node, AssignmentStmt) and len(body_node.lvalues) == 1:
+ lvalue = body_node.lvalues[0]
+ if isinstance(lvalue, NameExpr) and lvalue.name == "__metaclass__":
+ error_context = body_node.rvalue
+ if isinstance(body_node.rvalue, NameExpr):
+ name = body_node.rvalue.name
+ elif isinstance(body_node.rvalue, MemberExpr):
+ name = get_member_expr_fullname(body_node.rvalue)
+ else:
+ name = None
+ if name:
+ defn.metaclass = name
+ else:
+ self.fail(
+ "Dynamic metaclass not supported for '%s'" % defn.name,
+ body_node
+ )
+ return
if defn.metaclass:
if defn.metaclass == '<error>':
- self.fail("Dynamic metaclass not supported for '%s'" % defn.name, defn)
+ self.fail("Dynamic metaclass not supported for '%s'" % defn.name, error_context)
+ return
+ sym = self.lookup_qualified(defn.metaclass, error_context)
+ if sym is None:
+ # Probably a name error - it is already handled elsewhere
return
- sym = self.lookup_qualified(defn.metaclass, defn)
- if sym is not None and not isinstance(sym.node, TypeInfo):
+ if isinstance(sym.node, Var) and isinstance(sym.node.type, AnyType):
+ # 'Any' metaclass -- just ignore it.
+ #
+ # TODO: A better approach would be to record this information
+ # and assume that the type object supports arbitrary
+ # attributes, similar to an 'Any' base class.
+ return
+ if not isinstance(sym.node, TypeInfo) or sym.node.tuple_type is not None:
self.fail("Invalid metaclass '%s'" % defn.metaclass, defn)
+ return
+ if not sym.node.is_metaclass():
+ self.fail("Metaclasses not inheriting from 'type' are not supported", defn)
+ return
+ inst = fill_typevars(sym.node)
+ assert isinstance(inst, Instance)
+ defn.info.declared_metaclass = inst
+ defn.info.metaclass_type = defn.info.calculate_metaclass_type()
+ if defn.info.metaclass_type is None:
+ # Inconsistency may happen due to multiple baseclasses even in classes that
+ # do not declare explicit metaclass, but it's harder to catch at this stage
+ if defn.metaclass:
+ self.fail("Inconsistent metaclass structure for '%s'" % defn.name, defn)
def object_type(self) -> Instance:
return self.named_type('__builtins__.object')
@@ -926,23 +1074,115 @@ class SemanticAnalyzer(NodeVisitor):
def named_type(self, qualified_name: str, args: List[Type] = None) -> Instance:
sym = self.lookup_qualified(qualified_name, None)
- assert isinstance(sym.node, TypeInfo)
- return Instance(sym.node, args or [])
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ if args:
+ # TODO: assert len(args) == len(node.defn.type_vars)
+ return Instance(node, args)
+ return Instance(node, [AnyType()] * len(node.defn.type_vars))
def named_type_or_none(self, qualified_name: str, args: List[Type] = None) -> Instance:
sym = self.lookup_fully_qualified_or_none(qualified_name)
if not sym:
return None
- assert isinstance(sym.node, TypeInfo)
- return Instance(sym.node, args or [])
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ if args:
+ # TODO: assert len(args) == len(node.defn.type_vars)
+ return Instance(node, args)
+ return Instance(node, [AnyType()] * len(node.defn.type_vars))
+
+ def is_typeddict(self, expr: Expression) -> bool:
+ return (isinstance(expr, RefExpr) and isinstance(expr.node, TypeInfo) and
+ expr.node.typeddict_type is not None)
+
+ def analyze_typeddict_classdef(self, defn: ClassDef) -> bool:
+ # special case for TypedDict
+ possible = False
+ for base_expr in defn.base_type_exprs:
+ if isinstance(base_expr, RefExpr):
+ base_expr.accept(self)
+ if (base_expr.fullname == 'mypy_extensions.TypedDict' or
+ self.is_typeddict(base_expr)):
+ possible = True
+ if possible:
+ node = self.lookup(defn.name, defn)
+ if node is not None:
+ node.kind = GDEF # TODO in process_namedtuple_definition also applies here
+ if (len(defn.base_type_exprs) == 1 and
+ isinstance(defn.base_type_exprs[0], RefExpr) and
+ defn.base_type_exprs[0].fullname == 'mypy_extensions.TypedDict'):
+ # Building a new TypedDict
+ fields, types = self.check_typeddict_classdef(defn)
+ node.node = self.build_typeddict_typeinfo(defn.name, fields, types)
+ return True
+ # Extending/merging existing TypedDicts
+ if any(not isinstance(expr, RefExpr) or
+ expr.fullname != 'mypy_extensions.TypedDict' and
+ not self.is_typeddict(expr) for expr in defn.base_type_exprs):
+ self.fail("All bases of a new TypedDict must be TypedDict types", defn)
+ typeddict_bases = list(filter(self.is_typeddict, defn.base_type_exprs))
+ newfields = [] # type: List[str]
+ newtypes = [] # type: List[Type]
+ tpdict = None # type: OrderedDict[str, Type]
+ for base in typeddict_bases:
+ assert isinstance(base, RefExpr)
+ assert isinstance(base.node, TypeInfo)
+ assert isinstance(base.node.typeddict_type, TypedDictType)
+ tpdict = base.node.typeddict_type.items
+ newdict = tpdict.copy()
+ for key in tpdict:
+ if key in newfields:
+ self.fail('Cannot overwrite TypedDict field "{}" while merging'
+ .format(key), defn)
+ newdict.pop(key)
+ newfields.extend(newdict.keys())
+ newtypes.extend(newdict.values())
+ fields, types = self.check_typeddict_classdef(defn, newfields)
+ newfields.extend(fields)
+ newtypes.extend(types)
+ node.node = self.build_typeddict_typeinfo(defn.name, newfields, newtypes)
+ return True
+ return False
- def bind_class_type_variables_in_symbol_table(
- self, info: TypeInfo) -> List[SymbolTableNode]:
- nodes = [] # type: List[SymbolTableNode]
- for var, binder in zip(info.type_vars, info.defn.type_vars):
- node = self.bind_type_var(var, binder, info)
- nodes.append(node)
- return nodes
+ def check_typeddict_classdef(self, defn: ClassDef,
+ oldfields: List[str] = None) -> Tuple[List[str], List[Type]]:
+ TPDICT_CLASS_ERROR = ('Invalid statement in TypedDict definition; '
+ 'expected "field_name: field_type"')
+ if self.options.python_version < (3, 6):
+ self.fail('TypedDict class syntax is only supported in Python 3.6', defn)
+ return [], []
+ fields = [] # type: List[str]
+ types = [] # type: List[Type]
+ for stmt in defn.defs.body:
+ if not isinstance(stmt, AssignmentStmt):
+ # Still allow pass or ... (for empty TypedDict's).
+ if (not isinstance(stmt, PassStmt) and
+ not (isinstance(stmt, ExpressionStmt) and
+ isinstance(stmt.expr, EllipsisExpr))):
+ self.fail(TPDICT_CLASS_ERROR, stmt)
+ elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr):
+ # An assignment, but an invalid one.
+ self.fail(TPDICT_CLASS_ERROR, stmt)
+ else:
+ name = stmt.lvalues[0].name
+ if name in (oldfields or []):
+ self.fail('Cannot overwrite TypedDict field "{}" while extending'
+ .format(name), stmt)
+ continue
+ if name in fields:
+ self.fail('Duplicate TypedDict field "{}"'.format(name), stmt)
+ continue
+ # Append name and type in this case...
+ fields.append(name)
+ types.append(AnyType() if stmt.type is None else self.anal_type(stmt.type))
+ # ...despite possible minor failures that allow further analyzis.
+ if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax:
+ self.fail(TPDICT_CLASS_ERROR, stmt)
+ elif not isinstance(stmt.rvalue, TempNode):
+ # x: int assigns rvalue to TempNode(AnyType())
+ self.fail('Right hand side values are not supported in TypedDict', stmt)
+ return fields, types
def visit_import(self, i: Import) -> None:
for id, as_id in i.ids:
@@ -1026,7 +1266,8 @@ class SemanticAnalyzer(NodeVisitor):
symbol = SymbolTableNode(node.kind, node.node,
self.cur_mod_id,
node.type_override,
- module_public=module_public)
+ module_public=module_public,
+ normalized=node.normalized)
self.add_symbol(imported_id, symbol, imp)
elif module and not missing:
# Missing attribute.
@@ -1062,12 +1303,19 @@ class SemanticAnalyzer(NodeVisitor):
def normalize_type_alias(self, node: SymbolTableNode,
ctx: Context) -> SymbolTableNode:
+ normalized = False
if node.fullname in type_aliases:
# Node refers to an aliased type such as typing.List; normalize.
node = self.lookup_qualified(type_aliases[node.fullname], ctx)
- if node.fullname == 'typing.DefaultDict':
+ normalized = True
+ if node.fullname in collections_type_aliases:
+ # Similar, but for types from the collections module like typing.DefaultDict
self.add_module_symbol('collections', '__mypy_collections__', False, ctx)
- node = self.lookup_qualified('__mypy_collections__.defaultdict', ctx)
+ node = self.lookup_qualified(collections_type_aliases[node.fullname], ctx)
+ normalized = True
+ if normalized:
+ node = SymbolTableNode(node.kind, node.node,
+ node.mod_id, node.type_override, normalized=True)
return node
def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str:
@@ -1103,7 +1351,8 @@ class SemanticAnalyzer(NodeVisitor):
continue
self.add_symbol(name, SymbolTableNode(node.kind, node.node,
self.cur_mod_id,
- node.type_override), i)
+ node.type_override,
+ normalized=node.normalized), i)
else:
# Don't add any dummy symbols for 'from x import *' if 'x' is unknown.
pass
@@ -1135,37 +1384,42 @@ class SemanticAnalyzer(NodeVisitor):
if b:
self.visit_block(b)
- def anal_type(self, t: Type, allow_tuple_literal: bool = False,
+ def type_analyzer(self, *,
+ tvar_scope: Optional[TypeVarScope] = None,
+ allow_tuple_literal: bool = False,
+ aliasing: bool = False) -> TypeAnalyser:
+ if tvar_scope is None:
+ tvar_scope = self.tvar_scope
+ return TypeAnalyser(self.lookup_qualified,
+ self.lookup_fully_qualified,
+ tvar_scope,
+ self.fail,
+ aliasing=aliasing,
+ allow_tuple_literal=allow_tuple_literal,
+ allow_unnormalized=self.is_stub_file)
+
+ def anal_type(self, t: Type, *,
+ tvar_scope: Optional[TypeVarScope] = None,
+ allow_tuple_literal: bool = False,
aliasing: bool = False) -> Type:
if t:
- if allow_tuple_literal:
- # Types such as (t1, t2, ...) only allowed in assignment statements. They'll
- # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead.
- if isinstance(t, TupleType):
- # Unlike TypeAnalyser, also allow implicit tuple types (without Tuple[...]).
- star_count = sum(1 for item in t.items if isinstance(item, StarType))
- if star_count > 1:
- self.fail('At most one star type allowed in a tuple', t)
- return TupleType([AnyType() for _ in t.items],
- self.builtin_type('builtins.tuple'), t.line)
- items = [self.anal_type(item, True)
- for item in t.items]
- return TupleType(items, self.builtin_type('builtins.tuple'), t.line)
- a = TypeAnalyser(self.lookup_qualified,
- self.lookup_fully_qualified,
- self.fail,
- aliasing=aliasing)
+ a = self.type_analyzer(
+ tvar_scope=tvar_scope,
+ aliasing=aliasing,
+ allow_tuple_literal=allow_tuple_literal)
return t.accept(a)
+
else:
return None
def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
for lval in s.lvalues:
self.analyze_lvalue(lval, explicit_type=s.type is not None)
+ self.check_classvar(s)
s.rvalue.accept(self)
if s.type:
allow_tuple_literal = isinstance(s.lvalues[-1], (TupleExpr, ListExpr))
- s.type = self.anal_type(s.type, allow_tuple_literal)
+ s.type = self.anal_type(s.type, allow_tuple_literal=allow_tuple_literal)
else:
# For simple assignments, allow binding type aliases.
# Also set the type if the rvalue is a simple literal.
@@ -1176,7 +1430,8 @@ class SemanticAnalyzer(NodeVisitor):
res = analyze_type_alias(s.rvalue,
self.lookup_qualified,
self.lookup_fully_qualified,
- self.fail)
+ self.tvar_scope,
+ self.fail, allow_unnormalized=True)
if res and (not isinstance(res, Instance) or res.args):
# TODO: What if this gets reassigned?
name = s.lvalues[0]
@@ -1195,6 +1450,7 @@ class SemanticAnalyzer(NodeVisitor):
self.process_typevar_declaration(s)
self.process_namedtuple_definition(s)
self.process_typeddict_definition(s)
+ self.process_enum_call(s)
if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and
s.lvalues[0].name == '__all__' and s.lvalues[0].kind == GDEF and
@@ -1253,7 +1509,9 @@ class SemanticAnalyzer(NodeVisitor):
# TODO: We should record the fact that this is a variable
# that refers to a type, rather than making this
# just an alias for the type.
- self.globals[lvalue.name].node = node
+ sym = self.lookup_type_node(rvalue)
+ if sym:
+ self.globals[lvalue.name] = sym
def analyze_lvalue(self, lval: Lvalue, nested: bool = False,
add_global: bool = False,
@@ -1331,7 +1589,7 @@ class SemanticAnalyzer(NodeVisitor):
isinstance(lval, ListExpr)):
items = lval.items
if len(items) == 0 and isinstance(lval, TupleExpr):
- self.fail("Can't assign to ()", lval)
+ self.fail("can't assign to ()", lval)
self.analyze_tuple_or_list_lvalue(lval, add_global, explicit_type)
elif isinstance(lval, StarExpr):
if nested:
@@ -1380,8 +1638,10 @@ class SemanticAnalyzer(NodeVisitor):
return isinstance(node, Var) and node.is_self
def check_lvalue_validity(self, node: Union[Expression, SymbolNode], ctx: Context) -> None:
- if isinstance(node, (TypeInfo, TypeVarExpr)):
+ if isinstance(node, TypeVarExpr):
self.fail('Invalid assignment target', ctx)
+ elif isinstance(node, TypeInfo):
+ self.fail(CANNOT_ASSIGN_TO_TYPE, ctx)
def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None:
if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr):
@@ -1504,10 +1764,10 @@ class SemanticAnalyzer(NodeVisitor):
info.is_newtype = True
# Add __init__ method
- args = [Argument(Var('cls'), NoneTyp(), None, ARG_POS),
+ args = [Argument(Var('self'), NoneTyp(), None, ARG_POS),
self.make_argument('item', old_type)]
signature = CallableType(
- arg_types=[cast(Type, None), old_type],
+ arg_types=[Instance(info, []), old_type],
arg_kinds=[arg.kind for arg in args],
arg_names=['self', 'item'],
ret_type=old_type,
@@ -1545,7 +1805,7 @@ class SemanticAnalyzer(NodeVisitor):
res = self.process_typevar_parameters(call.args[1 + n_values:],
call.arg_names[1 + n_values:],
call.arg_kinds[1 + n_values:],
- bool(values),
+ n_values,
s)
if res is None:
return
@@ -1553,7 +1813,7 @@ class SemanticAnalyzer(NodeVisitor):
# Yes, it's a valid type variable definition! Add it to the symbol table.
node = self.lookup(name, s)
- node.kind = UNBOUND_TVAR
+ node.kind = TVAR
TypeVar = TypeVarExpr(name, node.fullname, values, upper_bound, variance)
TypeVar.line = call.line
call.analyzed = TypeVar
@@ -1592,8 +1852,9 @@ class SemanticAnalyzer(NodeVisitor):
def process_typevar_parameters(self, args: List[Expression],
names: List[Optional[str]],
kinds: List[int],
- has_values: bool,
+ num_values: int,
context: Context) -> Optional[Tuple[int, Type]]:
+ has_values = (num_values > 0)
covariant = False
contravariant = False
upper_bound = self.object_type() # type: Type
@@ -1643,6 +1904,9 @@ class SemanticAnalyzer(NodeVisitor):
if covariant and contravariant:
self.fail("TypeVar cannot be both covariant and contravariant", context)
return None
+ elif num_values == 1:
+ self.fail("TypeVar cannot have only a single constraint", context)
+ return None
elif covariant:
variance = COVARIANT
elif contravariant:
@@ -1688,12 +1952,12 @@ class SemanticAnalyzer(NodeVisitor):
items, types, ok = self.parse_namedtuple_args(call, fullname)
if not ok:
# Error. Construct dummy return value.
- return self.build_namedtuple_typeinfo('namedtuple', [], [])
+ return self.build_namedtuple_typeinfo('namedtuple', [], [], {})
name = cast(StrExpr, call.args[0]).value
if name != var_name or self.is_func_scope():
# Give it a unique name derived from the line number.
name += '@' + str(call.line)
- info = self.build_namedtuple_typeinfo(name, items, types)
+ info = self.build_namedtuple_typeinfo(name, items, types, {})
# Store it as a global just in case it would remain anonymous.
# (Or in the nearest class if there is one.)
stnode = SymbolTableNode(GDEF, info, self.cur_mod_id)
@@ -1786,8 +2050,8 @@ class SemanticAnalyzer(NodeVisitor):
info.bases = [basetype_or_fallback]
return info
- def build_namedtuple_typeinfo(self, name: str, items: List[str],
- types: List[Type]) -> TypeInfo:
+ def build_namedtuple_typeinfo(self, name: str, items: List[str], types: List[Type],
+ default_items: Dict[str, Expression]) -> TypeInfo:
strtype = self.str_type()
basetuple_type = self.named_type('__builtins__.tuple', [AnyType()])
dictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
@@ -1795,7 +2059,12 @@ class SemanticAnalyzer(NodeVisitor):
# Actual signature should return OrderedDict[str, Union[types]]
ordereddictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
or self.object_type())
- fallback = self.named_type('__builtins__.tuple', types)
+ # 'builtins.tuple' has only one type parameter.
+ #
+ # TODO: The corresponding type argument in the fallback instance should be a join of
+ # all item types, but we can't do joins during this pass of semantic analysis
+ # and we are using Any as a workaround.
+ fallback = self.named_type('__builtins__.tuple', [AnyType()])
# Note: actual signature should accept an invariant version of Iterable[UnionType[types]].
# but it can't be expressed. 'new' and 'len' should be callable types.
iterable_type = self.named_type_or_none('typing.Iterable', [AnyType()])
@@ -1819,6 +2088,7 @@ class SemanticAnalyzer(NodeVisitor):
tuple_of_strings = TupleType([strtype for _ in items], basetuple_type)
add_field(Var('_fields', tuple_of_strings), is_initialized_in_class=True)
add_field(Var('_field_types', dictype), is_initialized_in_class=True)
+ add_field(Var('_field_defaults', dictype), is_initialized_in_class=True)
add_field(Var('_source', strtype), is_initialized_in_class=True)
tvd = TypeVarDef('NT', 1, [], info.tuple_type)
@@ -1856,8 +2126,14 @@ class SemanticAnalyzer(NodeVisitor):
add_method('_replace', ret=selftype,
args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars])
+
+ def make_init_arg(var: Var) -> Argument:
+ default = default_items.get(var.name(), None)
+ kind = ARG_POS if default is None else ARG_OPT
+ return Argument(var, var.type, default, kind)
+
add_method('__init__', ret=NoneTyp(), name=info.name(),
- args=[Argument(var, var.type, None, ARG_POS) for var in vars])
+ args=[make_init_arg(var) for var in vars])
add_method('_asdict', args=[], ret=ordereddictype)
add_method('_make', ret=selftype, is_classmethod=True,
args=[Argument(Var('iterable', iterable_type), iterable_type, None, ARG_POS),
@@ -1952,10 +2228,6 @@ class SemanticAnalyzer(NodeVisitor):
"TypedDict() expects a dictionary literal as the second argument", call)
dictexpr = args[1]
items, types, ok = self.parse_typeddict_fields_with_types(dictexpr.items, call)
- underscore = [item for item in items if item.startswith('_')]
- if underscore:
- self.fail("TypedDict() item names cannot start with an underscore: "
- + ', '.join(underscore), call)
return items, types, ok
def parse_typeddict_fields_with_types(self, dict_items: List[Tuple[Expression, Expression]],
@@ -1991,6 +2263,165 @@ class SemanticAnalyzer(NodeVisitor):
return info
+ def check_classvar(self, s: AssignmentStmt) -> None:
+ lvalue = s.lvalues[0]
+ if len(s.lvalues) != 1 or not isinstance(lvalue, RefExpr):
+ return
+ if not self.is_classvar(s.type):
+ return
+ if self.is_class_scope() and isinstance(lvalue, NameExpr):
+ node = lvalue.node
+ if isinstance(node, Var):
+ node.is_classvar = True
+ elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue):
+ # In case of member access, report error only when assigning to self
+ # Other kinds of member assignments should be already reported
+ self.fail_invalid_classvar(lvalue)
+
+ def is_classvar(self, typ: Type) -> bool:
+ if not isinstance(typ, UnboundType):
+ return False
+ sym = self.lookup_qualified(typ.name, typ)
+ if not sym or not sym.node:
+ return False
+ return sym.node.fullname() == 'typing.ClassVar'
+
+ def fail_invalid_classvar(self, context: Context) -> None:
+ self.fail('ClassVar can only be used for assignments in class body', context)
+
+ def process_enum_call(self, s: AssignmentStmt) -> None:
+ """Check if s defines an Enum; if yes, store the definition in symbol table."""
+ if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr):
+ return
+ lvalue = s.lvalues[0]
+ name = lvalue.name
+ enum_call = self.check_enum_call(s.rvalue, name)
+ if enum_call is None:
+ return
+ # Yes, it's a valid Enum definition. Add it to the symbol table.
+ node = self.lookup(name, s)
+ if node:
+ node.kind = GDEF # TODO locally defined Enum
+ node.node = enum_call
+
+ def check_enum_call(self, node: Expression, var_name: str = None) -> Optional[TypeInfo]:
+ """Check if a call defines an Enum.
+
+ Example:
+
+ A = enum.Enum('A', 'foo bar')
+
+ is equivalent to:
+
+ class A(enum.Enum):
+ foo = 1
+ bar = 2
+ """
+ if not isinstance(node, CallExpr):
+ return None
+ call = node
+ callee = call.callee
+ if not isinstance(callee, RefExpr):
+ return None
+ fullname = callee.fullname
+ if fullname not in ('enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag'):
+ return None
+ items, values, ok = self.parse_enum_call_args(call, fullname.split('.')[-1])
+ if not ok:
+ # Error. Construct dummy return value.
+ return self.build_enum_call_typeinfo('Enum', [], fullname)
+ name = cast(StrExpr, call.args[0]).value
+ if name != var_name or self.is_func_scope():
+ # Give it a unique name derived from the line number.
+ name += '@' + str(call.line)
+ info = self.build_enum_call_typeinfo(name, items, fullname)
+ # Store it as a global just in case it would remain anonymous.
+ # (Or in the nearest class if there is one.)
+ stnode = SymbolTableNode(GDEF, info, self.cur_mod_id)
+ if self.type:
+ self.type.names[name] = stnode
+ else:
+ self.globals[name] = stnode
+ call.analyzed = EnumCallExpr(info, items, values)
+ call.analyzed.set_line(call.line, call.column)
+ return info
+
+ def build_enum_call_typeinfo(self, name: str, items: List[str], fullname: str) -> TypeInfo:
+ base = self.named_type_or_none(fullname)
+ assert base is not None
+ info = self.basic_new_typeinfo(name, base)
+ info.is_enum = True
+ for item in items:
+ var = Var(item)
+ var.info = info
+ var.is_property = True
+ info.names[item] = SymbolTableNode(MDEF, var)
+ return info
+
+ def parse_enum_call_args(self, call: CallExpr,
+ class_name: str) -> Tuple[List[str],
+ List[Optional[Expression]], bool]:
+ args = call.args
+ if len(args) < 2:
+ return self.fail_enum_call_arg("Too few arguments for %s()" % class_name, call)
+ if len(args) > 2:
+ return self.fail_enum_call_arg("Too many arguments for %s()" % class_name, call)
+ if call.arg_kinds != [ARG_POS, ARG_POS]:
+ return self.fail_enum_call_arg("Unexpected arguments to %s()" % class_name, call)
+ if not isinstance(args[0], (StrExpr, UnicodeExpr)):
+ return self.fail_enum_call_arg(
+ "%s() expects a string literal as the first argument" % class_name, call)
+ items = []
+ values = [] # type: List[Optional[Expression]]
+ if isinstance(args[1], (StrExpr, UnicodeExpr)):
+ fields = args[1].value
+ for field in fields.replace(',', ' ').split():
+ items.append(field)
+ elif isinstance(args[1], (TupleExpr, ListExpr)):
+ seq_items = args[1].items
+ if all(isinstance(seq_item, (StrExpr, UnicodeExpr)) for seq_item in seq_items):
+ items = [cast(StrExpr, seq_item).value for seq_item in seq_items]
+ elif all(isinstance(seq_item, (TupleExpr, ListExpr))
+ and len(seq_item.items) == 2
+ and isinstance(seq_item.items[0], (StrExpr, UnicodeExpr))
+ for seq_item in seq_items):
+ for seq_item in seq_items:
+ assert isinstance(seq_item, (TupleExpr, ListExpr))
+ name, value = seq_item.items
+ assert isinstance(name, (StrExpr, UnicodeExpr))
+ items.append(name.value)
+ values.append(value)
+ else:
+ return self.fail_enum_call_arg(
+ "%s() with tuple or list expects strings or (name, value) pairs" %
+ class_name,
+ call)
+ elif isinstance(args[1], DictExpr):
+ for key, value in args[1].items:
+ if not isinstance(key, (StrExpr, UnicodeExpr)):
+ return self.fail_enum_call_arg(
+ "%s() with dict literal requires string literals" % class_name, call)
+ items.append(key.value)
+ values.append(value)
+ else:
+ # TODO: Allow dict(x=1, y=2) as a substitute for {'x': 1, 'y': 2}?
+ return self.fail_enum_call_arg(
+ "%s() expects a string, tuple, list or dict literal as the second argument" %
+ class_name,
+ call)
+ if len(items) == 0:
+ return self.fail_enum_call_arg("%s() needs at least one item" % class_name, call)
+ if not values:
+ values = [None] * len(items)
+ assert len(items) == len(values)
+ return items, values, True
+
+ def fail_enum_call_arg(self, message: str,
+ context: Context) -> Tuple[List[str],
+ List[Optional[Expression]], bool]:
+ self.fail(message, context)
+ return [], [], False
+
def visit_decorator(self, dec: Decorator) -> None:
for d in dec.decorators:
d.accept(self)
@@ -2092,8 +2523,10 @@ class SemanticAnalyzer(NodeVisitor):
# Bind index variables and check if they define new names.
self.analyze_lvalue(s.index, explicit_type=s.index_type is not None)
if s.index_type:
+ if self.is_classvar(s.index_type):
+ self.fail_invalid_classvar(s.index)
allow_tuple_literal = isinstance(s.index, (TupleExpr, ListExpr))
- s.index_type = self.anal_type(s.index_type, allow_tuple_literal)
+ s.index_type = self.anal_type(s.index_type, allow_tuple_literal=allow_tuple_literal)
self.store_declared_types(s.index, s.index_type)
self.loop_depth += 1
@@ -2167,8 +2600,10 @@ class SemanticAnalyzer(NodeVisitor):
# Since we have a target, pop the next type from types
if types:
t = types.pop(0)
+ if self.is_classvar(t):
+ self.fail_invalid_classvar(n)
allow_tuple_literal = isinstance(n, (TupleExpr, ListExpr))
- t = self.anal_type(t, allow_tuple_literal)
+ t = self.anal_type(t, allow_tuple_literal=allow_tuple_literal)
new_types.append(t)
self.store_declared_types(n, t)
@@ -2239,7 +2674,7 @@ class SemanticAnalyzer(NodeVisitor):
def visit_name_expr(self, expr: NameExpr) -> None:
n = self.lookup(expr.name, expr)
if n:
- if n.kind == BOUND_TVAR:
+ if n.kind == TVAR and self.tvar_scope.get_binding(n):
self.fail("'{}' is a type variable and only valid in type "
"context".format(expr.name), expr)
else:
@@ -2403,8 +2838,7 @@ class SemanticAnalyzer(NodeVisitor):
# This branch handles the case foo.bar where foo is a module.
# In this case base.node is the module's MypyFile and we look up
# bar in its namespace. This must be done for all types of bar.
- file = base.node
- assert isinstance(file, (MypyFile, type(None)))
+ file = cast(Optional[MypyFile], base.node) # can't use isinstance due to issue #2999
n = file.names.get(expr.name, None) if file is not None else None
if n:
n = self.normalize_type_alias(n, expr)
@@ -2415,7 +2849,7 @@ class SemanticAnalyzer(NodeVisitor):
expr.node = n.node
else:
# We only catch some errors here; the rest will be
- # catched during type checking.
+ # caught during type checking.
#
# This way we can report a larger number of errors in
# one type checker run. If we reported errors here,
@@ -2427,14 +2861,14 @@ class SemanticAnalyzer(NodeVisitor):
self.fail("Module%s has no attribute %r (it's now called %r)" % (
mod_name, expr.name, obsolete_name_mapping[full_name]), expr)
elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo):
- # This branch handles the case C.bar where C is a class
- # and bar is a module resulting from `import bar` inside
- # class C. Here base.node is a TypeInfo, and again we
- # look up the name in its namespace. This is done only
- # when bar is a module; other things (e.g. methods)
- # are handled by other code in checkmember.
n = base.node.names.get(expr.name)
- if n is not None and n.kind == MODULE_REF:
+ if n is not None and (n.kind == MODULE_REF or isinstance(n.node, TypeInfo)):
+ # This branch handles the case C.bar where C is a class and
+ # bar is a type definition or a module resulting from
+ # `import bar` inside class C. Here base.node is a TypeInfo,
+ # and again we look up the name in its namespace.
+ # This is done only when bar is a module or a type; other
+ # things (e.g. methods) are handled by other code in checkmember.
n = self.normalize_type_alias(n, expr)
if not n:
return
@@ -2455,13 +2889,18 @@ class SemanticAnalyzer(NodeVisitor):
def visit_index_expr(self, expr: IndexExpr) -> None:
expr.base.accept(self)
- if isinstance(expr.base, RefExpr) and expr.base.kind == TYPE_ALIAS:
+ if (isinstance(expr.base, RefExpr)
+ and isinstance(expr.base.node, TypeInfo)
+ and not expr.base.node.is_generic()):
+ expr.index.accept(self)
+ elif isinstance(expr.base, RefExpr) and expr.base.kind == TYPE_ALIAS:
# Special form -- subscripting a generic type alias.
# Perform the type substitution and create a new alias.
res = analyze_type_alias(expr,
self.lookup_qualified,
self.lookup_fully_qualified,
- self.fail)
+ self.tvar_scope,
+ self.fail, allow_unnormalized=self.is_stub_file)
expr.analyzed = TypeAliasExpr(res, fallback=self.alias_fallback(res),
in_runtime=True)
elif refers_to_class_or_function(expr.base):
@@ -2482,9 +2921,23 @@ class SemanticAnalyzer(NodeVisitor):
types.append(typearg)
expr.analyzed = TypeApplication(expr.base, types)
expr.analyzed.line = expr.line
+ # list, dict, set are not directly subscriptable
+ n = self.lookup_type_node(expr.base)
+ if n and not n.normalized and n.fullname in nongen_builtins:
+ self.fail(no_subscript_builtin_alias(n.fullname, propose_alt=False), expr)
else:
expr.index.accept(self)
+ def lookup_type_node(self, expr: Expression) -> Optional[SymbolTableNode]:
+ try:
+ t = expr_to_unanalyzed_type(expr)
+ except TypeTranslationError:
+ return None
+ if isinstance(t, UnboundType):
+ n = self.lookup_qualified(t.name, expr)
+ return n
+ return None
+
def visit_slice_expr(self, expr: SliceExpr) -> None:
if expr.begin_index:
expr.begin_index.accept(self)
@@ -2552,7 +3005,7 @@ class SemanticAnalyzer(NodeVisitor):
"""
expr.sequences[0].accept(self)
- def visit_func_expr(self, expr: FuncExpr) -> None:
+ def visit_lambda_expr(self, expr: LambdaExpr) -> None:
self.analyze_function(expr)
def visit_conditional_expr(self, expr: ConditionalExpr) -> None:
@@ -2571,7 +3024,11 @@ class SemanticAnalyzer(NodeVisitor):
self.fail("'yield' outside function", expr, True, blocker=True)
else:
if self.function_stack[-1].is_coroutine:
- self.fail("'yield' in async function", expr, True, blocker=True)
+ if self.options.python_version < (3, 6):
+ self.fail("'yield' in async function", expr, True, blocker=True)
+ else:
+ self.function_stack[-1].is_generator = True
+ self.function_stack[-1].is_async_generator = True
else:
self.function_stack[-1].is_generator = True
if expr.expr:
@@ -2588,6 +3045,13 @@ class SemanticAnalyzer(NodeVisitor):
# Helpers
#
+ @contextmanager
+ def tvar_scope_frame(self, frame: TypeVarScope) -> Iterator[None]:
+ old_scope = self.tvar_scope
+ self.tvar_scope = frame
+ yield
+ self.tvar_scope = old_scope
+
def lookup(self, name: str, ctx: Context) -> SymbolTableNode:
"""Look up an unqualified name in all active namespaces."""
# 1a. Name declared using 'global x' takes precedence
@@ -2673,9 +3137,10 @@ class SemanticAnalyzer(NodeVisitor):
return n
def builtin_type(self, fully_qualified_name: str) -> Instance:
- node = self.lookup_fully_qualified(fully_qualified_name)
- assert isinstance(node.node, TypeInfo)
- return Instance(node.node, [])
+ sym = self.lookup_fully_qualified(fully_qualified_name)
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ return Instance(node, [AnyType()] * len(node.defn.type_vars))
def lookup_fully_qualified(self, name: str) -> SymbolTableNode:
"""Lookup a fully qualified name.
@@ -2859,7 +3324,7 @@ class FirstPass(NodeVisitor):
self.pyversion = options.python_version
self.platform = options.platform
sem.cur_mod_id = mod_id
- sem.errors.set_file(fnam)
+ sem.errors.set_file(fnam, mod_id)
sem.globals = SymbolTable()
sem.global_decls = [set()]
sem.nonlocal_decls = [set()]
@@ -2961,6 +3426,26 @@ class FirstPass(NodeVisitor):
func._fullname = self.sem.qualified_name(func.name())
if kind == GDEF:
self.sem.globals[func.name()] = SymbolTableNode(kind, func, self.sem.cur_mod_id)
+ if func.impl:
+ impl = func.impl
+ # Also analyze the function body (in case there are conditional imports).
+ sem = self.sem
+
+ if isinstance(impl, FuncDef):
+ sem.function_stack.append(impl)
+ sem.errors.push_function(func.name())
+ sem.enter()
+ impl.body.accept(self)
+ elif isinstance(impl, Decorator):
+ sem.function_stack.append(impl.func)
+ sem.errors.push_function(func.name())
+ sem.enter()
+ impl.func.body.accept(self)
+ else:
+ assert False, "Implementation of an overload needs to be FuncDef or Decorator"
+ sem.leave()
+ sem.errors.pop_function()
+ sem.function_stack.pop()
def visit_class_def(self, cdef: ClassDef) -> None:
kind = self.kind_by_scope()
@@ -3084,10 +3569,23 @@ class ThirdPass(TraverserVisitor):
self.errors = errors
def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
- self.errors.set_file(fnam)
+ self.errors.set_file(fnam, file_node.fullname())
self.options = options
self.accept(file_node)
+ def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None:
+ """Refresh a stale target in fine-grained incremental mode."""
+ if isinstance(node, MypyFile):
+ self.refresh_top_level(node)
+ else:
+ self.accept(node)
+
+ def refresh_top_level(self, file_node: MypyFile) -> None:
+ """Reanalyze a stale module top-level in fine-grained incremental mode."""
+ for d in file_node.defs:
+ if not isinstance(d, (FuncItem, ClassDef)):
+ self.accept(d)
+
def accept(self, node: Node) -> None:
try:
node.accept(self)
@@ -3200,8 +3698,12 @@ class ThirdPass(TraverserVisitor):
def builtin_type(self, name: str, args: List[Type] = None) -> Instance:
names = self.modules['builtins']
sym = names.names[name]
- assert isinstance(sym.node, TypeInfo)
- return Instance(sym.node, args or [])
+ node = sym.node
+ assert isinstance(node, TypeInfo)
+ if args:
+ # TODO: assert len(args) == len(node.defn.type_vars)
+ return Instance(node, args)
+ return Instance(node, [AnyType()] * len(node.defn.type_vars))
def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
@@ -3258,18 +3760,6 @@ def find_duplicate(list: List[T]) -> T:
return None
-def disable_typevars(nodes: List[SymbolTableNode]) -> None:
- for node in nodes:
- assert node.kind in (BOUND_TVAR, UNBOUND_TVAR)
- node.kind = UNBOUND_TVAR
-
-
-def enable_typevars(nodes: List[SymbolTableNode]) -> None:
- for node in nodes:
- assert node.kind in (BOUND_TVAR, UNBOUND_TVAR)
- node.kind = BOUND_TVAR
-
-
def remove_imported_names_from_symtable(names: SymbolTable,
module: str) -> None:
"""Remove all imported names from the symbol table of a module."""
diff --git a/mypy/sharedparse.py b/mypy/sharedparse.py
index 1643aac..157bb3f 100644
--- a/mypy/sharedparse.py
+++ b/mypy/sharedparse.py
@@ -87,6 +87,7 @@ MAGIC_METHODS_ALLOWING_KWARGS = {
"__init__",
"__init_subclass__",
"__new__",
+ "__call__",
}
MAGIC_METHODS_POS_ARGS_ONLY = MAGIC_METHODS - MAGIC_METHODS_ALLOWING_KWARGS
diff --git a/mypy/solve.py b/mypy/solve.py
index 07346f0..ad6a882 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -1,9 +1,9 @@
"""Type inference constraint solving"""
-from typing import List, Dict
+from typing import List, Dict, Optional
from collections import defaultdict
-from mypy.types import Type, Void, NoneTyp, AnyType, ErrorType, UninhabitedType, TypeVarId
+from mypy.types import Type, NoneTyp, AnyType, UninhabitedType, TypeVarId
from mypy.constraints import Constraint, SUPERTYPE_OF
from mypy.join import join_types
from mypy.meet import meet_types
@@ -13,7 +13,7 @@ from mypy import experiments
def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
- strict: bool =True) -> List[Type]:
+ strict: bool =True) -> List[Optional[Type]]:
"""Solve type constraints.
Return the best type(s) for type variables; each type can be None if the value of the variable
@@ -28,12 +28,13 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
for con in constraints:
cmap[con.type_var].append(con)
- res = [] # type: List[Type]
+ res = [] # type: List[Optional[Type]]
# Solve each type variable separately.
for tvar in vars:
- bottom = None # type: Type
- top = None # type: Type
+ bottom = None # type: Optional[Type]
+ top = None # type: Optional[Type]
+ candidate = None # type: Optional[Type]
# Process each constraint separately, and calculate the lower and upper
# bounds based on constraints. Note that we assume that the constraint
@@ -57,12 +58,9 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
if top:
candidate = top
else:
- # No constraints for type variable -- type 'None' is the most specific type.
+ # No constraints for type variable -- 'UninhabitedType' is the most specific type.
if strict:
- if experiments.STRICT_OPTIONAL:
- candidate = UninhabitedType()
- else:
- candidate = NoneTyp()
+ candidate = UninhabitedType()
else:
candidate = AnyType()
elif top is None:
@@ -71,9 +69,6 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
candidate = bottom
else:
candidate = None
- if isinstance(candidate, ErrorType):
- res.append(None)
- else:
- res.append(candidate)
+ res.append(candidate)
return res
diff --git a/mypy/stats.py b/mypy/stats.py
index 5d6df35..3739763 100644
--- a/mypy/stats.py
+++ b/mypy/stats.py
@@ -7,8 +7,7 @@ from typing import Any, Dict, List, cast, Tuple
from mypy.traverser import TraverserVisitor
from mypy.types import (
- Type, AnyType, Instance, FunctionLike, TupleType, Void, TypeVarType,
- TypeQuery, ANY_TYPE_STRATEGY, CallableType
+ Type, AnyType, Instance, FunctionLike, TupleType, TypeVarType, TypeQuery, CallableType
)
from mypy import nodes
from mypy.nodes import (
@@ -177,8 +176,6 @@ class StatisticsVisitor(TraverserVisitor):
self.num_generic += 1
else:
self.num_simple += 1
- elif isinstance(t, Void):
- self.num_simple += 1
elif isinstance(t, FunctionLike):
self.num_function += 1
elif isinstance(t, TupleType):
@@ -228,9 +225,9 @@ def is_imprecise(t: Type) -> bool:
return t.accept(HasAnyQuery())
-class HasAnyQuery(TypeQuery):
+class HasAnyQuery(TypeQuery[bool]):
def __init__(self) -> None:
- super().__init__(False, ANY_TYPE_STRATEGY)
+ super().__init__(any)
def visit_any(self, t: AnyType) -> bool:
return True
diff --git a/mypy/strconv.py b/mypy/strconv.py
index d7c1e48..ab0593a 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -3,9 +3,9 @@
import re
import os
-from typing import Any, List, Tuple, Optional, Union, Sequence
+from typing import Any, List, Tuple, Optional, Union, Sequence, Dict
-from mypy.util import dump_tagged, short_type
+from mypy.util import short_type, IdMapper
import mypy.nodes
from mypy.visitor import NodeVisitor
@@ -21,6 +21,24 @@ class StrConv(NodeVisitor[str]):
ExpressionStmt:1(
IntExpr(1)))
"""
+
+ def __init__(self, show_ids: bool = False) -> None:
+ self.show_ids = show_ids
+ self.id_mapper = None # type: Optional[IdMapper]
+ if show_ids:
+ self.id_mapper = IdMapper()
+
+ def get_id(self, o: object) -> Optional[int]:
+ if self.id_mapper:
+ return self.id_mapper.id(o)
+ return None
+
+ def format_id(self, o: object) -> str:
+ if self.id_mapper:
+ return '<{}>'.format(self.get_id(o))
+ else:
+ return ''
+
def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str:
"""Convert a list of items to a multiline pretty-printed string.
@@ -28,7 +46,11 @@ class StrConv(NodeVisitor[str]):
number. See mypy.util.dump_tagged for a description of the nodes
argument.
"""
- return dump_tagged(nodes, short_type(obj) + ':' + str(obj.get_line()))
+ tag = short_type(obj) + ':' + str(obj.get_line())
+ if self.show_ids:
+ assert self.id_mapper is not None
+ tag += '<{}>'.format(self.get_id(obj))
+ return dump_tagged(nodes, tag, self)
def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]:
"""Return a list in a format suitable for dump() that represents the
@@ -125,6 +147,8 @@ class StrConv(NodeVisitor[str]):
a = o.items[:] # type: Any
if o.type:
a.insert(0, o.type)
+ if o.impl:
+ a.insert(0, o.impl)
return self.dump(a, o)
def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> str:
@@ -318,29 +342,35 @@ class StrConv(NodeVisitor[str]):
return self.dump([o.expr], o)
def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str:
- return (short_type(o) + '(' + self.pretty_name(o.name, o.kind,
- o.fullname, o.is_def)
- + ')')
+ pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
+ return short_type(o) + '(' + pretty + ')'
- def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool) -> str:
+ def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool,
+ target_node: 'mypy.nodes.Node' = None) -> str:
n = name
if is_def:
n += '*'
+ if target_node:
+ id = self.format_id(target_node)
+ else:
+ id = ''
if kind == mypy.nodes.GDEF or (fullname != name and
fullname is not None):
# Append fully qualified name for global references.
- n += ' [{}]'.format(fullname)
+ n += ' [{}{}]'.format(fullname, id)
elif kind == mypy.nodes.LDEF:
# Add tag to signify a local reference.
- n += ' [l]'
+ n += ' [l{}]'.format(id)
elif kind == mypy.nodes.MDEF:
# Add tag to signify a member reference.
- n += ' [m]'
+ n += ' [m{}]'.format(id)
+ else:
+ n += id
return n
def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str:
- return self.dump([o.expr, self.pretty_name(o.name, o.kind, o.fullname,
- o.is_def)], o)
+ pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
+ return self.dump([o.expr, pretty], o)
def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str:
return self.dump([o.expr], o)
@@ -429,6 +459,9 @@ class StrConv(NodeVisitor[str]):
o.info.name(),
o.info.tuple_type)
+ def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> str:
+ return 'EnumCallExpr:{}({}, {})'.format(o.line, o.info.name(), o.items)
+
def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> str:
return 'TypedDictExpr:{}({})'.format(o.line,
o.info.name())
@@ -440,7 +473,7 @@ class StrConv(NodeVisitor[str]):
return 'NewTypeExpr:{}({}, {})'.format(o.line, o.name,
self.dump([o.old_type], o))
- def visit_func_expr(self, o: 'mypy.nodes.FuncExpr') -> str:
+ def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> str:
a = self.func_helper(o)
return self.dump(a, o)
@@ -471,3 +504,48 @@ class StrConv(NodeVisitor[str]):
def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> str:
return self.dump([o.expr], o)
+
+ def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> str:
+ return self.dump([o.type], o)
+
+
+def dump_tagged(nodes: Sequence[object], tag: Optional[str], str_conv: 'StrConv') -> str:
+ """Convert an array into a pretty-printed multiline string representation.
+
+ The format is
+ tag(
+ item1..
+ itemN)
+ Individual items are formatted like this:
+ - arrays are flattened
+ - pairs (str, array) are converted recursively, so that str is the tag
+ - other items are converted to strings and indented
+ """
+ from mypy.types import Type, TypeStrVisitor
+
+ a = [] # type: List[str]
+ if tag:
+ a.append(tag + '(')
+ for n in nodes:
+ if isinstance(n, list):
+ if n:
+ a.append(dump_tagged(n, None, str_conv))
+ elif isinstance(n, tuple):
+ s = dump_tagged(n[1], n[0], str_conv)
+ a.append(indent(s, 2))
+ elif isinstance(n, mypy.nodes.Node):
+ a.append(indent(n.accept(str_conv), 2))
+ elif isinstance(n, Type):
+ a.append(indent(n.accept(TypeStrVisitor(str_conv.id_mapper)), 2))
+ elif n:
+ a.append(indent(str(n), 2))
+ if tag:
+ a[-1] += ')'
+ return '\n'.join(a)
+
+
+def indent(s: str, n: int) -> str:
+ """Indent all the lines in s (separated by newlines) by n spaces."""
+ s = ' ' * n + s
+ s = s.replace('\n', '\n' + ' ' * n)
+ return s
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 86b5f92..6e25ecd 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -36,7 +36,6 @@ TODO:
"""
import glob
-import imp
import importlib
import json
import os.path
@@ -44,6 +43,7 @@ import pkgutil
import subprocess
import sys
import textwrap
+import traceback
from typing import (
Any, List, Dict, Tuple, Iterable, Iterator, Optional, NamedTuple, Set, Union, cast
@@ -73,24 +73,33 @@ Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
('modules', List[str]),
('ignore_errors', bool),
('recursive', bool),
- ('fast_parser', bool),
])
+class CantImport(Exception):
+ pass
+
+
def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
add_header: bool = False, sigs: Dict[str, str] = {},
class_sigs: Dict[str, str] = {},
pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
- fast_parser: bool = False,
no_import: bool = False,
search_path: List[str] = [],
interpreter: str = sys.executable) -> None:
target = module.replace('.', '/')
- result = find_module_path_and_all(module=module,
- pyversion=pyversion,
- no_import=no_import,
- search_path=search_path,
- interpreter=interpreter)
+ try:
+ result = find_module_path_and_all(module=module,
+ pyversion=pyversion,
+ no_import=no_import,
+ search_path=search_path,
+ interpreter=interpreter)
+ except CantImport:
+ if not quiet:
+ traceback.print_exc()
+ print('Failed to import %s; skipping it' % module)
+ return
+
if not result:
# C module
target = os.path.join(output_dir, target + '.pyi')
@@ -109,7 +118,7 @@ def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
target = os.path.join(output_dir, target)
generate_stub(module_path, output_dir, module_all,
target=target, add_header=add_header, module=module,
- pyversion=pyversion, fast_parser=fast_parser)
+ pyversion=pyversion)
if not quiet:
print('Created %s' % target)
@@ -129,8 +138,10 @@ def find_module_path_and_all(module: str, pyversion: Tuple[int, int],
module_path, module_all = load_python_module_info(module, interpreter)
else:
# TODO: Support custom interpreters.
- mod = importlib.import_module(module)
- imp.reload(mod)
+ try:
+ mod = importlib.import_module(module)
+ except Exception:
+ raise CantImport(module)
if is_c_module(mod):
return None
module_path = mod.__file__
@@ -174,13 +185,12 @@ def load_python_module_info(module: str, interpreter: str) -> Tuple[str, Optiona
def generate_stub(path: str, output_dir: str, _all_: Optional[List[str]] = None,
target: str = None, add_header: bool = False, module: str = None,
- pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
- fast_parser: bool = False) -> None:
+ pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION
+ ) -> None:
with open(path, 'rb') as f:
source = f.read()
options = MypyOptions()
options.python_version = pyversion
- options.fast_parser = fast_parser
try:
ast = mypy.parse.parse(source, fnam=path, errors=None, options=options)
except mypy.errors.CompileError as e:
@@ -301,11 +311,10 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
super().visit_decorator(o)
def visit_class_def(self, o: ClassDef) -> None:
+ sep = None # type: Optional[int]
if not self._indent and self._state != EMPTY:
sep = len(self._output)
self.add('\n')
- else:
- sep = None
self.add('%sclass %s' % (self._indent, o.name))
self.record_name(o.name)
base_types = self.get_base_types(o)
@@ -455,7 +464,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
self.add_import_line('import %s as %s\n' % (id, target_name))
self.record_name(target_name)
- def get_init(self, lvalue: str, rvalue: Expression) -> str:
+ def get_init(self, lvalue: str, rvalue: Expression) -> Optional[str]:
"""Return initializer for a variable.
Return None if we've generated one already or if the variable is internal.
@@ -501,7 +510,9 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
def is_not_in_all(self, name: str) -> bool:
if self.is_private_name(name):
return False
- return self.is_top_level() and bool(self._all_) and name not in self._all_
+ if self._all_:
+ return self.is_top_level() and name not in self._all_
+ return False
def is_private_name(self, name: str) -> bool:
return name.startswith('_') and (not name.endswith('__')
@@ -623,7 +634,6 @@ def main() -> None:
sigs=sigs,
class_sigs=class_sigs,
pyversion=options.pyversion,
- fast_parser=options.fast_parser,
no_import=options.no_import,
search_path=options.search_path,
interpreter=options.interpreter)
@@ -643,7 +653,6 @@ def parse_options() -> Options:
doc_dir = ''
search_path = [] # type: List[str]
interpreter = ''
- fast_parser = False
while args and args[0].startswith('-'):
if args[0] == '--doc-dir':
doc_dir = args[1]
@@ -658,8 +667,6 @@ def parse_options() -> Options:
args = args[1:]
elif args[0] == '--recursive':
recursive = True
- elif args[0] == '--fast-parser':
- fast_parser = True
elif args[0] == '--ignore-errors':
ignore_errors = True
elif args[0] == '--py2':
@@ -682,8 +689,7 @@ def parse_options() -> Options:
interpreter=interpreter,
modules=args,
ignore_errors=ignore_errors,
- recursive=recursive,
- fast_parser=fast_parser)
+ recursive=recursive)
def default_python2_interpreter() -> str:
@@ -711,7 +717,6 @@ def usage() -> None:
Options:
--py2 run in Python 2 mode (default: Python 3 mode)
--recursive traverse listed modules to generate inner package modules as well
- --fast-parser enable experimental fast parser
--ignore-errors ignore errors when trying to generate stubs for modules
--no-import don't import the modules, just parse and analyze them
(doesn't work with C extension modules and doesn't
diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py
index 278193f..92b78fa 100644
--- a/mypy/stubgenc.py
+++ b/mypy/stubgenc.py
@@ -111,12 +111,15 @@ def generate_c_function_stub(module: ModuleType,
self_arg = '%s, ' % self_var
else:
self_arg = ''
- if name in ('__new__', '__init__') and name not in sigs and class_name in class_sigs:
+ if (name in ('__new__', '__init__') and name not in sigs and class_name and
+ class_name in class_sigs):
sig = class_sigs[class_name]
else:
docstr = getattr(obj, '__doc__', None)
- sig = infer_sig_from_docstring(docstr, name)
- if not sig:
+ inferred = infer_sig_from_docstring(docstr, name)
+ if inferred:
+ sig = inferred
+ else:
if class_name and name not in sigs:
sig = infer_method_sig(name)
else:
@@ -165,10 +168,12 @@ def generate_c_type_stub(module: ModuleType,
continue
if attr not in done:
variables.append('%s = ... # type: Any' % attr)
- all_bases = obj.mro()[1:]
+ all_bases = obj.mro()
if all_bases[-1] is object:
# TODO: Is this always object?
del all_bases[-1]
+ # remove the class itself
+ all_bases = all_bases[1:]
# Remove base classes of other bases as redundant.
bases = [] # type: List[type]
for base in all_bases:
diff --git a/mypy/stubutil.py b/mypy/stubutil.py
index f0d8a17..93153f2 100644
--- a/mypy/stubutil.py
+++ b/mypy/stubutil.py
@@ -90,7 +90,8 @@ def is_c_module(module: ModuleType) -> bool:
return '__file__' not in module.__dict__ or module.__dict__['__file__'].endswith('.so')
-def write_header(file: IO[str], module_name: str, pyversion: Tuple[int, int] = (3, 5)) -> None:
+def write_header(file: IO[str], module_name: Optional[str] = None,
+ pyversion: Tuple[int, int] = (3, 5)) -> None:
if module_name:
if pyversion[0] >= 3:
version = '%d.%d' % (sys.version_info.major,
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index c91250e..8ca6421 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1,12 +1,14 @@
-from typing import List, Optional, Dict, Callable
+from typing import List, Optional, Dict, Callable, cast
from mypy.types import (
- Type, AnyType, UnboundType, TypeVisitor, ErrorType, FormalArgument, Void, NoneTyp,
+ Type, AnyType, UnboundType, TypeVisitor, FormalArgument, NoneTyp,
Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded,
- ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance
+ ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType,
+ is_named_instance
)
import mypy.applytype
import mypy.constraints
+from mypy.erasetype import erase_type
# Circular import; done in the function instead.
# import mypy.solve
from mypy import messages, sametypes
@@ -15,6 +17,7 @@ from mypy.nodes import (
ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2,
)
from mypy.maptype import map_instance_to_supertype
+from mypy.sametypes import is_same_type
from mypy import experiments
@@ -49,12 +52,30 @@ def is_subtype(left: Type, right: Type,
or isinstance(right, ErasedType)):
return True
elif isinstance(right, UnionType) and not isinstance(left, UnionType):
- return any(is_subtype(left, item, type_parameter_checker,
- ignore_pos_arg_names=ignore_pos_arg_names)
- for item in right.items)
- else:
- return left.accept(SubtypeVisitor(right, type_parameter_checker,
- ignore_pos_arg_names=ignore_pos_arg_names))
+ # Normally, when 'left' is not itself a union, the only way
+ # 'left' can be a subtype of the union 'right' is if it is a
+ # subtype of one of the items making up the union.
+ is_subtype_of_item = any(is_subtype(left, item, type_parameter_checker,
+ ignore_pos_arg_names=ignore_pos_arg_names)
+ for item in right.items)
+ # However, if 'left' is a type variable T, T might also have
+ # an upper bound which is itself a union. This case will be
+ # handled below by the SubtypeVisitor. We have to check both
+ # possibilities, to handle both cases like T <: Union[T, U]
+ # and cases like T <: B where B is the upper bound of T and is
+ # a union. (See #2314.)
+ if not isinstance(left, TypeVarType):
+ return is_subtype_of_item
+ elif is_subtype_of_item:
+ return True
+ # otherwise, fall through
+ # Treat builtins.type the same as Type[Any]
+ elif is_named_instance(left, 'builtins.type'):
+ return is_subtype(TypeType(AnyType()), right)
+ elif is_named_instance(right, 'builtins.type'):
+ return is_subtype(left, TypeType(AnyType()))
+ return left.accept(SubtypeVisitor(right, type_parameter_checker,
+ ignore_pos_arg_names=ignore_pos_arg_names))
def is_subtype_ignoring_tvars(left: Type, right: Type) -> bool:
@@ -74,19 +95,6 @@ def is_equivalent(a: Type,
and is_subtype(b, a, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names))
-def satisfies_upper_bound(a: Type, upper_bound: Type) -> bool:
- """Is 'a' valid value for a type variable with the given 'upper_bound'?
-
- Same as is_subtype except that Void is considered to be a subtype of
- any upper_bound. This is needed in a case like
-
- def f(g: Callable[[], T]) -> T: ...
- def h() -> None: ...
- f(h)
- """
- return isinstance(a, Void) or is_subtype(a, upper_bound)
-
-
class SubtypeVisitor(TypeVisitor[bool]):
def __init__(self, right: Type,
@@ -102,27 +110,18 @@ class SubtypeVisitor(TypeVisitor[bool]):
def visit_unbound_type(self, left: UnboundType) -> bool:
return True
- def visit_error_type(self, left: ErrorType) -> bool:
- return False
-
- def visit_type_list(self, t: TypeList) -> bool:
- assert False, 'Not supported'
-
def visit_any(self, left: AnyType) -> bool:
return True
- def visit_void(self, left: Void) -> bool:
- return isinstance(self.right, Void)
-
def visit_none_type(self, left: NoneTyp) -> bool:
if experiments.STRICT_OPTIONAL:
return (isinstance(self.right, NoneTyp) or
is_named_instance(self.right, 'builtins.object'))
else:
- return not isinstance(self.right, Void)
+ return True
def visit_uninhabited_type(self, left: UninhabitedType) -> bool:
- return not isinstance(self.right, Void)
+ return True
def visit_erased_type(self, left: ErasedType) -> bool:
return True
@@ -137,10 +136,14 @@ class SubtypeVisitor(TypeVisitor[bool]):
if isinstance(right, TupleType) and right.fallback.type.is_enum:
return is_subtype(left, right.fallback)
if isinstance(right, Instance):
- if left.type._promote and is_subtype(
- left.type._promote, self.right, self.check_type_parameter,
- ignore_pos_arg_names=self.ignore_pos_arg_names):
- return True
+ # NOTO: left.type.mro may be None in quick mode if there
+ # was an error somewhere.
+ if left.type.mro is not None:
+ for base in left.type.mro:
+ if base._promote and is_subtype(
+ base._promote, self.right, self.check_type_parameter,
+ ignore_pos_arg_names=self.ignore_pos_arg_names):
+ return True
rname = right.type.fullname()
if not left.type.has_base(rname) and rname != 'builtins.object':
return False
@@ -151,6 +154,18 @@ class SubtypeVisitor(TypeVisitor[bool]):
return all(self.check_type_parameter(lefta, righta, tvar.variance)
for lefta, righta, tvar in
zip(t.args, right.args, right.type.defn.type_vars))
+ if isinstance(right, TypeType):
+ item = right.item
+ if isinstance(item, TupleType):
+ item = item.fallback
+ if isinstance(item, Instance):
+ return is_subtype(left, item.type.metaclass_type)
+ elif isinstance(item, AnyType):
+ # Special case: all metaclasses are subtypes of Type[Any]
+ mro = left.type.mro or []
+ return any(base.fullname() == 'builtins.type' for base in mro)
+ else:
+ return False
else:
return False
@@ -228,8 +243,7 @@ class SubtypeVisitor(TypeVisitor[bool]):
right = self.right
if isinstance(right, Instance):
return is_subtype(left.fallback, right)
- elif isinstance(right, CallableType) or is_named_instance(
- right, 'builtins.type'):
+ elif isinstance(right, CallableType):
for item in left.items():
if is_subtype(item, right, self.check_type_parameter,
ignore_pos_arg_names=self.ignore_pos_arg_names):
@@ -268,20 +282,27 @@ class SubtypeVisitor(TypeVisitor[bool]):
return is_subtype(left.item, right.item)
if isinstance(right, CallableType):
# This is unsound, we don't check the __init__ signature.
- return right.is_type_obj() and is_subtype(left.item, right.ret_type)
- if (isinstance(right, Instance) and
- right.type.fullname() in ('builtins.type', 'builtins.object')):
- # Treat builtins.type the same as Type[Any];
- # treat builtins.object the same as Any.
- return True
+ return is_subtype(left.item, right.ret_type)
+ if isinstance(right, Instance):
+ if right.type.fullname() == 'builtins.object':
+ # treat builtins.object the same as Any.
+ return True
+ item = left.item
+ return isinstance(item, Instance) and is_subtype(item, right.type.metaclass_type)
return False
def is_callable_subtype(left: CallableType, right: CallableType,
ignore_return: bool = False,
- ignore_pos_arg_names: bool = False) -> bool:
+ ignore_pos_arg_names: bool = False,
+ use_proper_subtype: bool = False) -> bool:
"""Is left a subtype of right?"""
+ if use_proper_subtype:
+ is_compat = is_proper_subtype
+ else:
+ is_compat = is_subtype
+
# If either function is implicitly typed, ignore positional arg names too
if left.implicit or right.implicit:
ignore_pos_arg_names = True
@@ -303,12 +324,14 @@ def is_callable_subtype(left: CallableType, right: CallableType,
if left.variables:
# Apply generic type variables away in left via type inference.
- left = unify_generic_callable(left, right, ignore_return=ignore_return)
- if left is None:
+ unified = unify_generic_callable(left, right, ignore_return=ignore_return)
+ if unified is None:
return False
+ else:
+ left = unified
# Check return types.
- if not ignore_return and not is_subtype(left.ret_type, right.ret_type):
+ if not ignore_return and not is_compat(left.ret_type, right.ret_type):
return False
if right.is_ellipsis_args:
@@ -365,7 +388,7 @@ def is_callable_subtype(left: CallableType, right: CallableType,
right_by_position = right.argument_by_position(j)
assert right_by_position is not None
if not are_args_compatible(left_by_position, right_by_position,
- ignore_pos_arg_names):
+ ignore_pos_arg_names, use_proper_subtype):
return False
j += 1
continue
@@ -388,7 +411,7 @@ def is_callable_subtype(left: CallableType, right: CallableType,
right_by_name = right.argument_by_name(name)
assert right_by_name is not None
if not are_args_compatible(left_by_name, right_by_name,
- ignore_pos_arg_names):
+ ignore_pos_arg_names, use_proper_subtype):
return False
continue
@@ -397,7 +420,7 @@ def is_callable_subtype(left: CallableType, right: CallableType,
if left_arg is None:
return False
- if not are_args_compatible(left_arg, right_arg, ignore_pos_arg_names):
+ if not are_args_compatible(left_arg, right_arg, ignore_pos_arg_names, use_proper_subtype):
return False
done_with_positional = False
@@ -413,11 +436,11 @@ def is_callable_subtype(left: CallableType, right: CallableType,
# Check that *args and **kwargs types match in this loop
if left_kind == ARG_STAR:
- if right_star_type is not None and not is_subtype(right_star_type, left_arg.typ):
+ if right_star_type is not None and not is_compat(right_star_type, left_arg.typ):
return False
continue
elif left_kind == ARG_STAR2:
- if right_star2_type is not None and not is_subtype(right_star2_type, left_arg.typ):
+ if right_star2_type is not None and not is_compat(right_star2_type, left_arg.typ):
return False
continue
@@ -448,7 +471,8 @@ def is_callable_subtype(left: CallableType, right: CallableType,
def are_args_compatible(
left: FormalArgument,
right: FormalArgument,
- ignore_pos_arg_names: bool) -> bool:
+ ignore_pos_arg_names: bool,
+ use_proper_subtype: bool) -> bool:
# If right has a specific name it wants this argument to be, left must
# have the same.
if right.name is not None and left.name != right.name:
@@ -459,8 +483,12 @@ def are_args_compatible(
if right.pos is not None and left.pos != right.pos:
return False
# Left must have a more general type
- if not is_subtype(right.typ, left.typ):
- return False
+ if use_proper_subtype:
+ if not is_proper_subtype(right.typ, left.typ):
+ return False
+ else:
+ if not is_subtype(right.typ, left.typ):
+ return False
# If right's argument is optional, left's must also be.
if not right.required and left.required:
return False
@@ -468,7 +496,7 @@ def are_args_compatible(
def unify_generic_callable(type: CallableType, target: CallableType,
- ignore_return: bool) -> CallableType:
+ ignore_return: bool) -> Optional[CallableType]:
"""Try to unify a generic callable type with another callable type.
Return unified CallableType if successful; otherwise, return None.
@@ -487,54 +515,195 @@ def unify_generic_callable(type: CallableType, target: CallableType,
inferred_vars = mypy.solve.solve_constraints(type_var_ids, constraints)
if None in inferred_vars:
return None
+ non_none_inferred_vars = cast(List[Type], inferred_vars)
msg = messages.temp_message_builder()
- applied = mypy.applytype.apply_generic_arguments(type, inferred_vars, msg, context=target)
+ applied = mypy.applytype.apply_generic_arguments(type, non_none_inferred_vars, msg,
+ context=target)
if msg.is_errors():
return None
return applied
def restrict_subtype_away(t: Type, s: Type) -> Type:
- """Return a supertype of (t intersect not s)
+ """Return t minus s.
+
+ If we can't determine a precise result, return a supertype of the
+ ideal result (just t is a valid result).
+
+ This is used for type inference of runtime type checks such as
+ isinstance.
- Currently just remove elements of a union type.
+ Currently this just removes elements of a union type.
"""
if isinstance(t, UnionType):
- new_items = [item for item in t.items if (not is_subtype(item, s)
- or isinstance(item, AnyType))]
+ # Since runtime type checks will ignore type arguments, erase the types.
+ erased_s = erase_type(s)
+ new_items = [item for item in t.items
+ if (not is_proper_subtype(erase_type(item), erased_s)
+ or isinstance(item, AnyType))]
return UnionType.make_union(new_items)
else:
return t
-def is_proper_subtype(t: Type, s: Type) -> bool:
- """Check if t is a proper subtype of s?
+def is_proper_subtype(left: Type, right: Type) -> bool:
+ """Is left a proper subtype of right?
For proper subtypes, there's no need to rely on compatibility due to
- Any types. Any instance type t is also a proper subtype of t.
+ Any types. Every usable type is a proper subtype of itself.
"""
- # FIX tuple types
- if isinstance(t, Instance):
- if isinstance(s, Instance):
- if not t.type.has_base(s.type.fullname()):
+ if isinstance(right, UnionType) and not isinstance(left, UnionType):
+ return any([is_proper_subtype(left, item)
+ for item in right.items])
+ return left.accept(ProperSubtypeVisitor(right))
+
+
+class ProperSubtypeVisitor(TypeVisitor[bool]):
+ def __init__(self, right: Type) -> None:
+ self.right = right
+
+ def visit_unbound_type(self, left: UnboundType) -> bool:
+ # This can be called if there is a bad type annotation. The result probably
+ # doesn't matter much but by returning True we simplify these bad types away
+ # from unions, which could filter out some bogus messages.
+ return True
+
+ def visit_any(self, left: AnyType) -> bool:
+ return isinstance(self.right, AnyType)
+
+ def visit_none_type(self, left: NoneTyp) -> bool:
+ if experiments.STRICT_OPTIONAL:
+ return (isinstance(self.right, NoneTyp) or
+ is_named_instance(self.right, 'builtins.object'))
+ return True
+
+ def visit_uninhabited_type(self, left: UninhabitedType) -> bool:
+ return True
+
+ def visit_erased_type(self, left: ErasedType) -> bool:
+ # This may be encountered during type inference. The result probably doesn't
+ # matter much.
+ return True
+
+ def visit_deleted_type(self, left: DeletedType) -> bool:
+ return True
+
+ def visit_instance(self, left: Instance) -> bool:
+ right = self.right
+ if isinstance(right, Instance):
+ for base in left.type.mro:
+ if base._promote and is_proper_subtype(base._promote, right):
+ return True
+
+ if not left.type.has_base(right.type.fullname()):
return False
- def check_argument(left: Type, right: Type, variance: int) -> bool:
+ def check_argument(leftarg: Type, rightarg: Type, variance: int) -> bool:
if variance == COVARIANT:
- return is_proper_subtype(left, right)
+ return is_proper_subtype(leftarg, rightarg)
elif variance == CONTRAVARIANT:
- return is_proper_subtype(right, left)
+ return is_proper_subtype(rightarg, leftarg)
else:
- return sametypes.is_same_type(left, right)
+ return sametypes.is_same_type(leftarg, rightarg)
# Map left type to corresponding right instances.
- t = map_instance_to_supertype(t, s.type)
+ left = map_instance_to_supertype(left, right.type)
return all(check_argument(ta, ra, tvar.variance) for ta, ra, tvar in
- zip(t.args, s.args, s.type.defn.type_vars))
+ zip(left.args, right.args, right.type.defn.type_vars))
+ return False
+
+ def visit_type_var(self, left: TypeVarType) -> bool:
+ if isinstance(self.right, TypeVarType) and left.id == self.right.id:
+ return True
+ # TODO: Value restrictions
+ return is_proper_subtype(left.upper_bound, self.right)
+
+ def visit_callable_type(self, left: CallableType) -> bool:
+ right = self.right
+ if isinstance(right, CallableType):
+ return is_callable_subtype(
+ left, right,
+ ignore_pos_arg_names=False,
+ use_proper_subtype=True)
+ elif isinstance(right, Overloaded):
+ return all(is_proper_subtype(left, item)
+ for item in right.items())
+ elif isinstance(right, Instance):
+ return is_proper_subtype(left.fallback, right)
+ elif isinstance(right, TypeType):
+ # This is unsound, we don't check the __init__ signature.
+ return left.is_type_obj() and is_proper_subtype(left.ret_type, right.item)
+ return False
+
+ def visit_tuple_type(self, left: TupleType) -> bool:
+ right = self.right
+ if isinstance(right, Instance):
+ if (is_named_instance(right, 'builtins.tuple') or
+ is_named_instance(right, 'typing.Iterable') or
+ is_named_instance(right, 'typing.Container') or
+ is_named_instance(right, 'typing.Sequence') or
+ is_named_instance(right, 'typing.Reversible')):
+ if not right.args:
+ return False
+ iter_type = right.args[0]
+ if is_named_instance(right, 'builtins.tuple') and isinstance(iter_type, AnyType):
+ # TODO: We shouldn't need this special case. This is currently needed
+ # for isinstance(x, tuple), though it's unclear why.
+ return True
+ return all(is_proper_subtype(li, iter_type) for li in left.items)
+ return is_proper_subtype(left.fallback, right)
+ elif isinstance(right, TupleType):
+ if len(left.items) != len(right.items):
+ return False
+ for l, r in zip(left.items, right.items):
+ if not is_proper_subtype(l, r):
+ return False
+ return is_proper_subtype(left.fallback, right.fallback)
+ return False
+
+ def visit_typeddict_type(self, left: TypedDictType) -> bool:
+ right = self.right
+ if isinstance(right, TypedDictType):
+ for name, typ in left.items.items():
+ if name in right.items and not is_same_type(typ, right.items[name]):
+ return False
+ for name, typ in right.items.items():
+ if name not in left.items:
+ return False
+ return True
+ return is_proper_subtype(left.fallback, right)
+
+ def visit_overloaded(self, left: Overloaded) -> bool:
+ # TODO: What's the right thing to do here?
+ return False
+
+ def visit_union_type(self, left: UnionType) -> bool:
+ return all([is_proper_subtype(item, self.right) for item in left.items])
+
+ def visit_partial_type(self, left: PartialType) -> bool:
+ # TODO: What's the right thing to do here?
+ return False
+
+ def visit_type_type(self, left: TypeType) -> bool:
+ # TODO: Handle metaclasses?
+ right = self.right
+ if isinstance(right, TypeType):
+ # This is unsound, we don't check the __init__ signature.
+ return is_proper_subtype(left.item, right.item)
+ if isinstance(right, CallableType):
+ # This is also unsound because of __init__.
+ return right.is_type_obj() and is_proper_subtype(left.item, right.ret_type)
+ if isinstance(right, Instance):
+ if right.type.fullname() == 'builtins.type':
+ # TODO: Strictly speaking, the type builtins.type is considered equivalent to
+ # Type[Any]. However, this would break the is_proper_subtype check in
+ # conditional_type_map for cases like isinstance(x, type) when the type
+ # of x is Type[int]. It's unclear what's the right way to address this.
+ return True
+ if right.type.fullname() == 'builtins.object':
+ return True
return False
- else:
- return sametypes.is_same_type(t, s)
def is_more_precise(t: Type, s: Type) -> bool:
diff --git a/mypy/test/__init__.py b/mypy/test/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/mypy/test/collect.py b/mypy/test/collect.py
deleted file mode 100644
index e69de29..0000000
diff --git a/mypy/test/config.py b/mypy/test/config.py
deleted file mode 100644
index 681f866..0000000
--- a/mypy/test/config.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import os
-import os.path
-
-import typing
-
-
-this_file_dir = os.path.dirname(os.path.realpath(__file__))
-PREFIX = os.path.dirname(os.path.dirname(this_file_dir))
-
-# Location of test data files such as test case descriptions.
-test_data_prefix = os.path.join(PREFIX, 'test-data', 'unit')
-
-assert os.path.isdir(test_data_prefix), \
- 'Test data prefix ({}) not set correctly'.format(test_data_prefix)
-
-# Temp directory used for the temp files created when running test cases.
-# This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase.
-# It is also hard-coded in numerous places, so don't change it.
-test_temp_dir = 'tmp'
diff --git a/mypy/test/data.py b/mypy/test/data.py
deleted file mode 100644
index 8fa64a5..0000000
--- a/mypy/test/data.py
+++ /dev/null
@@ -1,483 +0,0 @@
-"""Utilities for processing .test files containing test case descriptions."""
-
-import os.path
-import os
-import re
-from os import remove, rmdir
-import shutil
-
-import pytest # type: ignore # no pytest in typeshed
-from typing import Callable, List, Tuple, Set, Optional, Iterator, Any
-
-from mypy.myunit import TestCase, SkipTestCaseException
-
-
-def parse_test_cases(
- path: str,
- perform: Optional[Callable[['DataDrivenTestCase'], None]],
- base_path: str = '.',
- optional_out: bool = False,
- include_path: str = None,
- native_sep: bool = False) -> List['DataDrivenTestCase']:
- """Parse a file with test case descriptions.
-
- Return an array of test cases.
-
- NB this function and DataDrivenTestCase are shared between the
- myunit and pytest codepaths -- if something looks redundant,
- that's likely the reason.
- """
-
- if not include_path:
- include_path = os.path.dirname(path)
- with open(path, encoding='utf-8') as f:
- l = f.readlines()
- for i in range(len(l)):
- l[i] = l[i].rstrip('\n')
- p = parse_test_data(l, path)
- out = [] # type: List[DataDrivenTestCase]
-
- # Process the parsed items. Each item has a header of form [id args],
- # optionally followed by lines of text.
- i = 0
- while i < len(p):
- ok = False
- i0 = i
- if p[i].id == 'case':
- i += 1
-
- files = [] # type: List[Tuple[str, str]] # path and contents
- output_files = [] # type: List[Tuple[str, str]] # path and contents for output files
- tcout = [] # type: List[str] # Regular output errors
- tcout2 = [] # type: List[str] # Output errors for incremental, second run
- stale_modules = None # type: Optional[Set[str]] # module names
- rechecked_modules = None # type: Optional[Set[str]] # module names
- while i < len(p) and p[i].id != 'case':
- if p[i].id == 'file' or p[i].id == 'outfile':
- # Record an extra file needed for the test case.
- arg = p[i].arg
- assert arg is not None
- file_entry = (os.path.join(base_path, arg), '\n'.join(p[i].data))
- if p[i].id == 'file':
- files.append(file_entry)
- elif p[i].id == 'outfile':
- output_files.append(file_entry)
- elif p[i].id in ('builtins', 'builtins_py2'):
- # Use a custom source file for the std module.
- arg = p[i].arg
- assert arg is not None
- mpath = os.path.join(os.path.dirname(path), arg)
- if p[i].id == 'builtins':
- fnam = 'builtins.pyi'
- else:
- # Python 2
- fnam = '__builtin__.pyi'
- with open(mpath) as f:
- files.append((os.path.join(base_path, fnam), f.read()))
- elif p[i].id == 'stale':
- arg = p[i].arg
- if arg is None:
- stale_modules = set()
- else:
- stale_modules = {item.strip() for item in arg.split(',')}
- elif p[i].id == 'rechecked':
- arg = p[i].arg
- if arg is None:
- rechecked_modules = set()
- else:
- rechecked_modules = {item.strip() for item in arg.split(',')}
- elif p[i].id == 'out' or p[i].id == 'out1':
- tcout = p[i].data
- if native_sep and os.path.sep == '\\':
- tcout = [fix_win_path(line) for line in tcout]
- ok = True
- elif p[i].id == 'out2':
- tcout2 = p[i].data
- if native_sep and os.path.sep == '\\':
- tcout2 = [fix_win_path(line) for line in tcout2]
- ok = True
- else:
- raise ValueError(
- 'Invalid section header {} in {} at line {}'.format(
- p[i].id, path, p[i].line))
- i += 1
-
- if rechecked_modules is None:
- # If the set of rechecked modules isn't specified, make it the same as the set of
- # modules with a stale public interface.
- rechecked_modules = stale_modules
- if (stale_modules is not None
- and rechecked_modules is not None
- and not stale_modules.issubset(rechecked_modules)):
- raise ValueError(
- 'Stale modules must be a subset of rechecked modules ({})'.format(path))
-
- if optional_out:
- ok = True
-
- if ok:
- input = expand_includes(p[i0].data, include_path)
- expand_errors(input, tcout, 'main')
- lastline = p[i].line if i < len(p) else p[i - 1].line + 9999
- tc = DataDrivenTestCase(p[i0].arg, input, tcout, tcout2, path,
- p[i0].line, lastline, perform,
- files, output_files, stale_modules,
- rechecked_modules)
- out.append(tc)
- if not ok:
- raise ValueError(
- '{}, line {}: Error in test case description'.format(
- path, p[i0].line))
-
- return out
-
-
-class DataDrivenTestCase(TestCase):
- input = None # type: List[str]
- output = None # type: List[str]
-
- file = ''
- line = 0
-
- # (file path, file content) tuples
- files = None # type: List[Tuple[str, str]]
- expected_stale_modules = None # type: Optional[Set[str]]
-
- clean_up = None # type: List[Tuple[bool, str]]
-
- def __init__(self,
- name: str,
- input: List[str],
- output: List[str],
- output2: List[str],
- file: str,
- line: int,
- lastline: int,
- perform: Callable[['DataDrivenTestCase'], None],
- files: List[Tuple[str, str]],
- output_files: List[Tuple[str, str]],
- expected_stale_modules: Optional[Set[str]],
- expected_rechecked_modules: Optional[Set[str]],
- ) -> None:
- super().__init__(name)
- self.input = input
- self.output = output
- self.output2 = output2
- self.lastline = lastline
- self.file = file
- self.line = line
- self.perform = perform
- self.files = files
- self.output_files = output_files
- self.expected_stale_modules = expected_stale_modules
- self.expected_rechecked_modules = expected_rechecked_modules
-
- def set_up(self) -> None:
- super().set_up()
- encountered_files = set()
- self.clean_up = []
- for path, content in self.files:
- dir = os.path.dirname(path)
- for d in self.add_dirs(dir):
- self.clean_up.append((True, d))
- with open(path, 'w') as f:
- f.write(content)
- self.clean_up.append((False, path))
- encountered_files.add(path)
- if path.endswith(".next"):
- # Make sure new files introduced in the second run are accounted for
- renamed_path = path[:-5]
- if renamed_path not in encountered_files:
- encountered_files.add(renamed_path)
- self.clean_up.append((False, renamed_path))
- for path, _ in self.output_files:
- # Create directories for expected output and mark them to be cleaned up at the end
- # of the test case.
- dir = os.path.dirname(path)
- for d in self.add_dirs(dir):
- self.clean_up.append((True, d))
- self.clean_up.append((False, path))
-
- def add_dirs(self, dir: str) -> List[str]:
- """Add all subdirectories required to create dir.
-
- Return an array of the created directories in the order of creation.
- """
- if dir == '' or os.path.isdir(dir):
- return []
- else:
- dirs = self.add_dirs(os.path.dirname(dir)) + [dir]
- os.mkdir(dir)
- return dirs
-
- def run(self) -> None:
- if self.name.endswith('-skip'):
- raise SkipTestCaseException()
- else:
- self.perform(self)
-
- def tear_down(self) -> None:
- # First remove files.
- for is_dir, path in reversed(self.clean_up):
- if not is_dir:
- remove(path)
- # Then remove directories.
- for is_dir, path in reversed(self.clean_up):
- if is_dir:
- pycache = os.path.join(path, '__pycache__')
- if os.path.isdir(pycache):
- shutil.rmtree(pycache)
- try:
- rmdir(path)
- except OSError as error:
- print(' ** Error removing directory %s -- contents:' % path)
- for item in os.listdir(path):
- print(' ', item)
- # Most likely, there are some files in the
- # directory. Use rmtree to nuke the directory, but
- # fail the test case anyway, since this seems like
- # a bug in a test case -- we shouldn't leave
- # garbage lying around. By nuking the directory,
- # the next test run hopefully passes.
- path = error.filename
- # Be defensive -- only call rmtree if we're sure we aren't removing anything
- # valuable.
- if path.startswith('tmp/') and os.path.isdir(path):
- shutil.rmtree(path)
- raise
- super().tear_down()
-
-
-class TestItem:
- """Parsed test caseitem.
-
- An item is of the form
- [id arg]
- .. data ..
- """
-
- id = ''
- arg = '' # type: Optional[str]
-
- # Text data, array of 8-bit strings
- data = None # type: List[str]
-
- file = ''
- line = 0 # Line number in file
-
- def __init__(self, id: str, arg: Optional[str], data: List[str], file: str,
- line: int) -> None:
- self.id = id
- self.arg = arg
- self.data = data
- self.file = file
- self.line = line
-
-
-def parse_test_data(l: List[str], fnam: str) -> List[TestItem]:
- """Parse a list of lines that represent a sequence of test items."""
-
- ret = [] # type: List[TestItem]
- data = [] # type: List[str]
-
- id = None # type: Optional[str]
- arg = None # type: Optional[str]
-
- i = 0
- i0 = 0
- while i < len(l):
- s = l[i].strip()
-
- if l[i].startswith('[') and s.endswith(']') and not s.startswith('[['):
- if id:
- data = collapse_line_continuation(data)
- data = strip_list(data)
- ret.append(TestItem(id, arg, strip_list(data), fnam, i0 + 1))
- i0 = i
- id = s[1:-1]
- arg = None
- if ' ' in id:
- arg = id[id.index(' ') + 1:]
- id = id[:id.index(' ')]
- data = []
- elif l[i].startswith('[['):
- data.append(l[i][1:])
- elif not l[i].startswith('--'):
- data.append(l[i])
- elif l[i].startswith('----'):
- data.append(l[i][2:])
- i += 1
-
- # Process the last item.
- if id:
- data = collapse_line_continuation(data)
- data = strip_list(data)
- ret.append(TestItem(id, arg, data, fnam, i0 + 1))
-
- return ret
-
-
-def strip_list(l: List[str]) -> List[str]:
- """Return a stripped copy of l.
-
- Strip whitespace at the end of all lines, and strip all empty
- lines from the end of the array.
- """
-
- r = [] # type: List[str]
- for s in l:
- # Strip spaces at end of line
- r.append(re.sub(r'\s+$', '', s))
-
- while len(r) > 0 and r[-1] == '':
- r.pop()
-
- return r
-
-
-def collapse_line_continuation(l: List[str]) -> List[str]:
- r = [] # type: List[str]
- cont = False
- for s in l:
- ss = re.sub(r'\\$', '', s)
- if cont:
- r[-1] += re.sub('^ +', '', ss)
- else:
- r.append(ss)
- cont = s.endswith('\\')
- return r
-
-
-def expand_includes(a: List[str], base_path: str) -> List[str]:
- """Expand @includes within a list of lines.
-
- Replace all lies starting with @include with the contents of the
- file name following the prefix. Look for the files in base_path.
- """
-
- res = [] # type: List[str]
- for s in a:
- if s.startswith('@include '):
- fn = s.split(' ', 1)[1].strip()
- with open(os.path.join(base_path, fn)) as f:
- res.extend(f.readlines())
- else:
- res.append(s)
- return res
-
-
-def expand_errors(input: List[str], output: List[str], fnam: str) -> None:
- """Transform comments such as '# E: message' or
- '# E:3: message' in input.
-
- The result is lines like 'fnam:line: error: message'.
- """
-
- for i in range(len(input)):
- # The first in the split things isn't a comment
- for possible_err_comment in input[i].split('#')[1:]:
- m = re.search(
- '^([ENW]):((?P<col>\d+):)? (?P<message>.*)$',
- possible_err_comment.strip())
- if m:
- if m.group(1) == 'E':
- severity = 'error'
- elif m.group(1) == 'N':
- severity = 'note'
- elif m.group(1) == 'W':
- severity = 'warning'
- col = m.group('col')
- if col is None:
- output.append(
- '{}:{}: {}: {}'.format(fnam, i + 1, severity, m.group('message')))
- else:
- output.append('{}:{}:{}: {}: {}'.format(
- fnam, i + 1, col, severity, m.group('message')))
-
-
-def fix_win_path(line: str) -> str:
- r"""Changes paths to Windows paths in error messages.
-
- E.g. foo/bar.py -> foo\bar.py.
- """
- m = re.match(r'^([\S/]+):(\d+:)?(\s+.*)', line)
- if not m:
- return line
- else:
- filename, lineno, message = m.groups()
- return '{}:{}{}'.format(filename.replace('/', '\\'),
- lineno or '', message)
-
-
-##
-#
-# pytest setup
-#
-##
-
-
-def pytest_addoption(parser: Any) -> None:
- group = parser.getgroup('mypy')
- group.addoption('--update-data', action='store_true', default=False,
- help='Update test data to reflect actual output'
- ' (supported only for certain tests)')
-
-
-def pytest_pycollect_makeitem(collector: Any, name: str, obj: Any) -> Any:
- if not isinstance(obj, type) or not issubclass(obj, DataSuite):
- return None
- return MypyDataSuite(name, parent=collector)
-
-
-class MypyDataSuite(pytest.Class): # type: ignore # inheriting from Any
- def collect(self) -> Iterator['MypyDataCase']:
- for case in self.obj.cases():
- yield MypyDataCase(case.name, self, case)
-
-
-class MypyDataCase(pytest.Item): # type: ignore # inheriting from Any
- def __init__(self, name: str, parent: MypyDataSuite, obj: DataDrivenTestCase) -> None:
- self.skip = False
- if name.endswith('-skip'):
- self.skip = True
- name = name[:-len('-skip')]
-
- super().__init__(name, parent)
- self.obj = obj
-
- def runtest(self) -> None:
- if self.skip:
- pytest.skip()
- update_data = self.config.getoption('--update-data', False)
- self.parent.obj(update_data=update_data).run_case(self.obj)
-
- def setup(self) -> None:
- self.obj.set_up()
-
- def teardown(self) -> None:
- self.obj.tear_down()
-
- def reportinfo(self) -> Tuple[str, int, str]:
- return self.obj.file, self.obj.line, self.obj.name
-
- def repr_failure(self, excinfo: Any) -> str:
- if excinfo.errisinstance(SystemExit):
- # We assume that before doing exit() (which raises SystemExit) we've printed
- # enough context about what happened so that a stack trace is not useful.
- # In particular, uncaught exceptions during semantic analysis or type checking
- # call exit() and they already print out a stack trace.
- excrepr = excinfo.exconly()
- else:
- self.parent._prunetraceback(excinfo)
- excrepr = excinfo.getrepr(style='short')
-
- return "data: {}:{}:\n{}".format(self.obj.file, self.obj.line, excrepr)
-
-
-class DataSuite:
- @classmethod
- def cases(cls) -> List[DataDrivenTestCase]:
- return []
-
- def run_case(self, testcase: DataDrivenTestCase) -> None:
- raise NotImplementedError
diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
deleted file mode 100644
index 32d889b..0000000
--- a/mypy/test/helpers.py
+++ /dev/null
@@ -1,285 +0,0 @@
-import sys
-import re
-import os
-
-from typing import List, Dict, Tuple
-
-from mypy import defaults
-from mypy.myunit import AssertionFailure
-from mypy.test.data import DataDrivenTestCase
-
-
-# AssertStringArraysEqual displays special line alignment helper messages if
-# the first different line has at least this many characters,
-MIN_LINE_LENGTH_FOR_ALIGNMENT = 5
-
-
-def assert_string_arrays_equal(expected: List[str], actual: List[str],
- msg: str) -> None:
- """Assert that two string arrays are equal.
-
- Display any differences in a human-readable form.
- """
-
- actual = clean_up(actual)
-
- if actual != expected:
- num_skip_start = num_skipped_prefix_lines(expected, actual)
- num_skip_end = num_skipped_suffix_lines(expected, actual)
-
- sys.stderr.write('Expected:\n')
-
- # If omit some lines at the beginning, indicate it by displaying a line
- # with '...'.
- if num_skip_start > 0:
- sys.stderr.write(' ...\n')
-
- # Keep track of the first different line.
- first_diff = -1
-
- # Display only this many first characters of identical lines.
- width = 75
-
- for i in range(num_skip_start, len(expected) - num_skip_end):
- if i >= len(actual) or expected[i] != actual[i]:
- if first_diff < 0:
- first_diff = i
- sys.stderr.write(' {:<45} (diff)'.format(expected[i]))
- else:
- e = expected[i]
- sys.stderr.write(' ' + e[:width])
- if len(e) > width:
- sys.stderr.write('...')
- sys.stderr.write('\n')
- if num_skip_end > 0:
- sys.stderr.write(' ...\n')
-
- sys.stderr.write('Actual:\n')
-
- if num_skip_start > 0:
- sys.stderr.write(' ...\n')
-
- for j in range(num_skip_start, len(actual) - num_skip_end):
- if j >= len(expected) or expected[j] != actual[j]:
- sys.stderr.write(' {:<45} (diff)'.format(actual[j]))
- else:
- a = actual[j]
- sys.stderr.write(' ' + a[:width])
- if len(a) > width:
- sys.stderr.write('...')
- sys.stderr.write('\n')
- if actual == []:
- sys.stderr.write(' (empty)\n')
- if num_skip_end > 0:
- sys.stderr.write(' ...\n')
-
- sys.stderr.write('\n')
-
- if first_diff >= 0 and first_diff < len(actual) and (
- len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT
- or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT):
- # Display message that helps visualize the differences between two
- # long lines.
- show_align_message(expected[first_diff], actual[first_diff])
-
- raise AssertionFailure(msg)
-
-
-def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None:
- testcase_path = os.path.join(testcase.old_cwd, testcase.file)
- with open(testcase_path) as f:
- data_lines = f.read().splitlines()
- test = '\n'.join(data_lines[testcase.line:testcase.lastline])
-
- mapping = {} # type: Dict[str, List[str]]
- for old, new in zip(testcase.output, output):
- PREFIX = 'error:'
- ind = old.find(PREFIX)
- if ind != -1 and old[:ind] == new[:ind]:
- old, new = old[ind + len(PREFIX):], new[ind + len(PREFIX):]
- mapping.setdefault(old, []).append(new)
-
- for old in mapping:
- if test.count(old) == len(mapping[old]):
- betweens = test.split(old)
-
- # Interleave betweens and mapping[old]
- from itertools import chain
- interleaved = [betweens[0]] + \
- list(chain.from_iterable(zip(mapping[old], betweens[1:])))
- test = ''.join(interleaved)
-
- data_lines[testcase.line:testcase.lastline] = [test]
- data = '\n'.join(data_lines)
- with open(testcase_path, 'w') as f:
- print(data, file=f)
-
-
-def show_align_message(s1: str, s2: str) -> None:
- """Align s1 and s2 so that the their first difference is highlighted.
-
- For example, if s1 is 'foobar' and s2 is 'fobar', display the
- following lines:
-
- E: foobar
- A: fobar
- ^
-
- If s1 and s2 are long, only display a fragment of the strings around the
- first difference. If s1 is very short, do nothing.
- """
-
- # Seeing what went wrong is trivial even without alignment if the expected
- # string is very short. In this case do nothing to simplify output.
- if len(s1) < 4:
- return
-
- maxw = 72 # Maximum number of characters shown
-
- sys.stderr.write('Alignment of first line difference:\n')
-
- trunc = False
- while s1[:30] == s2[:30]:
- s1 = s1[10:]
- s2 = s2[10:]
- trunc = True
-
- if trunc:
- s1 = '...' + s1
- s2 = '...' + s2
-
- max_len = max(len(s1), len(s2))
- extra = ''
- if max_len > maxw:
- extra = '...'
-
- # Write a chunk of both lines, aligned.
- sys.stderr.write(' E: {}{}\n'.format(s1[:maxw], extra))
- sys.stderr.write(' A: {}{}\n'.format(s2[:maxw], extra))
- # Write an indicator character under the different columns.
- sys.stderr.write(' ')
- for j in range(min(maxw, max(len(s1), len(s2)))):
- if s1[j:j + 1] != s2[j:j + 1]:
- sys.stderr.write('^') # Difference
- break
- else:
- sys.stderr.write(' ') # Equal
- sys.stderr.write('\n')
-
-
-def assert_string_arrays_equal_wildcards(expected: List[str],
- actual: List[str],
- msg: str) -> None:
- # Like above, but let a line with only '...' in expected match any number
- # of lines in actual.
- actual = clean_up(actual)
-
- while actual != [] and actual[-1] == '':
- actual = actual[:-1]
-
- # Expand "..." wildcards away.
- expected = match_array(expected, actual)
- assert_string_arrays_equal(expected, actual, msg)
-
-
-def clean_up(a: List[str]) -> List[str]:
- """Remove common directory prefix from all strings in a.
-
- This uses a naive string replace; it seems to work well enough. Also
- remove trailing carriage returns.
- """
- res = []
- for s in a:
- prefix = os.sep
- ss = s
- for p in prefix, prefix.replace(os.sep, '/'):
- if p != '/' and p != '//' and p != '\\' and p != '\\\\':
- ss = ss.replace(p, '')
- # Ignore spaces at end of line.
- ss = re.sub(' +$', '', ss)
- res.append(re.sub('\\r$', '', ss))
- return res
-
-
-def match_array(pattern: List[str], target: List[str]) -> List[str]:
- """Expand '...' wildcards in pattern by matching against target."""
-
- res = [] # type: List[str]
- i = 0
- j = 0
-
- while i < len(pattern):
- if pattern[i] == '...':
- # Wildcard in pattern.
- if i + 1 == len(pattern):
- # Wildcard at end of pattern; match the rest of target.
- res.extend(target[j:])
- # Finished.
- break
- else:
- # Must find the instance of the next pattern line in target.
- jj = j
- while jj < len(target):
- if target[jj] == pattern[i + 1]:
- break
- jj += 1
- if jj == len(target):
- # No match. Get out.
- res.extend(pattern[i:])
- break
- res.extend(target[j:jj])
- i += 1
- j = jj
- elif (j < len(target) and (pattern[i] == target[j]
- or (i + 1 < len(pattern)
- and j + 1 < len(target)
- and pattern[i + 1] == target[j + 1]))):
- # In sync; advance one line. The above condition keeps sync also if
- # only a single line is different, but loses it if two consecutive
- # lines fail to match.
- res.append(pattern[i])
- i += 1
- j += 1
- else:
- # Out of sync. Get out.
- res.extend(pattern[i:])
- break
- return res
-
-
-def num_skipped_prefix_lines(a1: List[str], a2: List[str]) -> int:
- num_eq = 0
- while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]:
- num_eq += 1
- return max(0, num_eq - 4)
-
-
-def num_skipped_suffix_lines(a1: List[str], a2: List[str]) -> int:
- num_eq = 0
- while (num_eq < min(len(a1), len(a2))
- and a1[-num_eq - 1] == a2[-num_eq - 1]):
- num_eq += 1
- return max(0, num_eq - 4)
-
-
-def testfile_pyversion(path: str) -> Tuple[int, int]:
- if path.endswith('python2.test'):
- return defaults.PYTHON2_VERSION
- else:
- return defaults.PYTHON3_VERSION
-
-
-def testcase_pyversion(path: str, testcase_name: str) -> Tuple[int, int]:
- if testcase_name.endswith('python2'):
- return defaults.PYTHON2_VERSION
- else:
- return testfile_pyversion(path)
-
-
-def normalize_error_messages(messages: List[str]) -> List[str]:
- """Translate an array of error messages to use / as path separator."""
-
- a = []
- for m in messages:
- a.append(m.replace(os.sep, '/'))
- return a
diff --git a/mypy/test/testargs.py b/mypy/test/testargs.py
deleted file mode 100644
index 4e27e37..0000000
--- a/mypy/test/testargs.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""Ensure the argparse parser and Options class are in sync.
-
-In particular, verify that the argparse defaults are the same as the Options
-defaults, and that argparse doesn't assign any new members to the Options
-object it creates.
-"""
-
-import typing
-from mypy.myunit import Suite, assert_equal
-from mypy.options import Options, BuildType
-from mypy.main import process_options
-
-
-class ArgSuite(Suite):
- def test_coherence(self) -> None:
- options = Options()
- _, parsed_options = process_options([], require_targets=False)
- assert_equal(options, parsed_options)
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
deleted file mode 100644
index 6f3b156..0000000
--- a/mypy/test/testcheck.py
+++ /dev/null
@@ -1,332 +0,0 @@
-"""Type checker test cases"""
-
-import os.path
-import re
-import shutil
-import sys
-import time
-import typed_ast
-import typed_ast.ast35
-
-from typing import Dict, List, Optional, Set, Tuple
-
-from mypy import build, defaults
-from mypy.main import parse_version, process_options
-from mypy.build import BuildSource, find_module_clear_caches
-from mypy.myunit import AssertionFailure
-from mypy.test.config import test_temp_dir, test_data_prefix
-from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
-from mypy.test.helpers import (
- assert_string_arrays_equal, normalize_error_messages,
- testcase_pyversion, update_testcase_output,
-)
-from mypy.errors import CompileError
-from mypy.options import Options
-
-from mypy import experiments
-
-# List of files that contain test case descriptions.
-files = [
-]
-fast_parser_files = [
- 'check-basic.test',
- 'check-callable.test',
- 'check-classes.test',
- 'check-statements.test',
- 'check-generics.test',
- 'check-dynamic-typing.test',
- 'check-inference.test',
- 'check-inference-context.test',
- 'check-kwargs.test',
- 'check-overloading.test',
- 'check-type-checks.test',
- 'check-abstract.test',
- 'check-multiple-inheritance.test',
- 'check-super.test',
- 'check-modules.test',
- 'check-typevar-values.test',
- 'check-unsupported.test',
- 'check-unreachable-code.test',
- 'check-unions.test',
- 'check-isinstance.test',
- 'check-lists.test',
- 'check-namedtuple.test',
- 'check-typeddict.test',
- 'check-type-aliases.test',
- 'check-ignore.test',
- 'check-type-promotion.test',
- 'check-semanal-error.test',
- 'check-flags.test',
- 'check-incremental.test',
- 'check-bound.test',
- 'check-optional.test',
- 'check-fastparse.test',
- 'check-warnings.test',
- 'check-async-await.test',
- 'check-newtype.test',
- 'check-class-namedtuple.test',
- 'check-selftype.test',
- 'check-python2.test',
- 'check-columns.test',
- 'check-functions.test',
- 'check-tuples.test',
- 'check-expressions.test',
- 'check-generic-subtyping.test',
- 'check-varargs.test',
-]
-
-files.extend(fast_parser_files)
-
-if 'annotation' in typed_ast.ast35.Assign._fields:
- fast_parser_files.append('check-newsyntax.test')
-
-if 'contains_underscores' in typed_ast.ast35.Num._fields:
- fast_parser_files.append('check-underscores.test')
-
-
-class TypeCheckSuite(DataSuite):
- def __init__(self, *, update_data: bool = False) -> None:
- self.update_data = update_data
-
- @classmethod
- def cases(cls) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- None, test_temp_dir, True)
- return c
-
- def run_case(self, testcase: DataDrivenTestCase) -> None:
- incremental = 'incremental' in testcase.name.lower() or 'incremental' in testcase.file
- optional = 'optional' in testcase.file
- if incremental:
- # Incremental tests are run once with a cold cache, once with a warm cache.
- # Expect success on first run, errors from testcase.output (if any) on second run.
- # We briefly sleep to make sure file timestamps are distinct.
- self.clear_cache()
- self.run_case_once(testcase, 1)
- self.run_case_once(testcase, 2)
- elif optional:
- try:
- experiments.STRICT_OPTIONAL = True
- self.run_case_once(testcase)
- finally:
- experiments.STRICT_OPTIONAL = False
- else:
- try:
- old_strict_optional = experiments.STRICT_OPTIONAL
- self.run_case_once(testcase)
- finally:
- experiments.STRICT_OPTIONAL = old_strict_optional
-
- def clear_cache(self) -> None:
- dn = defaults.CACHE_DIR
-
- if os.path.exists(dn):
- shutil.rmtree(dn)
-
- def run_case_once(self, testcase: DataDrivenTestCase, incremental: int = 0) -> None:
- find_module_clear_caches()
- original_program_text = '\n'.join(testcase.input)
- module_data = self.parse_module(original_program_text, incremental)
-
- if incremental:
- if incremental == 1:
- # In run 1, copy program text to program file.
- for module_name, program_path, program_text in module_data:
- if module_name == '__main__':
- with open(program_path, 'w') as f:
- f.write(program_text)
- break
- elif incremental == 2:
- # In run 2, copy *.next files to * files.
- for dn, dirs, files in os.walk(os.curdir):
- for file in files:
- if file.endswith('.next'):
- full = os.path.join(dn, file)
- target = full[:-5]
- shutil.copy(full, target)
-
- # In some systems, mtime has a resolution of 1 second which can cause
- # annoying-to-debug issues when a file has the same size after a
- # change. We manually set the mtime to circumvent this.
- new_time = os.stat(target).st_mtime + 1
- os.utime(target, times=(new_time, new_time))
-
- # Parse options after moving files (in case mypy.ini is being moved).
- options = self.parse_options(original_program_text, testcase)
- options.use_builtins_fixtures = True
- options.show_traceback = True
- if 'optional' in testcase.file:
- options.strict_optional = True
- if incremental:
- options.incremental = True
- if os.path.split(testcase.file)[1] in fast_parser_files:
- options.fast_parser = True
-
- sources = []
- for module_name, program_path, program_text in module_data:
- # Always set to none so we're forced to reread the module in incremental mode
- sources.append(BuildSource(program_path, module_name,
- None if incremental else program_text))
- res = None
- try:
- res = build.build(sources=sources,
- options=options,
- alt_lib_path=test_temp_dir)
- a = res.errors
- except CompileError as e:
- a = e.messages
- a = normalize_error_messages(a)
-
- # Make sure error messages match
- if incremental == 0:
- msg = 'Invalid type checker output ({}, line {})'
- output = testcase.output
- elif incremental == 1:
- msg = 'Invalid type checker output in incremental, run 1 ({}, line {})'
- output = testcase.output
- elif incremental == 2:
- msg = 'Invalid type checker output in incremental, run 2 ({}, line {})'
- output = testcase.output2
- else:
- raise AssertionError()
-
- if output != a and self.update_data:
- update_testcase_output(testcase, a)
- assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line))
-
- if incremental and res:
- if options.follow_imports == 'normal' and testcase.output is None:
- self.verify_cache(module_data, a, res.manager)
- if incremental == 2:
- self.check_module_equivalence(
- 'rechecked',
- testcase.expected_rechecked_modules,
- res.manager.rechecked_modules)
- self.check_module_equivalence(
- 'stale',
- testcase.expected_stale_modules,
- res.manager.stale_modules)
-
- def check_module_equivalence(self, name: str,
- expected: Optional[Set[str]], actual: Set[str]) -> None:
- if expected is not None:
- assert_string_arrays_equal(
- list(sorted(expected)),
- list(sorted(actual.difference({"__main__"}))),
- 'Set of {} modules does not match expected set'.format(name))
-
- def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str],
- manager: build.BuildManager) -> None:
- # There should be valid cache metadata for each module except
- # those in error_paths; for those there should not be.
- #
- # NOTE: When A imports B and there's an error in B, the cache
- # data for B is invalidated, but the cache data for A remains.
- # However build.process_graphs() will ignore A's cache data.
- #
- # Also note that when A imports B, and there's an error in A
- # _due to a valid change in B_, the cache data for B will be
- # invalidated and updated, but the old cache data for A will
- # remain unchanged. As before, build.process_graphs() will
- # ignore A's (old) cache data.
- error_paths = self.find_error_paths(a)
- modules = self.find_module_files()
- modules.update({module_name: path for module_name, path, text in module_data})
- missing_paths = self.find_missing_cache_files(modules, manager)
- if not missing_paths.issubset(error_paths):
- raise AssertionFailure("cache data discrepancy %s != %s" %
- (missing_paths, error_paths))
-
- def find_error_paths(self, a: List[str]) -> Set[str]:
- hits = set()
- for line in a:
- m = re.match(r'([^\s:]+):\d+: error:', line)
- if m:
- p = m.group(1).replace('/', os.path.sep)
- hits.add(p)
- return hits
-
- def find_module_files(self) -> Dict[str, str]:
- modules = {}
- for dn, dirs, files in os.walk(test_temp_dir):
- dnparts = dn.split(os.sep)
- assert dnparts[0] == test_temp_dir
- del dnparts[0]
- for file in files:
- if file.endswith('.py'):
- if file == "__init__.py":
- # If the file path is `a/b/__init__.py`, exclude the file name
- # and make sure the module id is just `a.b`, not `a.b.__init__`.
- id = '.'.join(dnparts)
- else:
- base, ext = os.path.splitext(file)
- id = '.'.join(dnparts + [base])
- modules[id] = os.path.join(dn, file)
- return modules
-
- def find_missing_cache_files(self, modules: Dict[str, str],
- manager: build.BuildManager) -> Set[str]:
- missing = {}
- for id, path in modules.items():
- meta = build.find_cache_meta(id, path, manager)
- if not build.is_meta_fresh(meta, id, path, manager):
- missing[id] = path
- return set(missing.values())
-
- def parse_module(self, program_text: str, incremental: int = 0) -> List[Tuple[str, str, str]]:
- """Return the module and program names for a test case.
-
- Normally, the unit tests will parse the default ('__main__')
- module and follow all the imports listed there. You can override
- this behavior and instruct the tests to check multiple modules
- by using a comment like this in the test case input:
-
- # cmd: mypy -m foo.bar foo.baz
-
- Return a list of tuples (module name, file name, program text).
- """
- m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE)
- m2 = re.search('# cmd2: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE)
- if m2 is not None and incremental == 2:
- # Optionally return a different command if in the second
- # stage of incremental mode, otherwise default to reusing
- # the original cmd.
- m = m2
-
- if m:
- # The test case wants to use a non-default main
- # module. Look up the module and give it as the thing to
- # analyze.
- module_names = m.group(1)
- out = []
- for module_name in module_names.split(' '):
- path = build.find_module(module_name, [test_temp_dir])
- with open(path) as f:
- program_text = f.read()
- out.append((module_name, path, program_text))
- return out
- else:
- return [('__main__', 'main', program_text)]
-
- def parse_options(self, program_text: str, testcase: DataDrivenTestCase) -> Options:
- options = Options()
- flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
-
- flag_list = None
- if flags:
- flag_list = flags.group(1).split()
- targets, options = process_options(flag_list, require_targets=False)
- if targets:
- # TODO: support specifying targets via the flags pragma
- raise RuntimeError('Specifying targets via the flags pragma is not supported.')
- else:
- options = Options()
-
- # Allow custom python version to override testcase_pyversion
- if (not flag_list or
- all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])):
- options.python_version = testcase_pyversion(testcase.file, testcase.name)
-
- return options
diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py
deleted file mode 100644
index f2ddc9b..0000000
--- a/mypy/test/testcmdline.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""Test cases for the command line.
-
-To begin we test that "mypy <directory>[/]" always recurses down the
-whole tree.
-"""
-
-import os
-import re
-import subprocess
-import sys
-
-from typing import Tuple, List, Dict, Set
-
-from mypy.myunit import Suite, SkipTestCaseException, AssertionFailure
-from mypy.test.config import test_data_prefix, test_temp_dir
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
-from mypy.test.helpers import assert_string_arrays_equal
-from mypy.version import __version__, base_version
-
-# Path to Python 3 interpreter
-python3_path = sys.executable
-
-# Files containing test case descriptions.
-cmdline_files = ['cmdline.test']
-
-
-class PythonEvaluationSuite(Suite):
-
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in cmdline_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- test_python_evaluation,
- base_path=test_temp_dir,
- optional_out=True,
- native_sep=True)
- return c
-
-
-def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
- # Write the program to a file.
- program = '_program.py'
- program_path = os.path.join(test_temp_dir, program)
- with open(program_path, 'w') as file:
- for s in testcase.input:
- file.write('{}\n'.format(s))
- args = parse_args(testcase.input[0])
- args.append('--show-traceback')
- # Type check the program.
- fixed = [python3_path,
- os.path.join(testcase.old_cwd, 'scripts', 'mypy')]
- process = subprocess.Popen(fixed + args,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- cwd=test_temp_dir)
- outb = process.stdout.read()
- # Split output into lines.
- out = [s.rstrip('\n\r') for s in str(outb, 'utf8').splitlines()]
- # Remove temp file.
- os.remove(program_path)
- # Compare actual output to expected.
- if testcase.output_files:
- for path, expected_content in testcase.output_files:
- if not os.path.exists(path):
- raise AssertionFailure(
- 'Expected file {} was not produced by test case'.format(path))
- with open(path, 'r') as output_file:
- actual_output_content = output_file.read().splitlines()
- noramlized_output = normalize_file_output(actual_output_content,
- os.path.abspath(test_temp_dir))
- assert_string_arrays_equal(expected_content.splitlines(), noramlized_output,
- 'Output file {} did not match its expected output'.format(
- path))
- else:
- assert_string_arrays_equal(testcase.output, out,
- 'Invalid output ({}, line {})'.format(
- testcase.file, testcase.line))
-
-
-def parse_args(line: str) -> List[str]:
- """Parse the first line of the program for the command line.
-
- This should have the form
-
- # cmd: mypy <options>
-
- For example:
-
- # cmd: mypy pkg/
- """
- m = re.match('# cmd: mypy (.*)$', line)
- if not m:
- return [] # No args; mypy will spit out an error.
- return m.group(1).split()
-
-
-def normalize_file_output(content: List[str], current_abs_path: str) -> List[str]:
- """Normalize file output for comparison."""
- timestamp_regex = re.compile('\d{10}')
- result = [x.replace(current_abs_path, '$PWD') for x in content]
- result = [x.replace(__version__, '$VERSION') for x in result]
- result = [x.replace(base_version, '$VERSION') for x in result]
- result = [timestamp_regex.sub('$TIMESTAMP', x) for x in result]
- return result
diff --git a/mypy/test/testextensions.py b/mypy/test/testextensions.py
deleted file mode 100644
index af3916f..0000000
--- a/mypy/test/testextensions.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import sys
-import pickle
-import typing
-try:
- import collections.abc as collections_abc
-except ImportError:
- import collections as collections_abc # type: ignore # PY32 and earlier
-from unittest import TestCase, main, skipUnless
-sys.path[0:0] = ['extensions']
-from mypy_extensions import TypedDict
-
-
-class BaseTestCase(TestCase):
-
- def assertIsSubclass(self, cls, class_or_tuple, msg=None):
- if not issubclass(cls, class_or_tuple):
- message = '%r is not a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
- def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
- if issubclass(cls, class_or_tuple):
- message = '%r is a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
-
-PY36 = sys.version_info[:2] >= (3, 6)
-
-PY36_TESTS = """
-Label = TypedDict('Label', [('label', str)])
-
-class Point2D(TypedDict):
- x: int
- y: int
-
-class LabelPoint2D(Point2D, Label): ...
-"""
-
-if PY36:
- exec(PY36_TESTS)
-
-
-class TypedDictTests(BaseTestCase):
-
- def test_basics_iterable_syntax(self):
- Emp = TypedDict('Emp', {'name': str, 'id': int})
- self.assertIsSubclass(Emp, dict)
- self.assertIsSubclass(Emp, typing.MutableMapping)
- self.assertNotIsSubclass(Emp, collections_abc.Sequence)
- jim = Emp(name='Jim', id=1)
- self.assertIs(type(jim), dict)
- self.assertEqual(jim['name'], 'Jim')
- self.assertEqual(jim['id'], 1)
- self.assertEqual(Emp.__name__, 'Emp')
- self.assertEqual(Emp.__module__, 'mypy.test.testextensions')
- self.assertEqual(Emp.__bases__, (dict,))
- self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
-
- def test_basics_keywords_syntax(self):
- Emp = TypedDict('Emp', name=str, id=int)
- self.assertIsSubclass(Emp, dict)
- self.assertIsSubclass(Emp, typing.MutableMapping)
- self.assertNotIsSubclass(Emp, collections_abc.Sequence)
- jim = Emp(name='Jim', id=1) # type: ignore # mypy doesn't support keyword syntax yet
- self.assertIs(type(jim), dict)
- self.assertEqual(jim['name'], 'Jim')
- self.assertEqual(jim['id'], 1)
- self.assertEqual(Emp.__name__, 'Emp')
- self.assertEqual(Emp.__module__, 'mypy.test.testextensions')
- self.assertEqual(Emp.__bases__, (dict,))
- self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
-
- def test_typeddict_errors(self):
- Emp = TypedDict('Emp', {'name': str, 'id': int})
- self.assertEqual(TypedDict.__module__, 'mypy_extensions')
- jim = Emp(name='Jim', id=1)
- with self.assertRaises(TypeError):
- isinstance({}, Emp)
- with self.assertRaises(TypeError):
- isinstance(jim, Emp)
- with self.assertRaises(TypeError):
- issubclass(dict, Emp)
- with self.assertRaises(TypeError):
- TypedDict('Hi', x=1)
- with self.assertRaises(TypeError):
- TypedDict('Hi', [('x', int), ('y', 1)])
- with self.assertRaises(TypeError):
- TypedDict('Hi', [('x', int)], y=int)
-
- @skipUnless(PY36, 'Python 3.6 required')
- def test_py36_class_syntax_usage(self):
- self.assertEqual(LabelPoint2D.__annotations__, {'x': int, 'y': int, 'label': str}) # noqa
- self.assertEqual(LabelPoint2D.__bases__, (dict,)) # noqa
- self.assertNotIsSubclass(LabelPoint2D, typing.Sequence) # noqa
- not_origin = Point2D(x=0, y=1) # noqa
- self.assertEqual(not_origin['x'], 0)
- self.assertEqual(not_origin['y'], 1)
- other = LabelPoint2D(x=0, y=1, label='hi') # noqa
- self.assertEqual(other['label'], 'hi')
-
- def test_pickle(self):
- global EmpD # pickle wants to reference the class by name
- EmpD = TypedDict('EmpD', name=str, id=int)
- jane = EmpD({'name': 'jane', 'id': 37})
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- z = pickle.dumps(jane, proto)
- jane2 = pickle.loads(z)
- self.assertEqual(jane2, jane)
- self.assertEqual(jane2, {'name': 'jane', 'id': 37})
- ZZ = pickle.dumps(EmpD, proto)
- EmpDnew = pickle.loads(ZZ)
- self.assertEqual(EmpDnew({'name': 'jane', 'id': 37}), jane)
-
- def test_optional(self):
- EmpD = TypedDict('EmpD', name=str, id=int)
-
- self.assertEqual(typing.Optional[EmpD], typing.Union[None, EmpD])
- self.assertNotEqual(typing.List[EmpD], typing.Tuple[EmpD])
-
-
-if __name__ == '__main__':
- main()
diff --git a/mypy/test/testgraph.py b/mypy/test/testgraph.py
deleted file mode 100644
index d88ca1c..0000000
--- a/mypy/test/testgraph.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""Test cases for graph processing code in build.py."""
-
-from typing import AbstractSet, Dict, Set
-
-from mypy.myunit import Suite, assert_equal
-from mypy.build import BuildManager, State, BuildSourceSet
-from mypy.build import topsort, strongly_connected_components, sorted_components, order_ascc
-from mypy.version import __version__
-from mypy.options import Options
-from mypy.report import Reports
-
-
-class GraphSuite(Suite):
-
- def test_topsort(self) -> None:
- a = frozenset({'A'})
- b = frozenset({'B'})
- c = frozenset({'C'})
- d = frozenset({'D'})
- data = {a: {b, c}, b: {d}, c: {d}} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]]
- res = list(topsort(data))
- assert_equal(res, [{d}, {b, c}, {a}])
-
- def test_scc(self) -> None:
- vertices = {'A', 'B', 'C', 'D'}
- edges = {'A': ['B', 'C'],
- 'B': ['C'],
- 'C': ['B', 'D'],
- 'D': []} # type: Dict[str, List[str]]
- sccs = set(frozenset(x) for x in strongly_connected_components(vertices, edges))
- assert_equal(sccs,
- {frozenset({'A'}),
- frozenset({'B', 'C'}),
- frozenset({'D'})})
-
- def _make_manager(self) -> BuildManager:
- manager = BuildManager(
- data_dir='',
- lib_path=[],
- ignore_prefix='',
- source_set=BuildSourceSet([]),
- reports=Reports('', {}),
- options=Options(),
- version_id=__version__,
- )
- return manager
-
- def test_sorted_components(self) -> None:
- manager = self._make_manager()
- graph = {'a': State('a', None, 'import b, c', manager),
- 'd': State('d', None, 'pass', manager),
- 'b': State('b', None, 'import c', manager),
- 'c': State('c', None, 'import b, d', manager),
- }
- res = sorted_components(graph)
- assert_equal(res, [frozenset({'d'}), frozenset({'c', 'b'}), frozenset({'a'})])
-
- def test_order_ascc(self) -> None:
- manager = self._make_manager()
- graph = {'a': State('a', None, 'import b, c', manager),
- 'd': State('d', None, 'def f(): import a', manager),
- 'b': State('b', None, 'import c', manager),
- 'c': State('c', None, 'import b, d', manager),
- }
- res = sorted_components(graph)
- assert_equal(res, [frozenset({'a', 'd', 'c', 'b'})])
- ascc = res[0]
- scc = order_ascc(graph, ascc)
- assert_equal(scc, ['d', 'c', 'b', 'a'])
diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py
deleted file mode 100644
index 2142456..0000000
--- a/mypy/test/testinfer.py
+++ /dev/null
@@ -1,223 +0,0 @@
-"""Test cases for type inference helper functions."""
-
-from typing import List, Optional, Tuple, Union
-
-from mypy.myunit import Suite, assert_equal, assert_true
-from mypy.checkexpr import map_actuals_to_formals
-from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED
-from mypy.types import AnyType, TupleType, Type
-
-
-class MapActualsToFormalsSuite(Suite):
- """Test cases for checkexpr.map_actuals_to_formals."""
-
- def test_basic(self) -> None:
- self.assert_map([], [], [])
-
- def test_positional_only(self) -> None:
- self.assert_map([ARG_POS],
- [ARG_POS],
- [[0]])
- self.assert_map([ARG_POS, ARG_POS],
- [ARG_POS, ARG_POS],
- [[0], [1]])
-
- def test_optional(self) -> None:
- self.assert_map([],
- [ARG_OPT],
- [[]])
- self.assert_map([ARG_POS],
- [ARG_OPT],
- [[0]])
- self.assert_map([ARG_POS],
- [ARG_OPT, ARG_OPT],
- [[0], []])
-
- def test_callee_star(self) -> None:
- self.assert_map([],
- [ARG_STAR],
- [[]])
- self.assert_map([ARG_POS],
- [ARG_STAR],
- [[0]])
- self.assert_map([ARG_POS, ARG_POS],
- [ARG_STAR],
- [[0, 1]])
-
- def test_caller_star(self) -> None:
- self.assert_map([ARG_STAR],
- [ARG_STAR],
- [[0]])
- self.assert_map([ARG_POS, ARG_STAR],
- [ARG_STAR],
- [[0, 1]])
- self.assert_map([ARG_STAR],
- [ARG_POS, ARG_STAR],
- [[0], [0]])
- self.assert_map([ARG_STAR],
- [ARG_OPT, ARG_STAR],
- [[0], [0]])
-
- def test_too_many_caller_args(self) -> None:
- self.assert_map([ARG_POS],
- [],
- [])
- self.assert_map([ARG_STAR],
- [],
- [])
- self.assert_map([ARG_STAR],
- [ARG_POS],
- [[0]])
-
- def test_tuple_star(self) -> None:
- self.assert_vararg_map(
- [ARG_STAR],
- [ARG_POS],
- [[0]],
- self.tuple(AnyType()))
- self.assert_vararg_map(
- [ARG_STAR],
- [ARG_POS, ARG_POS],
- [[0], [0]],
- self.tuple(AnyType(), AnyType()))
- self.assert_vararg_map(
- [ARG_STAR],
- [ARG_POS, ARG_OPT, ARG_OPT],
- [[0], [0], []],
- self.tuple(AnyType(), AnyType()))
-
- def tuple(self, *args: Type) -> TupleType:
- return TupleType(list(args), None)
-
- def test_named_args(self) -> None:
- self.assert_map(
- ['x'],
- [(ARG_POS, 'x')],
- [[0]])
- self.assert_map(
- ['y', 'x'],
- [(ARG_POS, 'x'), (ARG_POS, 'y')],
- [[1], [0]])
-
- def test_some_named_args(self) -> None:
- self.assert_map(
- ['y'],
- [(ARG_OPT, 'x'), (ARG_OPT, 'y'), (ARG_OPT, 'z')],
- [[], [0], []])
-
- def test_missing_named_arg(self) -> None:
- self.assert_map(
- ['y'],
- [(ARG_OPT, 'x')],
- [[]])
-
- def test_duplicate_named_arg(self) -> None:
- self.assert_map(
- ['x', 'x'],
- [(ARG_OPT, 'x')],
- [[0, 1]])
-
- def test_varargs_and_bare_asterisk(self) -> None:
- self.assert_map(
- [ARG_STAR],
- [ARG_STAR, (ARG_NAMED, 'x')],
- [[0], []])
- self.assert_map(
- [ARG_STAR, 'x'],
- [ARG_STAR, (ARG_NAMED, 'x')],
- [[0], [1]])
-
- def test_keyword_varargs(self) -> None:
- self.assert_map(
- ['x'],
- [ARG_STAR2],
- [[0]])
- self.assert_map(
- ['x', ARG_STAR2],
- [ARG_STAR2],
- [[0, 1]])
- self.assert_map(
- ['x', ARG_STAR2],
- [(ARG_POS, 'x'), ARG_STAR2],
- [[0], [1]])
- self.assert_map(
- [ARG_POS, ARG_STAR2],
- [(ARG_POS, 'x'), ARG_STAR2],
- [[0], [1]])
-
- def test_both_kinds_of_varargs(self) -> None:
- self.assert_map(
- [ARG_STAR, ARG_STAR2],
- [(ARG_POS, 'x'), (ARG_POS, 'y')],
- [[0, 1], [0, 1]])
-
- def test_special_cases(self) -> None:
- self.assert_map([ARG_STAR],
- [ARG_STAR, ARG_STAR2],
- [[0], []])
- self.assert_map([ARG_STAR, ARG_STAR2],
- [ARG_STAR, ARG_STAR2],
- [[0], [1]])
- self.assert_map([ARG_STAR2],
- [(ARG_POS, 'x'), ARG_STAR2],
- [[0], [0]])
- self.assert_map([ARG_STAR2],
- [ARG_STAR2],
- [[0]])
-
- def assert_map(self,
- caller_kinds_: List[Union[int, str]],
- callee_kinds_: List[Union[int, Tuple[int, str]]],
- expected: List[List[int]],
- ) -> None:
- caller_kinds, caller_names = expand_caller_kinds(caller_kinds_)
- callee_kinds, callee_names = expand_callee_kinds(callee_kinds_)
- result = map_actuals_to_formals(
- caller_kinds,
- caller_names,
- callee_kinds,
- callee_names,
- lambda i: AnyType())
- assert_equal(result, expected)
-
- def assert_vararg_map(self,
- caller_kinds: List[int],
- callee_kinds: List[int],
- expected: List[List[int]],
- vararg_type: Type,
- ) -> None:
- result = map_actuals_to_formals(
- caller_kinds,
- [],
- callee_kinds,
- [],
- lambda i: vararg_type)
- assert_equal(result, expected)
-
-
-def expand_caller_kinds(kinds_or_names: List[Union[int, str]]
- ) -> Tuple[List[int], List[Optional[str]]]:
- kinds = []
- names = []
- for k in kinds_or_names:
- if isinstance(k, str):
- kinds.append(ARG_NAMED)
- names.append(k)
- else:
- kinds.append(k)
- names.append(None)
- return kinds, names
-
-
-def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]]
- ) -> Tuple[List[int], List[Optional[str]]]:
- kinds = []
- names = []
- for v in kinds_and_names:
- if isinstance(v, tuple):
- kinds.append(v[0])
- names.append(v[1])
- else:
- kinds.append(v)
- names.append(None)
- return kinds, names
diff --git a/mypy/test/testlex.py b/mypy/test/testlex.py
deleted file mode 100644
index 138aef8..0000000
--- a/mypy/test/testlex.py
+++ /dev/null
@@ -1,466 +0,0 @@
-"""Lexical analyzer test cases"""
-
-from typing import List, Union
-
-from mypy.myunit import Suite, assert_equal
-from mypy.lex import lex
-
-
-class LexerSuite(Suite):
- def test_empty(self) -> None:
- self.assert_lex('', 'Eof()')
-
- def test_keywords(self) -> None:
- self.assert_lex(
- 'if else elif def return pass',
- 'Keyword(if) Keyword( else) Keyword( elif) Keyword( def) '
- 'Keyword( return) Keyword( pass) Break() Eof()')
-
- self.assert_lex(
- 'from import as class global',
- 'Keyword(from) Keyword( import) Keyword( as) Keyword( class) '
- 'Keyword( global) ...')
-
- def test_identifiers(self) -> None:
- self.assert_lex(
- 'i x FooBar FOO_BAR __x var',
- 'Name(i) Name( x) Name( FooBar) Name( FOO_BAR) Name( __x) '
- 'Name( var) Break() Eof()')
-
- self.assert_lex(
- 'any interface void',
- 'Name(any) Name( interface) Name( void) Break() Eof()')
-
- def test_int_literals(self) -> None:
- self.assert_lex(
- '0 00 1 0987654321 10002000300040005000600070008000',
- 'IntLit(0) IntLit( 00) IntLit( 1) LexError( 0987654321) '
- 'IntLit( 10002000300040005000600070008000) Break() Eof()')
-
- def test_hex_int_literals(self) -> None:
- self.assert_lex('0x0 0xabcedf0189 0xAFe 0X2',
- 'IntLit(0x0) IntLit( 0xabcedf0189) IntLit( 0xAFe) '
- 'IntLit( 0X2) ...')
-
- def test_oct_int_literals(self) -> None:
- self.assert_lex('0o0 0o127 0O1',
- 'IntLit(0o0) IntLit( 0o127) IntLit( 0O1) ...')
-
- def test_bin_int_literals(self) -> None:
- self.assert_lex('0b0 0b110 0B1',
- 'IntLit(0b0) IntLit( 0b110) IntLit( 0B1) ...')
-
- def test_float_literals(self) -> None:
- self.assert_lex('1.2 .1 1.',
- 'FloatLit(1.2) FloatLit( .1) FloatLit( 1.) ...')
-
- self.assert_lex(
- '1e2 1.2e+3 1.3e-12',
- 'FloatLit(1e2) FloatLit( 1.2e+3) FloatLit( 1.3e-12) ...')
-
- self.assert_lex('1.e2', 'FloatLit(1.e2) ...')
-
- def test_comments(self) -> None:
- self.assert_lex('# foo "" bar' + '\n' + 'x #x',
- 'Name(# foo "" bar\\nx) Break( #x) Eof()')
-
- def test_empty_lines(self) -> None:
- self.assert_lex(r'\n1', r'IntLit(\n1) ...')
- self.assert_lex(r'\n\n1', r'IntLit(\n\n1) ...')
- self.assert_lex(r'1\n\n2', r'IntLit(1) Break(\n\n) IntLit(2) ...')
-
- def test_line_breaks(self) -> None:
- self.assert_lex('1\\r2', 'IntLit(1) Break(\\r) IntLit(2) ...')
- self.assert_lex('1\\r\\n2', 'IntLit(1) Break(\\r\\n) IntLit(2) ...')
-
- def test_operators(self) -> None:
- self.assert_lex('- + < > == != <= >= .',
- 'Op(-) Op( +) Op( <) Op( >) Op( ==) Op( !=) Op( <=) '
- 'Op( >=) Op( .) ...')
-
- self.assert_lex('* / % // **',
- 'Op(*) Op( /) Op( %) Op( //) Op( **) ...')
-
- self.assert_lex('& | ^ ~ << >>',
- 'Op(&) Op( |) Op( ^) Op( ~) Op( <<) Op( >>) ...')
-
- self.assert_lex('in is and or not',
- 'Op(in) Op( is) Op( and) Op( or) Op( not) ...')
-
- def test_punctuators(self) -> None:
- self.assert_lex(': = ,', 'Colon(:) Punct( =) Punct( ,) ...')
-
- self.assert_lex(
- '+= -= *= %= //=',
- 'Punct(+=) Punct( -=) Punct( *=) Punct( %=) Punct( //=) ...')
- self.assert_lex('**=', 'Punct(**=) ...')
- self.assert_lex(
- '&= |= ^= <<= >>=',
- 'Punct(&=) Punct( |=) Punct( ^=) Punct( <<=) Punct( >>=) ...')
-
- def test_basic_indentation(self) -> None:
- self.assert_lex(
- 'y' + '\n' + ' x',
- 'Name(y) Break(\\n) Indent( ) Name(x) Break() Dedent() Eof()')
-
- self.assert_lex(
- 'y' + '\n' + ' x' + '\n' + 'z',
- 'Name(y) Break(\\n) Indent( ) Name(x) Break(\\n) Dedent() '
- 'Name(z) Break() Eof()')
-
- def test_multiple_indent_levels(self) -> None:
- self.assert_lex('y' + '\n' +
- ' x' + '\n' +
- ' y' + '\n' +
- ' z',
- 'Name(y) Break(\\n) ' +
- 'Indent( ) Name(x) Break(\\n) ' +
- 'Name( y) Break(\\n) ' +
- 'Indent( ) Name(z) Break() ' +
- 'Dedent() Dedent() Eof()')
-
- self.assert_lex('y' + '\n' +
- ' x' + '\n' +
- ' z' + '\n' +
- ' y',
- 'Name(y) Break(\\n) ' +
- 'Indent( ) Name(x) Break(\\n) ' +
- 'Indent( ) Name(z) Break(\\n) ' +
- 'Dedent() Name( y) Break() ' +
- 'Dedent() Eof()')
-
- def test_tab_indent(self) -> None:
- self.assert_lex('y' + '\n' +
- '\t' + 'x' + '\n' +
- ' y' + '\n' +
- ' ' + '\t' + 'z',
- 'Name(y) Break(\\n) ' +
- 'Indent(\\t) Name(x) Break(\\n) ' +
- 'Name( y) Break(\\n) ' +
- 'Name( \\tz) Break() ' +
- 'Dedent() Eof()')
-
- def test_comment_after_dedent(self) -> None:
- self.assert_lex('y\n'
- ' x\n'
- '# Foo\n'
- 'z',
- r'Name(y) Break(\n) Indent( ) Name(x) '
- r'Break(\n# Foo\n) '
- r'Dedent() Name(z) Break() Eof()')
-
- def test_parens(self) -> None:
- self.assert_lex('( x )', 'Punct(() Name( x) Punct( )) Break() Eof()')
- self.assert_lex(
- '( x' + '\n' + ' y )',
- 'Punct(() Name( x) Name(\\n y) Punct( )) Break() Eof()')
-
- self.assert_lex('()' + '\n' + ' y',
- 'Punct(() Punct()) Break(\\n) Indent( ) Name(y) '
- 'Break() Dedent() Eof()')
-
- # [ ... ] and { ... }.
- self.assert_lex(
- '[ x' + '\n' + ' y ]',
- 'Punct([) Name( x) Name(\\n y) Punct( ]) Break() Eof()')
- self.assert_lex(
- '{ x' + '\n' + ' y }',
- 'Punct({) Name( x) Name(\\n y) Punct( }) Break() Eof()')
-
- # Nested brackets.
- self.assert_lex(
- '({}' + '\n' + ' y)',
- 'Punct(() Punct({) Punct(}) Name(\\n y) Punct()) Break() Eof()')
-
- def test_brackets_and_line_breaks(self) -> None:
- # This used to fail.
- self.assert_lex('{}' + '\n' + '1',
- 'Punct({) Punct(}) Break(\\n) IntLit(1) Break() Eof()')
-
- def test_str_literals(self) -> None:
- self.assert_lex("'' 'foo_bar'",
- "StrLit('') StrLit( 'foo_bar') Break() Eof()")
- self.assert_lex('"" "foo_bar"',
- 'StrLit("") StrLit( "foo_bar") Break() Eof()')
-
- self.assert_lex('"\\"" 1', 'StrLit("\\"") IntLit( 1) Break() Eof()')
- self.assert_lex("'\\'' 1", "StrLit('\\'') IntLit( 1) Break() Eof()")
-
- self.assert_lex('"\\\\" 1', 'StrLit("\\\\") IntLit( 1) Break() Eof()')
- self.assert_lex("'\\\\' 1", "StrLit('\\\\') IntLit( 1) Break() Eof()")
-
- def test_triple_quoted_string_literals(self) -> None:
- # Single-line
-
- self.assert_lex("''''''", "StrLit('''''') ...")
- self.assert_lex("1 '''x''y'''1",
- "IntLit(1) StrLit( '''x''y''') IntLit(1) ...")
-
- self.assert_lex('""""""', 'StrLit("""""") ...')
- self.assert_lex('"""x""y"""', 'StrLit("""x""y""") ...')
-
- # Multiple-line
-
- self.assert_lex("'''" + '\n' + "'''", "StrLit('''\\n''') ...")
- self.assert_lex("'''x''" + '\n' + "''x'''",
- "StrLit('''x''\\n''x''') ...")
- self.assert_lex("'''''" + '\n' + "'''''",
- "StrLit('''''\\n''') StrLit('') ...")
- self.assert_lex("'''x" + '\n' + 'xyz' + '\n' + "''x'''",
- "StrLit('''x\\nxyz\\n''x''') ...")
-
- self.assert_lex('"""x' + '\n' + 'y"""', 'StrLit("""x\\ny""") ...')
-
- def test_unicode_literals(self) -> None:
- self.assert_lex("u'' u'foo'",
- "UnicodeLit(u'') UnicodeLit( u'foo') ...")
- self.assert_lex('u"" u"foo"',
- 'UnicodeLit(u"") UnicodeLit( u"foo") ...')
- self.assert_lex('ur"" ur"foo"',
- 'UnicodeLit(ur"") UnicodeLit( ur"foo") ...')
- self.assert_lex('u"""foo\n"""',
- r'UnicodeLit(u"""foo\n""") ...')
-
- def test_unicode_literal_capital_u(self) -> None:
- self.assert_lex("U'foo'", "UnicodeLit(U'foo') ...")
-
- def test_semicolons(self) -> None:
- self.assert_lex('a;b', 'Name(a) Break(;) Name(b) ...')
- self.assert_lex('a;', 'Name(a) Break(;) Eof()')
-
- self.assert_lex(';a', 'Break(;) Name(a) ...')
- self.assert_lex('a;;b', 'Name(a) Break(;) Break(;) Name(b) ...')
-
- def test_raw_string(self) -> None:
- self.assert_lex("r'' r'foo bar'",
- "StrLit(r'') StrLit( r'foo bar') ...")
- self.assert_lex('r"" r"foo bar"',
- 'StrLit(r"") StrLit( r"foo bar") ...')
-
- self.assert_lex("r'\\x\\''", "StrLit(r'\\x\\'') ...")
- self.assert_lex('r"\\x\\""', 'StrLit(r"\\x\\"") ...')
-
- self.assert_lex("r'\\\\' ''", "StrLit(r'\\\\') StrLit( '') ...")
- self.assert_lex('r"\\\\" ""', 'StrLit(r"\\\\") StrLit( "") ...')
-
- self.assert_lex("r'''" + '\n' + "x'''", "StrLit(r'''\\nx''') ...")
-
- def test_raw_string_with_capital_r(self) -> None:
- self.assert_lex("R'foo'", "StrLit(R'foo') ...")
-
- def test_escapes_in_triple_quoted_literals(self) -> None:
- self.assert_lex(r"'''\''''",
- r"StrLit('''\'''') ...")
- self.assert_lex(r'"""\""""',
- r'StrLit("""\"""") ...')
- self.assert_lex(r'"""\\"""',
- r'StrLit("""\\""") ...')
-
- def test_escapes_in_triple_quoted_raw_literals(self) -> None:
- self.assert_lex(r"r'''\''''",
- r"StrLit(r'''\'''') ...")
- self.assert_lex(r"r'''\\'''",
- r"StrLit(r'''\\''') ...")
- self.assert_lex(r'r"""\""""',
- r'StrLit(r"""\"""") ...')
-
- def test_bytes(self) -> None:
- self.assert_lex("b'\\'' b'foo bar'",
- "BytesLit(b'\\'') BytesLit( b'foo bar') ...")
- self.assert_lex('b"\\"" b"foo bar"',
- 'BytesLit(b"\\"") BytesLit( b"foo bar") ...')
-
- self.assert_lex("b'''" + '\n' + " x'''", "BytesLit(b'''\\n x''') ...")
-
- def test_bytes_with_capital_b(self) -> None:
- self.assert_lex("B'foo'", "BytesLit(B'foo') ...")
-
- def test_raw_bytes(self) -> None:
- self.assert_lex("br'x\\x\\''", "BytesLit(br'x\\x\\'') ...")
- self.assert_lex('br"x\\y\\""', 'BytesLit(br"x\\y\\"") ...')
-
- self.assert_lex('br"""' + '\n' + 'x"""', 'BytesLit(br"""\\nx""") ...')
-
- def test_raw_bytes_alternative(self) -> None:
- self.assert_lex("rb'x\\x\\''", "BytesLit(rb'x\\x\\'') ...")
-
- def test_backslash(self) -> None:
- self.assert_lex('a\\' + '\n' + ' b', 'Name(a) Name(\\\\n b) ...')
- self.assert_lex(
- 'a = \\' + '\n' + ' 1' + '\n' + '=',
- 'Name(a) Punct( =) IntLit( \\\\n 1) Break(\\n) Punct(=) ...')
-
- def test_backslash_in_string(self) -> None:
- self.assert_lex("'foo\\" + '\n' + "bar'", "StrLit('foo\\\\nbar') ...")
- self.assert_lex("'foo\\" + '\n' + ' zar\\' + '\n' + " bar'",
- "StrLit('foo\\\\n zar\\\\n bar') ...")
-
- self.assert_lex('"foo\\' + '\n' + 'bar"', 'StrLit("foo\\\\nbar") ...')
-
- def test_backslash_in_raw_string(self) -> None:
- self.assert_lex("r'a\\" + '\n' + "b\\'1",
- "StrLit(r'a\\\\nb\\') IntLit(1) ...")
- self.assert_lex("r'a\\" + '\n' + '-\\' + '\n' + "b\\'1",
- "StrLit(r'a\\\\n-\\\\nb\\') IntLit(1) ...")
- self.assert_lex('r"a\\' + '\n' + 'b\\"1',
- 'StrLit(r"a\\\\nb\\") IntLit(1) ...')
- self.assert_lex('r"a\\' + '\n' + '-\\' + '\n' + 'b\\"1',
- 'StrLit(r"a\\\\n-\\\\nb\\") IntLit(1) ...')
-
- def test_final_dedent(self) -> None:
- self.assert_lex(
- '1' + '\n' + ' 1' + '\n',
- 'IntLit(1) Break(\\n) Indent( ) IntLit(1) Break(\\n) Dedent() Eof()')
-
- def test_empty_line(self) -> None:
- self.assert_lex('1' + '\n' + ' 1' + '\n' + '\n',
- r'IntLit(1) Break(\n) Indent( ) IntLit(1) '
- r'Break(\n\n) Dedent() Eof()')
-
- def test_comments_and_indents(self) -> None:
- self.assert_lex('1' + '\n' + ' #x' + '\n' + ' y',
- r'IntLit(1) Break(\n #x\n) Indent( ) Name(y) '
- r'Break() Dedent() Eof()')
- self.assert_lex('1' + '\n' + '#x' + '\n' + ' y',
- r'IntLit(1) Break(\n#x\n) Indent( ) Name(y) '
- r'Break() Dedent() Eof()')
-
- def test_form_feed(self) -> None:
- self.assert_lex('\x0c' + '\n' + 'x', 'Name(\x0c\\nx) ...')
-
- def test_comment_after_linebreak(self) -> None:
- self.assert_lex('1\n# foo\n2',
- 'IntLit(1) Break(\\n# foo\\n) IntLit(2) ...')
- self.assert_lex('1\n# foo',
- 'IntLit(1) Break(\\n# foo) Eof()')
-
- def test_line_numbers(self) -> None:
- self.assert_line('a\\nb', [1, 1, 2, 2, 2])
-
- self.assert_line('(\\nb)', [1, 2, 2]) # Note: omit break and eof tokens
-
- self.assert_line('a\\n b', [1, 1, # a, break
- 2, 2, 2, # indent, b, break
- 2, 2]) # dedent, break
- self.assert_line('a\\n b\\nc', [1, 1, # a, break
- 2, 2, 2, # indent, b, break
- 3, 3]) # dedent, c
-
- self.assert_line('a\\rb', [1, 1, 2])
- self.assert_line('a\\r\\nb', [1, 1, 2])
-
- self.assert_line('"""x""" 1', [1, 1])
- self.assert_line('"""x\\ny""" 1', [1, 2])
- self.assert_line('"""x\\r\\ny""" 1', [1, 2])
- self.assert_line('"""x\\ry""" 1', [1, 2])
- self.assert_line('"""x\\n\\ny""" 1', [1, 3])
- self.assert_line('\\n"""x\\ny""" 1', [2, 3])
-
- self.assert_line('"x" 1', [1, 1])
- self.assert_line('"\\\\n" 1', [1, 2])
- self.assert_line('"\\\\nx\\\\n" 1', [1, 3])
-
- self.assert_line('r"x" 1', [1, 1])
- self.assert_line('r"\\\\n" 1', [1, 2])
- self.assert_line('r"\\\\nx\\\\n" 1', [1, 3])
-
- def test_backslash_line(self) -> None:
- self.assert_line('a\\\\n 1\\n=', [1, 2, 2, 3])
-
- def test_invalid_parens(self) -> None:
- self.assert_lex('([\\n )\\n1',
- 'Punct(() Punct([) Punct(\\n )) IntLit(\\n1) ...')
- self.assert_lex('])', 'Punct(]) Punct()) ...')
- self.assert_lex('(]\\n )', 'Punct(() Punct(]) Punct(\\n )) ...')
- self.assert_lex('(\\n ])', 'Punct(() Punct(\\n ]) Punct()) ...')
-
- def test_invalid_indent(self) -> None:
- self.assert_lex('x\\n y\\n z',
- 'Name(x) Break(\\n) Indent( ) Name(y) ' +
- 'Break(\\n) Dedent() LexError( ) Name(z) ...')
-
- def test_invalid_backslash(self) -> None:
- self.assert_lex('\\ \\nx', 'LexError(\\) Break( \\n) Name(x) ...')
- self.assert_lex('\\ \\nx', 'LexError(\\) Break( \\n) Name(x) ...')
-
- def test_non_terminated_string_literal(self) -> None:
- self.assert_lex("'", 'LexError(\') ...')
- self.assert_lex("'\\na", 'LexError(\') Break(\\n) Name(a) ...')
-
- self.assert_lex('"', 'LexError(") ...')
- self.assert_lex('"\\na', 'LexError(") Break(\\n) Name(a) ...')
-
- self.assert_lex("r'", 'LexError(r\') ...')
- self.assert_lex('r"', 'LexError(r") ...')
-
- self.assert_lex('"""', 'LexError(""") ...')
- self.assert_lex('"""\\n', 'LexError("""\\n) ...')
-
- self.assert_lex("'''", "LexError(''') ...")
- self.assert_lex("'''\\n", "LexError('''\\n) ...")
-
- self.assert_lex("'\\", 'LexError(\'\\) ...')
- self.assert_lex("'\\\\n", 'LexError(\'\\\\n) ...')
- self.assert_lex("r'\\", 'LexError(r\'\\) ...')
- self.assert_lex("r'\\\\n", 'LexError(r\'\\\\n) ...')
-
- def test_invalid_hex_int_literals(self) -> None:
- self.assert_lex('0x', 'LexError( ) ...')
- self.assert_lex('0xax', 'LexError( ) ...')
-
- def test_latin1_encoding(self) -> None:
- self.assert_lex(b'# coding: latin1\n"\xbb"',
- 'StrLit(# coding: latin1\\n"\xbb") Break() Eof()')
-
- def test_utf8_encoding(self) -> None:
- self.assert_lex('"\xbb"'.encode('utf8'),
- 'StrLit("\xbb") Break() Eof()')
- self.assert_lex(b'"\xbb"',
- "LexError('utf8' codec can't decode byte 187 in column 2) "
- 'Break() Eof()')
- self.assert_lex(b'\n"abcde\xbc"',
- "LexError('utf8' codec can't decode byte 188 in column 7) "
- 'Break() Eof()')
-
- def test_byte_order_mark(self) -> None:
- self.assert_lex('\ufeff"\xbb"'.encode('utf8'),
- 'Bom(\ufeff) StrLit("\xbb") Break() Eof()')
-
- def test_long_comment(self) -> None:
- prog = '# pass\n' * 1000
- self.assert_lex(prog, 'Eof(%s)' % repr(prog)[1:-1])
-
- # TODO
- # invalid escape sequences in string literals etc.
-
- def assert_lex(self, src: Union[str, bytes], lexed: str) -> None:
- if isinstance(src, str):
- src = src.replace('\\n', '\n')
- src = src.replace('\\r', '\r')
-
- if lexed.endswith(' ...'):
- lexed = lexed[:-3] + 'Break() Eof()'
-
- l = lex(src)[0]
- r = []
- for t in l:
- r.append(str(t))
- act = ' '.join(r)
- if act != lexed:
- print('Actual: ', act)
- print('Expected:', lexed)
- assert_equal(act, lexed)
-
- def assert_line(self, s: str, a: List[int]) -> None:
- s = s.replace('\\n', '\n')
- s = s.replace('\\r', '\r')
-
- tt = lex(s)[0]
- r = []
- for t in tt:
- r.append(t.line)
- if len(r) == len(a) + 2:
- a = a[:]
- a.append(a[-1])
- a.append(a[-1])
- assert_equal(r, a)
diff --git a/mypy/test/testmoduleinfo.py b/mypy/test/testmoduleinfo.py
deleted file mode 100644
index 5818479..0000000
--- a/mypy/test/testmoduleinfo.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from mypy import moduleinfo
-from mypy.myunit import (
- Suite, assert_equal, assert_true, assert_false
-)
-
-
-class ModuleInfoSuite(Suite):
- def test_is_in_module_collection(self) -> None:
- assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo'))
- assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo.bar'))
- assert_false(moduleinfo.is_in_module_collection({'foo'}, 'fo'))
- assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar'))
- assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar.zar'))
- assert_false(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo'))
diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py
deleted file mode 100644
index d6789c0..0000000
--- a/mypy/test/testparse.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""Tests for the mypy parser."""
-
-import os.path
-
-from typing import List
-
-from mypy import defaults
-from mypy.myunit import Suite, AssertionFailure
-from mypy.test.helpers import assert_string_arrays_equal
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
-from mypy.test import config
-from mypy.parse import parse
-from mypy.errors import CompileError
-from mypy.options import Options
-
-
-class ParserSuite(Suite):
- parse_files = ['parse.test',
- 'parse-python2.test']
-
- def cases(self) -> List[DataDrivenTestCase]:
- # The test case descriptions are stored in data files.
- c = [] # type: List[DataDrivenTestCase]
- for f in self.parse_files:
- c += parse_test_cases(
- os.path.join(config.test_data_prefix, f), test_parser)
- return c
-
-
-def test_parser(testcase: DataDrivenTestCase) -> None:
- """Perform a single parser test case.
-
- The argument contains the description of the test case.
- """
- options = Options()
-
- if testcase.file.endswith('python2.test'):
- options.python_version = defaults.PYTHON2_VERSION
- else:
- options.python_version = defaults.PYTHON3_VERSION
-
- try:
- n = parse(bytes('\n'.join(testcase.input), 'ascii'),
- fnam='main',
- errors=None,
- options=options)
- a = str(n).split('\n')
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(testcase.output, a,
- 'Invalid parser output ({}, line {})'.format(
- testcase.file, testcase.line))
-
-
-# The file name shown in test case output. This is displayed in error
-# messages, and must match the file name in the test case descriptions.
-INPUT_FILE_NAME = 'file'
-
-
-class ParseErrorSuite(Suite):
- def cases(self) -> List[DataDrivenTestCase]:
- # Test case descriptions are in an external file.
- return parse_test_cases(os.path.join(config.test_data_prefix,
- 'parse-errors.test'),
- test_parse_error)
-
-
-def test_parse_error(testcase: DataDrivenTestCase) -> None:
- try:
- # Compile temporary file. The test file contains non-ASCII characters.
- parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, None, Options())
- raise AssertionFailure('No errors reported')
- except CompileError as e:
- # Verify that there was a compile error and that the error messages
- # are equivalent.
- assert_string_arrays_equal(
- testcase.output, e.messages,
- 'Invalid compiler output ({}, line {})'.format(testcase.file,
- testcase.line))
diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py
deleted file mode 100644
index cc598d2..0000000
--- a/mypy/test/testpythoneval.py
+++ /dev/null
@@ -1,135 +0,0 @@
-"""Test cases for running mypy programs using a Python interpreter.
-
-Each test case type checks a program then runs it using Python. The
-output (stdout) of the program is compared to expected output. Type checking
-uses full builtins and other stubs.
-
-Note: Currently Python interpreter paths are hard coded.
-
-Note: These test cases are *not* included in the main test suite, as including
- this suite would slow down the main suite too much.
-"""
-
-from contextlib import contextmanager
-import errno
-import os
-import os.path
-import re
-import subprocess
-import sys
-
-import typing
-from typing import Dict, List, Tuple
-
-from mypy.myunit import Suite, SkipTestCaseException
-from mypy.test.config import test_data_prefix, test_temp_dir
-from mypy.test.data import DataDrivenTestCase, parse_test_cases
-from mypy.test.helpers import assert_string_arrays_equal
-from mypy.util import try_find_python2_interpreter
-
-
-# Files which contain test case descriptions.
-python_eval_files = ['pythoneval.test',
- 'python2eval.test']
-
-python_34_eval_files = ['pythoneval-asyncio.test',
- 'pythoneval-enum.test']
-
-# Path to Python 3 interpreter
-python3_path = sys.executable
-program_re = re.compile(r'\b_program.py\b')
-
-
-class PythonEvaluationSuite(Suite):
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in python_eval_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- test_python_evaluation, test_temp_dir, True)
- if sys.version_info.major == 3 and sys.version_info.minor >= 4:
- for f in python_34_eval_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- test_python_evaluation, test_temp_dir, True)
- return c
-
-
-def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
- """Runs Mypy in a subprocess.
-
- If this passes without errors, executes the script again with a given Python
- version.
- """
- mypy_cmdline = [
- python3_path,
- os.path.join(testcase.old_cwd, 'scripts', 'mypy'),
- '--show-traceback',
- ]
- py2 = testcase.name.lower().endswith('python2')
- if py2:
- mypy_cmdline.append('--py2')
- interpreter = try_find_python2_interpreter()
- if not interpreter:
- # Skip, can't find a Python 2 interpreter.
- raise SkipTestCaseException()
- else:
- interpreter = python3_path
-
- # Write the program to a file.
- program = '_' + testcase.name + '.py'
- mypy_cmdline.append(program)
- program_path = os.path.join(test_temp_dir, program)
- with open(program_path, 'w') as file:
- for s in testcase.input:
- file.write('{}\n'.format(s))
- # Type check the program.
- # This uses the same PYTHONPATH as the current process.
- returncode, out = run(mypy_cmdline)
- if returncode == 0:
- # Set up module path for the execution.
- # This needs the typing module but *not* the mypy module.
- vers_dir = '2.7' if py2 else '3.2'
- typing_path = os.path.join(testcase.old_cwd, 'lib-typing', vers_dir)
- assert os.path.isdir(typing_path)
- env = os.environ.copy()
- env['PYTHONPATH'] = typing_path
- returncode, interp_out = run([interpreter, program], env=env)
- out += interp_out
- # Remove temp file.
- os.remove(program_path)
- assert_string_arrays_equal(adapt_output(testcase), out,
- 'Invalid output ({}, line {})'.format(
- testcase.file, testcase.line))
-
-
-def split_lines(*streams: bytes) -> List[str]:
- """Returns a single list of string lines from the byte streams in args."""
- return [
- s.rstrip('\n\r')
- for stream in streams
- for s in str(stream, 'utf8').splitlines()
- ]
-
-
-def adapt_output(testcase: DataDrivenTestCase) -> List[str]:
- """Translates the generic _program.py into the actual filename."""
- program = '_' + testcase.name + '.py'
- return [program_re.sub(program, line) for line in testcase.output]
-
-
-def run(
- cmdline: List[str], *, env: Dict[str, str] = None, timeout: int = 30
-) -> Tuple[int, List[str]]:
- """A poor man's subprocess.run() for 3.3 and 3.4 compatibility."""
- process = subprocess.Popen(
- cmdline,
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=test_temp_dir,
- )
- try:
- out, err = process.communicate(timeout=timeout)
- except subprocess.TimeoutExpired:
- out = err = b''
- process.kill()
- return process.returncode, split_lines(out, err)
diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py
deleted file mode 100644
index 80e6980..0000000
--- a/mypy/test/testreports.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Test cases for reports generated by mypy."""
-import textwrap
-
-from mypy.myunit import Suite, assert_equal
-from mypy.report import CoberturaPackage, get_line_rate
-
-import lxml.etree as etree
-
-
-class CoberturaReportSuite(Suite):
- def test_get_line_rate(self) -> None:
- assert_equal('1.0', get_line_rate(0, 0))
- assert_equal('0.3333', get_line_rate(1, 3))
-
- def test_as_xml(self) -> None:
- cobertura_package = CoberturaPackage('foobar')
- cobertura_package.covered_lines = 21
- cobertura_package.total_lines = 42
-
- child_package = CoberturaPackage('raz')
- child_package.covered_lines = 10
- child_package.total_lines = 10
- child_package.classes['class'] = etree.Element('class')
-
- cobertura_package.packages['raz'] = child_package
-
- expected_output = textwrap.dedent('''\
- <package complexity="1.0" name="foobar" branch-rate="0" line-rate="0.5000">
- <classes/>
- <packages>
- <package complexity="1.0" name="raz" branch-rate="0" line-rate="1.0000">
- <classes>
- <class/>
- </classes>
- </package>
- </packages>
- </package>
- ''').encode('ascii')
- assert_equal(expected_output,
- etree.tostring(cobertura_package.as_xml(), pretty_print=True))
diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py
deleted file mode 100644
index 4870fa8..0000000
--- a/mypy/test/testsemanal.py
+++ /dev/null
@@ -1,224 +0,0 @@
-"""Semantic analyzer test cases"""
-
-import os.path
-
-from typing import Dict, List
-
-from mypy import build
-from mypy.build import BuildSource
-from mypy.myunit import Suite
-from mypy.test.helpers import (
- assert_string_arrays_equal, normalize_error_messages, testfile_pyversion,
-)
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
-from mypy.test.config import test_data_prefix, test_temp_dir
-from mypy.errors import CompileError
-from mypy.nodes import TypeInfo
-from mypy.options import Options
-
-
-# Semantic analyzer test cases: dump parse tree
-
-# Semantic analysis test case description files.
-semanal_files = ['semanal-basic.test',
- 'semanal-expressions.test',
- 'semanal-classes.test',
- 'semanal-types.test',
- 'semanal-typealiases.test',
- 'semanal-modules.test',
- 'semanal-statements.test',
- 'semanal-abstractclasses.test',
- 'semanal-namedtuple.test',
- 'semanal-typeddict.test',
- 'semanal-python2.test']
-
-
-def get_semanal_options() -> Options:
- options = Options()
- options.use_builtins_fixtures = True
- options.semantic_analysis_only = True
- options.show_traceback = True
- return options
-
-
-class SemAnalSuite(Suite):
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in semanal_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- test_semanal,
- base_path=test_temp_dir,
- optional_out=True,
- native_sep=True)
- return c
-
-
-def test_semanal(testcase: DataDrivenTestCase) -> None:
- """Perform a semantic analysis test case.
-
- The testcase argument contains a description of the test case
- (inputs and output).
- """
-
- try:
- src = '\n'.join(testcase.input)
- options = get_semanal_options()
- options.python_version = testfile_pyversion(testcase.file)
- result = build.build(sources=[BuildSource('main', None, src)],
- options=options,
- alt_lib_path=test_temp_dir)
- a = result.errors
- if a:
- raise CompileError(a)
- # Include string representations of the source files in the actual
- # output.
- for fnam in sorted(result.files.keys()):
- f = result.files[fnam]
- # Omit the builtins module and files with a special marker in the
- # path.
- # TODO the test is not reliable
- if (not f.path.endswith((os.sep + 'builtins.pyi',
- 'typing.pyi',
- 'mypy_extensions.pyi',
- 'abc.pyi',
- 'collections.pyi'))
- and not os.path.basename(f.path).startswith('_')
- and not os.path.splitext(
- os.path.basename(f.path))[0].endswith('_')):
- a += str(f).split('\n')
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(
- testcase.output, a,
- 'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
- testcase.line))
-
-
-# Semantic analyzer error test cases
-
-# Paths to files containing test case descriptions.
-semanal_error_files = ['semanal-errors.test']
-
-
-class SemAnalErrorSuite(Suite):
- def cases(self) -> List[DataDrivenTestCase]:
- # Read test cases from test case description files.
- c = [] # type: List[DataDrivenTestCase]
- for f in semanal_error_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- test_semanal_error, test_temp_dir, optional_out=True)
- return c
-
-
-def test_semanal_error(testcase: DataDrivenTestCase) -> None:
- """Perform a test case."""
-
- try:
- src = '\n'.join(testcase.input)
- res = build.build(sources=[BuildSource('main', None, src)],
- options=get_semanal_options(),
- alt_lib_path=test_temp_dir)
- a = res.errors
- assert a, 'No errors reported in {}, line {}'.format(testcase.file, testcase.line)
- except CompileError as e:
- # Verify that there was a compile error and that the error messages
- # are equivalent.
- a = e.messages
- assert_string_arrays_equal(
- testcase.output, normalize_error_messages(a),
- 'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line))
-
-
-# SymbolNode table export test cases
-
-# Test case descriptions
-semanal_symtable_files = ['semanal-symtable.test']
-
-
-class SemAnalSymtableSuite(Suite):
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in semanal_symtable_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- self.run_test, test_temp_dir)
- return c
-
- def run_test(self, testcase: DataDrivenTestCase) -> None:
- """Perform a test case."""
- try:
- # Build test case input.
- src = '\n'.join(testcase.input)
- result = build.build(sources=[BuildSource('main', None, src)],
- options=get_semanal_options(),
- alt_lib_path=test_temp_dir)
- # The output is the symbol table converted into a string.
- a = result.errors
- if a:
- raise CompileError(a)
- for f in sorted(result.files.keys()):
- if f not in ('builtins', 'typing', 'abc'):
- a.append('{}:'.format(f))
- for s in str(result.files[f].names).split('\n'):
- a.append(' ' + s)
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(
- testcase.output, a,
- 'Invalid semantic analyzer output ({}, line {})'.format(
- testcase.file, testcase.line))
-
-
-# Type info export test cases
-
-semanal_typeinfo_files = ['semanal-typeinfo.test']
-
-
-class SemAnalTypeInfoSuite(Suite):
- def cases(self) -> List[DataDrivenTestCase]:
- """Test case descriptions"""
- c = [] # type: List[DataDrivenTestCase]
- for f in semanal_typeinfo_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- self.run_test, test_temp_dir)
- return c
-
- def run_test(self, testcase: DataDrivenTestCase) -> None:
- """Perform a test case."""
- try:
- # Build test case input.
- src = '\n'.join(testcase.input)
- result = build.build(sources=[BuildSource('main', None, src)],
- options=get_semanal_options(),
- alt_lib_path=test_temp_dir)
- a = result.errors
- if a:
- raise CompileError(a)
-
- # Collect all TypeInfos in top-level modules.
- typeinfos = TypeInfoMap()
- for f in result.files.values():
- for n in f.names.values():
- if isinstance(n.node, TypeInfo):
- typeinfos[n.fullname] = n.node
-
- # The output is the symbol table converted into a string.
- a = str(typeinfos).split('\n')
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(
- testcase.output, a,
- 'Invalid semantic analyzer output ({}, line {})'.format(
- testcase.file, testcase.line))
-
-
-class TypeInfoMap(Dict[str, TypeInfo]):
- def __str__(self) -> str:
- a = ['TypeInfoMap('] # type: List[str]
- for x, y in sorted(self.items()):
- if isinstance(x, str) and (not x.startswith('builtins.') and
- not x.startswith('typing.') and
- not x.startswith('abc.')):
- ti = ('\n' + ' ').join(str(y).split('\n'))
- a.append(' {} : {}'.format(x, ti))
- a[-1] += ')'
- return '\n'.join(a)
diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py
deleted file mode 100644
index e407f75..0000000
--- a/mypy/test/testsolve.py
+++ /dev/null
@@ -1,156 +0,0 @@
-"""Test cases for the constraint solver used in type inference."""
-
-from typing import List, Union, Tuple
-
-from mypy.myunit import Suite, assert_equal
-from mypy.constraints import SUPERTYPE_OF, SUBTYPE_OF, Constraint
-from mypy.solve import solve_constraints
-from mypy.typefixture import TypeFixture
-from mypy.types import Type, TypeVarType, TypeVarId
-
-
-class SolveSuite(Suite):
- def __init__(self) -> None:
- super().__init__()
- self.fx = TypeFixture()
-
- def test_empty_input(self) -> None:
- self.assert_solve([], [], [])
-
- def test_simple_supertype_constraints(self) -> None:
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.a)],
- [(self.fx.a, self.fx.o)])
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.a),
- self.supc(self.fx.t, self.fx.b)],
- [(self.fx.a, self.fx.o)])
-
- def test_simple_subtype_constraints(self) -> None:
- self.assert_solve([self.fx.t.id],
- [self.subc(self.fx.t, self.fx.a)],
- [self.fx.a])
- self.assert_solve([self.fx.t.id],
- [self.subc(self.fx.t, self.fx.a),
- self.subc(self.fx.t, self.fx.b)],
- [self.fx.b])
-
- def test_both_kinds_of_constraints(self) -> None:
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.b),
- self.subc(self.fx.t, self.fx.a)],
- [(self.fx.b, self.fx.a)])
-
- def test_unsatisfiable_constraints(self) -> None:
- # The constraints are impossible to satisfy.
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.a),
- self.subc(self.fx.t, self.fx.b)],
- [None])
-
- def test_exactly_specified_result(self) -> None:
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.b),
- self.subc(self.fx.t, self.fx.b)],
- [(self.fx.b, self.fx.b)])
-
- def test_multiple_variables(self) -> None:
- self.assert_solve([self.fx.t.id, self.fx.s.id],
- [self.supc(self.fx.t, self.fx.b),
- self.supc(self.fx.s, self.fx.c),
- self.subc(self.fx.t, self.fx.a)],
- [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)])
-
- def test_no_constraints_for_var(self) -> None:
- self.assert_solve([self.fx.t.id],
- [],
- [self.fx.nonet])
- self.assert_solve([self.fx.t.id, self.fx.s.id],
- [],
- [self.fx.nonet, self.fx.nonet])
- self.assert_solve([self.fx.t.id, self.fx.s.id],
- [self.supc(self.fx.s, self.fx.a)],
- [self.fx.nonet, (self.fx.a, self.fx.o)])
-
- def test_void_constraints(self) -> None:
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.void)],
- [(self.fx.void, self.fx.void)])
- self.assert_solve([self.fx.t.id],
- [self.subc(self.fx.t, self.fx.void)],
- [(self.fx.void, self.fx.void)])
-
- # Both bounds void.
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.void),
- self.subc(self.fx.t, self.fx.void)],
- [(self.fx.void, self.fx.void)])
-
- # Cannot infer any type.
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.a),
- self.supc(self.fx.t, self.fx.void)],
- [None])
- self.assert_solve([self.fx.t.id],
- [self.subc(self.fx.t, self.fx.a),
- self.subc(self.fx.t, self.fx.void)],
- [None])
-
- def test_simple_constraints_with_dynamic_type(self) -> None:
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.anyt)],
- [(self.fx.anyt, self.fx.anyt)])
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.anyt),
- self.supc(self.fx.t, self.fx.anyt)],
- [(self.fx.anyt, self.fx.anyt)])
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.anyt),
- self.supc(self.fx.t, self.fx.a)],
- [(self.fx.anyt, self.fx.anyt)])
-
- self.assert_solve([self.fx.t.id],
- [self.subc(self.fx.t, self.fx.anyt)],
- [(self.fx.anyt, self.fx.anyt)])
- self.assert_solve([self.fx.t.id],
- [self.subc(self.fx.t, self.fx.anyt),
- self.subc(self.fx.t, self.fx.anyt)],
- [(self.fx.anyt, self.fx.anyt)])
- # self.assert_solve([self.fx.t.id],
- # [self.subc(self.fx.t, self.fx.anyt),
- # self.subc(self.fx.t, self.fx.a)],
- # [(self.fx.anyt, self.fx.anyt)])
- # TODO: figure out what this should be after changes to meet(any, X)
-
- def test_both_normal_and_any_types_in_results(self) -> None:
- # If one of the bounds is any, we promote the other bound to
- # any as well, since otherwise the type range does not make sense.
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.a),
- self.subc(self.fx.t, self.fx.anyt)],
- [(self.fx.anyt, self.fx.anyt)])
-
- self.assert_solve([self.fx.t.id],
- [self.supc(self.fx.t, self.fx.anyt),
- self.subc(self.fx.t, self.fx.a)],
- [(self.fx.anyt, self.fx.anyt)])
-
- def assert_solve(self,
- vars: List[TypeVarId],
- constraints: List[Constraint],
- results: List[Union[Type, Tuple[Type, Type]]],
- ) -> None:
- res = []
- for r in results:
- if isinstance(r, tuple):
- res.append(r[0])
- else:
- res.append(r)
- actual = solve_constraints(vars, constraints)
- assert_equal(str(actual), str(res))
-
- def supc(self, type_var: TypeVarType, bound: Type) -> Constraint:
- return Constraint(type_var.id, SUPERTYPE_OF, bound)
-
- def subc(self, type_var: TypeVarType, bound: Type) -> Constraint:
- return Constraint(type_var.id, SUBTYPE_OF, bound)
diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py
deleted file mode 100644
index 0be61ca..0000000
--- a/mypy/test/teststubgen.py
+++ /dev/null
@@ -1,186 +0,0 @@
-import glob
-import importlib
-import os.path
-import random
-import shutil
-import sys
-import tempfile
-import time
-
-from typing import List, Tuple
-
-from mypy.myunit import Suite, AssertionFailure, assert_equal
-from mypy.test.helpers import assert_string_arrays_equal
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
-from mypy.test import config
-from mypy.parse import parse
-from mypy.errors import CompileError
-from mypy.stubgen import generate_stub, generate_stub_for_module
-from mypy.stubgenc import infer_method_sig
-from mypy.stubutil import (
- parse_signature, parse_all_signatures, build_signature, find_unique_signatures,
- infer_sig_from_docstring
-)
-
-
-class StubgenUtilSuite(Suite):
- def test_parse_signature(self) -> None:
- self.assert_parse_signature('func()', ('func', [], []))
-
- def test_parse_signature_with_args(self) -> None:
- self.assert_parse_signature('func(arg)', ('func', ['arg'], []))
- self.assert_parse_signature('do(arg, arg2)', ('do', ['arg', 'arg2'], []))
-
- def test_parse_signature_with_optional_args(self) -> None:
- self.assert_parse_signature('func([arg])', ('func', [], ['arg']))
- self.assert_parse_signature('func(arg[, arg2])', ('func', ['arg'], ['arg2']))
- self.assert_parse_signature('func([arg[, arg2]])', ('func', [], ['arg', 'arg2']))
-
- def test_parse_signature_with_default_arg(self) -> None:
- self.assert_parse_signature('func(arg=None)', ('func', [], ['arg']))
- self.assert_parse_signature('func(arg, arg2=None)', ('func', ['arg'], ['arg2']))
- self.assert_parse_signature('func(arg=1, arg2="")', ('func', [], ['arg', 'arg2']))
-
- def test_parse_signature_with_qualified_function(self) -> None:
- self.assert_parse_signature('ClassName.func(arg)', ('func', ['arg'], []))
-
- def test_parse_signature_with_kw_only_arg(self) -> None:
- self.assert_parse_signature('ClassName.func(arg, *, arg2=1)',
- ('func', ['arg', '*'], ['arg2']))
-
- def test_parse_signature_with_star_arg(self) -> None:
- self.assert_parse_signature('ClassName.func(arg, *args)',
- ('func', ['arg', '*args'], []))
-
- def test_parse_signature_with_star_star_arg(self) -> None:
- self.assert_parse_signature('ClassName.func(arg, **args)',
- ('func', ['arg', '**args'], []))
-
- def assert_parse_signature(self, sig: str, result: Tuple[str, List[str], List[str]]) -> None:
- assert_equal(parse_signature(sig), result)
-
- def test_build_signature(self) -> None:
- assert_equal(build_signature([], []), '()')
- assert_equal(build_signature(['arg'], []), '(arg)')
- assert_equal(build_signature(['arg', 'arg2'], []), '(arg, arg2)')
- assert_equal(build_signature(['arg'], ['arg2']), '(arg, arg2=...)')
- assert_equal(build_signature(['arg'], ['arg2', '**x']), '(arg, arg2=..., **x)')
-
- def test_parse_all_signatures(self) -> None:
- assert_equal(parse_all_signatures(['random text',
- '.. function:: fn(arg',
- '.. function:: fn()',
- ' .. method:: fn2(arg)']),
- ([('fn', '()'),
- ('fn2', '(arg)')], []))
-
- def test_find_unique_signatures(self) -> None:
- assert_equal(find_unique_signatures(
- [('func', '()'),
- ('func', '()'),
- ('func2', '()'),
- ('func2', '(arg)'),
- ('func3', '(arg, arg2)')]),
- [('func', '()'),
- ('func3', '(arg, arg2)')])
-
- def test_infer_sig_from_docstring(self) -> None:
- assert_equal(infer_sig_from_docstring('\nfunc(x) - y', 'func'), '(x)')
- assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=None)', 'func'), '(x, Y_a=None)')
- assert_equal(infer_sig_from_docstring('\nafunc(x) - y', 'func'), None)
- assert_equal(infer_sig_from_docstring('\nfunc(x, y', 'func'), None)
- assert_equal(infer_sig_from_docstring('\nfunc(x=z(y))', 'func'), None)
- assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), None)
-
-
-class StubgenPythonSuite(Suite):
- test_data_files = ['stubgen.test']
-
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for path in self.test_data_files:
- c += parse_test_cases(os.path.join(config.test_data_prefix, path), test_stubgen)
- return c
-
-
-def test_stubgen(testcase: DataDrivenTestCase) -> None:
- if 'stubgen-test-path' not in sys.path:
- sys.path.insert(0, 'stubgen-test-path')
- os.mkdir('stubgen-test-path')
- source = '\n'.join(testcase.input)
- handle = tempfile.NamedTemporaryFile(prefix='prog_', suffix='.py', dir='stubgen-test-path',
- delete=False)
- assert os.path.isabs(handle.name)
- path = os.path.basename(handle.name)
- name = path[:-3]
- path = os.path.join('stubgen-test-path', path)
- out_dir = '_out'
- os.mkdir(out_dir)
- try:
- handle.write(bytes(source, 'ascii'))
- handle.close()
- # Without this we may sometimes be unable to import the module below, as importlib
- # caches os.listdir() results in Python 3.3+ (Guido explained this to me).
- reset_importlib_caches()
- try:
- if testcase.name.endswith('_import'):
- generate_stub_for_module(name, out_dir, quiet=True)
- elif testcase.name.endswith('_fast_parser'):
- generate_stub(path, out_dir, fast_parser=True)
- else:
- generate_stub(path, out_dir)
- a = load_output(out_dir)
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(testcase.output, a,
- 'Invalid output ({}, line {})'.format(
- testcase.file, testcase.line))
- finally:
- handle.close()
- os.unlink(handle.name)
- shutil.rmtree(out_dir)
-
-
-def reset_importlib_caches() -> None:
- try:
- importlib.invalidate_caches()
- except (ImportError, AttributeError):
- pass
-
-
-def load_output(dirname: str) -> List[str]:
- result = [] # type: List[str]
- entries = glob.glob('%s/*' % dirname)
- assert entries, 'No files generated'
- if len(entries) == 1:
- add_file(entries[0], result)
- else:
- for entry in entries:
- result.append('## %s ##' % entry)
- add_file(entry, result)
- return result
-
-
-def add_file(path: str, result: List[str]) -> None:
- with open(path) as file:
- result.extend(file.read().splitlines())
-
-
-class StubgencSuite(Suite):
- def test_infer_hash_sig(self) -> None:
- assert_equal(infer_method_sig('__hash__'), '()')
-
- def test_infer_getitem_sig(self) -> None:
- assert_equal(infer_method_sig('__getitem__'), '(index)')
-
- def test_infer_setitem_sig(self) -> None:
- assert_equal(infer_method_sig('__setitem__'), '(index, object)')
-
- def test_infer_binary_op_sig(self) -> None:
- for op in ('eq', 'ne', 'lt', 'le', 'gt', 'ge',
- 'add', 'radd', 'sub', 'rsub', 'mul', 'rmul'):
- assert_equal(infer_method_sig('__%s__' % op), '(other)')
-
- def test_infer_unary_op_sig(self) -> None:
- for op in ('neg', 'pos'):
- assert_equal(infer_method_sig('__%s__' % op), '()')
diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py
deleted file mode 100644
index 307cb7a..0000000
--- a/mypy/test/testsubtypes.py
+++ /dev/null
@@ -1,208 +0,0 @@
-from mypy.myunit import Suite, assert_true
-from mypy.nodes import CONTRAVARIANT, INVARIANT, COVARIANT
-from mypy.subtypes import is_subtype
-from mypy.typefixture import TypeFixture, InterfaceTypeFixture
-from mypy.types import Type
-
-
-class SubtypingSuite(Suite):
- def set_up(self) -> None:
- self.fx = TypeFixture(INVARIANT)
- self.fx_contra = TypeFixture(CONTRAVARIANT)
- self.fx_co = TypeFixture(COVARIANT)
-
- def test_trivial_cases(self) -> None:
- for simple in self.fx_co.void, self.fx_co.a, self.fx_co.o, self.fx_co.b:
- self.assert_subtype(simple, simple)
-
- def test_instance_subtyping(self) -> None:
- self.assert_strict_subtype(self.fx.a, self.fx.o)
- self.assert_strict_subtype(self.fx.b, self.fx.o)
- self.assert_strict_subtype(self.fx.b, self.fx.a)
-
- self.assert_not_subtype(self.fx.a, self.fx.d)
- self.assert_not_subtype(self.fx.b, self.fx.c)
-
- def test_simple_generic_instance_subtyping_invariant(self) -> None:
- self.assert_subtype(self.fx.ga, self.fx.ga)
- self.assert_subtype(self.fx.hab, self.fx.hab)
-
- self.assert_not_subtype(self.fx.ga, self.fx.g2a)
- self.assert_not_subtype(self.fx.ga, self.fx.gb)
- self.assert_not_subtype(self.fx.gb, self.fx.ga)
-
- def test_simple_generic_instance_subtyping_covariant(self) -> None:
- self.assert_subtype(self.fx_co.ga, self.fx_co.ga)
- self.assert_subtype(self.fx_co.hab, self.fx_co.hab)
-
- self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a)
- self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb)
- self.assert_subtype(self.fx_co.gb, self.fx_co.ga)
-
- def test_simple_generic_instance_subtyping_contravariant(self) -> None:
- self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga)
- self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab)
-
- self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a)
- self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb)
- self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga)
-
- def test_generic_subtyping_with_inheritance_invariant(self) -> None:
- self.assert_subtype(self.fx.gsab, self.fx.gb)
- self.assert_not_subtype(self.fx.gsab, self.fx.ga)
- self.assert_not_subtype(self.fx.gsaa, self.fx.gb)
-
- def test_generic_subtyping_with_inheritance_covariant(self) -> None:
- self.assert_subtype(self.fx_co.gsab, self.fx_co.gb)
- self.assert_subtype(self.fx_co.gsab, self.fx_co.ga)
- self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb)
-
- def test_generic_subtyping_with_inheritance_contravariant(self) -> None:
- self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb)
- self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga)
- self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb)
-
- def test_interface_subtyping(self) -> None:
- self.assert_subtype(self.fx.e, self.fx.f)
- self.assert_equivalent(self.fx.f, self.fx.f)
- self.assert_not_subtype(self.fx.a, self.fx.f)
-
- def test_generic_interface_subtyping(self) -> None:
- # TODO make this work
- self.skip()
-
- fx2 = InterfaceTypeFixture()
-
- self.assert_subtype(fx2.m1, fx2.gfa)
- self.assert_not_subtype(fx2.m1, fx2.gfb)
-
- self.assert_equivalent(fx2.gfa, fx2.gfa)
-
- def test_basic_callable_subtyping(self) -> None:
- self.assert_strict_subtype(self.fx.callable(self.fx.o, self.fx.d),
- self.fx.callable(self.fx.a, self.fx.d))
- self.assert_strict_subtype(self.fx.callable(self.fx.d, self.fx.b),
- self.fx.callable(self.fx.d, self.fx.a))
-
- self.assert_unrelated(self.fx.callable(self.fx.a, self.fx.a),
- self.fx.callable(self.fx.a, self.fx.void))
-
- self.assert_unrelated(
- self.fx.callable(self.fx.a, self.fx.a, self.fx.a),
- self.fx.callable(self.fx.a, self.fx.a))
-
- def test_default_arg_callable_subtyping(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable(self.fx.a, self.fx.d, self.fx.a))
-
- self.assert_strict_subtype(
- self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable(self.fx.a, self.fx.a))
-
- self.assert_strict_subtype(
- self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a))
-
- self.assert_unrelated(
- self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable(self.fx.d, self.fx.d, self.fx.a))
-
- self.assert_unrelated(
- self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a))
-
- self.assert_unrelated(
- self.fx.callable_default(1, self.fx.a, self.fx.a),
- self.fx.callable(self.fx.a, self.fx.a, self.fx.a))
-
- def test_var_arg_callable_subtyping_1(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
- self.fx.callable_var_arg(0, self.fx.b, self.fx.a))
-
- def test_var_arg_callable_subtyping_2(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
- self.fx.callable(self.fx.b, self.fx.a))
-
- def test_var_arg_callable_subtyping_3(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
- self.fx.callable(self.fx.a))
-
- def test_var_arg_callable_subtyping_4(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable(self.fx.b, self.fx.a))
-
- def test_var_arg_callable_subtyping_5(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a),
- self.fx.callable(self.fx.b, self.fx.a))
-
- def test_var_arg_callable_subtyping_6(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d),
- self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d))
-
- def test_var_arg_callable_subtyping_7(self) -> None:
- self.assert_not_subtype(
- self.fx.callable_var_arg(0, self.fx.b, self.fx.d),
- self.fx.callable(self.fx.a, self.fx.d))
-
- def test_var_arg_callable_subtyping_8(self) -> None:
- self.assert_not_subtype(
- self.fx.callable_var_arg(0, self.fx.b, self.fx.d),
- self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d))
- self.assert_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.d),
- self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d))
-
- def test_var_arg_callable_subtyping_9(self) -> None:
- self.assert_not_subtype(
- self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d),
- self.fx.callable_var_arg(0, self.fx.a, self.fx.d))
- self.assert_subtype(
- self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d),
- self.fx.callable_var_arg(0, self.fx.b, self.fx.d))
-
- def test_type_callable_subtyping(self) -> None:
- self.assert_strict_subtype(
- self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type)
-
- self.assert_strict_subtype(
- self.fx.callable_type(self.fx.d, self.fx.b),
- self.fx.callable(self.fx.d, self.fx.a))
-
- self.assert_strict_subtype(self.fx.callable_type(self.fx.a, self.fx.b),
- self.fx.callable(self.fx.a, self.fx.b))
-
- # IDEA: Maybe add these test cases (they are tested pretty well in type
- # checker tests already):
- # * more interface subtyping test cases
- # * more generic interface subtyping test cases
- # * type variables
- # * tuple types
- # * void type
- # * None type
- # * any type
- # * generic function types
-
- def assert_subtype(self, s: Type, t: Type) -> None:
- assert_true(is_subtype(s, t), '{} not subtype of {}'.format(s, t))
-
- def assert_not_subtype(self, s: Type, t: Type) -> None:
- assert_true(not is_subtype(s, t), '{} subtype of {}'.format(s, t))
-
- def assert_strict_subtype(self, s: Type, t: Type) -> None:
- self.assert_subtype(s, t)
- self.assert_not_subtype(t, s)
-
- def assert_equivalent(self, s: Type, t: Type) -> None:
- self.assert_subtype(s, t)
- self.assert_subtype(t, s)
-
- def assert_unrelated(self, s: Type, t: Type) -> None:
- self.assert_not_subtype(s, t)
- self.assert_not_subtype(t, s)
diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py
deleted file mode 100644
index 1dac308..0000000
--- a/mypy/test/testtransform.py
+++ /dev/null
@@ -1,85 +0,0 @@
-"""Identity AST transform test cases"""
-
-import os.path
-
-from typing import Dict, List
-
-from mypy import build
-from mypy.build import BuildSource
-from mypy.myunit import Suite
-from mypy.test.helpers import assert_string_arrays_equal, testfile_pyversion
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
-from mypy.test.config import test_data_prefix, test_temp_dir
-from mypy.errors import CompileError
-from mypy.nodes import TypeInfo
-from mypy.treetransform import TransformVisitor
-from mypy.types import Type
-from mypy.options import Options
-
-
-class TransformSuite(Suite):
- # Reuse semantic analysis test cases.
- transform_files = ['semanal-basic.test',
- 'semanal-expressions.test',
- 'semanal-classes.test',
- 'semanal-types.test',
- 'semanal-modules.test',
- 'semanal-statements.test',
- 'semanal-abstractclasses.test',
- 'semanal-python2.test']
-
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in self.transform_files:
- c += parse_test_cases(os.path.join(test_data_prefix, f),
- test_transform,
- base_path=test_temp_dir,
- native_sep=True)
- return c
-
-
-def test_transform(testcase: DataDrivenTestCase) -> None:
- """Perform an identity transform test case."""
-
- try:
- src = '\n'.join(testcase.input)
- options = Options()
- options.use_builtins_fixtures = True
- options.semantic_analysis_only = True
- options.show_traceback = True
- options.python_version = testfile_pyversion(testcase.file)
- result = build.build(sources=[BuildSource('main', None, src)],
- options=options,
- alt_lib_path=test_temp_dir)
- a = result.errors
- if a:
- raise CompileError(a)
- # Include string representations of the source files in the actual
- # output.
- for fnam in sorted(result.files.keys()):
- f = result.files[fnam]
-
- # Omit the builtins module and files with a special marker in the
- # path.
- # TODO the test is not reliable
- if (not f.path.endswith((os.sep + 'builtins.pyi',
- 'typing.pyi',
- 'abc.pyi'))
- and not os.path.basename(f.path).startswith('_')
- and not os.path.splitext(
- os.path.basename(f.path))[0].endswith('_')):
- t = TestTransformVisitor()
- f = t.mypyfile(f)
- a += str(f).split('\n')
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(
- testcase.output, a,
- 'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
- testcase.line))
-
-
-class TestTransformVisitor(TransformVisitor):
- def type(self, type: Type) -> Type:
- assert type is not None
- return type
diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py
deleted file mode 100644
index a465c71..0000000
--- a/mypy/test/testtypegen.py
+++ /dev/null
@@ -1,128 +0,0 @@
-"""Test cases for the type checker: exporting inferred types"""
-
-import os.path
-import re
-
-from typing import Set, List
-
-from mypy import build
-from mypy.build import BuildSource
-from mypy.myunit import Suite
-from mypy.test import config
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
-from mypy.test.helpers import assert_string_arrays_equal
-from mypy.util import short_type
-from mypy.nodes import (
- NameExpr, TypeVarExpr, CallExpr, Expression, MypyFile, AssignmentStmt, IntExpr
-)
-from mypy.traverser import TraverserVisitor
-from mypy.errors import CompileError
-from mypy.options import Options
-
-
-class TypeExportSuite(Suite):
- # List of files that contain test case descriptions.
- files = ['typexport-basic.test']
-
- def cases(self) -> List[DataDrivenTestCase]:
- c = [] # type: List[DataDrivenTestCase]
- for f in self.files:
- c += parse_test_cases(os.path.join(config.test_data_prefix, f),
- self.run_test, config.test_temp_dir)
- return c
-
- def run_test(self, testcase: DataDrivenTestCase) -> None:
- try:
- line = testcase.input[0]
- mask = ''
- if line.startswith('##'):
- mask = '(' + line[2:].strip() + ')$'
-
- src = '\n'.join(testcase.input)
- options = Options()
- options.use_builtins_fixtures = True
- options.show_traceback = True
- result = build.build(sources=[BuildSource('main', None, src)],
- options=options,
- alt_lib_path=config.test_temp_dir)
- a = result.errors
- map = result.types
- nodes = map.keys()
-
- # Ignore NameExpr nodes of variables with explicit (trivial) types
- # to simplify output.
- searcher = SkippedNodeSearcher()
- for file in result.files.values():
- file.accept(searcher)
- ignored = searcher.nodes
-
- # Filter nodes that should be included in the output.
- keys = []
- for node in nodes:
- if node.line is not None and node.line != -1 and map[node]:
- if ignore_node(node) or node in ignored:
- continue
- if (re.match(mask, short_type(node))
- or (isinstance(node, NameExpr)
- and re.match(mask, node.name))):
- # Include node in output.
- keys.append(node)
-
- for key in sorted(keys,
- key=lambda n: (n.line, short_type(n),
- str(n) + str(map[n]))):
- ts = str(map[key]).replace('*', '') # Remove erased tags
- ts = ts.replace('__main__.', '')
- a.append('{}({}) : {}'.format(short_type(key), key.line, ts))
- except CompileError as e:
- a = e.messages
- assert_string_arrays_equal(
- testcase.output, a,
- 'Invalid type checker output ({}, line {})'.format(testcase.file,
- testcase.line))
-
-
-class SkippedNodeSearcher(TraverserVisitor):
- def __init__(self) -> None:
- self.nodes = set() # type: Set[Expression]
- self.is_typing = False
-
- def visit_mypy_file(self, f: MypyFile) -> None:
- self.is_typing = f.fullname() == 'typing'
- super().visit_mypy_file(f)
-
- def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
- if s.type or ignore_node(s.rvalue):
- for lvalue in s.lvalues:
- if isinstance(lvalue, NameExpr):
- self.nodes.add(lvalue)
- super().visit_assignment_stmt(s)
-
- def visit_name_expr(self, n: NameExpr) -> None:
- self.skip_if_typing(n)
-
- def visit_int_expr(self, n: IntExpr) -> None:
- self.skip_if_typing(n)
-
- def skip_if_typing(self, n: Expression) -> None:
- if self.is_typing:
- self.nodes.add(n)
-
-
-def ignore_node(node: Expression) -> bool:
- """Return True if node is to be omitted from test case output."""
-
- # We want to get rid of object() expressions in the typing module stub
- # and also TypeVar(...) expressions. Since detecting whether a node comes
- # from the typing module is not easy, we just to strip them all away.
- if isinstance(node, TypeVarExpr):
- return True
- if isinstance(node, NameExpr) and node.fullname == 'builtins.object':
- return True
- if isinstance(node, NameExpr) and node.fullname == 'builtins.None':
- return True
- if isinstance(node, CallExpr) and (ignore_node(node.callee) or
- node.analyzed):
- return True
-
- return False
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
deleted file mode 100644
index 4364848..0000000
--- a/mypy/test/testtypes.py
+++ /dev/null
@@ -1,847 +0,0 @@
-"""Test cases for mypy types and type operations."""
-
-from typing import List, Tuple
-
-from mypy.myunit import (
- Suite, assert_equal, assert_true, assert_false, assert_type
-)
-from mypy.erasetype import erase_type
-from mypy.expandtype import expand_type
-from mypy.join import join_types, join_simple
-from mypy.meet import meet_types
-from mypy.types import (
- UnboundType, AnyType, Void, CallableType, TupleType, TypeVarDef, Type,
- Instance, NoneTyp, ErrorType, Overloaded, TypeType, UnionType, UninhabitedType,
- true_only, false_only, TypeVarId
-)
-from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, CONTRAVARIANT, INVARIANT, COVARIANT
-from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype
-from mypy.typefixture import TypeFixture, InterfaceTypeFixture
-
-
-class TypesSuite(Suite):
- def __init__(self) -> None:
- super().__init__()
- self.x = UnboundType('X') # Helpers
- self.y = UnboundType('Y')
- self.fx = TypeFixture()
- self.function = self.fx.function
-
- def test_any(self) -> None:
- assert_equal(str(AnyType()), 'Any')
-
- def test_simple_unbound_type(self) -> None:
- u = UnboundType('Foo')
- assert_equal(str(u), 'Foo?')
-
- def test_generic_unbound_type(self) -> None:
- u = UnboundType('Foo', [UnboundType('T'), AnyType()])
- assert_equal(str(u), 'Foo?[T?, Any]')
-
- def test_void_type(self) -> None:
- assert_equal(str(Void(None)), 'void')
-
- def test_callable_type(self) -> None:
- c = CallableType([self.x, self.y],
- [ARG_POS, ARG_POS],
- [None, None],
- AnyType(), self.function)
- assert_equal(str(c), 'def (X?, Y?) -> Any')
-
- c2 = CallableType([], [], [], Void(None), None)
- assert_equal(str(c2), 'def ()')
-
- def test_callable_type_with_default_args(self) -> None:
- c = CallableType([self.x, self.y], [ARG_POS, ARG_OPT], [None, None],
- AnyType(), self.function)
- assert_equal(str(c), 'def (X?, Y? =) -> Any')
-
- c2 = CallableType([self.x, self.y], [ARG_OPT, ARG_OPT], [None, None],
- AnyType(), self.function)
- assert_equal(str(c2), 'def (X? =, Y? =) -> Any')
-
- def test_callable_type_with_var_args(self) -> None:
- c = CallableType([self.x], [ARG_STAR], [None], AnyType(), self.function)
- assert_equal(str(c), 'def (*X?) -> Any')
-
- c2 = CallableType([self.x, self.y], [ARG_POS, ARG_STAR],
- [None, None], AnyType(), self.function)
- assert_equal(str(c2), 'def (X?, *Y?) -> Any')
-
- c3 = CallableType([self.x, self.y], [ARG_OPT, ARG_STAR], [None, None],
- AnyType(), self.function)
- assert_equal(str(c3), 'def (X? =, *Y?) -> Any')
-
- def test_tuple_type(self) -> None:
- assert_equal(str(TupleType([], None)), 'Tuple[]')
- assert_equal(str(TupleType([self.x], None)), 'Tuple[X?]')
- assert_equal(str(TupleType([self.x, AnyType()], None)), 'Tuple[X?, Any]')
-
- def test_type_variable_binding(self) -> None:
- assert_equal(str(TypeVarDef('X', 1, None, self.fx.o)), 'X')
- assert_equal(str(TypeVarDef('X', 1, [self.x, self.y], self.fx.o)),
- 'X in (X?, Y?)')
-
- def test_generic_function_type(self) -> None:
- c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None],
- self.y, self.function, name=None,
- variables=[TypeVarDef('X', -1, None, self.fx.o)])
- assert_equal(str(c), 'def [X] (X?, Y?) -> Y?')
-
- v = [TypeVarDef('Y', -1, None, self.fx.o),
- TypeVarDef('X', -2, None, self.fx.o)]
- c2 = CallableType([], [], [], Void(None), self.function, name=None, variables=v)
- assert_equal(str(c2), 'def [Y, X] ()')
-
-
-class TypeOpsSuite(Suite):
- def set_up(self) -> None:
- self.fx = TypeFixture(INVARIANT)
- self.fx_co = TypeFixture(COVARIANT)
- self.fx_contra = TypeFixture(CONTRAVARIANT)
-
- # expand_type
-
- def test_trivial_expand(self) -> None:
- for t in (self.fx.a, self.fx.o, self.fx.t, self.fx.void, self.fx.nonet,
- self.tuple(self.fx.a),
- self.callable([], self.fx.a, self.fx.a), self.fx.anyt):
- self.assert_expand(t, [], t)
- self.assert_expand(t, [], t)
- self.assert_expand(t, [], t)
-
- def test_expand_naked_type_var(self) -> None:
- self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a)
- self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t)
-
- def test_expand_basic_generic_types(self) -> None:
- self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga)
-
- # IDEA: Add test cases for
- # tuple types
- # callable types
- # multiple arguments
-
- def assert_expand(self,
- orig: Type,
- map_items: List[Tuple[TypeVarId, Type]],
- result: Type,
- ) -> None:
- lower_bounds = {}
-
- for id, t in map_items:
- lower_bounds[id] = t
-
- exp = expand_type(orig, lower_bounds)
- # Remove erased tags (asterisks).
- assert_equal(str(exp).replace('*', ''), str(result))
-
- # erase_type
-
- def test_trivial_erase(self) -> None:
- for t in (self.fx.a, self.fx.o, self.fx.void, self.fx.nonet,
- self.fx.anyt, self.fx.err):
- self.assert_erase(t, t)
-
- def test_erase_with_type_variable(self) -> None:
- self.assert_erase(self.fx.t, self.fx.anyt)
-
- def test_erase_with_generic_type(self) -> None:
- self.assert_erase(self.fx.ga, self.fx.gdyn)
- self.assert_erase(self.fx.hab,
- Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt]))
-
- def test_erase_with_tuple_type(self) -> None:
- self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple)
-
- def test_erase_with_function_type(self) -> None:
- self.assert_erase(self.fx.callable(self.fx.a, self.fx.b),
- self.fx.callable_type(self.fx.void))
-
- def test_erase_with_type_object(self) -> None:
- self.assert_erase(self.fx.callable_type(self.fx.a, self.fx.b),
- self.fx.callable_type(self.fx.void))
-
- def test_erase_with_type_type(self) -> None:
- self.assert_erase(self.fx.type_a, self.fx.type_a)
- self.assert_erase(self.fx.type_t, self.fx.type_any)
-
- def assert_erase(self, orig: Type, result: Type) -> None:
- assert_equal(str(erase_type(orig)), str(result))
-
- # is_more_precise
-
- def test_is_more_precise(self) -> None:
- fx = self.fx
- assert_true(is_more_precise(fx.b, fx.a))
- assert_true(is_more_precise(fx.b, fx.b))
- assert_true(is_more_precise(fx.b, fx.b))
- assert_true(is_more_precise(fx.b, fx.anyt))
- assert_true(is_more_precise(self.tuple(fx.b, fx.a),
- self.tuple(fx.b, fx.a)))
-
- assert_false(is_more_precise(fx.a, fx.b))
- assert_false(is_more_precise(fx.anyt, fx.b))
- assert_false(is_more_precise(self.tuple(fx.b, fx.b),
- self.tuple(fx.b, fx.a)))
-
- # is_proper_subtype
-
- def test_is_proper_subtype(self) -> None:
- fx = self.fx
-
- assert_true(is_proper_subtype(fx.a, fx.a))
- assert_true(is_proper_subtype(fx.b, fx.a))
- assert_true(is_proper_subtype(fx.b, fx.o))
- assert_true(is_proper_subtype(fx.b, fx.o))
-
- assert_false(is_proper_subtype(fx.a, fx.b))
- assert_false(is_proper_subtype(fx.o, fx.b))
-
- assert_true(is_proper_subtype(fx.anyt, fx.anyt))
- assert_false(is_proper_subtype(fx.a, fx.anyt))
- assert_false(is_proper_subtype(fx.anyt, fx.a))
-
- assert_true(is_proper_subtype(fx.ga, fx.ga))
- assert_true(is_proper_subtype(fx.gdyn, fx.gdyn))
- assert_false(is_proper_subtype(fx.ga, fx.gdyn))
- assert_false(is_proper_subtype(fx.gdyn, fx.ga))
-
- assert_true(is_proper_subtype(fx.t, fx.t))
- assert_false(is_proper_subtype(fx.t, fx.s))
-
- def test_is_proper_subtype_covariance(self) -> None:
- fx_co = self.fx_co
-
- assert_true(is_proper_subtype(fx_co.gsab, fx_co.gb))
- assert_true(is_proper_subtype(fx_co.gsab, fx_co.ga))
- assert_false(is_proper_subtype(fx_co.gsaa, fx_co.gb))
- assert_true(is_proper_subtype(fx_co.gb, fx_co.ga))
- assert_false(is_proper_subtype(fx_co.ga, fx_co.gb))
-
- def test_is_proper_subtype_contravariance(self) -> None:
- fx_contra = self.fx_contra
-
- assert_true(is_proper_subtype(fx_contra.gsab, fx_contra.gb))
- assert_false(is_proper_subtype(fx_contra.gsab, fx_contra.ga))
- assert_true(is_proper_subtype(fx_contra.gsaa, fx_contra.gb))
- assert_false(is_proper_subtype(fx_contra.gb, fx_contra.ga))
- assert_true(is_proper_subtype(fx_contra.ga, fx_contra.gb))
-
- def test_is_proper_subtype_invariance(self) -> None:
- fx = self.fx
-
- assert_true(is_proper_subtype(fx.gsab, fx.gb))
- assert_false(is_proper_subtype(fx.gsab, fx.ga))
- assert_false(is_proper_subtype(fx.gsaa, fx.gb))
- assert_false(is_proper_subtype(fx.gb, fx.ga))
- assert_false(is_proper_subtype(fx.ga, fx.gb))
-
- # can_be_true / can_be_false
-
- def test_empty_tuple_always_false(self) -> None:
- tuple_type = self.tuple()
- assert_true(tuple_type.can_be_false)
- assert_false(tuple_type.can_be_true)
-
- def test_nonempty_tuple_always_true(self) -> None:
- tuple_type = self.tuple(AnyType(), AnyType())
- assert_true(tuple_type.can_be_true)
- assert_false(tuple_type.can_be_false)
-
- def test_union_can_be_true_if_any_true(self) -> None:
- union_type = UnionType([self.fx.a, self.tuple()])
- assert_true(union_type.can_be_true)
-
- def test_union_can_not_be_true_if_none_true(self) -> None:
- union_type = UnionType([self.tuple(), self.tuple()])
- assert_false(union_type.can_be_true)
-
- def test_union_can_be_false_if_any_false(self) -> None:
- union_type = UnionType([self.fx.a, self.tuple()])
- assert_true(union_type.can_be_false)
-
- def test_union_can_not_be_false_if_none_false(self) -> None:
- union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)])
- assert_false(union_type.can_be_false)
-
- # true_only / false_only
-
- def test_true_only_of_false_type_is_uninhabited(self) -> None:
- to = true_only(NoneTyp())
- assert_type(UninhabitedType, to)
-
- def test_true_only_of_true_type_is_idempotent(self) -> None:
- always_true = self.tuple(AnyType())
- to = true_only(always_true)
- assert_true(always_true is to)
-
- def test_true_only_of_instance(self) -> None:
- to = true_only(self.fx.a)
- assert_equal(str(to), "A")
- assert_true(to.can_be_true)
- assert_false(to.can_be_false)
- assert_type(Instance, to)
- # The original class still can be false
- assert_true(self.fx.a.can_be_false)
-
- def test_true_only_of_union(self) -> None:
- tup_type = self.tuple(AnyType())
- # Union of something that is unknown, something that is always true, something
- # that is always false
- union_type = UnionType([self.fx.a, tup_type, self.tuple()])
- to = true_only(union_type)
- assert isinstance(to, UnionType)
- assert_equal(len(to.items), 2)
- assert_true(to.items[0].can_be_true)
- assert_false(to.items[0].can_be_false)
- assert_true(to.items[1] is tup_type)
-
- def test_false_only_of_true_type_is_uninhabited(self) -> None:
- fo = false_only(self.tuple(AnyType()))
- assert_type(UninhabitedType, fo)
-
- def test_false_only_of_false_type_is_idempotent(self) -> None:
- always_false = NoneTyp()
- fo = false_only(always_false)
- assert_true(always_false is fo)
-
- def test_false_only_of_instance(self) -> None:
- fo = false_only(self.fx.a)
- assert_equal(str(fo), "A")
- assert_false(fo.can_be_true)
- assert_true(fo.can_be_false)
- assert_type(Instance, fo)
- # The original class still can be true
- assert_true(self.fx.a.can_be_true)
-
- def test_false_only_of_union(self) -> None:
- tup_type = self.tuple()
- # Union of something that is unknown, something that is always true, something
- # that is always false
- union_type = UnionType([self.fx.a, self.tuple(AnyType()), tup_type])
- assert_equal(len(union_type.items), 3)
- fo = false_only(union_type)
- assert isinstance(fo, UnionType)
- assert_equal(len(fo.items), 2)
- assert_false(fo.items[0].can_be_true)
- assert_true(fo.items[0].can_be_false)
- assert_true(fo.items[1] is tup_type)
-
- # Helpers
-
- def tuple(self, *a: Type) -> TupleType:
- return TupleType(list(a), self.fx.std_tuple)
-
- def callable(self, vars: List[str], *a: Type) -> CallableType:
- """callable(args, a1, ..., an, r) constructs a callable with
- argument types a1, ... an and return type r and type arguments
- vars.
- """
- tv = [] # type: List[TypeVarDef]
- n = -1
- for v in vars:
- tv.append(TypeVarDef(v, n, None, self.fx.o))
- n -= 1
- return CallableType(list(a[:-1]),
- [ARG_POS] * (len(a) - 1),
- [None] * (len(a) - 1),
- a[-1],
- self.fx.function,
- name=None,
- variables=tv)
-
-
-class JoinSuite(Suite):
- def set_up(self) -> None:
- self.fx = TypeFixture()
-
- def test_trivial_cases(self) -> None:
- for simple in self.fx.void, self.fx.a, self.fx.o, self.fx.b:
- self.assert_join(simple, simple, simple)
-
- def test_class_subtyping(self) -> None:
- self.assert_join(self.fx.a, self.fx.o, self.fx.o)
- self.assert_join(self.fx.b, self.fx.o, self.fx.o)
- self.assert_join(self.fx.a, self.fx.d, self.fx.o)
- self.assert_join(self.fx.b, self.fx.c, self.fx.a)
- self.assert_join(self.fx.b, self.fx.d, self.fx.o)
-
- def test_tuples(self) -> None:
- self.assert_join(self.tuple(), self.tuple(), self.tuple())
- self.assert_join(self.tuple(self.fx.a),
- self.tuple(self.fx.a),
- self.tuple(self.fx.a))
- self.assert_join(self.tuple(self.fx.b, self.fx.c),
- self.tuple(self.fx.a, self.fx.d),
- self.tuple(self.fx.a, self.fx.o))
-
- self.assert_join(self.tuple(self.fx.a, self.fx.a),
- self.fx.std_tuple,
- self.fx.o)
- self.assert_join(self.tuple(self.fx.a),
- self.tuple(self.fx.a, self.fx.a),
- self.fx.o)
-
- def test_function_types(self) -> None:
- self.assert_join(self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.a, self.fx.b))
-
- self.assert_join(self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.b, self.fx.b),
- self.fx.function)
- self.assert_join(self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.a, self.fx.a),
- self.fx.function)
- self.assert_join(self.callable(self.fx.a, self.fx.b),
- self.fx.function,
- self.fx.function)
-
- def test_type_vars(self) -> None:
- self.assert_join(self.fx.t, self.fx.t, self.fx.t)
- self.assert_join(self.fx.s, self.fx.s, self.fx.s)
- self.assert_join(self.fx.t, self.fx.s, self.fx.o)
-
- def test_void(self) -> None:
- self.assert_join(self.fx.void, self.fx.void, self.fx.void)
- self.assert_join(self.fx.void, self.fx.anyt, self.fx.anyt)
-
- # Join of any other type against void results in ErrorType, since there
- # is no other meaningful result.
- for t in [self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'),
- self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_join(t, self.fx.void, self.fx.err)
-
- def test_none(self) -> None:
- # Any type t joined with None results in t.
- for t in [NoneTyp(), self.fx.a, self.fx.o, UnboundType('x'),
- self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b), self.fx.anyt]:
- self.assert_join(t, NoneTyp(), t)
-
- def test_unbound_type(self) -> None:
- self.assert_join(UnboundType('x'), UnboundType('x'), self.fx.anyt)
- self.assert_join(UnboundType('x'), UnboundType('y'), self.fx.anyt)
-
- # Any type t joined with an unbound type results in dynamic. Unbound
- # type means that there is an error somewhere in the program, so this
- # does not affect type safety (whatever the result).
- for t in [self.fx.a, self.fx.o, self.fx.ga, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_join(t, UnboundType('X'), self.fx.anyt)
-
- def test_any_type(self) -> None:
- # Join against 'Any' type always results in 'Any'.
- for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(),
- UnboundType('x'), self.fx.void, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_join(t, self.fx.anyt, self.fx.anyt)
-
- def test_mixed_truth_restricted_type_simple(self) -> None:
- # join_simple against differently restricted truthiness types drops restrictions.
- true_a = true_only(self.fx.a)
- false_o = false_only(self.fx.o)
- j = join_simple(self.fx.o, true_a, false_o)
- assert_true(j.can_be_true)
- assert_true(j.can_be_false)
-
- def test_mixed_truth_restricted_type(self) -> None:
- # join_types against differently restricted truthiness types drops restrictions.
- true_any = true_only(AnyType())
- false_o = false_only(self.fx.o)
- j = join_types(true_any, false_o)
- assert_true(j.can_be_true)
- assert_true(j.can_be_false)
-
- def test_other_mixed_types(self) -> None:
- # In general, joining unrelated types produces object.
- for t1 in [self.fx.a, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- for t2 in [self.fx.a, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- if str(t1) != str(t2):
- self.assert_join(t1, t2, self.fx.o)
-
- def test_error_type(self) -> None:
- self.assert_join(self.fx.err, self.fx.anyt, self.fx.anyt)
-
- # Meet against any type except dynamic results in ErrorType.
- for t in [self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'),
- self.fx.void, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_join(t, self.fx.err, self.fx.err)
-
- def test_simple_generics(self) -> None:
- self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga)
- self.assert_join(self.fx.ga, self.fx.gb, self.fx.ga)
- self.assert_join(self.fx.ga, self.fx.gd, self.fx.o)
- self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o)
-
- self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga)
- self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt)
-
- for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_join(t, self.fx.ga, self.fx.o)
-
- def test_generics_with_multiple_args(self) -> None:
- self.assert_join(self.fx.hab, self.fx.hab, self.fx.hab)
- self.assert_join(self.fx.hab, self.fx.hbb, self.fx.hab)
- self.assert_join(self.fx.had, self.fx.haa, self.fx.o)
-
- def test_generics_with_inheritance(self) -> None:
- self.assert_join(self.fx.gsab, self.fx.gb, self.fx.gb)
- self.assert_join(self.fx.gsba, self.fx.gb, self.fx.ga)
- self.assert_join(self.fx.gsab, self.fx.gd, self.fx.o)
-
- def test_generics_with_inheritance_and_shared_supertype(self) -> None:
- self.assert_join(self.fx.gsba, self.fx.gs2a, self.fx.ga)
- self.assert_join(self.fx.gsab, self.fx.gs2a, self.fx.ga)
- self.assert_join(self.fx.gsab, self.fx.gs2d, self.fx.o)
-
- def test_generic_types_and_any(self) -> None:
- self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn)
-
- def test_callables_with_any(self) -> None:
- self.assert_join(self.callable(self.fx.a, self.fx.a, self.fx.anyt,
- self.fx.a),
- self.callable(self.fx.a, self.fx.anyt, self.fx.a,
- self.fx.anyt),
- self.callable(self.fx.a, self.fx.anyt, self.fx.anyt,
- self.fx.anyt))
-
- def test_overloaded(self) -> None:
- c = self.callable
-
- def ov(*items: CallableType) -> Overloaded:
- return Overloaded(list(items))
-
- fx = self.fx
- func = fx.function
- c1 = c(fx.a, fx.a)
- c2 = c(fx.b, fx.b)
- c3 = c(fx.c, fx.c)
- self.assert_join(ov(c1, c2), c1, c1)
- self.assert_join(ov(c1, c2), c2, c2)
- self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2))
- self.assert_join(ov(c1, c2), ov(c1, c3), c1)
- self.assert_join(ov(c2, c1), ov(c3, c1), c1)
- self.assert_join(ov(c1, c2), c3, func)
-
- def test_overloaded_with_any(self) -> None:
- c = self.callable
-
- def ov(*items: CallableType) -> Overloaded:
- return Overloaded(list(items))
-
- fx = self.fx
- any = fx.anyt
- self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b))
- self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b))
-
- def test_join_interface_types(self) -> None:
- self.skip() # FIX
- self.assert_join(self.fx.f, self.fx.f, self.fx.f)
- self.assert_join(self.fx.f, self.fx.f2, self.fx.o)
- self.assert_join(self.fx.f, self.fx.f3, self.fx.f)
-
- def test_join_interface_and_class_types(self) -> None:
- self.skip() # FIX
-
- self.assert_join(self.fx.o, self.fx.f, self.fx.o)
- self.assert_join(self.fx.a, self.fx.f, self.fx.o)
-
- self.assert_join(self.fx.e, self.fx.f, self.fx.f)
-
- def test_join_class_types_with_interface_result(self) -> None:
- self.skip() # FIX
- # Unique result
- self.assert_join(self.fx.e, self.fx.e2, self.fx.f)
-
- # Ambiguous result
- self.assert_join(self.fx.e2, self.fx.e3, self.fx.err)
-
- def test_generic_interfaces(self) -> None:
- self.skip() # FIX
-
- fx = InterfaceTypeFixture()
-
- self.assert_join(fx.gfa, fx.gfa, fx.gfa)
- self.assert_join(fx.gfa, fx.gfb, fx.o)
-
- self.assert_join(fx.m1, fx.gfa, fx.gfa)
-
- self.assert_join(fx.m1, fx.gfb, fx.o)
-
- def test_simple_type_objects(self) -> None:
- t1 = self.type_callable(self.fx.a, self.fx.a)
- t2 = self.type_callable(self.fx.b, self.fx.b)
-
- self.assert_join(t1, t1, t1)
- j = join_types(t1, t1)
- assert isinstance(j, CallableType)
- assert_true(j.is_type_obj())
-
- self.assert_join(t1, t2, self.fx.type_type)
- self.assert_join(t1, self.fx.type_type, self.fx.type_type)
- self.assert_join(self.fx.type_type, self.fx.type_type,
- self.fx.type_type)
-
- def test_type_type(self) -> None:
- self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a)
- self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any)
- self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type)
- self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a)
- self.assert_join(self.fx.type_c, self.fx.type_d, TypeType(self.fx.o))
- self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type)
- self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt)
-
- # There are additional test cases in check-inference.test.
-
- # TODO: Function types + varargs and default args.
-
- def assert_join(self, s: Type, t: Type, join: Type) -> None:
- self.assert_simple_join(s, t, join)
- self.assert_simple_join(t, s, join)
-
- def assert_simple_join(self, s: Type, t: Type, join: Type) -> None:
- result = join_types(s, t)
- actual = str(result)
- expected = str(join)
- assert_equal(actual, expected,
- 'join({}, {}) == {{}} ({{}} expected)'.format(s, t))
- if not isinstance(s, ErrorType) and not isinstance(result, ErrorType):
- assert_true(is_subtype(s, result),
- '{} not subtype of {}'.format(s, result))
- if not isinstance(t, ErrorType) and not isinstance(result, ErrorType):
- assert_true(is_subtype(t, result),
- '{} not subtype of {}'.format(t, result))
-
- def tuple(self, *a: Type) -> TupleType:
- return TupleType(list(a), self.fx.std_tuple)
-
- def callable(self, *a: Type) -> CallableType:
- """callable(a1, ..., an, r) constructs a callable with argument types
- a1, ... an and return type r.
- """
- n = len(a) - 1
- return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n,
- a[-1], self.fx.function)
-
- def type_callable(self, *a: Type) -> CallableType:
- """type_callable(a1, ..., an, r) constructs a callable with
- argument types a1, ... an and return type r, and which
- represents a type.
- """
- n = len(a) - 1
- return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n,
- a[-1], self.fx.type_type)
-
-
-class MeetSuite(Suite):
- def set_up(self) -> None:
- self.fx = TypeFixture()
-
- def test_trivial_cases(self) -> None:
- for simple in self.fx.void, self.fx.a, self.fx.o, self.fx.b:
- self.assert_meet(simple, simple, simple)
-
- def test_class_subtyping(self) -> None:
- self.assert_meet(self.fx.a, self.fx.o, self.fx.a)
- self.assert_meet(self.fx.a, self.fx.b, self.fx.b)
- self.assert_meet(self.fx.b, self.fx.o, self.fx.b)
- self.assert_meet(self.fx.a, self.fx.d, NoneTyp())
- self.assert_meet(self.fx.b, self.fx.c, NoneTyp())
-
- def test_tuples(self) -> None:
- self.assert_meet(self.tuple(), self.tuple(), self.tuple())
- self.assert_meet(self.tuple(self.fx.a),
- self.tuple(self.fx.a),
- self.tuple(self.fx.a))
- self.assert_meet(self.tuple(self.fx.b, self.fx.c),
- self.tuple(self.fx.a, self.fx.d),
- self.tuple(self.fx.b, NoneTyp()))
-
- self.assert_meet(self.tuple(self.fx.a, self.fx.a),
- self.fx.std_tuple,
- NoneTyp())
- self.assert_meet(self.tuple(self.fx.a),
- self.tuple(self.fx.a, self.fx.a),
- NoneTyp())
-
- def test_function_types(self) -> None:
- self.assert_meet(self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.a, self.fx.b))
-
- self.assert_meet(self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.b, self.fx.b),
- NoneTyp())
- self.assert_meet(self.callable(self.fx.a, self.fx.b),
- self.callable(self.fx.a, self.fx.a),
- NoneTyp())
-
- def test_type_vars(self) -> None:
- self.assert_meet(self.fx.t, self.fx.t, self.fx.t)
- self.assert_meet(self.fx.s, self.fx.s, self.fx.s)
- self.assert_meet(self.fx.t, self.fx.s, NoneTyp())
-
- def test_void(self) -> None:
- self.assert_meet(self.fx.void, self.fx.void, self.fx.void)
- self.assert_meet(self.fx.void, self.fx.anyt, self.fx.void)
-
- # Meet of any other type against void results in ErrorType, since there
- # is no meaningful valid result.
- for t in [self.fx.a, self.fx.o, UnboundType('x'), NoneTyp(),
- self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_meet(t, self.fx.void, self.fx.err)
-
- def test_none(self) -> None:
- self.assert_meet(NoneTyp(), NoneTyp(), NoneTyp())
-
- self.assert_meet(NoneTyp(), self.fx.anyt, NoneTyp())
- self.assert_meet(NoneTyp(), self.fx.void, self.fx.err)
-
- # Any type t joined with None results in None, unless t is any or
- # void.
- for t in [self.fx.a, self.fx.o, UnboundType('x'), self.fx.t,
- self.tuple(), self.callable(self.fx.a, self.fx.b)]:
- self.assert_meet(t, NoneTyp(), NoneTyp())
-
- def test_unbound_type(self) -> None:
- self.assert_meet(UnboundType('x'), UnboundType('x'), self.fx.anyt)
- self.assert_meet(UnboundType('x'), UnboundType('y'), self.fx.anyt)
-
- self.assert_meet(UnboundType('x'), self.fx.void, self.fx.err)
- self.assert_meet(UnboundType('x'), self.fx.anyt, UnboundType('x'))
-
- # The meet of any type t with an unbound type results in dynamic
- # (except for void). Unbound type means that there is an error
- # somewhere in the program, so this does not affect type safety.
- for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_meet(t, UnboundType('X'), self.fx.anyt)
-
- def test_dynamic_type(self) -> None:
- # Meet against dynamic type always results in dynamic.
- for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(),
- UnboundType('x'), self.fx.void, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_meet(t, self.fx.anyt, t)
-
- def test_error_type(self) -> None:
- self.assert_meet(self.fx.err, self.fx.anyt, self.fx.err)
-
- # Meet against any type except dynamic results in ErrorType.
- for t in [self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'),
- self.fx.void, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_meet(t, self.fx.err, self.fx.err)
-
- def test_simple_generics(self) -> None:
- self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga)
- self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga)
- self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb)
- self.assert_meet(self.fx.ga, self.fx.gd, self.fx.nonet)
- self.assert_meet(self.fx.ga, self.fx.g2a, self.fx.nonet)
-
- self.assert_meet(self.fx.ga, self.fx.nonet, self.fx.nonet)
- self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga)
-
- for t in [self.fx.a, self.fx.t, self.tuple(),
- self.callable(self.fx.a, self.fx.b)]:
- self.assert_meet(t, self.fx.ga, self.fx.nonet)
-
- def test_generics_with_multiple_args(self) -> None:
- self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab)
- self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab)
- self.assert_meet(self.fx.hab, self.fx.had, self.fx.nonet)
- self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb)
-
- def test_generics_with_inheritance(self) -> None:
- self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab)
- self.assert_meet(self.fx.gsba, self.fx.gb, self.fx.nonet)
-
- def test_generics_with_inheritance_and_shared_supertype(self) -> None:
- self.assert_meet(self.fx.gsba, self.fx.gs2a, self.fx.nonet)
- self.assert_meet(self.fx.gsab, self.fx.gs2a, self.fx.nonet)
-
- def test_generic_types_and_dynamic(self) -> None:
- self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga)
-
- def test_callables_with_dynamic(self) -> None:
- self.assert_meet(self.callable(self.fx.a, self.fx.a, self.fx.anyt,
- self.fx.a),
- self.callable(self.fx.a, self.fx.anyt, self.fx.a,
- self.fx.anyt),
- self.callable(self.fx.a, self.fx.anyt, self.fx.anyt,
- self.fx.anyt))
-
- def test_meet_interface_types(self) -> None:
- self.assert_meet(self.fx.f, self.fx.f, self.fx.f)
- self.assert_meet(self.fx.f, self.fx.f2, self.fx.nonet)
- self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3)
-
- def test_meet_interface_and_class_types(self) -> None:
- self.assert_meet(self.fx.o, self.fx.f, self.fx.f)
- self.assert_meet(self.fx.a, self.fx.f, self.fx.nonet)
-
- self.assert_meet(self.fx.e, self.fx.f, self.fx.e)
-
- def test_meet_class_types_with_shared_interfaces(self) -> None:
- # These have nothing special with respect to meets, unlike joins. These
- # are for completeness only.
- self.assert_meet(self.fx.e, self.fx.e2, self.fx.nonet)
- self.assert_meet(self.fx.e2, self.fx.e3, self.fx.nonet)
-
- def test_meet_with_generic_interfaces(self) -> None:
- # TODO fix
- self.skip()
-
- fx = InterfaceTypeFixture()
- self.assert_meet(fx.gfa, fx.m1, fx.m1)
- self.assert_meet(fx.gfa, fx.gfa, fx.gfa)
- self.assert_meet(fx.gfb, fx.m1, fx.nonet)
-
- def test_type_type(self) -> None:
- self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b)
- self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b)
- self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b)
- self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.nonet)
- self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.nonet)
- self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any)
- self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b)
-
- # FIX generic interfaces + ranges
-
- def assert_meet(self, s: Type, t: Type, meet: Type) -> None:
- self.assert_simple_meet(s, t, meet)
- self.assert_simple_meet(t, s, meet)
-
- def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None:
- result = meet_types(s, t)
- actual = str(result)
- expected = str(meet)
- assert_equal(actual, expected,
- 'meet({}, {}) == {{}} ({{}} expected)'.format(s, t))
- if not isinstance(s, ErrorType) and not isinstance(result, ErrorType):
- assert_true(is_subtype(result, s),
- '{} not subtype of {}'.format(result, s))
- if not isinstance(t, ErrorType) and not isinstance(result, ErrorType):
- assert_true(is_subtype(result, t),
- '{} not subtype of {}'.format(result, t))
-
- def tuple(self, *a: Type) -> TupleType:
- return TupleType(list(a), self.fx.std_tuple)
-
- def callable(self, *a: Type) -> CallableType:
- """callable(a1, ..., an, r) constructs a callable with argument types
- a1, ... an and return type r.
- """
- n = len(a) - 1
- return CallableType(list(a[:-1]),
- [ARG_POS] * n, [None] * n,
- a[-1], self.fx.function)
diff --git a/mypy/test/update.py b/mypy/test/update.py
deleted file mode 100644
index e69de29..0000000
diff --git a/mypy/traverser.py b/mypy/traverser.py
index 35da22a..d748b02 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -7,9 +7,10 @@ from mypy.nodes import (
ForStmt, ReturnStmt, AssertStmt, DelStmt, IfStmt, RaiseStmt,
TryStmt, WithStmt, MemberExpr, OpExpr, SliceExpr, CastExpr, RevealTypeExpr,
UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr,
- GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication,
- FuncExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr,
- YieldExpr
+ GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension,
+ ConditionalExpr, TypeApplication, ExecStmt, Import, ImportFrom,
+ LambdaExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr,
+ YieldExpr, StarExpr, BackquoteExpr, AwaitExpr, PrintStmt,
)
@@ -49,6 +50,8 @@ class TraverserVisitor(NodeVisitor[None]):
def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
for item in o.items:
item.accept(self)
+ if o.impl:
+ o.impl.accept(self)
def visit_class_def(self, o: ClassDef) -> None:
for d in o.decorators:
@@ -209,9 +212,22 @@ class TraverserVisitor(NodeVisitor[None]):
cond.accept(self)
o.left_expr.accept(self)
+ def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None:
+ for index, sequence, conditions in zip(o.indices, o.sequences,
+ o.condlists):
+ sequence.accept(self)
+ index.accept(self)
+ for cond in conditions:
+ cond.accept(self)
+ o.key.accept(self)
+ o.value.accept(self)
+
def visit_list_comprehension(self, o: ListComprehension) -> None:
o.generator.accept(self)
+ def visit_set_comprehension(self, o: SetComprehension) -> None:
+ o.generator.accept(self)
+
def visit_conditional_expr(self, o: ConditionalExpr) -> None:
o.cond.accept(self)
o.if_expr.accept(self)
@@ -220,5 +236,29 @@ class TraverserVisitor(NodeVisitor[None]):
def visit_type_application(self, o: TypeApplication) -> None:
o.expr.accept(self)
- def visit_func_expr(self, o: FuncExpr) -> None:
+ def visit_lambda_expr(self, o: LambdaExpr) -> None:
self.visit_func(o)
+
+ def visit_star_expr(self, o: StarExpr) -> None:
+ o.expr.accept(self)
+
+ def visit_backquote_expr(self, o: BackquoteExpr) -> None:
+ o.expr.accept(self)
+
+ def visit_await_expr(self, o: AwaitExpr) -> None:
+ o.expr.accept(self)
+
+ def visit_import(self, o: Import) -> None:
+ for a in o.assignments:
+ a.accept(self)
+
+ def visit_import_from(self, o: ImportFrom) -> None:
+ for a in o.assignments:
+ a.accept(self)
+
+ def visit_print_stmt(self, o: PrintStmt) -> None:
+ for arg in o.args:
+ arg.accept(self)
+
+ def visit_exec_stmt(self, o: ExecStmt) -> None:
+ o.expr.accept(self)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index e6e4678..170be48 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -3,7 +3,7 @@
Subclass TransformVisitor to perform non-trivial transformations.
"""
-from typing import List, Dict, cast
+from typing import List, Dict, cast, Optional, Iterable
from mypy.nodes import (
MypyFile, Import, Node, ImportAll, ImportFrom, FuncItem, FuncDef,
@@ -14,12 +14,13 @@ from mypy.nodes import (
CastExpr, RevealTypeExpr, TupleExpr, GeneratorExpr, ListComprehension, ListExpr,
ConditionalExpr, DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr,
UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr,
- SliceExpr, OpExpr, UnaryExpr, FuncExpr, TypeApplication, PrintStmt,
+ SliceExpr, OpExpr, UnaryExpr, LambdaExpr, TypeApplication, PrintStmt,
SymbolTable, RefExpr, TypeVarExpr, NewTypeExpr, PromoteExpr,
ComparisonExpr, TempNode, StarExpr, Statement, Expression,
YieldFromExpr, NamedTupleExpr, TypedDictExpr, NonlocalDecl, SetComprehension,
DictionaryComprehension, ComplexExpr, TypeAliasExpr, EllipsisExpr,
YieldExpr, ExecStmt, Argument, BackquoteExpr, AwaitExpr,
+ OverloadPart, EnumCallExpr,
)
from mypy.types import Type, FunctionLike
from mypy.traverser import TraverserVisitor
@@ -75,7 +76,7 @@ class TransformVisitor(NodeVisitor[Node]):
return ImportAll(node.id, node.relative)
def copy_argument(self, argument: Argument) -> Argument:
- init_stmt = None # type: AssignmentStmt
+ init_stmt = None # type: Optional[AssignmentStmt]
if argument.initialization_statement:
init_lvalue = cast(
@@ -143,8 +144,8 @@ class TransformVisitor(NodeVisitor[Node]):
else:
return new
- def visit_func_expr(self, node: FuncExpr) -> FuncExpr:
- new = FuncExpr([self.copy_argument(arg) for arg in node.arguments],
+ def visit_lambda_expr(self, node: LambdaExpr) -> LambdaExpr:
+ new = LambdaExpr([self.copy_argument(arg) for arg in node.arguments],
self.block(node.body),
cast(FunctionLike, self.optional_type(node.type)))
self.copy_function_attributes(new, node)
@@ -160,14 +161,15 @@ class TransformVisitor(NodeVisitor[Node]):
new.line = original.line
def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef:
- items = [self.visit_decorator(decorator)
- for decorator in node.items]
+ items = [cast(OverloadPart, item.accept(self)) for item in node.items]
for newitem, olditem in zip(items, node.items):
newitem.line = olditem.line
new = OverloadedFuncDef(items)
new._fullname = node._fullname
- new.type = self.type(node.type)
+ new.type = self.optional_type(node.type)
new.info = node.info
+ if node.impl:
+ new.impl = cast(OverloadPart, node.impl.accept(self))
return new
def visit_class_def(self, node: ClassDef) -> ClassDef:
@@ -358,7 +360,7 @@ class TransformVisitor(NodeVisitor[Node]):
return YieldFromExpr(self.expr(node.expr))
def visit_yield_expr(self, node: YieldExpr) -> YieldExpr:
- return YieldExpr(self.expr(node.expr))
+ return YieldExpr(self.optional_expr(node.expr))
def visit_await_expr(self, node: AwaitExpr) -> AwaitExpr:
return AwaitExpr(self.expr(node.expr))
@@ -439,10 +441,11 @@ class TransformVisitor(NodeVisitor[Node]):
def visit_dictionary_comprehension(self, node: DictionaryComprehension
) -> DictionaryComprehension:
return DictionaryComprehension(self.expr(node.key), self.expr(node.value),
- [self.expr(index) for index in node.indices],
- [self.expr(s) for s in node.sequences],
- [[self.expr(cond) for cond in conditions]
- for conditions in node.condlists])
+ [self.expr(index) for index in node.indices],
+ [self.expr(s) for s in node.sequences],
+ [[self.expr(cond) for cond in conditions]
+ for conditions in node.condlists],
+ node.is_async)
def visit_generator_expr(self, node: GeneratorExpr) -> GeneratorExpr:
return self.duplicate_generator(node)
@@ -452,7 +455,8 @@ class TransformVisitor(NodeVisitor[Node]):
[self.expr(index) for index in node.indices],
[self.expr(s) for s in node.sequences],
[[self.expr(cond) for cond in conditions]
- for conditions in node.condlists])
+ for conditions in node.condlists],
+ node.is_async)
def visit_slice_expr(self, node: SliceExpr) -> SliceExpr:
return SliceExpr(self.optional_expr(node.begin_index),
@@ -483,6 +487,9 @@ class TransformVisitor(NodeVisitor[Node]):
def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr:
return NamedTupleExpr(node.info)
+ def visit_enum_call_expr(self, node: EnumCallExpr) -> EnumCallExpr:
+ return EnumCallExpr(node.info, node.items, node.values)
+
def visit_typeddict_expr(self, node: TypedDictExpr) -> Node:
return TypedDictExpr(node.info)
@@ -519,7 +526,7 @@ class TransformVisitor(NodeVisitor[Node]):
#
# All the node helpers also propagate line numbers.
- def optional_expr(self, expr: Expression) -> Expression:
+ def optional_expr(self, expr: Optional[Expression]) -> Optional[Expression]:
if expr:
return self.expr(expr)
else:
@@ -530,7 +537,7 @@ class TransformVisitor(NodeVisitor[Node]):
new.line = block.line
return new
- def optional_block(self, block: Block) -> Block:
+ def optional_block(self, block: Optional[Block]) -> Optional[Block]:
if block:
return self.block(block)
else:
@@ -542,7 +549,8 @@ class TransformVisitor(NodeVisitor[Node]):
def expressions(self, expressions: List[Expression]) -> List[Expression]:
return [self.expr(expr) for expr in expressions]
- def optional_expressions(self, expressions: List[Expression]) -> List[Expression]:
+ def optional_expressions(self, expressions: Iterable[Optional[Expression]]
+ ) -> List[Optional[Expression]]:
return [self.optional_expr(expr) for expr in expressions]
def blocks(self, blocks: List[Block]) -> List[Block]:
@@ -551,8 +559,8 @@ class TransformVisitor(NodeVisitor[Node]):
def names(self, names: List[NameExpr]) -> List[NameExpr]:
return [self.duplicate_name(name) for name in names]
- def optional_names(self, names: List[NameExpr]) -> List[NameExpr]:
- result = [] # type: List[NameExpr]
+ def optional_names(self, names: Iterable[Optional[NameExpr]]) -> List[Optional[NameExpr]]:
+ result = [] # type: List[Optional[NameExpr]]
for name in names:
if name:
result.append(self.duplicate_name(name))
@@ -564,7 +572,7 @@ class TransformVisitor(NodeVisitor[Node]):
# Override this method to transform types.
return type
- def optional_type(self, type: Type) -> Type:
+ def optional_type(self, type: Optional[Type]) -> Optional[Type]:
if type:
return self.type(type)
else:
diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py
new file mode 100644
index 0000000..3cdb67b
--- /dev/null
+++ b/mypy/tvar_scope.py
@@ -0,0 +1,82 @@
+from typing import Optional, Dict, Union
+from mypy.types import TypeVarDef, TypeVarId
+from mypy.nodes import TypeVarExpr, SymbolTableNode
+
+
+class TypeVarScope:
+ """Scope that holds bindings for type variables. Node fullname -> TypeVarDef."""
+
+ def __init__(self,
+ parent: Optional['TypeVarScope'] = None,
+ is_class_scope: bool = False,
+ prohibited: Optional['TypeVarScope'] = None) -> None:
+ """Initializer for TypeVarScope
+
+ Parameters:
+ parent: the outer scope for this scope
+ is_class_scope: True if this represents a generic class
+ prohibited: Type variables that aren't strictly in scope exactly,
+ but can't be bound because they're part of an outer class's scope.
+ """
+ self.scope = {} # type: Dict[str, TypeVarDef]
+ self.parent = parent
+ self.func_id = 0
+ self.class_id = 0
+ self.is_class_scope = is_class_scope
+ self.prohibited = prohibited
+ if parent is not None:
+ self.func_id = parent.func_id
+ self.class_id = parent.class_id
+
+ def get_function_scope(self) -> Optional['TypeVarScope']:
+ """Get the nearest parent that's a function scope, not a class scope"""
+ it = self
+ while it is not None and it.is_class_scope:
+ it = it.parent
+ return it
+
+ def allow_binding(self, fullname: str) -> bool:
+ if fullname in self.scope:
+ return False
+ elif self.parent and not self.parent.allow_binding(fullname):
+ return False
+ elif self.prohibited and not self.prohibited.allow_binding(fullname):
+ return False
+ return True
+
+ def method_frame(self) -> 'TypeVarScope':
+ """A new scope frame for binding a method"""
+ return TypeVarScope(self, False, None)
+
+ def class_frame(self) -> 'TypeVarScope':
+ """A new scope frame for binding a class. Prohibits *this* class's tvars"""
+ return TypeVarScope(self.get_function_scope(), True, self)
+
+ def bind(self, name: str, tvar_expr: TypeVarExpr) -> TypeVarDef:
+ if self.is_class_scope:
+ self.class_id += 1
+ i = self.class_id
+ else:
+ self.func_id -= 1
+ i = self.func_id
+ tvar_def = TypeVarDef(
+ name, i, values=tvar_expr.values,
+ upper_bound=tvar_expr.upper_bound, variance=tvar_expr.variance,
+ line=tvar_expr.line, column=tvar_expr.column)
+ self.scope[tvar_expr.fullname()] = tvar_def
+ return tvar_def
+
+ def get_binding(self, item: Union[str, SymbolTableNode]) -> Optional[TypeVarDef]:
+ fullname = item.fullname if isinstance(item, SymbolTableNode) else item
+ if fullname in self.scope:
+ return self.scope[fullname]
+ elif self.parent is not None:
+ return self.parent.get_binding(fullname)
+ else:
+ return None
+
+ def __str__(self) -> str:
+ me = ", ".join('{}: {}`{}'.format(k, v.name, v.id) for k, v in self.scope.items())
+ if self.parent is None:
+ return me
+ return "{} <- {}".format(str(self.parent), me)
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 4d9f170..2a506b7 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1,25 +1,36 @@
"""Semantic analysis of types"""
from collections import OrderedDict
-from typing import Callable, cast, List, Optional
+from typing import Callable, List, Optional, Set, Tuple, Iterator, TypeVar, Iterable
+from itertools import chain
+
+from contextlib import contextmanager
from mypy.types import (
Type, UnboundType, TypeVarType, TupleType, TypedDictType, UnionType, Instance,
- AnyType, CallableType, Void, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor,
+ AnyType, CallableType, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor,
+ SyntheticTypeVisitor,
StarType, PartialType, EllipsisType, UninhabitedType, TypeType, get_typ_args, set_typ_args,
+ CallableArgument, get_type_vars, TypeQuery, union_items
)
+
from mypy.nodes import (
- BOUND_TVAR, UNBOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
+ TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
TypeInfo, Context, SymbolTableNode, Var, Expression,
- IndexExpr, RefExpr
+ IndexExpr, RefExpr, nongen_builtins, check_arg_names, check_arg_kinds,
+ ARG_POS, ARG_NAMED, ARG_OPT, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, TypeVarExpr
)
+from mypy.tvar_scope import TypeVarScope
from mypy.sametypes import is_same_type
from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
-from mypy.subtypes import satisfies_upper_bound
+from mypy.subtypes import is_subtype
from mypy import nodes
from mypy import experiments
+T = TypeVar('T')
+
+
type_constructors = {
'typing.Callable',
'typing.Optional',
@@ -28,11 +39,22 @@ type_constructors = {
'typing.Union',
}
+ARG_KINDS_BY_CONSTRUCTOR = {
+ 'mypy_extensions.Arg': ARG_POS,
+ 'mypy_extensions.DefaultArg': ARG_OPT,
+ 'mypy_extensions.NamedArg': ARG_NAMED,
+ 'mypy_extensions.DefaultNamedArg': ARG_NAMED_OPT,
+ 'mypy_extensions.VarArg': ARG_STAR,
+ 'mypy_extensions.KwArg': ARG_STAR2,
+}
+
def analyze_type_alias(node: Expression,
lookup_func: Callable[[str, Context], SymbolTableNode],
lookup_fqn_func: Callable[[str], SymbolTableNode],
- fail_func: Callable[[str, Context], None]) -> Type:
+ tvar_scope: TypeVarScope,
+ fail_func: Callable[[str, Context], None],
+ allow_unnormalized: bool = False) -> Optional[Type]:
"""Return type if node is valid as a type alias rvalue.
Return None otherwise. 'node' must have been semantically analyzed.
@@ -41,7 +63,10 @@ def analyze_type_alias(node: Expression,
# that we don't support straight string literals as type aliases
# (only string literals within index expressions).
if isinstance(node, RefExpr):
- if node.kind == UNBOUND_TVAR or node.kind == BOUND_TVAR:
+ # Note that this misses the case where someone tried to use a
+ # class-referenced type variable as a type alias. It's easier to catch
+ # that one in checkmember.py
+ if node.kind == TVAR:
fail_func('Type variable "{}" is invalid as target for type alias'.format(
node.fullname), node)
return None
@@ -56,6 +81,10 @@ def analyze_type_alias(node: Expression,
base.fullname in type_constructors or
base.kind == TYPE_ALIAS):
return None
+ # Enums can't be generic, and without this check we may incorrectly interpret indexing
+ # an Enum class as creating a type alias.
+ if isinstance(base.node, TypeInfo) and base.node.is_enum:
+ return None
else:
return None
else:
@@ -67,11 +96,20 @@ def analyze_type_alias(node: Expression,
except TypeTranslationError:
fail_func('Invalid type alias', node)
return None
- analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, fail_func, aliasing=True)
+ analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, tvar_scope, fail_func, aliasing=True,
+ allow_unnormalized=allow_unnormalized)
return type.accept(analyzer)
-class TypeAnalyser(TypeVisitor[Type]):
+def no_subscript_builtin_alias(name: str, propose_alt: bool = True) -> str:
+ msg = '"{}" is not subscriptable'.format(name.split('.')[-1])
+ replacement = nongen_builtins[name]
+ if replacement and propose_alt:
+ msg += ', use "{}" instead'.format(replacement)
+ return msg
+
+
+class TypeAnalyser(SyntheticTypeVisitor[Type]):
"""Semantic analyzer for types (semantic analysis pass 2).
Converts unbound types into bound types.
@@ -80,19 +118,27 @@ class TypeAnalyser(TypeVisitor[Type]):
def __init__(self,
lookup_func: Callable[[str, Context], SymbolTableNode],
lookup_fqn_func: Callable[[str], SymbolTableNode],
+ tvar_scope: TypeVarScope,
fail_func: Callable[[str, Context], None], *,
- aliasing: bool = False) -> None:
+ aliasing: bool = False,
+ allow_tuple_literal: bool = False,
+ allow_unnormalized: bool = False) -> None:
self.lookup = lookup_func
self.lookup_fqn_func = lookup_fqn_func
self.fail = fail_func
+ self.tvar_scope = tvar_scope
self.aliasing = aliasing
+ self.allow_tuple_literal = allow_tuple_literal
+ # Positive if we are analyzing arguments of another (outer) type
+ self.nesting_level = 0
+ self.allow_unnormalized = allow_unnormalized
def visit_unbound_type(self, t: UnboundType) -> Type:
if t.optional:
t.optional = False
# We don't need to worry about double-wrapping Optionals or
# wrapping Anys: Union simplification will take care of that.
- return UnionType.make_simplified_union([self.visit_unbound_type(t), NoneTyp()])
+ return make_optional_type(self.visit_unbound_type(t))
sym = self.lookup(t.name, t)
if sym is not None:
if sym.node is None:
@@ -101,18 +147,18 @@ class TypeAnalyser(TypeVisitor[Type]):
self.fail('Internal error (node is None, kind={})'.format(sym.kind), t)
return AnyType()
fullname = sym.node.fullname()
- if sym.kind == BOUND_TVAR:
+ if (fullname in nongen_builtins and t.args and
+ not sym.normalized and not self.allow_unnormalized):
+ self.fail(no_subscript_builtin_alias(fullname), t)
+ tvar_def = self.tvar_scope.get_binding(sym)
+ if sym.kind == TVAR and tvar_def is not None:
if len(t.args) > 0:
self.fail('Type variable "{}" used with arguments'.format(
t.name), t)
- assert sym.tvar_def is not None
- return TypeVarType(sym.tvar_def, t.line)
+ return TypeVarType(tvar_def, t.line)
elif fullname == 'builtins.None':
- if experiments.STRICT_OPTIONAL:
- return NoneTyp(is_ret_type=t.is_ret_type)
- else:
- return Void()
- elif fullname == 'typing.Any':
+ return NoneTyp()
+ elif fullname == 'typing.Any' or fullname == 'builtins.Any':
return AnyType()
elif fullname == 'typing.Tuple':
if len(t.args) == 0 and not t.empty_tuple_index:
@@ -120,24 +166,21 @@ class TypeAnalyser(TypeVisitor[Type]):
return self.builtin_type('builtins.tuple')
if len(t.args) == 2 and isinstance(t.args[1], EllipsisType):
# Tuple[T, ...] (uniform, variable-length tuple)
- instance = self.builtin_type('builtins.tuple', [t.args[0].accept(self)])
+ instance = self.builtin_type('builtins.tuple', [self.anal_type(t.args[0])])
instance.line = t.line
return instance
return self.tuple_type(self.anal_array(t.args))
elif fullname == 'typing.Union':
items = self.anal_array(t.args)
- items = [item for item in items if not isinstance(item, Void)]
+ if not experiments.STRICT_OPTIONAL:
+ items = [item for item in items if not isinstance(item, NoneTyp)]
return UnionType.make_union(items)
elif fullname == 'typing.Optional':
if len(t.args) != 1:
self.fail('Optional[...] must have exactly one type argument', t)
return AnyType()
- items = self.anal_array(t.args)
- if experiments.STRICT_OPTIONAL:
- return UnionType.make_simplified_union([items[0], NoneTyp()])
- else:
- # Without strict Optional checking Optional[t] is just an alias for t.
- return items[0]
+ item = self.anal_type(t.args[0])
+ return make_optional_type(item)
elif fullname == 'typing.Callable':
return self.analyze_callable_type(t)
elif fullname == 'typing.Type':
@@ -145,11 +188,26 @@ class TypeAnalyser(TypeVisitor[Type]):
return TypeType(AnyType(), line=t.line)
if len(t.args) != 1:
self.fail('Type[...] must have exactly one type argument', t)
- items = self.anal_array(t.args)
- item = items[0]
+ item = self.anal_type(t.args[0])
return TypeType(item, line=t.line)
+ elif fullname == 'typing.ClassVar':
+ if self.nesting_level > 0:
+ self.fail('Invalid type: ClassVar nested inside other type', t)
+ if len(t.args) == 0:
+ return AnyType(line=t.line)
+ if len(t.args) != 1:
+ self.fail('ClassVar[...] must have at most one type argument', t)
+ return AnyType()
+ item = self.anal_type(t.args[0])
+ if isinstance(item, TypeVarType) or get_type_vars(item):
+ self.fail('Invalid type: ClassVar cannot be generic', t)
+ return AnyType()
+ return item
+ elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'):
+ return UninhabitedType(is_noreturn=True)
elif sym.kind == TYPE_ALIAS:
override = sym.type_override
+ assert override is not None
an_args = self.anal_array(t.args)
all_vars = self.get_type_var_names(override)
exp_len = len(all_vars)
@@ -176,7 +234,8 @@ class TypeAnalyser(TypeVisitor[Type]):
# is pretty minor.
return AnyType()
# Allow unbound type variables when defining an alias
- if not (self.aliasing and sym.kind == UNBOUND_TVAR):
+ if not (self.aliasing and sym.kind == TVAR and
+ self.tvar_scope.get_binding(sym) is None):
self.fail('Invalid type "{}"'.format(name), t)
return t
info = sym.node # type: TypeInfo
@@ -218,30 +277,15 @@ class TypeAnalyser(TypeVisitor[Type]):
"""Get all type variable names that are present in a generic type alias
in order of textual appearance (recursively, if needed).
"""
- tvars = [] # type: List[str]
- typ_args = get_typ_args(tp)
- for arg in typ_args:
- tvar = self.get_tvar_name(arg)
- if tvar:
- tvars.append(tvar)
- else:
- subvars = self.get_type_var_names(arg)
- if subvars:
- tvars.extend(subvars)
- # Get unique type variables in order of appearance
- all_tvars = set(tvars)
- new_tvars = []
- for t in tvars:
- if t in all_tvars:
- new_tvars.append(t)
- all_tvars.remove(t)
- return new_tvars
+ return [name for name, _
+ in tp.accept(TypeVariableQuery(self.lookup, self.tvar_scope,
+ include_callables=True, include_bound_tvars=True))]
def get_tvar_name(self, t: Type) -> Optional[str]:
if not isinstance(t, UnboundType):
return None
sym = self.lookup(t.name, t)
- if sym is not None and (sym.kind == UNBOUND_TVAR or sym.kind == BOUND_TVAR):
+ if sym is not None and sym.kind == TVAR:
return t.name
return None
@@ -265,9 +309,6 @@ class TypeAnalyser(TypeVisitor[Type]):
def visit_any(self, t: AnyType) -> Type:
return t
- def visit_void(self, t: Void) -> Type:
- return t
-
def visit_none_type(self, t: NoneTyp) -> Type:
return t
@@ -281,38 +322,56 @@ class TypeAnalyser(TypeVisitor[Type]):
self.fail('Invalid type', t)
return AnyType()
+ def visit_callable_argument(self, t: CallableArgument) -> Type:
+ self.fail('Invalid type', t)
+ return AnyType()
+
def visit_instance(self, t: Instance) -> Type:
return t
def visit_type_var(self, t: TypeVarType) -> Type:
return t
- def visit_callable_type(self, t: CallableType) -> Type:
- return t.copy_modified(arg_types=self.anal_array(t.arg_types),
- ret_type=t.ret_type.accept(self),
- fallback=t.fallback or self.builtin_type('builtins.function'),
- variables=self.anal_var_defs(t.variables))
+ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type:
+ # Every Callable can bind its own type variables, if they're not in the outer scope
+ with self.tvar_scope_frame():
+ if self.aliasing:
+ variables = t.variables
+ else:
+ variables = self.bind_function_type_variables(t, t)
+ ret = t.copy_modified(arg_types=self.anal_array(t.arg_types, nested=nested),
+ ret_type=self.anal_type(t.ret_type, nested=nested),
+ fallback=t.fallback or self.builtin_type('builtins.function'),
+ variables=self.anal_var_defs(variables))
+ return ret
def visit_tuple_type(self, t: TupleType) -> Type:
- if t.implicit:
+ # Types such as (t1, t2, ...) only allowed in assignment statements. They'll
+ # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead.
+ if t.implicit and not self.allow_tuple_literal:
self.fail('Invalid tuple literal type', t)
return AnyType()
star_count = sum(1 for item in t.items if isinstance(item, StarType))
if star_count > 1:
self.fail('At most one star type allowed in a tuple', t)
- return AnyType()
+ if t.implicit:
+ return TupleType([AnyType() for _ in t.items],
+ self.builtin_type('builtins.tuple'),
+ t.line)
+ else:
+ return AnyType()
fallback = t.fallback if t.fallback else self.builtin_type('builtins.tuple', [AnyType()])
return TupleType(self.anal_array(t.items), fallback, t.line)
def visit_typeddict_type(self, t: TypedDictType) -> Type:
items = OrderedDict([
- (item_name, item_type.accept(self))
+ (item_name, self.anal_type(item_type))
for (item_name, item_type) in t.items.items()
])
return TypedDictType(items, t.fallback)
def visit_star_type(self, t: StarType) -> Type:
- return StarType(t.type.accept(self), t.line)
+ return StarType(self.anal_type(t.type), t.line)
def visit_union_type(self, t: UnionType) -> Type:
return UnionType(self.anal_array(t.items), t.line)
@@ -325,49 +384,145 @@ class TypeAnalyser(TypeVisitor[Type]):
return AnyType()
def visit_type_type(self, t: TypeType) -> Type:
- return TypeType(t.item.accept(self), line=t.line)
+ return TypeType(self.anal_type(t.item), line=t.line)
def analyze_callable_type(self, t: UnboundType) -> Type:
fallback = self.builtin_type('builtins.function')
if len(t.args) == 0:
# Callable (bare). Treat as Callable[..., Any].
- return CallableType([AnyType(), AnyType()],
- [nodes.ARG_STAR, nodes.ARG_STAR2],
- [None, None],
- ret_type=AnyType(),
- fallback=fallback,
- is_ellipsis_args=True)
+ ret = CallableType([AnyType(), AnyType()],
+ [nodes.ARG_STAR, nodes.ARG_STAR2],
+ [None, None],
+ ret_type=AnyType(),
+ fallback=fallback,
+ is_ellipsis_args=True)
elif len(t.args) == 2:
- ret_type = t.args[1].accept(self)
+ ret_type = t.args[1]
if isinstance(t.args[0], TypeList):
# Callable[[ARG, ...], RET] (ordinary callable type)
- args = t.args[0].items
- return CallableType(self.anal_array(args),
- [nodes.ARG_POS] * len(args),
- [None] * len(args),
- ret_type=ret_type,
- fallback=fallback)
+ args = [] # type: List[Type]
+ names = [] # type: List[str]
+ kinds = [] # type: List[int]
+ for arg in t.args[0].items:
+ if isinstance(arg, CallableArgument):
+ args.append(arg.typ)
+ names.append(arg.name)
+ if arg.constructor is None:
+ return AnyType()
+ found = self.lookup(arg.constructor, arg)
+ if found is None:
+ # Looking it up already put an error message in
+ return AnyType()
+ elif found.fullname not in ARG_KINDS_BY_CONSTRUCTOR:
+ self.fail('Invalid argument constructor "{}"'.format(
+ found.fullname), arg)
+ return AnyType()
+ else:
+ kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname]
+ kinds.append(kind)
+ if arg.name is not None and kind in {ARG_STAR, ARG_STAR2}:
+ self.fail("{} arguments should not have names".format(
+ arg.constructor), arg)
+ return AnyType()
+ else:
+ args.append(arg)
+ names.append(None)
+ kinds.append(ARG_POS)
+
+ check_arg_names(names, [t] * len(args), self.fail, "Callable")
+ check_arg_kinds(kinds, [t] * len(args), self.fail)
+ ret = CallableType(args,
+ kinds,
+ names,
+ ret_type=ret_type,
+ fallback=fallback)
elif isinstance(t.args[0], EllipsisType):
# Callable[..., RET] (with literal ellipsis; accept arbitrary arguments)
- return CallableType([AnyType(), AnyType()],
- [nodes.ARG_STAR, nodes.ARG_STAR2],
- [None, None],
- ret_type=ret_type,
- fallback=fallback,
- is_ellipsis_args=True)
+ ret = CallableType([AnyType(), AnyType()],
+ [nodes.ARG_STAR, nodes.ARG_STAR2],
+ [None, None],
+ ret_type=ret_type,
+ fallback=fallback,
+ is_ellipsis_args=True)
else:
self.fail('The first argument to Callable must be a list of types or "..."', t)
return AnyType()
-
- self.fail('Invalid function type', t)
- return AnyType()
-
- def anal_array(self, a: List[Type]) -> List[Type]:
+ else:
+ self.fail('Invalid function type', t)
+ return AnyType()
+ assert isinstance(ret, CallableType)
+ return ret.accept(self)
+
+ @contextmanager
+ def tvar_scope_frame(self) -> Iterator[None]:
+ old_scope = self.tvar_scope
+ self.tvar_scope = self.tvar_scope.method_frame()
+ yield
+ self.tvar_scope = old_scope
+
+ def infer_type_variables(self,
+ type: CallableType) -> List[Tuple[str, TypeVarExpr]]:
+ """Return list of unique type variables referred to in a callable."""
+ names = [] # type: List[str]
+ tvars = [] # type: List[TypeVarExpr]
+ for arg in type.arg_types:
+ for name, tvar_expr in arg.accept(TypeVariableQuery(self.lookup, self.tvar_scope)):
+ if name not in names:
+ names.append(name)
+ tvars.append(tvar_expr)
+ # When finding type variables in the return type of a function, don't
+ # look inside Callable types. Type variables only appearing in
+ # functions in the return type belong to those functions, not the
+ # function we're currently analyzing.
+ for name, tvar_expr in type.ret_type.accept(
+ TypeVariableQuery(self.lookup, self.tvar_scope, include_callables=False)):
+ if name not in names:
+ names.append(name)
+ tvars.append(tvar_expr)
+ return list(zip(names, tvars))
+
+ def bind_function_type_variables(self,
+ fun_type: CallableType, defn: Context) -> List[TypeVarDef]:
+ """Find the type variables of the function type and bind them in our tvar_scope"""
+ if fun_type.variables:
+ for var in fun_type.variables:
+ var_expr = self.lookup(var.name, var).node
+ assert isinstance(var_expr, TypeVarExpr)
+ self.tvar_scope.bind(var.name, var_expr)
+ return fun_type.variables
+ typevars = self.infer_type_variables(fun_type)
+ # Do not define a new type variable if already defined in scope.
+ typevars = [(name, tvar) for name, tvar in typevars
+ if not self.is_defined_type_var(name, defn)]
+ defs = [] # type: List[TypeVarDef]
+ for name, tvar in typevars:
+ if not self.tvar_scope.allow_binding(tvar.fullname()):
+ self.fail("Type variable '{}' is bound by an outer class".format(name), defn)
+ self.tvar_scope.bind(name, tvar)
+ binding = self.tvar_scope.get_binding(tvar.fullname())
+ assert binding is not None
+ defs.append(binding)
+
+ return defs
+
+ def is_defined_type_var(self, tvar: str, context: Context) -> bool:
+ return self.tvar_scope.get_binding(self.lookup(tvar, context)) is not None
+
+ def anal_array(self, a: List[Type], nested: bool = True) -> List[Type]:
res = [] # type: List[Type]
for t in a:
- res.append(t.accept(self))
+ res.append(self.anal_type(t, nested))
return res
+ def anal_type(self, t: Type, nested: bool = True) -> Type:
+ if nested:
+ self.nesting_level += 1
+ try:
+ return t.accept(self)
+ finally:
+ if nested:
+ self.nesting_level -= 1
+
def anal_var_defs(self, var_defs: List[TypeVarDef]) -> List[TypeVarDef]:
a = [] # type: List[TypeVarDef]
for vd in var_defs:
@@ -436,8 +591,8 @@ class TypeAnalyserPass3(TypeVisitor[None]):
t.invalid = True
elif info.defn.type_vars:
# Check type argument values.
- for (i, arg), TypeVar in zip(enumerate(t.args), info.defn.type_vars):
- if TypeVar.values:
+ for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars):
+ if tvar.values:
if isinstance(arg, TypeVarType):
arg_values = arg.values
if not arg_values:
@@ -448,11 +603,11 @@ class TypeAnalyserPass3(TypeVisitor[None]):
else:
arg_values = [arg]
self.check_type_var_values(info, arg_values,
- TypeVar.values, i + 1, t)
- if not satisfies_upper_bound(arg, TypeVar.upper_bound):
+ tvar.values, i + 1, t)
+ if not is_subtype(arg, tvar.upper_bound):
self.fail('Type argument "{}" of "{}" must be '
'a subtype of "{}"'.format(
- arg, info.name(), TypeVar.upper_bound), t)
+ arg, info.name(), tvar.upper_bound), t)
for arg in t.args:
arg.accept(self)
@@ -496,9 +651,6 @@ class TypeAnalyserPass3(TypeVisitor[None]):
def visit_any(self, t: AnyType) -> None:
pass
- def visit_void(self, t: Void) -> None:
- pass
-
def visit_none_type(self, t: NoneTyp) -> None:
pass
@@ -519,3 +671,79 @@ class TypeAnalyserPass3(TypeVisitor[None]):
def visit_type_type(self, t: TypeType) -> None:
pass
+
+
+TypeVarList = List[Tuple[str, TypeVarExpr]]
+
+
+def remove_dups(tvars: Iterable[T]) -> List[T]:
+ # Get unique elements in order of appearance
+ all_tvars = set() # type: Set[T]
+ new_tvars = [] # type: List[T]
+ for t in tvars:
+ if t not in all_tvars:
+ new_tvars.append(t)
+ all_tvars.add(t)
+ return new_tvars
+
+
+def flatten_tvars(ll: Iterable[List[T]]) -> List[T]:
+ return remove_dups(chain.from_iterable(ll))
+
+
+class TypeVariableQuery(TypeQuery[TypeVarList]):
+
+ def __init__(self,
+ lookup: Callable[[str, Context], SymbolTableNode],
+ scope: 'TypeVarScope',
+ *,
+ include_callables: bool = True,
+ include_bound_tvars: bool = False) -> None:
+ self.include_callables = include_callables
+ self.lookup = lookup
+ self.scope = scope
+ self.include_bound_tvars = include_bound_tvars
+ super().__init__(flatten_tvars)
+
+ def _seems_like_callable(self, type: UnboundType) -> bool:
+ if not type.args:
+ return False
+ if isinstance(type.args[0], (EllipsisType, TypeList)):
+ return True
+ return False
+
+ def visit_unbound_type(self, t: UnboundType) -> TypeVarList:
+ name = t.name
+ node = self.lookup(name, t)
+ if node and node.kind == TVAR and (
+ self.include_bound_tvars or self.scope.get_binding(node) is None):
+ assert isinstance(node.node, TypeVarExpr)
+ return [(name, node.node)]
+ elif not self.include_callables and self._seems_like_callable(t):
+ return []
+ else:
+ return super().visit_unbound_type(t)
+
+ def visit_callable_type(self, t: CallableType) -> TypeVarList:
+ if self.include_callables:
+ return super().visit_callable_type(t)
+ else:
+ return []
+
+
+def make_optional_type(t: Type) -> Type:
+ """Return the type corresponding to Optional[t].
+
+ Note that we can't use normal union simplification, since this function
+ is called during semantic analysis and simplification only works during
+ type checking.
+ """
+ if not experiments.STRICT_OPTIONAL:
+ return t
+ if isinstance(t, NoneTyp):
+ return t
+ if isinstance(t, UnionType):
+ items = [item for item in union_items(t)
+ if not isinstance(item, NoneTyp)]
+ return UnionType(items + [NoneTyp()], t.line, t.column)
+ return UnionType([t, NoneTyp()], t.line, t.column)
diff --git a/mypy/typefixture.py b/mypy/typefixture.py
index 8afcfac..04714e9 100644
--- a/mypy/typefixture.py
+++ b/mypy/typefixture.py
@@ -6,8 +6,8 @@ It contains class TypeInfos and Type objects.
from typing import List
from mypy.types import (
- Type, TypeVarType, AnyType, Void, ErrorType, NoneTyp,
- Instance, CallableType, TypeVarDef, TypeType,
+ Type, TypeVarType, AnyType, NoneTyp,
+ Instance, CallableType, TypeVarDef, TypeType, UninhabitedType
)
from mypy.nodes import (
TypeInfo, ClassDef, Block, ARG_POS, ARG_OPT, ARG_STAR, SymbolTable,
@@ -41,9 +41,8 @@ class TypeFixture:
# Simple types
self.anyt = AnyType()
- self.void = Void()
- self.err = ErrorType()
self.nonet = NoneTyp()
+ self.uninhabited = UninhabitedType()
# Abstract class TypeInfos
@@ -217,7 +216,7 @@ class TypeFixture:
variance = variances[id - 1]
else:
variance = COVARIANT
- v.append(TypeVarDef(n, id, None, self.o, variance=variance))
+ v.append(TypeVarDef(n, id, [], self.o, variance=variance))
class_def.type_vars = v
info = TypeInfo(SymbolTable(), class_def, module_name)
diff --git a/mypy/types.py b/mypy/types.py
index 3d27d52..8de44cc 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -5,7 +5,7 @@ import copy
from collections import OrderedDict
from typing import (
Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Sequence, Optional, Union, Iterable,
- NamedTuple,
+ NamedTuple, Callable,
)
import mypy.nodes
@@ -13,9 +13,8 @@ from mypy.nodes import (
INVARIANT, SymbolNode,
ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT,
)
-
-from mypy import experiments
from mypy.sharedparse import argument_elide_name
+from mypy.util import IdMapper
T = TypeVar('T')
@@ -23,6 +22,16 @@ T = TypeVar('T')
JsonDict = Dict[str, Any]
+def deserialize_type(data: Union[JsonDict, str]) -> 'Type':
+ if isinstance(data, str):
+ return Instance.deserialize(data)
+ classname = data['.class']
+ method = deserialize_map.get(classname)
+ if method is not None:
+ return method(data)
+ raise NotImplementedError('unexpected .class {}'.format(classname))
+
+
class Type(mypy.nodes.Context):
"""Abstract base class for all types."""
@@ -47,18 +56,12 @@ class Type(mypy.nodes.Context):
def __repr__(self) -> str:
return self.accept(TypeStrVisitor())
- def serialize(self) -> JsonDict:
+ def serialize(self) -> Union[JsonDict, str]:
raise NotImplementedError('Cannot serialize {} instance'.format(self.__class__.__name__))
@classmethod
def deserialize(cls, data: JsonDict) -> 'Type':
- classname = data['.class']
- glo = globals()
- if classname in glo:
- cl = glo[classname]
- if 'deserialize' in cl.__dict__:
- return cl.deserialize(data)
- raise NotImplementedError('unexpected .class {}'.format(classname))
+ raise NotImplementedError('Cannot deserialize {} instance'.format(cls.__name__))
class TypeVarId:
@@ -123,9 +126,10 @@ class TypeVarDef(mypy.nodes.Context):
line = 0
column = 0
- def __init__(self, name: str, id: Union[TypeVarId, int], values: Optional[List[Type]],
+ def __init__(self, name: str, id: Union[TypeVarId, int], values: List[Type],
upper_bound: Type, variance: int = INVARIANT, line: int = -1,
column: int = -1) -> None:
+ assert values is not None, "No restrictions must be represented by empty list"
self.name = name
if isinstance(id, int):
id = TypeVarId(id)
@@ -161,7 +165,7 @@ class TypeVarDef(mypy.nodes.Context):
return {'.class': 'TypeVarDef',
'name': self.name,
'id': self.id.raw_id,
- 'values': None if self.values is None else [v.serialize() for v in self.values],
+ 'values': [v.serialize() for v in self.values],
'upper_bound': self.upper_bound.serialize(),
'variance': self.variance,
}
@@ -171,9 +175,8 @@ class TypeVarDef(mypy.nodes.Context):
assert data['.class'] == 'TypeVarDef'
return TypeVarDef(data['name'],
data['id'],
- None if data['values'] is None
- else [Type.deserialize(v) for v in data['values']],
- Type.deserialize(data['upper_bound']),
+ [deserialize_type(v) for v in data['values']],
+ deserialize_type(data['upper_bound']),
data['variance'],
)
@@ -185,8 +188,6 @@ class UnboundType(Type):
args = None # type: List[Type]
# should this type be wrapped in an Optional?
optional = False
- # is this type a return type?
- is_ret_type = False
# special case for X[()]
empty_tuple_index = False
@@ -197,14 +198,12 @@ class UnboundType(Type):
line: int = -1,
column: int = -1,
optional: bool = False,
- is_ret_type: bool = False,
empty_tuple_index: bool = False) -> None:
if not args:
args = []
self.name = name
self.args = args
self.optional = optional
- self.is_ret_type = is_ret_type
self.empty_tuple_index = empty_tuple_index
super().__init__(line, column)
@@ -221,18 +220,35 @@ class UnboundType(Type):
def deserialize(cls, data: JsonDict) -> 'UnboundType':
assert data['.class'] == 'UnboundType'
return UnboundType(data['name'],
- [Type.deserialize(a) for a in data['args']])
+ [deserialize_type(a) for a in data['args']])
-class ErrorType(Type):
- """The error type is used as the result of failed type operations."""
+class CallableArgument(Type):
+ """Represents a Arg(type, 'name') inside a Callable's type list.
+
+ Note that this is a synthetic type for helping parse ASTs, not a real type.
+ """
+ typ = None # type: Type
+ name = None # type: Optional[str]
+ constructor = None # type: Optional[str]
+
+ def __init__(self, typ: Type, name: Optional[str], constructor: Optional[str],
+ line: int = -1, column: int = -1) -> None:
+ super().__init__(line, column)
+ self.typ = typ
+ self.name = name
+ self.constructor = constructor
def accept(self, visitor: 'TypeVisitor[T]') -> T:
- return visitor.visit_error_type(self)
+ assert isinstance(visitor, SyntheticTypeVisitor)
+ return visitor.visit_callable_argument(self)
+
+ def serialize(self) -> JsonDict:
+ assert False, "Synthetic types don't serialize"
class TypeList(Type):
- """A list of types [...].
+ """Information about argument types and names [...].
This is only used for the arguments of a Callable type, i.e. for
[arg, ...] in Callable[[arg, ...], ret]. This is not a real type
@@ -246,17 +262,11 @@ class TypeList(Type):
self.items = items
def accept(self, visitor: 'TypeVisitor[T]') -> T:
+ assert isinstance(visitor, SyntheticTypeVisitor)
return visitor.visit_type_list(self)
def serialize(self) -> JsonDict:
- return {'.class': 'TypeList',
- 'items': [t.serialize() for t in self.items],
- }
-
- @classmethod
- def deserialize(cls, data: JsonDict) -> 'TypeList':
- assert data['.class'] == 'TypeList'
- return TypeList([Type.deserialize(t) for t in data['items']])
+ assert False, "Sythetic types don't serialize"
class AnyType(Type):
@@ -278,44 +288,15 @@ class AnyType(Type):
return AnyType()
-class Void(Type):
- """The return type 'None'.
-
- This can only be used as the return type in a callable type and as
- the result type of calling such callable.
- """
-
- can_be_true = False
- source = '' # May be None; function that generated this value
-
- def __init__(self, source: str = None, line: int = -1, column: int = -1) -> None:
- self.source = source
- super().__init__(line, column)
-
- def accept(self, visitor: 'TypeVisitor[T]') -> T:
- return visitor.visit_void(self)
-
- def with_source(self, source: str) -> 'Void':
- return Void(source, self.line, self.column)
-
- def serialize(self) -> JsonDict:
- return {'.class': 'Void'}
-
- @classmethod
- def deserialize(cls, data: JsonDict) -> 'Void':
- assert data['.class'] == 'Void'
- return Void()
-
-
class UninhabitedType(Type):
"""This type has no members.
- This type is almost the bottom type, except it is not a subtype of Void.
+ This type is the bottom type.
With strict Optional checking, it is the only common subtype between all
other types, which allows `meet` to be well defined. Without strict
Optional checking, NoneTyp fills this role.
- In general, for any type T that isn't Void:
+ In general, for any type T:
join(UninhabitedType, T) = T
meet(UninhabitedType, T) = UninhabitedType
is_subtype(UninhabitedType, T) = True
@@ -323,58 +304,46 @@ class UninhabitedType(Type):
can_be_true = False
can_be_false = False
+ is_noreturn = False # Does this come from a NoReturn? Purely for error messages.
- def __init__(self, line: int = -1, column: int = -1) -> None:
+ def __init__(self, is_noreturn: bool = False, line: int = -1, column: int = -1) -> None:
super().__init__(line, column)
+ self.is_noreturn = is_noreturn
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_uninhabited_type(self)
def serialize(self) -> JsonDict:
- return {'.class': 'UninhabitedType'}
+ return {'.class': 'UninhabitedType',
+ 'is_noreturn': self.is_noreturn}
@classmethod
def deserialize(cls, data: JsonDict) -> 'UninhabitedType':
assert data['.class'] == 'UninhabitedType'
- return UninhabitedType()
+ return UninhabitedType(is_noreturn=data['is_noreturn'])
class NoneTyp(Type):
"""The type of 'None'.
- Without strict Optional checking:
- This is only used internally during type inference. Programs
- cannot declare a variable of this type, and the type checker
- refuses to infer this type for a variable. However, subexpressions
- often have this type. Note that this is not used as the result
- type when calling a function with a void type, even though
- semantically such a function returns a None value; the void type
- is used instead so that we can report an error if the caller tries
- to do anything with the return value.
-
- With strict Optional checking:
- This type can be written by users as 'None', except as the return value
- of a function, where 'None' means Void.
+ This type can be written by users as 'None'.
"""
can_be_true = False
- def __init__(self, is_ret_type: bool = False, line: int = -1, column: int = -1) -> None:
+ def __init__(self, line: int = -1, column: int = -1) -> None:
super().__init__(line, column)
- self.is_ret_type = is_ret_type
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_none_type(self)
def serialize(self) -> JsonDict:
- return {'.class': 'NoneTyp',
- 'is_ret_type': self.is_ret_type,
- }
+ return {'.class': 'NoneTyp'}
@classmethod
def deserialize(cls, data: JsonDict) -> 'NoneTyp':
assert data['.class'] == 'NoneTyp'
- return NoneTyp(is_ret_type=data['is_ret_type'])
+ return NoneTyp()
class ErasedType(Type):
@@ -394,7 +363,7 @@ class DeletedType(Type):
These can be used as lvalues but not rvalues.
"""
- source = '' # May be None; name that generated this value
+ source = '' # type: Optional[str] # May be None; name that generated this value
def __init__(self, source: str = None, line: int = -1, column: int = -1) -> None:
self.source = source
@@ -413,6 +382,12 @@ class DeletedType(Type):
return DeletedType(data['source'])
+# Fake TypeInfo to be used as a placeholder during Instance de-serialization.
+NOT_READY = mypy.nodes.FakeInfo(mypy.nodes.SymbolTable(),
+ mypy.nodes.ClassDef('<NOT READY>', mypy.nodes.Block([])),
+ '<NOT READY>')
+
+
class Instance(Type):
"""An instance type of form C[T1, ..., Tn].
@@ -426,6 +401,7 @@ class Instance(Type):
def __init__(self, typ: mypy.nodes.TypeInfo, args: List[Type],
line: int = -1, column: int = -1, erased: bool = False) -> None:
+ assert(typ is None or typ.fullname() not in ["builtins.Any", "typing.Any"])
self.type = typ
self.args = args
self.erased = erased
@@ -436,24 +412,30 @@ class Instance(Type):
type_ref = None # type: str
- def serialize(self) -> JsonDict:
+ def serialize(self) -> Union[JsonDict, str]:
+ assert self.type is not None
+ type_ref = self.type.fullname()
+ if not self.args:
+ return type_ref
data = {'.class': 'Instance',
} # type: JsonDict
- assert self.type is not None
- data['type_ref'] = self.type.alt_fullname or self.type.fullname()
- if self.args:
- data['args'] = [arg.serialize() for arg in self.args]
+ data['type_ref'] = type_ref
+ data['args'] = [arg.serialize() for arg in self.args]
return data
@classmethod
- def deserialize(cls, data: JsonDict) -> 'Instance':
+ def deserialize(cls, data: Union[JsonDict, str]) -> 'Instance':
+ if isinstance(data, str):
+ inst = Instance(NOT_READY, [])
+ inst.type_ref = data
+ return inst
assert data['.class'] == 'Instance'
args = [] # type: List[Type]
if 'args' in data:
args_list = data['args']
assert isinstance(args_list, list)
- args = [Type.deserialize(arg) for arg in args_list]
- inst = Instance(None, args)
+ args = [deserialize_type(arg) for arg in args_list]
+ inst = Instance(NOT_READY, args)
inst.type_ref = data['type_ref'] # Will be fixed up by fixup.py later.
return inst
@@ -507,8 +489,8 @@ class TypeVarType(Type):
assert data['.class'] == 'TypeVarType'
tvdef = TypeVarDef(data['name'],
data['id'],
- [Type.deserialize(v) for v in data['values']],
- Type.deserialize(data['upper_bound']),
+ [deserialize_type(v) for v in data['values']],
+ deserialize_type(data['upper_bound']),
data['variance'])
return TypeVarType(tvdef)
@@ -533,13 +515,12 @@ class FunctionLike(Type):
@abstractmethod
def with_name(self, name: str) -> 'FunctionLike': pass
+ @abstractmethod
+ def get_name(self) -> Optional[str]: pass
+
# Corresponding instance type (e.g. builtins.type)
fallback = None # type: Instance
- @classmethod
- def deserialize(cls, data: JsonDict) -> 'FunctionLike':
- return cast(FunctionLike, super().deserialize(data))
-
_dummy = object() # type: Any
@@ -556,12 +537,13 @@ class CallableType(FunctionLike):
arg_types = None # type: List[Type] # Types of function arguments
arg_kinds = None # type: List[int] # ARG_ constants
- arg_names = None # type: List[str] # None if not a keyword argument
+ arg_names = None # type: List[Optional[str]] # None if not a keyword argument
min_args = 0 # Minimum number of arguments; derived from arg_kinds
is_var_arg = False # Is it a varargs function? derived from arg_kinds
+ is_kw_arg = False
ret_type = None # type: Type # Return value type
- name = '' # Name (may be None; for error messages)
- definition = None # type: SymbolNode # For error messages. May be None.
+ name = '' # type: Optional[str] # Name (may be None; for error messages)
+ definition = None # type: Optional[SymbolNode] # For error messages. May be None.
# Type variables for a generic function
variables = None # type: List[TypeVarDef]
@@ -574,6 +556,10 @@ class CallableType(FunctionLike):
# Defined for signatures that require special handling (currently only value is 'dict'
# for a signature similar to 'dict')
special_sig = None # type: Optional[str]
+ # Was this callable generated by analyzing Type[...] instantiation?
+ from_type_type = False # type: bool
+
+ bound_args = None # type: List[Optional[Type]]
def __init__(self,
arg_types: List[Type],
@@ -590,10 +576,13 @@ class CallableType(FunctionLike):
implicit: bool = False,
is_classmethod_class: bool = False,
special_sig: Optional[str] = None,
+ from_type_type: bool = False,
+ bound_args: List[Optional[Type]] = None,
) -> None:
if variables is None:
variables = []
assert len(arg_types) == len(arg_kinds)
+ assert not any(tp is None for tp in arg_types), "No annotation must be Any, not None"
self.arg_types = arg_types
self.arg_kinds = arg_kinds
self.arg_names = arg_names
@@ -608,22 +597,27 @@ class CallableType(FunctionLike):
self.variables = variables
self.is_ellipsis_args = is_ellipsis_args
self.implicit = implicit
+ self.is_classmethod_class = is_classmethod_class
self.special_sig = special_sig
+ self.from_type_type = from_type_type
+ self.bound_args = bound_args or []
super().__init__(line, column)
def copy_modified(self,
arg_types: List[Type] = _dummy,
arg_kinds: List[int] = _dummy,
- arg_names: List[str] = _dummy,
+ arg_names: List[Optional[str]] = _dummy,
ret_type: Type = _dummy,
fallback: Instance = _dummy,
- name: str = _dummy,
+ name: Optional[str] = _dummy,
definition: SymbolNode = _dummy,
variables: List[TypeVarDef] = _dummy,
line: int = _dummy,
column: int = _dummy,
is_ellipsis_args: bool = _dummy,
- special_sig: Optional[str] = _dummy) -> 'CallableType':
+ special_sig: Optional[str] = _dummy,
+ from_type_type: bool = _dummy,
+ bound_args: List[Optional[Type]] = _dummy) -> 'CallableType':
return CallableType(
arg_types=arg_types if arg_types is not _dummy else self.arg_types,
arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds,
@@ -640,10 +634,13 @@ class CallableType(FunctionLike):
implicit=self.implicit,
is_classmethod_class=self.is_classmethod_class,
special_sig=special_sig if special_sig is not _dummy else self.special_sig,
+ from_type_type=from_type_type if from_type_type is not _dummy else self.from_type_type,
+ bound_args=bound_args if bound_args is not _dummy else self.bound_args,
)
def is_type_obj(self) -> bool:
- return self.fallback.type is not None and self.fallback.type.fullname() == 'builtins.type'
+ t = self.fallback.type
+ return t is not None and t.is_metaclass()
def is_concrete_type_obj(self) -> bool:
return self.is_type_obj() and self.is_classmethod_class
@@ -653,17 +650,20 @@ class CallableType(FunctionLike):
ret = self.ret_type
if isinstance(ret, TupleType):
ret = ret.fallback
- return cast(Instance, ret).type
+ if isinstance(ret, TypeVarType):
+ ret = ret.upper_bound
+ assert isinstance(ret, Instance)
+ return ret.type
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_callable_type(self)
def with_name(self, name: str) -> 'CallableType':
"""Return a copy of this type with the specified name."""
- ret = self.ret_type
- if isinstance(ret, Void):
- ret = ret.with_source(name)
- return self.copy_modified(ret_type=ret, name=name)
+ return self.copy_modified(ret_type=self.ret_type, name=name)
+
+ def get_name(self) -> Optional[str]:
+ return self.name
def max_fixed_args(self) -> int:
n = len(self.arg_types)
@@ -695,7 +695,7 @@ class CallableType(FunctionLike):
return FormalArgument(by_name.name, by_pos.pos, by_name.typ, False)
return by_name if by_name is not None else by_pos
- def argument_by_name(self, name: str) -> Optional[FormalArgument]:
+ def argument_by_name(self, name: Optional[str]) -> Optional[FormalArgument]:
if name is None:
return None
seen_star = False
@@ -717,7 +717,7 @@ class CallableType(FunctionLike):
return FormalArgument(name, None, star2_type, False)
return None
- def argument_by_position(self, position: int) -> Optional[FormalArgument]:
+ def argument_by_position(self, position: Optional[int]) -> Optional[FormalArgument]:
if position is None:
return None
if self.is_var_arg:
@@ -759,8 +759,7 @@ class CallableType(FunctionLike):
# TODO: As an optimization, leave out everything related to
# generic functions for non-generic functions.
return {'.class': 'CallableType',
- 'arg_types': [(None if t is None else t.serialize())
- for t in self.arg_types],
+ 'arg_types': [t.serialize() for t in self.arg_types],
'arg_kinds': self.arg_kinds,
'arg_names': self.arg_names,
'ret_type': self.ret_type.serialize(),
@@ -771,23 +770,26 @@ class CallableType(FunctionLike):
'is_ellipsis_args': self.is_ellipsis_args,
'implicit': self.implicit,
'is_classmethod_class': self.is_classmethod_class,
+ 'bound_args': [(None if t is None else t.serialize())
+ for t in self.bound_args],
}
@classmethod
def deserialize(cls, data: JsonDict) -> 'CallableType':
assert data['.class'] == 'CallableType'
# TODO: Set definition to the containing SymbolNode?
- return CallableType([(None if t is None else Type.deserialize(t))
- for t in data['arg_types']],
+ return CallableType([deserialize_type(t) for t in data['arg_types']],
data['arg_kinds'],
data['arg_names'],
- Type.deserialize(data['ret_type']),
+ deserialize_type(data['ret_type']),
Instance.deserialize(data['fallback']),
name=data['name'],
variables=[TypeVarDef.deserialize(v) for v in data['variables']],
is_ellipsis_args=data['is_ellipsis_args'],
implicit=data['implicit'],
is_classmethod_class=data['is_classmethod_class'],
+ bound_args=[(None if t is None else deserialize_type(t))
+ for t in data['bound_args']],
)
@@ -810,8 +812,8 @@ class Overloaded(FunctionLike):
def items(self) -> List[CallableType]:
return self._items
- def name(self) -> str:
- return self._items[0].name
+ def name(self) -> Optional[str]:
+ return self.get_name()
def is_type_obj(self) -> bool:
# All the items must have the same type object status, so it's
@@ -829,6 +831,9 @@ class Overloaded(FunctionLike):
ni.append(it.with_name(name))
return Overloaded(ni)
+ def get_name(self) -> Optional[str]:
+ return self._items[0].name
+
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_overloaded(self)
@@ -883,7 +888,7 @@ class TupleType(Type):
@classmethod
def deserialize(cls, data: JsonDict) -> 'TupleType':
assert data['.class'] == 'TupleType'
- return TupleType([Type.deserialize(t) for t in data['items']],
+ return TupleType([deserialize_type(t) for t in data['items']],
Instance.deserialize(data['fallback']),
implicit=data['implicit'])
@@ -932,7 +937,8 @@ class TypedDictType(Type):
@classmethod
def deserialize(cls, data: JsonDict) -> 'TypedDictType':
assert data['.class'] == 'TypedDictType'
- return TypedDictType(OrderedDict([(n, Type.deserialize(t)) for (n, t) in data['items']]),
+ return TypedDictType(OrderedDict([(n, deserialize_type(t))
+ for (n, t) in data['items']]),
Instance.deserialize(data['fallback']))
def as_anonymous(self) -> 'TypedDictType':
@@ -993,8 +999,12 @@ class StarType(Type):
super().__init__(line, column)
def accept(self, visitor: 'TypeVisitor[T]') -> T:
+ assert isinstance(visitor, SyntheticTypeVisitor)
return visitor.visit_star_type(self)
+ def serialize(self) -> JsonDict:
+ assert False, "Sythetic types don't serialize"
+
class UnionType(Type):
"""The union type Union[T1, ..., Tn] (at least one type argument)."""
@@ -1002,7 +1012,7 @@ class UnionType(Type):
items = None # type: List[Type]
def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None:
- self.items = items
+ self.items = flatten_nested_unions(items)
self.can_be_true = any(item.can_be_true for item in items)
self.can_be_false = any(item.can_be_false for item in items)
super().__init__(line, column)
@@ -1014,13 +1024,27 @@ class UnionType(Type):
elif len(items) == 1:
return items[0]
else:
- if experiments.STRICT_OPTIONAL:
- return UninhabitedType()
- else:
- return Void()
+ return UninhabitedType()
@staticmethod
def make_simplified_union(items: List[Type], line: int = -1, column: int = -1) -> Type:
+ """Build union type with redundant union items removed.
+
+ If only a single item remains, this may return a non-union type.
+
+ Examples:
+
+ * [int, str] -> Union[int, str]
+ * [int, object] -> object
+ * [int, int] -> int
+ * [int, Any] -> Union[int, Any] (Any types are not simplified away!)
+ * [Any, Any] -> Any
+
+ Note: This must NOT be used during semantic analysis, since TypeInfos may not
+ be fully initialized.
+ """
+ # TODO: Make this a function living somewhere outside mypy.types. Most other non-trivial
+ # type operations are not static methods, so this is inconsistent.
while any(isinstance(typ, UnionType) for typ in items):
all_items = [] # type: List[Type]
for typ in items:
@@ -1030,11 +1054,7 @@ class UnionType(Type):
all_items.append(typ)
items = all_items
- if any(isinstance(typ, AnyType) for typ in items):
- return AnyType()
-
- from mypy.subtypes import is_subtype
- from mypy.sametypes import is_same_type
+ from mypy.subtypes import is_proper_subtype
removed = set() # type: Set[int]
for i, ti in enumerate(items):
@@ -1042,10 +1062,8 @@ class UnionType(Type):
# Keep track of the truishness info for deleted subtypes which can be relevant
cbt = cbf = False
for j, tj in enumerate(items):
- if (i != j
- and is_subtype(tj, ti)
- and (not (isinstance(tj, Instance) and tj.type.fallback_to_any)
- or is_same_type(ti, tj))):
+ if (i != j and is_proper_subtype(tj, ti)):
+ # We found a redundant item in the union.
removed.add(j)
cbt = cbt or tj.can_be_true
cbf = cbf or tj.can_be_false
@@ -1082,7 +1100,7 @@ class UnionType(Type):
@classmethod
def deserialize(cls, data: JsonDict) -> 'UnionType':
assert data['.class'] == 'UnionType'
- return UnionType([Type.deserialize(t) for t in data['items']])
+ return UnionType([deserialize_type(t) for t in data['items']])
class PartialType(Type):
@@ -1126,15 +1144,11 @@ class EllipsisType(Type):
"""
def accept(self, visitor: 'TypeVisitor[T]') -> T:
+ assert isinstance(visitor, SyntheticTypeVisitor)
return visitor.visit_ellipsis_type(self)
def serialize(self) -> JsonDict:
- return {'.class': 'EllipsisType'}
-
- @classmethod
- def deserialize(cls, data: JsonDict) -> 'EllipsisType':
- assert data['.class'] == 'EllipsisType'
- return EllipsisType()
+ assert False, "Synthetic types don't serialize"
class TypeType(Type):
@@ -1171,7 +1185,10 @@ class TypeType(Type):
def __init__(self, item: Type, *, line: int = -1, column: int = -1) -> None:
super().__init__(line, column)
- self.item = item
+ if isinstance(item, CallableType) and item.is_type_obj():
+ self.item = item.fallback
+ else:
+ self.item = item
def accept(self, visitor: 'TypeVisitor[T]') -> T:
return visitor.visit_type_type(self)
@@ -1182,7 +1199,7 @@ class TypeType(Type):
@classmethod
def deserialize(cls, data: JsonDict) -> 'TypeType':
assert data['.class'] == 'TypeType'
- return TypeType(Type.deserialize(data['item']))
+ return TypeType(deserialize_type(data['item']))
#
@@ -1206,21 +1223,11 @@ class TypeVisitor(Generic[T]):
def visit_unbound_type(self, t: UnboundType) -> T:
pass
- def visit_type_list(self, t: TypeList) -> T:
- raise self._notimplemented_helper('type_list')
-
- def visit_error_type(self, t: ErrorType) -> T:
- raise self._notimplemented_helper('error_type')
-
@abstractmethod
def visit_any(self, t: AnyType) -> T:
pass
@abstractmethod
- def visit_void(self, t: Void) -> T:
- pass
-
- @abstractmethod
def visit_none_type(self, t: NoneTyp) -> T:
pass
@@ -1258,9 +1265,6 @@ class TypeVisitor(Generic[T]):
def visit_typeddict_type(self, t: TypedDictType) -> T:
pass
- def visit_star_type(self, t: StarType) -> T:
- raise self._notimplemented_helper('star_type')
-
@abstractmethod
def visit_union_type(self, t: UnionType) -> T:
pass
@@ -1269,14 +1273,33 @@ class TypeVisitor(Generic[T]):
def visit_partial_type(self, t: PartialType) -> T:
pass
- def visit_ellipsis_type(self, t: EllipsisType) -> T:
- raise self._notimplemented_helper('ellipsis_type')
-
@abstractmethod
def visit_type_type(self, t: TypeType) -> T:
pass
+class SyntheticTypeVisitor(TypeVisitor[T]):
+ """A TypeVisitor that also knows how to visit synthetic AST constructs.
+
+ Not just real types."""
+
+ @abstractmethod
+ def visit_star_type(self, t: StarType) -> T:
+ pass
+
+ @abstractmethod
+ def visit_type_list(self, t: TypeList) -> T:
+ pass
+
+ @abstractmethod
+ def visit_callable_argument(self, t: CallableArgument) -> T:
+ pass
+
+ @abstractmethod
+ def visit_ellipsis_type(self, t: EllipsisType) -> T:
+ pass
+
+
class TypeTranslator(TypeVisitor[Type]):
"""Identity type transformation.
@@ -1287,18 +1310,9 @@ class TypeTranslator(TypeVisitor[Type]):
def visit_unbound_type(self, t: UnboundType) -> Type:
return t
- def visit_type_list(self, t: TypeList) -> Type:
- return t
-
- def visit_error_type(self, t: ErrorType) -> Type:
- return t
-
def visit_any(self, t: AnyType) -> Type:
return t
- def visit_void(self, t: Void) -> Type:
- return t
-
def visit_none_type(self, t: NoneTyp) -> Type:
return t
@@ -1341,15 +1355,9 @@ class TypeTranslator(TypeVisitor[Type]):
cast(Any, t.fallback.accept(self)),
t.line, t.column)
- def visit_star_type(self, t: StarType) -> Type:
- return StarType(t.type.accept(self), t.line, t.column)
-
def visit_union_type(self, t: UnionType) -> Type:
return UnionType(self.translate_types(t.items), t.line, t.column)
- def visit_ellipsis_type(self, t: EllipsisType) -> Type:
- return t
-
def translate_types(self, types: List[Type]) -> List[Type]:
return [t.accept(self) for t in types]
@@ -1371,7 +1379,7 @@ class TypeTranslator(TypeVisitor[Type]):
return TypeType(t.item.accept(self), line=t.line, column=t.column)
-class TypeStrVisitor(TypeVisitor[str]):
+class TypeStrVisitor(SyntheticTypeVisitor[str]):
"""Visitor for pretty-printing types into strings.
This is mostly for debugging/testing.
@@ -1383,6 +1391,9 @@ class TypeStrVisitor(TypeVisitor[str]):
- Represent the NoneTyp type as None.
"""
+ def __init__(self, id_mapper: IdMapper = None) -> None:
+ self.id_mapper = id_mapper
+
def visit_unbound_type(self, t: UnboundType)-> str:
s = t.name + '?'
if t.args != []:
@@ -1392,21 +1403,22 @@ class TypeStrVisitor(TypeVisitor[str]):
def visit_type_list(self, t: TypeList) -> str:
return '<TypeList {}>'.format(self.list_str(t.items))
- def visit_error_type(self, t: ErrorType) -> str:
- return '<ERROR>'
+ def visit_callable_argument(self, t: CallableArgument) -> str:
+ typ = t.typ.accept(self)
+ if t.name is None:
+ return "{}({})".format(t.constructor, typ)
+ else:
+ return "{}({}, {})".format(t.constructor, typ, t.name)
def visit_any(self, t: AnyType) -> str:
return 'Any'
- def visit_void(self, t: Void) -> str:
- return 'void'
-
def visit_none_type(self, t: NoneTyp) -> str:
# Fully qualify to make this distinct from the None value.
return "builtins.None"
def visit_uninhabited_type(self, t: UninhabitedType) -> str:
- return "<uninhabited>"
+ return "<nothing>"
def visit_erased_type(self, t: ErasedType) -> str:
return "<Erased>"
@@ -1426,6 +1438,8 @@ class TypeStrVisitor(TypeVisitor[str]):
s += '*'
if t.args != []:
s += '[{}]'.format(self.list_str(t.args))
+ if self.id_mapper:
+ s += '<{}>'.format(self.id_mapper.id(t.type))
return s
def visit_type_var(self, t: TypeVarType) -> str:
@@ -1449,16 +1463,17 @@ class TypeStrVisitor(TypeVisitor[str]):
s += '*'
if t.arg_kinds[i] == ARG_STAR2:
s += '**'
- if t.arg_names[i]:
- s += t.arg_names[i] + ': '
- s += str(t.arg_types[i])
+ name = t.arg_names[i]
+ if name:
+ s += name + ': '
+ s += t.arg_types[i].accept(self)
if t.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT):
s += ' ='
s = '({})'.format(s)
- if not isinstance(t.ret_type, Void):
- s += ' -> {}'.format(t.ret_type)
+ if not isinstance(t.ret_type, NoneTyp):
+ s += ' -> {}'.format(t.ret_type.accept(self))
if t.variables:
s = '{} {}'.format(t.variables, s)
@@ -1531,115 +1546,81 @@ class TypeStrVisitor(TypeVisitor[str]):
])
-# These constants define the method used by TypeQuery to combine multiple
-# query results, e.g. for tuple types. The strategy is not used for empty
-# result lists; in that case the default value takes precedence.
-ANY_TYPE_STRATEGY = 0 # Return True if any of the results are True.
-ALL_TYPES_STRATEGY = 1 # Return True if all of the results are True.
-
+class TypeQuery(SyntheticTypeVisitor[T]):
+ """Visitor for performing queries of types.
-class TypeQuery(TypeVisitor[bool]):
- """Visitor for performing simple boolean queries of types.
+ strategy is used to combine results for a series of types
- This class allows defining the default value for leafs to simplify the
- implementation of many queries.
+ Common use cases involve a boolean query using `any` or `all`
"""
- default = False # Default result
- strategy = 0 # Strategy for combining multiple values (ANY_TYPE_STRATEGY or ALL_TYPES_...).
-
- def __init__(self, default: bool, strategy: int) -> None:
- """Construct a query visitor.
-
- Use the given default result and strategy for combining
- multiple results. The strategy must be either
- ANY_TYPE_STRATEGY or ALL_TYPES_STRATEGY.
- """
- self.default = default
+ def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None:
self.strategy = strategy
- def visit_unbound_type(self, t: UnboundType) -> bool:
- return self.default
-
- def visit_type_list(self, t: TypeList) -> bool:
- return self.default
+ def visit_unbound_type(self, t: UnboundType) -> T:
+ return self.query_types(t.args)
- def visit_error_type(self, t: ErrorType) -> bool:
- return self.default
+ def visit_type_list(self, t: TypeList) -> T:
+ return self.query_types(t.items)
- def visit_any(self, t: AnyType) -> bool:
- return self.default
+ def visit_callable_argument(self, t: CallableArgument) -> T:
+ return t.typ.accept(self)
- def visit_void(self, t: Void) -> bool:
- return self.default
+ def visit_any(self, t: AnyType) -> T:
+ return self.strategy([])
- def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
- return self.default
+ def visit_uninhabited_type(self, t: UninhabitedType) -> T:
+ return self.strategy([])
- def visit_none_type(self, t: NoneTyp) -> bool:
- return self.default
+ def visit_none_type(self, t: NoneTyp) -> T:
+ return self.strategy([])
- def visit_erased_type(self, t: ErasedType) -> bool:
- return self.default
+ def visit_erased_type(self, t: ErasedType) -> T:
+ return self.strategy([])
- def visit_deleted_type(self, t: DeletedType) -> bool:
- return self.default
+ def visit_deleted_type(self, t: DeletedType) -> T:
+ return self.strategy([])
- def visit_type_var(self, t: TypeVarType) -> bool:
- return self.default
+ def visit_type_var(self, t: TypeVarType) -> T:
+ return self.strategy([])
- def visit_partial_type(self, t: PartialType) -> bool:
- return self.default
+ def visit_partial_type(self, t: PartialType) -> T:
+ return self.query_types(t.inner_types)
- def visit_instance(self, t: Instance) -> bool:
+ def visit_instance(self, t: Instance) -> T:
return self.query_types(t.args)
- def visit_callable_type(self, t: CallableType) -> bool:
+ def visit_callable_type(self, t: CallableType) -> T:
# FIX generics
return self.query_types(t.arg_types + [t.ret_type])
- def visit_tuple_type(self, t: TupleType) -> bool:
+ def visit_tuple_type(self, t: TupleType) -> T:
return self.query_types(t.items)
- def visit_typeddict_type(self, t: TypedDictType) -> bool:
+ def visit_typeddict_type(self, t: TypedDictType) -> T:
return self.query_types(t.items.values())
- def visit_star_type(self, t: StarType) -> bool:
+ def visit_star_type(self, t: StarType) -> T:
return t.type.accept(self)
- def visit_union_type(self, t: UnionType) -> bool:
+ def visit_union_type(self, t: UnionType) -> T:
return self.query_types(t.items)
- def visit_overloaded(self, t: Overloaded) -> bool:
+ def visit_overloaded(self, t: Overloaded) -> T:
return self.query_types(t.items())
- def visit_type_type(self, t: TypeType) -> bool:
+ def visit_type_type(self, t: TypeType) -> T:
return t.item.accept(self)
- def query_types(self, types: Iterable[Type]) -> bool:
+ def visit_ellipsis_type(self, t: EllipsisType) -> T:
+ return self.strategy([])
+
+ def query_types(self, types: Iterable[Type]) -> T:
"""Perform a query for a list of types.
- Use the strategy constant to combine the results.
+ Use the strategy to combine the results.
"""
- if not types:
- # Use default result for empty list.
- return self.default
- if self.strategy == ANY_TYPE_STRATEGY:
- # Return True if at least one component is true.
- res = False
- for t in types:
- res = res or t.accept(self)
- if res:
- break
- return res
- else:
- # Return True if all components are true.
- res = True
- for t in types:
- res = res and t.accept(self)
- if not res:
- break
- return res
+ return self.strategy(t.accept(self) for t in types)
def strip_type(typ: Type) -> Type:
@@ -1762,8 +1743,61 @@ def set_typ_args(tp: Type, new_args: List[Type], line: int = -1, column: int = -
if isinstance(tp, TupleType):
return tp.copy_modified(items=new_args)
if isinstance(tp, UnionType):
- return UnionType.make_simplified_union(new_args, line, column)
+ return UnionType(new_args, line, column)
if isinstance(tp, CallableType):
return tp.copy_modified(arg_types=new_args[:-1], ret_type=new_args[-1],
line=line, column=column)
return tp
+
+
+def get_type_vars(typ: Type) -> List[TypeVarType]:
+ """Get all type variables that are present in an already analyzed type,
+ without duplicates, in order of textual appearance.
+ Similar to TypeAnalyser.get_type_var_names.
+ """
+ all_vars = [] # type: List[TypeVarType]
+ for t in get_typ_args(typ):
+ if isinstance(t, TypeVarType):
+ all_vars.append(t)
+ else:
+ all_vars.extend(get_type_vars(t))
+ # Remove duplicates while preserving order
+ included = set() # type: Set[TypeVarId]
+ tvars = []
+ for var in all_vars:
+ if var.id not in included:
+ tvars.append(var)
+ included.add(var.id)
+ return tvars
+
+
+def flatten_nested_unions(types: Iterable[Type]) -> List[Type]:
+ """Flatten nested unions in a type list."""
+ flat_items = [] # type: List[Type]
+ for tp in types:
+ if isinstance(tp, UnionType):
+ flat_items.extend(flatten_nested_unions(tp.items))
+ else:
+ flat_items.append(tp)
+ return flat_items
+
+
+def union_items(typ: Type) -> List[Type]:
+ """Return the flattened items of a union type.
+
+ For non-union types, return a list containing just the argument.
+ """
+ if isinstance(typ, UnionType):
+ items = []
+ for item in typ.items:
+ items.extend(union_items(item))
+ return items
+ else:
+ return [typ]
+
+
+deserialize_map = {
+ key: obj.deserialize # type: ignore
+ for key, obj in globals().items()
+ if isinstance(obj, type) and issubclass(obj, Type) and obj is not Type
+}
diff --git a/mypy/typevars.py b/mypy/typevars.py
index 1bdb104..6b3e419 100644
--- a/mypy/typevars.py
+++ b/mypy/typevars.py
@@ -1,4 +1,4 @@
-from typing import Union
+from typing import Union, List
from mypy.nodes import TypeInfo
diff --git a/mypy/util.py b/mypy/util.py
index e5c9e5e..1e8e318 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -3,7 +3,7 @@
import re
import subprocess
from xml.sax.saxutils import escape
-from typing import TypeVar, List, Tuple, Optional, Sequence
+from typing import TypeVar, List, Tuple, Optional, Sequence, Dict
T = TypeVar('T')
@@ -37,13 +37,6 @@ def short_type(obj: object) -> str:
return t.split('.')[-1].rstrip("'>")
-def indent(s: str, n: int) -> str:
- """Indent all the lines in s (separated by Newlines) by n spaces."""
- s = ' ' * n + s
- s = s.replace('\n', '\n' + ' ' * n)
- return s
-
-
def array_repr(a: List[T]) -> List[str]:
"""Return the items of an array converted to strings using Repr."""
aa = [] # type: List[str]
@@ -52,35 +45,6 @@ def array_repr(a: List[T]) -> List[str]:
return aa
-def dump_tagged(nodes: Sequence[object], tag: str) -> str:
- """Convert an array into a pretty-printed multiline string representation.
-
- The format is
- tag(
- item1..
- itemN)
- Individual items are formatted like this:
- - arrays are flattened
- - pairs (str : array) are converted recursively, so that str is the tag
- - other items are converted to strings and indented
- """
- a = [] # type: List[str]
- if tag:
- a.append(tag + '(')
- for n in nodes:
- if isinstance(n, list):
- if n:
- a.append(dump_tagged(n, None))
- elif isinstance(n, tuple):
- s = dump_tagged(n[1], n[0])
- a.append(indent(s, 2))
- elif n:
- a.append(indent(str(n), 2))
- if tag:
- a[-1] += ')'
- return '\n'.join(a)
-
-
def find_python_encoding(text: bytes, pyversion: Tuple[int, int]) -> Tuple[str, int]:
"""PEP-263 for detecting Python file encoding"""
result = ENCODING_RE.match(text)
@@ -150,3 +114,23 @@ def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str) ->
xml = ERROR_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt)
with open(path, 'wb') as f:
f.write(xml.encode('utf-8'))
+
+
+class IdMapper:
+ """Generate integer ids for objects.
+
+ Unlike id(), these start from 0 and increment by 1, and ids won't
+ get reused across the life-time of IdMapper.
+
+ Assume objects don't redefine __eq__ or __hash__.
+ """
+
+ def __init__(self) -> None:
+ self.id_map = {} # type: Dict[object, int]
+ self.next_id = 0
+
+ def id(self, o: object) -> int:
+ if o not in self.id_map:
+ self.id_map[o] = self.next_id
+ self.next_id += 1
+ return self.id_map[o]
diff --git a/mypy/version.py b/mypy/version.py
index 8d8eed8..cbe0b01 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1,7 +1,7 @@
import os
from mypy import git
-__version__ = '0.480-dev'
+__version__ = '0.511'
base_version = __version__
mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
diff --git a/mypy/visitor.py b/mypy/visitor.py
index df04ebd..6bd7520 100644
--- a/mypy/visitor.py
+++ b/mypy/visitor.py
@@ -113,7 +113,7 @@ class ExpressionVisitor(Generic[T]):
pass
@abstractmethod
- def visit_func_expr(self, o: 'mypy.nodes.FuncExpr') -> T:
+ def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T:
pass
@abstractmethod
@@ -157,6 +157,10 @@ class ExpressionVisitor(Generic[T]):
pass
@abstractmethod
+ def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T:
+ pass
+
+ @abstractmethod
def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T:
pass
@@ -177,7 +181,127 @@ class ExpressionVisitor(Generic[T]):
pass
-class NodeVisitor(Generic[T], ExpressionVisitor[T]):
+class StatementVisitor(Generic[T]):
+ # Definitions
+
+ @abstractmethod
+ def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> T:
+ pass
+
+ @abstractmethod
+ def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> T:
+ pass
+
+ @abstractmethod
+ def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> T:
+ pass
+
+ @abstractmethod
+ def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> T:
+ pass
+
+ @abstractmethod
+ def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> T:
+ pass
+
+ @abstractmethod
+ def visit_decorator(self, o: 'mypy.nodes.Decorator') -> T:
+ pass
+
+ @abstractmethod
+ def visit_var(self, o: 'mypy.nodes.Var') -> T:
+ pass
+
+ # Module structure
+
+ @abstractmethod
+ def visit_import(self, o: 'mypy.nodes.Import') -> T:
+ pass
+
+ @abstractmethod
+ def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> T:
+ pass
+
+ @abstractmethod
+ def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> T:
+ pass
+
+ # Statements
+
+ @abstractmethod
+ def visit_block(self, o: 'mypy.nodes.Block') -> T:
+ pass
+
+ @abstractmethod
+ def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> T:
+ pass
+
+ @abstractmethod
+ def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T:
+ pass
+
+
+class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T]):
"""Empty base class for parse tree node visitors.
The T type argument specifies the return type of the visit
@@ -187,11 +311,13 @@ class NodeVisitor(Generic[T], ExpressionVisitor[T]):
TODO make the default return value explicit
"""
- # Module structure
+ # Not in superclasses:
def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> T:
pass
+ # Module structure
+
def visit_import(self, o: 'mypy.nodes.Import') -> T:
pass
@@ -359,7 +485,7 @@ class NodeVisitor(Generic[T], ExpressionVisitor[T]):
def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T:
pass
- def visit_func_expr(self, o: 'mypy.nodes.FuncExpr') -> T:
+ def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T:
pass
def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T:
@@ -392,6 +518,9 @@ class NodeVisitor(Generic[T], ExpressionVisitor[T]):
def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T:
pass
+ def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T:
+ pass
+
def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T:
pass
diff --git a/mypy/waiter.py b/mypy/waiter.py
index 10fa402..62a0555 100644
--- a/mypy/waiter.py
+++ b/mypy/waiter.py
@@ -3,15 +3,18 @@
This is used for running mypy tests.
"""
-from typing import Dict, List, Optional, Set, Tuple
+from typing import Dict, List, Optional, Set, Tuple, Any, Iterable
import os
+from multiprocessing import cpu_count
import pipes
import re
from subprocess import Popen, STDOUT
import sys
import tempfile
import time
+import json
+from collections import defaultdict
class WaiterError(Exception):
@@ -32,7 +35,7 @@ class LazySubprocess:
def start(self) -> None:
self.outfile = tempfile.TemporaryFile()
- self.start_time = time.time()
+ self.start_time = time.perf_counter()
self.process = Popen(self.args, cwd=self.cwd, env=self.env,
stdout=self.outfile, stderr=STDOUT)
self.pid = self.process.pid
@@ -107,7 +110,11 @@ class Waiter:
if not waiter.run():
print('error')
"""
- def __init__(self, limit: int = 0, *, verbosity: int = 0, xfail: List[str] = []) -> None:
+ LOGSIZE = 50
+ FULL_LOG_FILENAME = '.runtest_log.json'
+
+ def __init__(self, limit: int = 0, *, verbosity: int = 0, xfail: List[str] = [],
+ lf: bool = False, ff: bool = False) -> None:
self.verbosity = verbosity
self.queue = [] # type: List[LazySubprocess]
# Index of next task to run in the queue.
@@ -117,21 +124,42 @@ class Waiter:
try:
sched_getaffinity = os.sched_getaffinity
except AttributeError:
- limit = 2
+ # no support for affinity on OSX/Windows
+ limit = cpu_count()
else:
# Note: only count CPUs we are allowed to use. It is a
# major mistake to count *all* CPUs on the machine.
limit = len(sched_getaffinity(0))
self.limit = limit
+ self.lf = lf
+ self.ff = ff
assert limit > 0
self.xfail = set(xfail)
self._note = None # type: Noter
self.times1 = {} # type: Dict[str, float]
self.times2 = {} # type: Dict[str, float]
-
- def add(self, cmd: LazySubprocess) -> int:
+ self.new_log = defaultdict(dict) # type: Dict[str, Dict[str, float]]
+ self.sequential_tasks = set() # type: Set[str]
+
+ def load_log_file(self) -> Optional[List[Dict[str, Dict[str, Any]]]]:
+ try:
+ # get the last log
+ with open(self.FULL_LOG_FILENAME) as fp:
+ test_log = json.load(fp)
+ except FileNotFoundError:
+ test_log = []
+ except json.JSONDecodeError:
+ print('corrupt test log file {}'.format(self.FULL_LOG_FILENAME), file=sys.stderr)
+ test_log = []
+ return test_log
+
+ def add(self, cmd: LazySubprocess, sequential: bool = False) -> int:
rv = len(self.queue)
+ if cmd.name in (task.name for task in self.queue):
+ sys.exit('Duplicate test name: {}'.format(cmd.name))
self.queue.append(cmd)
+ if sequential:
+ self.sequential_tasks.add(cmd.name)
return rv
def _start_next(self) -> None:
@@ -161,12 +189,14 @@ class Waiter:
def _poll_current(self) -> Tuple[int, int]:
while True:
- time.sleep(.05)
+ time.sleep(.01)
for pid in self.current:
cmd = self.current[pid][1]
code = cmd.process.poll()
if code is not None:
- cmd.end_time = time.time()
+ cmd.end_time = time.perf_counter()
+ self.new_log['exit_code'][cmd.name] = code
+ self.new_log['runtime'][cmd.name] = cmd.end_time - cmd.start_time
return pid, code
def _wait_next(self) -> Tuple[List[str], int, int]:
@@ -239,6 +269,47 @@ class Waiter:
if self.verbosity == 0:
self._note = Noter(len(self.queue))
print('SUMMARY %d tasks selected' % len(self.queue))
+
+ def avg(lst: Iterable[float]) -> float:
+ valid_items = [item for item in lst if item is not None]
+ if not valid_items:
+ # we don't know how long a new task takes
+ # better err by putting it in front in case it is slow:
+ # a fast task in front hurts performance less than a slow task in the back
+ return float('inf')
+ else:
+ return sum(valid_items) / len(valid_items)
+
+ logs = self.load_log_file()
+ if logs:
+ times = {cmd.name: avg(log['runtime'].get(cmd.name, None) for log in logs)
+ for cmd in self.queue}
+
+ def sort_function(cmd: LazySubprocess) -> Tuple[Any, int, float]:
+ # longest tasks first
+ runtime = -times[cmd.name]
+ # sequential tasks go first by default
+ sequential = -(cmd.name in self.sequential_tasks)
+ if self.ff:
+ # failed tasks first with -ff
+ exit_code = -logs[-1]['exit_code'].get(cmd.name, 0)
+ if not exit_code:
+ # avoid interrupting parallel tasks with sequential in between
+ # so either: seq failed, parallel failed, parallel passed, seq passed
+ # or: parallel failed, seq failed, seq passed, parallel passed
+ # I picked the first one arbitrarily, since no obvious pros/cons
+ # in other words, among failed tasks, sequential should go before parallel,
+ # and among successful tasks, sequential should go after parallel
+ sequential = -sequential
+ else:
+ # ignore exit code without -ff
+ exit_code = 0
+ return exit_code, sequential, runtime
+ self.queue = sorted(self.queue, key=sort_function)
+ if self.lf:
+ self.queue = [cmd for cmd in self.queue
+ if logs[-1]['exit_code'].get(cmd.name, 0)]
+
sys.stdout.flush()
# Failed tasks.
all_failures = [] # type: List[str]
@@ -246,15 +317,35 @@ class Waiter:
total_tests = 0
# Number of failed test cases.
total_failed_tests = 0
+ running_sequential_task = False
while self.current or self.next < len(self.queue):
while len(self.current) < self.limit and self.next < len(self.queue):
+ # only start next task if idle, or current and next tasks are both parallel
+ if running_sequential_task:
+ break
+ if self.queue[self.next].name in self.sequential_tasks:
+ if self.current:
+ break
+ else:
+ running_sequential_task = True
self._start_next()
fails, tests, test_fails = self._wait_next()
+ running_sequential_task = False
all_failures += fails
total_tests += tests
total_failed_tests += test_fails
if self.verbosity == 0:
self._note.clear()
+
+ if self.new_log: # don't append empty log, it will corrupt the cache file
+ # log only LOGSIZE most recent tests
+ test_log = (self.load_log_file() + [self.new_log])[:self.LOGSIZE]
+ try:
+ with open(self.FULL_LOG_FILENAME, 'w') as fp:
+ json.dump(test_log, fp, sort_keys=True, indent=4)
+ except Exception as e:
+ print('cannot save test log file:', e)
+
if all_failures:
summary = 'SUMMARY %d/%d tasks and %d/%d tests failed' % (
len(all_failures), len(self.queue), total_failed_tests, total_tests)
@@ -271,7 +362,6 @@ class Waiter:
len(self.queue), total_tests))
print('*** OK ***')
sys.stdout.flush()
-
return 0
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
deleted file mode 100644
index de99649..0000000
--- a/mypy_self_check.ini
+++ /dev/null
@@ -1,8 +0,0 @@
-[mypy]
-disallow_untyped_defs = True
-disallow_subclassing_any = True
-warn_no_return = True
-
-; historical exceptions
-[mypy-mypy.test.testextensions]
-disallow_untyped_defs = False
diff --git a/mypy_strict_optional.ini b/mypy_strict_optional.ini
deleted file mode 100644
index 3b7d272..0000000
--- a/mypy_strict_optional.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-; Mypy is run both with and without this config file in CI.
-; This allows us to make mypy strict Optional compliant over time.
-[mypy]
-strict_optional = True
-ignore_errors = True
diff --git a/pinfer/.gitignore b/pinfer/.gitignore
deleted file mode 100644
index e1dace5..0000000
--- a/pinfer/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-__pycache__
-*~
-*.pyc
diff --git a/pinfer/LICENSE b/pinfer/LICENSE
deleted file mode 100644
index ecdce98..0000000
--- a/pinfer/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-pinfer is licensed under the terms of the MIT license, reproduced below.
-
-= = = = =
-
-The MIT License
-
-Copyright (c) 2013, 2014 Jukka Lehtosalo
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
-= = = = =
diff --git a/pinfer/README b/pinfer/README
deleted file mode 100644
index 1f4fe4c..0000000
--- a/pinfer/README
+++ /dev/null
@@ -1,47 +0,0 @@
-ReadMe for pinfer
-=================
-
-Introduction
-------------
-
-Pinfer is tool for runtime type inference of variable types and
-function signatures in Python programs. The inferred types are mostly
-compatible with mypy types. It is intended for coming up with draft
-types when migrating Python code to static typing, but it can also be
-used as a code understanding or analysis tool.
-
-Pinfer is very experimental!
-
-Requirements
-------------
-
- * Python 3.2 or later
-
-Basic usage
------------
-
-To infer types of all functions and methods in a module:
-
- import foo # target module
- import pinfer
-
- # set up type inference and dumping
- pinfer.infer_module(foo)
- pinfer.dump_at_exit()
-
- # now do something with the module, e.g. run tests
-
-For inferring a Python module, add the above lines to the test suite.
-
-Handy wrapper
--------------
-
-The p.py script provides a handy wrapper for the above.
-
-
-Copyright
--------------
-
-This project includes files from the open source CPython project. Those files are Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved. The license can be found at https://github.com/python/cpython/blob/master/LICENSE.
-
diff --git a/pinfer/__init__.py b/pinfer/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/pinfer/inspect3.py b/pinfer/inspect3.py
deleted file mode 100644
index 4d74be1..0000000
--- a/pinfer/inspect3.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# from Python 3's inspect.py
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
-'''
-provide getfullargspec() and getcallargs() for Python 2
-'''
-
-import sys
-import inspect
-
-if sys.version_info.major == 2:
-
- def getfullargspec(func):
- (args, varargs, keywords, defaults) = inspect.getargspec(func)
- return (args, varargs, keywords, defaults, [], [], {})
-
-
- def getcallargs(*func_and_positional, **named):
- """Get the mapping of arguments to values.
-
- A dict is returned, with keys the function argument names (including the
- names of the * and ** arguments, if any), and values the respective bound
- values from 'positional' and 'named'."""
- func = func_and_positional[0]
- positional = func_and_positional[1:]
- spec = getfullargspec(func)
- args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = spec
- f_name = func.__name__
- arg2value = {}
-
-
- if inspect.ismethod(func) and func.__self__ is not None:
- # implicit 'self' (or 'cls' for classmethods) argument
- positional = (func.__self__,) + positional
- num_pos = len(positional)
- num_args = len(args)
- num_defaults = len(defaults) if defaults else 0
-
- n = min(num_pos, num_args)
- for i in range(n):
- arg2value[args[i]] = positional[i]
- if varargs:
- arg2value[varargs] = tuple(positional[n:])
- possible_kwargs = set(args + kwonlyargs)
- if varkw:
- arg2value[varkw] = {}
- for kw, value in named.items():
- if kw not in possible_kwargs:
- if not varkw:
- raise TypeError("%s() got an unexpected keyword argument %r" %
- (f_name, kw))
- arg2value[varkw][kw] = value
- continue
- if kw in arg2value:
- raise TypeError("%s() got multiple values for argument %r" %
- (f_name, kw))
- arg2value[kw] = value
- if num_pos > num_args and not varargs:
- _too_many(f_name, args, kwonlyargs, varargs, num_defaults,
- num_pos, arg2value)
- if num_pos < num_args:
- req = args[:num_args - num_defaults]
- for arg in req:
- if arg not in arg2value:
- _missing_arguments(f_name, req, True, arg2value)
- for i, arg in enumerate(args[num_args - num_defaults:]):
- if arg not in arg2value:
- arg2value[arg] = defaults[i]
- missing = 0
- for kwarg in kwonlyargs:
- if kwarg not in arg2value:
- if kwonlydefaults and kwarg in kwonlydefaults:
- arg2value[kwarg] = kwonlydefaults[kwarg]
- else:
- missing += 1
- if missing:
- _missing_arguments(f_name, kwonlyargs, False, arg2value)
- return arg2value
-
-
- def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
- atleast = len(args) - defcount
- kwonly_given = len([arg for arg in kwonly if arg in values])
- if varargs:
- plural = atleast != 1
- sig = "at least %d" % (atleast,)
- elif defcount:
- plural = True
- sig = "from %d to %d" % (atleast, len(args))
- else:
- plural = len(args) != 1
- sig = str(len(args))
- kwonly_sig = ""
- if kwonly_given:
- msg = " positional argument%s (and %d keyword-only argument%s)"
- kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
- "s" if kwonly_given != 1 else ""))
- raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
- (f_name, sig, "s" if plural else "", given, kwonly_sig,
- "was" if given == 1 and not kwonly_given else "were"))
-
-
- def _missing_arguments(f_name, argnames, pos, values):
- names = [repr(name) for name in argnames if name not in values]
- missing = len(names)
- if missing == 1:
- s = names[0]
- elif missing == 2:
- s = "{} and {}".format(*names)
- else:
- tail = ", {} and {}".format(*names[-2:])
- del names[-2:]
- s = ", ".join(names) + tail
- raise TypeError("%s() missing %i required %s argument%s: %s" %
- (f_name, missing,
- "positional" if pos else "keyword-only",
- "" if missing == 1 else "s", s))
-
-
-else:
- getfullargspec = inspect.getfullargspec
- getcallargs = inspect.getcallargs
diff --git a/pinfer/p.py b/pinfer/p.py
deleted file mode 100644
index 451038d..0000000
--- a/pinfer/p.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-"""Stub to run pinfer on a module.
-
-Usage:
-
- p.py targetmod testfile [outfile] [ -- testargs]
-
-Where:
-
- targetmod: the full target module (e.g. textwrap)
- testfile: the full test module file (e.g. test/test_textwrap.py)
- outfile: where to write the annotated module. If unspecified, will
- write stubs at end of stdout.
-
-Example invocation:
-
- python3 p.py textwrap test/test_textwrap.py
-"""
-
-
-import sys
-import imp
-import pinfer
-import os
-import atexit
-import inspect
-
-iport = __builtins__.__import__
-watched = set()
-
-
-def inferring_import(*args, **kwargs):
- module = iport(*args, **kwargs)
- if module not in watched:
- watched.add(module)
- pinfer.infer_module(module)
- return module
-
-
-def main():
- if '--' in sys.argv:
- argslen = sys.argv.index('--')
- else:
- argslen = len(sys.argv)
- args = sys.argv[1:argslen]
- del sys.argv[1:argslen + 1]
-
- if len(args) == 2:
- targetpackage, testfile = args
- outfile = None
- elif len(args) == 3:
- targetpackage, testfile, outfile = args
- else:
- sys.stderr.write('Usage: %s targetmodule testfile [outfile] [ -- testargs]\n' %
- sys.argv[0])
- sys.exit(2)
-
- # help us with local imports
- filemodule = os.path.dirname(os.path.abspath(testfile))
- sys.path.append(filemodule)
-
- targetmod = __import__(targetpackage)
- targetfile = inspect.getfile(targetmod)
- pinfer.infer_module(targetmod)
-
- if outfile:
- @atexit.register
- def rewrite_file(targetfile=targetfile, outfile=outfile, pinfer=pinfer):
- if targetfile.endswith(".pyc"):
- targetfile = targetfile[0:-1]
- annotated = pinfer.annotate_file(targetfile)
- open(outfile, "w").write(annotated)
- else:
- pinfer.dump_at_exit()
-
- pinfer.ignore_files.add(os.path.abspath(testfile))
-
- # run testfile as main
- del sys.modules['__main__']
- imp.load_source('__main__', testfile)
-
-if __name__ == '__main__':
- main()
diff --git a/pinfer/pinfer.py b/pinfer/pinfer.py
deleted file mode 100644
index 3dd1445..0000000
--- a/pinfer/pinfer.py
+++ /dev/null
@@ -1,686 +0,0 @@
-"""Tools for runtime type inference"""
-
-import inspect
-from inspect3 import getfullargspec, getcallargs
-import types
-import codecs
-import os
-import tokenize
-try:
- from StringIO import StringIO
- from unparse import Unparser
-except:
- from io import StringIO
- from unparse3 import Unparser
-import ast
-
-
-MAX_INFERRED_TUPLE_LENGTH = 10
-PREFERRED_LINE_LENGTH = 79
-
-
-var_db = {} # (location, variable) -> type
-func_argid_db = {} # funcid -> argspec
-func_arg_db = {} # (funcid, name) -> type
-func_return_db = {} # funcname -> type
-func_source_db = {} # funcid -> source string
-#func_info_db = {} # funcid -> (class, name, argspec, file, line, source)
-ignore_files = set()
-
-# The type inferencing wrapper should not be reentrant. It's not, in theory, calling
-# out to any external code which we would want to infer the types of. However,
-# sometimes we do something like infer_type(arg.keys()) or infer_type(arg.values()) if
-# the arg is a collection, and we want to know about the types of its elements. .keys(),
-# .values(), etc. can be overloaded, possibly to a method we've wrapped. This can become
-# infinitely recursive, particularly because on something like arg.keys(), keys() gets passed
-# arg as the first parameter, so if we've wrapped keys() we'll try to infer_type(arg),
-# which will detect it's a dictionary, call infer_type(arg.keys()), recurse and so on.
-# We ran in to this problem with collections.OrderedDict.
-# To prevent reentrancy, we set is_performing_inference = True iff we're in the middle of
-# inferring the types of a function. If we try to run another function we've wrapped,
-# we skip type inferencing so we can't accidentally infinitely recurse.
-is_performing_inference = False
-
-
-def reset():
- global var_db, func_argid_db, func_arg_db, func_return_db, func_source_db
- global ignore_files, is_performing_inference
- var_db = {}
- func_arg_db = {}
- func_return_db = {}
- # we don't actually want to clear these on reset(), or we'll
- # lose the functions we've already wrapped forever.
- #func_source_db = {}
- #func_argid_db = {}
- is_performing_inference = False
- ignore_files = set()
-
-
-def format_state(pretty=False):
- lines = []
- for loc, var in sorted(var_db.keys()):
- lines.append('%s: %s' % (var, var_db[(loc, var)]))
- funcnames = sorted(set(func_return_db.keys()))
- prevclass = ''
- indent = ''
- for funcid in funcnames:
- curclass, name, sourcefile, sourceline = funcid
- if curclass != prevclass:
- if curclass:
- lines.append('class %s(...):' % curclass)
- indent = ' ' * 4
- else:
- indent = ''
- prevclass = curclass
-
- lines.append(format_sig(funcid, name, indent, pretty))
- return '\n'.join(lines)
-
-
-def unparse_ast(node):
- buf = StringIO()
- Unparser(node, buf)
- return buf.getvalue().strip()
-
-
-def format_sig(funcid, fname, indent, pretty, defaults=[]):
- (argnames, varargs, varkw, _, kwonlyargs, _, _) = func_argid_db[funcid]
-
- # to get defaults, parse the function, get the nodes for the
- # defaults, then unparse them
- try:
- fn_ast = ast.parse(func_source_db[funcid].strip()).body[0]
-
- # override fname if we parsed a different one
- fname = fn_ast.name
-
- defaults = [unparse_ast(dn) for dn in fn_ast.args.defaults]
-
- if hasattr(fn_ast.args, 'kw_defaults'):
- kwonly_defaults = [unparse_ast(dn) for dn in fn_ast.args.kw_defaults]
- else:
- kwonly_defaults = []
- except:
- defaults, kwonly_defaults = [], []
- finally:
- # pad defaults to match the length of args
- defaults = ([None] * (len(argnames) - len(defaults))) + defaults
- kwonly_defaults = ([None] * (len(kwonlyargs) - len(kwonly_defaults))) + kwonly_defaults
-
- args = [('', arg, default) for (arg, default) in zip(argnames, defaults)]
-
- if varargs:
- args += [('*', varargs, None)]
- elif len(kwonlyargs) > 0:
- args += [('*', '', None)]
- if len(kwonlyargs) > 0:
- args += [('', arg, default) for (arg, default) in zip(kwonlyargs, kwonly_defaults)]
- if varkw:
- args += [('**', varkw, None)]
-
- argstrs = []
- for i, (prefix, arg, default) in enumerate(args):
- argstr = prefix + arg
-
- # Omit type of self argument.
- if (funcid, arg) in func_arg_db and not (i == 0 and arg == 'self'):
- argstr += ': %s' % func_arg_db[(funcid, arg)]
-
- if default:
- argstr += ' = %s' % default
-
- argstrs.append(argstr)
-
- ret = str(func_return_db.get(funcid, Unknown()))
-
- sig = 'def %s(%s) -> %s' % (fname, ', '.join(argstrs), ret)
- if not pretty or len(sig) <= PREFERRED_LINE_LENGTH or not args:
- return indent + sig
-
- else:
- # Format into multiple lines to conserve horizontal space.
- first = indent + 'def %s(' % fname
- extra_indent = first.index('(') + 1
-
- decl = indent + first
- decl += (',\n' + indent + ' ' * extra_indent).join(argstrs)
- decl += ')\n%s -> %s' % (indent + ' ' * (extra_indent - 4), ret)
- return decl
-
-
-def annotate_file(path):
- # this should be documented somewhere...
- INDENT_TOKEN = 5
-
- with open(path, 'r') as targetfile:
- source = targetfile.read()
-
- line_offsets = []
- source_length = 0
- for line in source.split('\n'):
- line_offsets.append(source_length)
- source_length = source_length + len(line) + 1
-
- funcids = set(funcid for funcid, arg in func_arg_db)
-
- # list of (oldstart, oldend, replacement)
- replacements = [] # type: List[Tuple[Int, Int, String]]
-
- for funcid in funcids:
- class_name, name, sourcefile, def_start_line = funcid
- if sourcefile != path:
- continue
-
- func_source = func_source_db[funcid]
- tokens = list(tokenize.generate_tokens(StringIO(func_source).readline))
- assert len(tokens) > 0
-
- # we're making the assumption that the def at least gets to start on
- # it's own line, which is fine for non-lambdas
-
- if tokens[0][0] == INDENT_TOKEN:
- indent = tokens[0][1]
- del tokens[0]
- else:
- indent = ''
-
- # Find the first indent, which should be between the end of the def
- # and before the start of the body. Then find the preceding colon,
- # which should be at the end of the def.
-
- for indent_loc in range(len(tokens)):
- if tokens[indent_loc][0] == INDENT_TOKEN:
- function_is_one_line = False
- break
- else:
- function_is_one_line = True
-
- if function_is_one_line:
- # we're also making the assumption that the def has an indent on the
- # line following the signature, which is true almost all of the time.
- # If this is not the case, we should just leave a comment above the
- # function, although I might not have time to do that now.
- continue
-
- for def_end_loc in range(indent_loc, -1, -1):
- if tokens[def_end_loc][1] == ':':
- break
-
- assert def_end_loc > 0
-
- def_end_line, def_end_col = tokens[def_end_loc][2]
- def_end_line -= 1 # the tokenizer apparently 1-indexes lines
- def_end_line += def_start_line
-
- def_start_offset = line_offsets[def_start_line]
- def_end_offset = line_offsets[def_end_line] + def_end_col
-
- annotated_def = format_sig(funcid, name, indent, True)
-
- replacements.append((def_start_offset, def_end_offset, annotated_def))
-
- # ideally, we'd put this after the docstring
- replacements.append((0, 0, "from typing import List, Dict, Set, Tuple, Callable, Pattern, Match, Union, Optional\n"))
-
- # absurdly inefficient algorithm: replace with O(n) writer
-
- for (start, end, replacement) in sorted(replacements, key=lambda r: r[0], reverse=True):
- source = source[0:start] + replacement + source[end:]
-
- return source
-
-
-def dump():
- s = format_state(pretty=True)
- if s:
- print()
- print('INFERRED TYPES:')
- print(s)
- reset()
-
-
-def dump_at_exit():
- import atexit
- atexit.register(dump)
-
-
-def get_defining_file(obj):
- try:
- path = os.path.abspath(inspect.getfile(obj))
- if path.endswith('.pyc'):
- path = path[0:-1]
- return path
- except:
- return None
-
-
-def infer_var(name, value):
- key = (None, name)
- update_var_db(key, value)
-
-
-def infer_attrs(x):
- if hasattr(x, '__class__'):
- t = x.__class__
- else:
- t = type(x)
- cls = t.__name__
- typedict = t.__dict__
- for dict in x.__dict__, typedict:
- for attr, value in dict.items():
- if attr in ('__dict__', '__doc__', '__module__', '__weakref__'):
- continue
- if type(value) is type(infer_attrs) and dict is typedict:
- # Skip methods.
- continue
- key = (None, '%s.%s' % (cls, attr))
- update_var_db(key, value)
-
-
-def infer_method_signature(class_name):
- def decorator(func):
- return infer_signature(func, class_name)
- return decorator
-
-
-def infer_signature(func, class_name=''):
- """Decorator that infers the signature of a function."""
-
- # infer_method_signature should be idempotent
- if hasattr(func, '__is_inferring_sig'):
- return func
-
- assert func.__module__ != infer_method_signature.__module__
-
- try:
- funcfile = get_defining_file(func)
- funcsource, sourceline = inspect.getsourcelines(func)
- sourceline -= 1 # getsourcelines is apparently 1-indexed
- except:
- return func
-
- funcid = (class_name, func.__name__, funcfile, sourceline)
- func_source_db[funcid] = ''.join(funcsource)
-
- try:
- func_argid_db[funcid] = getfullargspec(func)
- vargs_name, kwargs_name = func_argid_db[funcid][1], func_argid_db[funcid][2]
- except TypeError:
- # Not supported.
- return func
-
- def wrapper(*args, **kwargs):
- global is_performing_inference
- # If we're already doing inference, we should be in our own code, not code we're checking.
- # Not doing this check sometimes results in infinite recursion.
-
- if is_performing_inference:
- return func(*args, **kwargs)
-
- expecting_type_error, got_type_error, got_exception = False, False, False
-
- is_performing_inference = True
- try:
- callargs = getcallargs(func, *args, **kwargs)
-
- # we have to handle *args and **kwargs separately
- if vargs_name:
- va = callargs.pop(vargs_name)
- if kwargs_name:
- kw = callargs.pop(kwargs_name)
-
- arg_db = {arg: infer_value_type(value) for arg, value in callargs.items()}
-
- # *args and **kwargs need to merge the types of all their values
- if vargs_name:
- arg_db[vargs_name] = union_many_types(*[infer_value_type(v) for v in va])
- if kwargs_name:
- arg_db[kwargs_name] = union_many_types(*[infer_value_type(v) for v in kw.values()])
-
- except TypeError:
- got_exception = expecting_type_error = True
- except:
- got_exception = True
- finally:
- is_performing_inference = False
-
- try:
- ret = func(*args, **kwargs)
- except TypeError:
- got_type_error = got_exception = True
- raise
- except:
- got_exception = True
- raise
- finally:
- if not got_exception:
- assert not expecting_type_error
-
- # if we didn't get a TypeError, update the actual database
- for arg, t in arg_db.items():
- update_db(func_arg_db, (funcid, arg), t)
-
- # if we got an exception, we don't have a ret
- if not got_exception:
- is_performing_inference = True
- try:
- type = infer_value_type(ret)
- update_db(func_return_db, funcid, type)
- except:
- pass
- finally:
- is_performing_inference = False
-
- return ret
-
- if hasattr(func, '__name__'):
- wrapper.__name__ = func.__name__
- wrapper.__is_inferring_sig = True
- return wrapper
-
-
-def infer_class(cls):
- """Class decorator for inferring signatures of all methods of the class."""
- for attr, value in cls.__dict__.items():
- if type(value) is type(infer_class):
- setattr(cls, attr, infer_method_signature(cls.__name__)(value))
- return cls
-
-
-def infer_module(namespace):
- if hasattr(namespace, '__dict__'):
- namespace = namespace.__dict__
- for name, value in list(namespace.items()):
- if inspect.isfunction(value):
- namespace[name] = infer_signature(value)
- elif inspect.isclass(value):
- namespace[name] = infer_class(value)
-
-
-def update_var_db(key, value):
- type = infer_value_type(value)
- update_db(var_db, key, type)
-
-
-def update_db(db, key, type):
- if key not in db:
- db[key] = type
- else:
- db[key] = combine_types(db[key], type)
-
-
-def merge_db(db, other):
- assert id(db) != id(other)
- for key in other.keys():
- if key not in db:
- db[key] = other[key]
- else:
- db[key] = combine_types(db[key], other[key])
-
-
-def infer_value_type(value, depth=0):
- # Prevent infinite recursion
- if depth > 5:
- return Unknown()
- depth += 1
-
- if value is None:
- return None
- elif isinstance(value, list):
- return Generic('List', [infer_value_types(value, depth)])
- elif isinstance(value, dict):
- keytype = infer_value_types(value.keys(), depth)
- valuetype = infer_value_types(value.values(), depth)
- return Generic('Dict', (keytype, valuetype))
- elif isinstance(value, tuple):
- if len(value) <= MAX_INFERRED_TUPLE_LENGTH:
- return Tuple(infer_value_type(item, depth)
- for item in value)
- else:
- return Generic('TupleSequence', [infer_value_types(value, depth)])
- elif isinstance(value, set):
- return Generic('Set', [infer_value_types(value, depth)])
- elif isinstance(value, types.MethodType) or isinstance(value, types.FunctionType):
- return Instance(Callable)
- else:
- for t in type(value).mro():
- if get_defining_file(t) in ignore_files:
- continue
- elif t is object:
- return Any()
- elif hasattr(types, 'InstanceType') and t is types.InstanceType:
- return Any()
- else:
- return Instance(t)
- else:
- return Any()
-
-
-def infer_value_types(values, depth=0):
- """Infer a single type for an iterable of values.
-
- >>> infer_value_types((1, 'x'))
- Union(int, str)
- >>> infer_value_types([])
- Unknown
- """
- inferred = Unknown()
- for value in sample(values):
- type = infer_value_type(value, depth)
- inferred = combine_types(inferred, type)
- return inferred
-
-
-def sample(values):
- # TODO only return a sample of values
- return list(values)
-
-
-def union_many_types(*types):
- union = Unknown()
- for t in types:
- union = combine_types(union, t)
- return union
-
-
-def combine_types(x, y):
- """Perform a union of two types.
-
- >>> combine_types(Instance(int), None)
- Optional[int]
- """
- if isinstance(x, Unknown):
- return y
- if isinstance(y, Unknown):
- return x
- if isinstance(x, Any):
- return x
- if isinstance(y, Any):
- return y
- if isinstance(x, Union):
- return combine_either(x, y)
- if isinstance(y, Union):
- return combine_either(y, x)
- if x == y:
- return x
- return simplify_either([x], [y])
-
-
-def combine_either(either, x):
- if isinstance(x, Union):
- xtypes = x.types
- else:
- xtypes = [x]
- return simplify_either(either.types, xtypes)
-
-
-def simplify_either(x, y):
- numerics = [Instance(int), Instance(float), Instance(complex)]
-
- # TODO this is O(n**2); use an O(n) algorithm instead
- result = list(x)
- for type in y:
- if isinstance(type, Generic):
- for i, rt in enumerate(result):
- if isinstance(rt, Generic) and type.typename == rt.typename:
- result[i] = Generic(rt.typename,
- (combine_types(t, s)
- for t, s in zip(type.args, rt.args)))
- break
- else:
- result.append(type)
- elif isinstance(type, Tuple):
- for i, rt in enumerate(result):
- if isinstance(rt, Tuple) and len(type) == len(rt):
- result[i] = Tuple(combine_types(t, s)
- for t, s in zip(type.itemtypes,
- rt.itemtypes))
- break
- else:
- result.append(type)
- elif type in numerics:
- for i, rt in enumerate(result):
- if rt in numerics:
- result[i] = numerics[max(numerics.index(rt), numerics.index(type))]
- break
- else:
- result.append(type)
- elif isinstance(type, Instance):
- for i, rt in enumerate(result):
- if isinstance(rt, Instance):
- # Union[A, SubclassOfA] -> A
- # Union[A, A] -> A, because issubclass(A, A) == True,
- if issubclass(type.typeobj, rt.typeobj):
- break
- elif issubclass(rt.typeobj, type.typeobj):
- result[i] = type
- break
- else:
- result.append(type)
- elif type not in result:
- result.append(type)
-
- if len(result) > 1:
- return Union(result)
- else:
- return result[0]
-
-
-class TypeBase(object):
- """Abstract base class of all type objects.
-
- Type objects use isinstance tests librarally -- they don't support duck
- typing well.
- """
-
- def __eq__(self, other):
- if type(other) is not type(self):
- return False
- for attr in self.__dict__:
- if getattr(other, attr) != getattr(self, attr):
- return False
- return True
-
- def __ne__(self, other):
- return not self == other
-
- def __repr__(self):
- return str(self)
-
-
-class Instance(TypeBase):
- def __init__(self, typeobj):
- assert not inspect.isclass(typeobj) or not issubclass(typeobj, TypeBase)
- self.typeobj = typeobj
-
- def __str__(self):
- # cheat on regular expression objects which have weird class names
- # to be consistent with typing.py
- if self.typeobj == Pattern:
- return "Pattern"
- elif self.typeobj == Match:
- return "Match"
- else:
- return self.typeobj.__name__
-
- def __repr__(self):
- return 'Instance(%s)' % self
-
-
-class Generic(TypeBase):
- def __init__(self, typename, args):
- self.typename = typename
- self.args = tuple(args)
-
- def __str__(self):
- return '%s[%s]' % (self.typename, ', '.join(str(t)
- for t in self.args))
-
-
-class Tuple(TypeBase):
- def __init__(self, itemtypes):
- self.itemtypes = tuple(itemtypes)
-
- def __len__(self):
- return len(self.itemtypes)
-
- def __str__(self):
- return 'Tuple[%s]' % (', '.join(str(t) for t in self.itemtypes))
-
-
-class Union(TypeBase):
- def __init__(self, types):
- assert len(types) > 1
- self.types = tuple(types)
-
- def __eq__(self, other):
- if type(other) is not Union:
- return False
- # TODO this is O(n**2); use an O(n) algorithm instead
- for t in self.types:
- if t not in other.types:
- return False
- for t in other.types:
- if t not in self.types:
- return False
- return True
-
- def __str__(self):
- types = list(self.types)
- if str != bytes: # on Python 2 str == bytes
- if Instance(bytes) in types and Instance(str) in types:
- # we Union[bytes, str] -> AnyStr as late as possible so we avoid
- # corner cases like subclasses of bytes or str
- types.remove(Instance(bytes))
- types.remove(Instance(str))
- types.append(Instance(AnyStr))
- if len(types) == 1:
- return str(types[0])
- elif len(types) == 2 and None in types:
- type = [t for t in types if t is not None][0]
- return 'Optional[%s]' % type
- else:
- return 'Union[%s]' % (', '.join(sorted(str(t) for t in types)))
-
-
-class Unknown(TypeBase):
- def __str__(self):
- return 'Unknown'
-
- def __repr__(self):
- return 'Unknown()'
-
-
-class Any(TypeBase):
- def __str__(self):
- return 'Any'
-
- def __repr__(self):
- return 'Any()'
-
-
-class AnyStr(object): pass
-class Callable(object): pass
-import re
-Pattern = type(re.compile(u''))
-Match = type(re.match(u'', u''))
diff --git a/pinfer/test_pinfer.py b/pinfer/test_pinfer.py
deleted file mode 100644
index d6168db..0000000
--- a/pinfer/test_pinfer.py
+++ /dev/null
@@ -1,302 +0,0 @@
-"""Test cases for the infer module"""
-
-import unittest
-
-from pinfer import Instance, Generic, Tuple, Union, Unknown
-import pinfer
-
-
-class TestInfer(unittest.TestCase):
- def setUp(self):
- self.int = Instance(int)
- self.float = Instance(float)
-
- def tearDown(self):
- pinfer.reset()
-
- def test_instance(self):
- i = self.int
- self.assertEqual(i.typeobj, int)
- self.assertEqual(str(i), 'int')
- self.assertEqual(repr(i), 'Instance(int)')
-
- self.assertTrue(i == Instance(int))
- self.assertFalse(i != Instance(int))
- self.assertTrue(i != self.float)
- self.assertFalse(i == self.float)
- self.assertNotEqual(i, None)
-
- def test_generic_with_one_arg(self):
- g = Generic('List', [self.int])
- self.assertEqual(g.typename, 'List')
- self.assertEqual(str(g.args), '(Instance(int),)')
- self.assertEqual(str(g), 'List[int]')
- self.assertEqual(repr(g), 'List[int]')
-
- self.assertEqual(g, Generic('List', [self.int]))
- self.assertNotEqual(g, Generic('Set', [self.int]))
- self.assertNotEqual(g, Generic('List', [self.float]))
- self.assertNotEqual(g, self.int)
-
- def test_generic_with_two_args(self):
- g = Generic('Dict', (self.int, self.float))
- self.assertEqual(g.typename, 'Dict')
- self.assertEqual(str(g), 'Dict[int, float]')
-
- def test_tuple(self):
- t0 = Tuple(())
- t1 = Tuple([self.int])
- t2 = Tuple((self.float, self.int))
- self.assertEqual(t0.itemtypes, ())
- self.assertEqual(str(t1.itemtypes[0]), 'int')
- self.assertEqual(str(t2.itemtypes[0]), 'float')
- self.assertEqual(str(t2.itemtypes[1]), 'int')
- self.assertEqual(str(t0), 'Tuple[]')
- self.assertEqual(str(t1), 'Tuple[int]')
- self.assertEqual(str(t2), 'Tuple[float, int]')
-
- self.assertEqual(t1, Tuple([self.int]))
- self.assertNotEqual(t1, Tuple([self.float]))
- self.assertNotEqual(t1, Tuple([self.int, self.int]))
- self.assertNotEqual(t1, self.int)
-
- def test_either(self):
- i = self.int
- f = self.float
- s = Instance(str)
-
- e2 = Union((i, f))
- self.assertEqual(len(e2.types), 2)
- self.assertEqual(str(e2), 'Union[float, int]')
-
- self.assertEqual(e2, Union((i, f)))
- self.assertEqual(e2, Union((f, i)))
- self.assertNotEqual(e2, Union((i, s)))
- self.assertNotEqual(e2, Union((i, f, s)))
- self.assertNotEqual(Union((i, f, s)), e2)
- self.assertNotEqual(e2, i)
-
- def test_either_as_optional(self):
- optint = Union((self.int, None))
- self.assertEqual(str(optint), 'Optional[int]')
- optfloat = Union((None, self.float))
- self.assertEqual(str(optfloat), 'Optional[float]')
- eithernone = Union((self.int, self.float, None))
- self.assertEqual(str(eithernone), 'Union[None, float, int]')
-
- def test_unknown(self):
- unknown = Unknown()
- self.assertEqual(str(unknown), 'Unknown')
- self.assertEqual(repr(unknown), 'Unknown()')
-
- self.assertEqual(unknown, Unknown())
- self.assertNotEqual(unknown, self.int)
-
- def test_combine_types(self):
- i = self.int
- f = self.float
- s = Instance(str)
- c = Instance(complex)
- class Foo: pass
- o = Instance(Foo)
-
- # Simple types
- self.assert_combine(i, i, i)
- self.assert_combine(s, s, s)
- self.assert_combine(i, s, Union((i, s)))
- self.assert_combine(i, None, Union((i, None)))
- # Unknowns
- self.assert_combine(i, Unknown(), i)
- self.assert_combine(Unknown(), Unknown(), Unknown())
- # Union types
- self.assert_combine(o, Union((f, s)), Union((o, f, s)))
- self.assert_combine(i, Union((i, s)), Union((i, s)))
- self.assert_combine(Union((o, f)), Union((o, s)), Union((o, f, s)))
- # Tuple types
- self.assert_combine(Tuple([i, i]), Tuple([i, i]), Tuple([i, i]))
- self.assert_combine(Tuple([i, i]), Tuple([o, s]),
- Tuple([Union([o, i]), Union([s, i])]))
- # Numeric types
- self.assert_combine(i, f, f)
- self.assert_combine(i, c, c)
- self.assert_combine(c, f, c)
- # Unions with numerics
- self.assert_combine(i, Union((o, f)), Union((o, f)))
- self.assert_combine(Union((o, f)), i, Union((o, f)))
- self.assert_combine(Union((o, i)), f, Union((o, f)))
- # Tuples with numerics
- self.assert_combine(Tuple([i, i]), Tuple([f, i]), Tuple([f, i]))
- self.assert_combine(Tuple([i, i]), Tuple([f, o]), Tuple([f, Union((i, o))]))
- self.assert_combine(Tuple([f, i]), Tuple([i, o]), Tuple([f, Union((i, o))]))
-
- def test_combine_special_cases(self):
- i = self.int
- f = self.float
- u = Unknown()
- def list_(x):
- return Generic('List', [x])
- # Simplify generic types.
- self.assert_combine(list_(i), list_(u), list_(i))
-
- def assert_combine(self, t, s, combined):
- self.assertEqual(pinfer.combine_types(t, s), combined)
- self.assertEqual(pinfer.combine_types(s, t), combined)
-
- def test_sample(self):
- sample = pinfer.sample
- self.assertEqual(sample(()), [])
- self.assertEqual(sample((1, 2)), [1, 2])
- self.assertEqual(sample([]), [])
- self.assertEqual(sample([1]), [1])
- self.assertEqual(sample([1, 2]), [1, 2])
- # TODO larger collections
-
- def test_infer_simple_value_type(self):
- self.assert_infer_type(1, 'int')
- self.assert_infer_type('', 'str')
- self.assert_infer_type(None, 'None')
-
- def test_infer_collection_type(self):
- # List
- self.assert_infer_type([], 'List[Unknown]')
- self.assert_infer_type([1], 'List[int]')
- self.assert_infer_type([1, None], 'List[Optional[int]]')
- # Dict
- self.assert_infer_type({1: 'x', 2: None},
- 'Dict[int, Optional[str]]')
- # Set
- self.assert_infer_type({1, None}, 'Set[Optional[int]]')
- # Tuple
- self.assert_infer_type((1, 'x'), 'Tuple[int, str]')
- self.assert_infer_type((1, None) * 100, 'TupleSequence[Optional[int]]')
-
- def assert_infer_type(self, value, type):
- self.assertEqual(str(pinfer.infer_value_type(value)), type)
-
- def test_infer_variables(self):
- pinfer.infer_var('x', 1)
- self.assert_infer_state('x: int')
- pinfer.infer_var('x', 1)
- pinfer.infer_var('x', None)
- pinfer.infer_var('y', 1.1)
- self.assert_infer_state('x: Optional[int]\n'
- 'y: float')
-
- def test_infer_instance_var(self):
- class A: pass
- a = A()
- a.x = 1
- a.y = 'x'
- pinfer.infer_attrs(a)
- self.assert_infer_state('A.x: int\n'
- 'A.y: str')
-
- def test_infer_class_var(self):
- class A:
- x = 1.1
- pinfer.infer_attrs(A())
- self.assert_infer_state('A.x: float')
-
- def test_infer_function_attr(self):
- class A:
- def f(self): pass
- a = A()
- a.g = lambda x: 1
- pinfer.infer_attrs(a)
- self.assert_infer_state('A.g: Callable')
-
- def test_infer_simple_function_signature(self):
- @pinfer.infer_signature
- def f(a):
- return 'x'
- f(1)
- f(None)
- self.assertEqual(f.__name__, 'f')
- self.assert_infer_state('def f(a: Optional[int]) -> str')
-
- def test_infer_function_with_two_args(self):
- @pinfer.infer_signature
- def f(x, y):
- return x * y
- f(1, 2)
- f(1, 'x')
- self.assert_infer_state(
- 'def f(x: int, y: Union[int, str]) -> Union[int, str]')
-
- def test_infer_method(self):
- class A:
- @pinfer.infer_signature
- def f(self, x): pass
- A().f('x')
- self.assert_infer_state('def f(self, x: str) -> None')
-
- def test_infer_default_arg_values(self):
- @pinfer.infer_signature
- def f(x=1, y=None): pass
- f()
- self.assert_infer_state('def f(x: int, y: None) -> None')
- f('x')
- f('x', 1.1)
- f()
- self.assert_infer_state(
- 'def f(x: Union[int, str], y: Optional[float]) -> None')
-
- def test_infer_varargs(self):
- @pinfer.infer_signature
- def f(x, *y): pass
- f(1)
- f(1, 'x', None)
- self.assert_infer_state('def f(x: int, *y: Optional[str]) -> None')
- f(1)
- self.assert_infer_state('def f(x: int, *y: Unknown) -> None')
-
- def test_infer_keyword_args(self):
- @pinfer.infer_signature
- def f(x): pass
- f(x=1)
- self.assert_infer_state('def f(x: int) -> None')
-
- @pinfer.infer_signature
- def f(x='x'): pass
- f(x=1)
- self.assert_infer_state('def f(x: int) -> None')
-
- def test_infer_keyword_varargs(self):
- @pinfer.infer_signature
- def f(a, **kwargs): pass
- f(None, x=1, y='x')
- self.assert_infer_state(
- 'def f(a: None, **kwargs: Union[int, str]) -> None')
-
- def test_infer_class(self):
- @pinfer.infer_class
- class A:
- def f(self, x): return 0
- A().f('x')
- self.assert_infer_state('class A(...):\n'
- ' def f(self, x: str) -> int')
-
- @pinfer.infer_class
- class A:
- def f(self, x): return 0
- @pinfer.infer_class
- class B:
- def f(self): pass
- def g(self): pass
- A().f('')
- B().f()
- B().g()
- self.assert_infer_state('class A(...):\n'
- ' def f(self, x: str) -> int\n'
- 'class B(...):\n'
- ' def f(self) -> None\n'
- ' def g(self) -> None')
-
- def assert_infer_state(self, expected):
- state = pinfer.format_state()
- self.assertEqual(state, expected)
- pinfer.reset()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/pinfer/test_pinfer3.py b/pinfer/test_pinfer3.py
deleted file mode 100644
index 688e8c0..0000000
--- a/pinfer/test_pinfer3.py
+++ /dev/null
@@ -1,31 +0,0 @@
-""" tests cases that require python3 syntax """
-
-import unittest
-import pinfer
-
-# Include all of the shared unit tests
-from test_pinfer import TestInfer
-
-
-class TestInfer3(unittest.TestCase):
- def test_infer_keyword_only_args(self):
- # decorators break the parsing
- def f(x, *, y=0): pass
- f = pinfer.infer_signature(f)
- f(1, y='x')
- self.assert_infer_state(
- 'def f(x: int, *, y: str = 0) -> None')
-
- def f(*, x=None, y=None): pass
- f = pinfer.infer_signature(f)
- f(y='x')
- self.assert_infer_state(
- 'def f(*, x: None = None, y: str = None) -> None')
-
- def assert_infer_state(self, expected):
- state = pinfer.format_state()
- self.assertEqual(state, expected)
- pinfer.reset()
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/pinfer/unparse.py b/pinfer/unparse.py
deleted file mode 100644
index 6e1e493..0000000
--- a/pinfer/unparse.py
+++ /dev/null
@@ -1,610 +0,0 @@
-# From Python 2's Demo/parser/unparse.py
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
-
-"Usage: unparse.py <path to source file>"
-import sys
-import ast
-import cStringIO
-import os
-
-# Large float and imaginary literals get turned into infinities in the AST.
-# We unparse those infinities to INFSTR.
-INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
-
-def interleave(inter, f, seq):
- """Call f on each item in seq, calling inter() in between.
- """
- seq = iter(seq)
- try:
- f(next(seq))
- except StopIteration:
- pass
- else:
- for x in seq:
- inter()
- f(x)
-
-class Unparser:
- """Methods in this class recursively traverse an AST and
- output source code for the abstract syntax; original formatting
- is disregarded. """
-
- def __init__(self, tree, file = sys.stdout):
- """Unparser(tree, file=sys.stdout) -> None.
- Print the source for tree to file."""
- self.f = file
- self.future_imports = []
- self._indent = 0
- self.dispatch(tree)
- self.f.write("")
- self.f.flush()
-
- def fill(self, text = ""):
- "Indent a piece of text, according to the current indentation level"
- self.f.write("\n"+" "*self._indent + text)
-
- def write(self, text):
- "Append a piece of text to the current line."
- self.f.write(text)
-
- def enter(self):
- "Print ':', and increase the indentation."
- self.write(":")
- self._indent += 1
-
- def leave(self):
- "Decrease the indentation level."
- self._indent -= 1
-
- def dispatch(self, tree):
- "Dispatcher function, dispatching tree type T to method _T."
- if isinstance(tree, list):
- for t in tree:
- self.dispatch(t)
- return
- meth = getattr(self, "_"+tree.__class__.__name__)
- meth(tree)
-
-
- ############### Unparsing methods ######################
- # There should be one method per concrete grammar type #
- # Constructors should be grouped by sum type. Ideally, #
- # this would follow the order in the grammar, but #
- # currently doesn't. #
- ########################################################
-
- def _Module(self, tree):
- for stmt in tree.body:
- self.dispatch(stmt)
-
- # stmt
- def _Expr(self, tree):
- self.fill()
- self.dispatch(tree.value)
-
- def _Import(self, t):
- self.fill("import ")
- interleave(lambda: self.write(", "), self.dispatch, t.names)
-
- def _ImportFrom(self, t):
- # A from __future__ import may affect unparsing, so record it.
- if t.module and t.module == '__future__':
- self.future_imports.extend(n.name for n in t.names)
-
- self.fill("from ")
- self.write("." * t.level)
- if t.module:
- self.write(t.module)
- self.write(" import ")
- interleave(lambda: self.write(", "), self.dispatch, t.names)
-
- def _Assign(self, t):
- self.fill()
- for target in t.targets:
- self.dispatch(target)
- self.write(" = ")
- self.dispatch(t.value)
-
- def _AugAssign(self, t):
- self.fill()
- self.dispatch(t.target)
- self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
- self.dispatch(t.value)
-
- def _Return(self, t):
- self.fill("return")
- if t.value:
- self.write(" ")
- self.dispatch(t.value)
-
- def _Pass(self, t):
- self.fill("pass")
-
- def _Break(self, t):
- self.fill("break")
-
- def _Continue(self, t):
- self.fill("continue")
-
- def _Delete(self, t):
- self.fill("del ")
- interleave(lambda: self.write(", "), self.dispatch, t.targets)
-
- def _Assert(self, t):
- self.fill("assert ")
- self.dispatch(t.test)
- if t.msg:
- self.write(", ")
- self.dispatch(t.msg)
-
- def _Exec(self, t):
- self.fill("exec ")
- self.dispatch(t.body)
- if t.globals:
- self.write(" in ")
- self.dispatch(t.globals)
- if t.locals:
- self.write(", ")
- self.dispatch(t.locals)
-
- def _Print(self, t):
- self.fill("print ")
- do_comma = False
- if t.dest:
- self.write(">>")
- self.dispatch(t.dest)
- do_comma = True
- for e in t.values:
- if do_comma:self.write(", ")
- else:do_comma=True
- self.dispatch(e)
- if not t.nl:
- self.write(",")
-
- def _Global(self, t):
- self.fill("global ")
- interleave(lambda: self.write(", "), self.write, t.names)
-
- def _Yield(self, t):
- self.write("(")
- self.write("yield")
- if t.value:
- self.write(" ")
- self.dispatch(t.value)
- self.write(")")
-
- def _Raise(self, t):
- self.fill('raise ')
- if t.type:
- self.dispatch(t.type)
- if t.inst:
- self.write(", ")
- self.dispatch(t.inst)
- if t.tback:
- self.write(", ")
- self.dispatch(t.tback)
-
- def _TryExcept(self, t):
- self.fill("try")
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- for ex in t.handlers:
- self.dispatch(ex)
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _TryFinally(self, t):
- if len(t.body) == 1 and isinstance(t.body[0], ast.TryExcept):
- # try-except-finally
- self.dispatch(t.body)
- else:
- self.fill("try")
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- self.fill("finally")
- self.enter()
- self.dispatch(t.finalbody)
- self.leave()
-
- def _ExceptHandler(self, t):
- self.fill("except")
- if t.type:
- self.write(" ")
- self.dispatch(t.type)
- if t.name:
- self.write(" as ")
- self.dispatch(t.name)
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- def _ClassDef(self, t):
- self.write("\n")
- for deco in t.decorator_list:
- self.fill("@")
- self.dispatch(deco)
- self.fill("class "+t.name)
- if t.bases:
- self.write("(")
- for a in t.bases:
- self.dispatch(a)
- self.write(", ")
- self.write(")")
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- def _FunctionDef(self, t):
- self.write("\n")
- for deco in t.decorator_list:
- self.fill("@")
- self.dispatch(deco)
- self.fill("def "+t.name + "(")
- self.dispatch(t.args)
- self.write(")")
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- def _For(self, t):
- self.fill("for ")
- self.dispatch(t.target)
- self.write(" in ")
- self.dispatch(t.iter)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _If(self, t):
- self.fill("if ")
- self.dispatch(t.test)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- # collapse nested ifs into equivalent elifs.
- while (t.orelse and len(t.orelse) == 1 and
- isinstance(t.orelse[0], ast.If)):
- t = t.orelse[0]
- self.fill("elif ")
- self.dispatch(t.test)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- # final else
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _While(self, t):
- self.fill("while ")
- self.dispatch(t.test)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _With(self, t):
- self.fill("with ")
- self.dispatch(t.context_expr)
- if t.optional_vars:
- self.write(" as ")
- self.dispatch(t.optional_vars)
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- # expr
- def _Str(self, tree):
- # if from __future__ import unicode_literals is in effect,
- # then we want to output string literals using a 'b' prefix
- # and unicode literals with no prefix.
- if "unicode_literals" not in self.future_imports:
- self.write(repr(tree.s))
- elif isinstance(tree.s, str):
- self.write("b" + repr(tree.s))
- elif isinstance(tree.s, unicode):
- self.write(repr(tree.s).lstrip("u"))
- else:
- assert False, "shouldn't get here"
-
- def _Name(self, t):
- self.write(t.id)
-
- def _Repr(self, t):
- self.write("`")
- self.dispatch(t.value)
- self.write("`")
-
- def _Num(self, t):
- repr_n = repr(t.n)
- # Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
- if repr_n.startswith("-"):
- self.write("(")
- # Substitute overflowing decimal literal for AST infinities.
- self.write(repr_n.replace("inf", INFSTR))
- if repr_n.startswith("-"):
- self.write(")")
-
- def _List(self, t):
- self.write("[")
- interleave(lambda: self.write(", "), self.dispatch, t.elts)
- self.write("]")
-
- def _ListComp(self, t):
- self.write("[")
- self.dispatch(t.elt)
- for gen in t.generators:
- self.dispatch(gen)
- self.write("]")
-
- def _GeneratorExp(self, t):
- self.write("(")
- self.dispatch(t.elt)
- for gen in t.generators:
- self.dispatch(gen)
- self.write(")")
-
- def _SetComp(self, t):
- self.write("{")
- self.dispatch(t.elt)
- for gen in t.generators:
- self.dispatch(gen)
- self.write("}")
-
- def _DictComp(self, t):
- self.write("{")
- self.dispatch(t.key)
- self.write(": ")
- self.dispatch(t.value)
- for gen in t.generators:
- self.dispatch(gen)
- self.write("}")
-
- def _comprehension(self, t):
- self.write(" for ")
- self.dispatch(t.target)
- self.write(" in ")
- self.dispatch(t.iter)
- for if_clause in t.ifs:
- self.write(" if ")
- self.dispatch(if_clause)
-
- def _IfExp(self, t):
- self.write("(")
- self.dispatch(t.body)
- self.write(" if ")
- self.dispatch(t.test)
- self.write(" else ")
- self.dispatch(t.orelse)
- self.write(")")
-
- def _Set(self, t):
- assert(t.elts) # should be at least one element
- self.write("{")
- interleave(lambda: self.write(", "), self.dispatch, t.elts)
- self.write("}")
-
- def _Dict(self, t):
- self.write("{")
- def write_pair(pair):
- (k, v) = pair
- self.dispatch(k)
- self.write(": ")
- self.dispatch(v)
- interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
- self.write("}")
-
- def _Tuple(self, t):
- self.write("(")
- if len(t.elts) == 1:
- (elt,) = t.elts
- self.dispatch(elt)
- self.write(",")
- else:
- interleave(lambda: self.write(", "), self.dispatch, t.elts)
- self.write(")")
-
- unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
- def _UnaryOp(self, t):
- self.write("(")
- self.write(self.unop[t.op.__class__.__name__])
- self.write(" ")
- # If we're applying unary minus to a number, parenthesize the number.
- # This is necessary: -2147483648 is different from -(2147483648) on
- # a 32-bit machine (the first is an int, the second a long), and
- # -7j is different from -(7j). (The first has real part 0.0, the second
- # has real part -0.0.)
- if isinstance(t.op, ast.USub) and isinstance(t.operand, ast.Num):
- self.write("(")
- self.dispatch(t.operand)
- self.write(")")
- else:
- self.dispatch(t.operand)
- self.write(")")
-
- binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
- "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
- "FloorDiv":"//", "Pow": "**"}
- def _BinOp(self, t):
- self.write("(")
- self.dispatch(t.left)
- self.write(" " + self.binop[t.op.__class__.__name__] + " ")
- self.dispatch(t.right)
- self.write(")")
-
- cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
- "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
- def _Compare(self, t):
- self.write("(")
- self.dispatch(t.left)
- for o, e in zip(t.ops, t.comparators):
- self.write(" " + self.cmpops[o.__class__.__name__] + " ")
- self.dispatch(e)
- self.write(")")
-
- boolops = {ast.And: 'and', ast.Or: 'or'}
- def _BoolOp(self, t):
- self.write("(")
- s = " %s " % self.boolops[t.op.__class__]
- interleave(lambda: self.write(s), self.dispatch, t.values)
- self.write(")")
-
- def _Attribute(self,t):
- self.dispatch(t.value)
- # Special case: 3.__abs__() is a syntax error, so if t.value
- # is an integer literal then we need to either parenthesize
- # it or add an extra space to get 3 .__abs__().
- if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
- self.write(" ")
- self.write(".")
- self.write(t.attr)
-
- def _Call(self, t):
- self.dispatch(t.func)
- self.write("(")
- comma = False
- for e in t.args:
- if comma: self.write(", ")
- else: comma = True
- self.dispatch(e)
- for e in t.keywords:
- if comma: self.write(", ")
- else: comma = True
- self.dispatch(e)
- if t.starargs:
- if comma: self.write(", ")
- else: comma = True
- self.write("*")
- self.dispatch(t.starargs)
- if t.kwargs:
- if comma: self.write(", ")
- else: comma = True
- self.write("**")
- self.dispatch(t.kwargs)
- self.write(")")
-
- def _Subscript(self, t):
- self.dispatch(t.value)
- self.write("[")
- self.dispatch(t.slice)
- self.write("]")
-
- # slice
- def _Ellipsis(self, t):
- self.write("...")
-
- def _Index(self, t):
- self.dispatch(t.value)
-
- def _Slice(self, t):
- if t.lower:
- self.dispatch(t.lower)
- self.write(":")
- if t.upper:
- self.dispatch(t.upper)
- if t.step:
- self.write(":")
- self.dispatch(t.step)
-
- def _ExtSlice(self, t):
- interleave(lambda: self.write(', '), self.dispatch, t.dims)
-
- # others
- def _arguments(self, t):
- first = True
- # normal arguments
- defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
- for a,d in zip(t.args, defaults):
- if first:first = False
- else: self.write(", ")
- self.dispatch(a),
- if d:
- self.write("=")
- self.dispatch(d)
-
- # varargs
- if t.vararg:
- if first:first = False
- else: self.write(", ")
- self.write("*")
- self.write(t.vararg)
-
- # kwargs
- if t.kwarg:
- if first:first = False
- else: self.write(", ")
- self.write("**"+t.kwarg)
-
- def _keyword(self, t):
- self.write(t.arg)
- self.write("=")
- self.dispatch(t.value)
-
- def _Lambda(self, t):
- self.write("(")
- self.write("lambda ")
- self.dispatch(t.args)
- self.write(": ")
- self.dispatch(t.body)
- self.write(")")
-
- def _alias(self, t):
- self.write(t.name)
- if t.asname:
- self.write(" as "+t.asname)
-
-def roundtrip(filename, output=sys.stdout):
- with open(filename, "r") as pyfile:
- source = pyfile.read()
- tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
- Unparser(tree, output)
-
-
-
-def testdir(a):
- try:
- names = [n for n in os.listdir(a) if n.endswith('.py')]
- except OSError:
- sys.stderr.write("Directory not readable: %s" % a)
- else:
- for n in names:
- fullname = os.path.join(a, n)
- if os.path.isfile(fullname):
- output = cStringIO.StringIO()
- print 'Testing %s' % fullname
- try:
- roundtrip(fullname, output)
- except Exception as e:
- print ' Failed to compile, exception is %s' % repr(e)
- elif os.path.isdir(fullname):
- testdir(fullname)
-
-def main(args):
- if args[0] == '--testdir':
- for a in args[1:]:
- testdir(a)
- else:
- for a in args:
- roundtrip(a)
-
-if __name__=='__main__':
- main(sys.argv[1:])
diff --git a/pinfer/unparse3.py b/pinfer/unparse3.py
deleted file mode 100644
index 0936cb2..0000000
--- a/pinfer/unparse3.py
+++ /dev/null
@@ -1,610 +0,0 @@
-# From Python 3's Tools/parser/unparse.py
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
-
-"Usage: unparse.py <path to source file>"
-import sys
-import ast
-import tokenize
-import io
-import os
-
-# Large float and imaginary literals get turned into infinities in the AST.
-# We unparse those infinities to INFSTR.
-INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
-
-def interleave(inter, f, seq):
- """Call f on each item in seq, calling inter() in between.
- """
- seq = iter(seq)
- try:
- f(next(seq))
- except StopIteration:
- pass
- else:
- for x in seq:
- inter()
- f(x)
-
-class Unparser:
- """Methods in this class recursively traverse an AST and
- output source code for the abstract syntax; original formatting
- is disregarded. """
-
- def __init__(self, tree, file = sys.stdout):
- """Unparser(tree, file=sys.stdout) -> None.
- Print the source for tree to file."""
- self.f = file
- self._indent = 0
- self.dispatch(tree)
- print("", file=self.f)
- self.f.flush()
-
- def fill(self, text = ""):
- "Indent a piece of text, according to the current indentation level"
- self.f.write("\n"+" "*self._indent + text)
-
- def write(self, text):
- "Append a piece of text to the current line."
- self.f.write(text)
-
- def enter(self):
- "Print ':', and increase the indentation."
- self.write(":")
- self._indent += 1
-
- def leave(self):
- "Decrease the indentation level."
- self._indent -= 1
-
- def dispatch(self, tree):
- "Dispatcher function, dispatching tree type T to method _T."
- if isinstance(tree, list):
- for t in tree:
- self.dispatch(t)
- return
- meth = getattr(self, "_"+tree.__class__.__name__)
- meth(tree)
-
-
- ############### Unparsing methods ######################
- # There should be one method per concrete grammar type #
- # Constructors should be grouped by sum type. Ideally, #
- # this would follow the order in the grammar, but #
- # currently doesn't. #
- ########################################################
-
- def _Module(self, tree):
- for stmt in tree.body:
- self.dispatch(stmt)
-
- # stmt
- def _Expr(self, tree):
- self.fill()
- self.dispatch(tree.value)
-
- def _Import(self, t):
- self.fill("import ")
- interleave(lambda: self.write(", "), self.dispatch, t.names)
-
- def _ImportFrom(self, t):
- self.fill("from ")
- self.write("." * t.level)
- if t.module:
- self.write(t.module)
- self.write(" import ")
- interleave(lambda: self.write(", "), self.dispatch, t.names)
-
- def _Assign(self, t):
- self.fill()
- for target in t.targets:
- self.dispatch(target)
- self.write(" = ")
- self.dispatch(t.value)
-
- def _AugAssign(self, t):
- self.fill()
- self.dispatch(t.target)
- self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
- self.dispatch(t.value)
-
- def _Return(self, t):
- self.fill("return")
- if t.value:
- self.write(" ")
- self.dispatch(t.value)
-
- def _Pass(self, t):
- self.fill("pass")
-
- def _Break(self, t):
- self.fill("break")
-
- def _Continue(self, t):
- self.fill("continue")
-
- def _Delete(self, t):
- self.fill("del ")
- interleave(lambda: self.write(", "), self.dispatch, t.targets)
-
- def _Assert(self, t):
- self.fill("assert ")
- self.dispatch(t.test)
- if t.msg:
- self.write(", ")
- self.dispatch(t.msg)
-
- def _Global(self, t):
- self.fill("global ")
- interleave(lambda: self.write(", "), self.write, t.names)
-
- def _Nonlocal(self, t):
- self.fill("nonlocal ")
- interleave(lambda: self.write(", "), self.write, t.names)
-
- def _Yield(self, t):
- self.write("(")
- self.write("yield")
- if t.value:
- self.write(" ")
- self.dispatch(t.value)
- self.write(")")
-
- def _YieldFrom(self, t):
- self.write("(")
- self.write("yield from")
- if t.value:
- self.write(" ")
- self.dispatch(t.value)
- self.write(")")
-
- def _Raise(self, t):
- self.fill("raise")
- if not t.exc:
- assert not t.cause
- return
- self.write(" ")
- self.dispatch(t.exc)
- if t.cause:
- self.write(" from ")
- self.dispatch(t.cause)
-
- def _Try(self, t):
- self.fill("try")
- self.enter()
- self.dispatch(t.body)
- self.leave()
- for ex in t.handlers:
- self.dispatch(ex)
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
- if t.finalbody:
- self.fill("finally")
- self.enter()
- self.dispatch(t.finalbody)
- self.leave()
-
- def _ExceptHandler(self, t):
- self.fill("except")
- if t.type:
- self.write(" ")
- self.dispatch(t.type)
- if t.name:
- self.write(" as ")
- self.write(t.name)
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- def _ClassDef(self, t):
- self.write("\n")
- for deco in t.decorator_list:
- self.fill("@")
- self.dispatch(deco)
- self.fill("class "+t.name)
- self.write("(")
- comma = False
- for e in t.bases:
- if comma: self.write(", ")
- else: comma = True
- self.dispatch(e)
- for e in t.keywords:
- if comma: self.write(", ")
- else: comma = True
- self.dispatch(e)
- if t.starargs:
- if comma: self.write(", ")
- else: comma = True
- self.write("*")
- self.dispatch(t.starargs)
- if t.kwargs:
- if comma: self.write(", ")
- else: comma = True
- self.write("**")
- self.dispatch(t.kwargs)
- self.write(")")
-
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- def _FunctionDef(self, t):
- self.write("\n")
- for deco in t.decorator_list:
- self.fill("@")
- self.dispatch(deco)
- self.fill("def "+t.name + "(")
- self.dispatch(t.args)
- self.write(")")
- if t.returns:
- self.write(" -> ")
- self.dispatch(t.returns)
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- def _For(self, t):
- self.fill("for ")
- self.dispatch(t.target)
- self.write(" in ")
- self.dispatch(t.iter)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _If(self, t):
- self.fill("if ")
- self.dispatch(t.test)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- # collapse nested ifs into equivalent elifs.
- while (t.orelse and len(t.orelse) == 1 and
- isinstance(t.orelse[0], ast.If)):
- t = t.orelse[0]
- self.fill("elif ")
- self.dispatch(t.test)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- # final else
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _While(self, t):
- self.fill("while ")
- self.dispatch(t.test)
- self.enter()
- self.dispatch(t.body)
- self.leave()
- if t.orelse:
- self.fill("else")
- self.enter()
- self.dispatch(t.orelse)
- self.leave()
-
- def _With(self, t):
- self.fill("with ")
- interleave(lambda: self.write(", "), self.dispatch, t.items)
- self.enter()
- self.dispatch(t.body)
- self.leave()
-
- # expr
- def _Bytes(self, t):
- self.write(repr(t.s))
-
- def _Str(self, tree):
- self.write(repr(tree.s))
-
- def _Name(self, t):
- self.write(t.id)
-
- def _NameConstant(self, t):
- self.write(repr(t.value))
-
- def _Num(self, t):
- # Substitute overflowing decimal literal for AST infinities.
- self.write(repr(t.n).replace("inf", INFSTR))
-
- def _List(self, t):
- self.write("[")
- interleave(lambda: self.write(", "), self.dispatch, t.elts)
- self.write("]")
-
- def _ListComp(self, t):
- self.write("[")
- self.dispatch(t.elt)
- for gen in t.generators:
- self.dispatch(gen)
- self.write("]")
-
- def _GeneratorExp(self, t):
- self.write("(")
- self.dispatch(t.elt)
- for gen in t.generators:
- self.dispatch(gen)
- self.write(")")
-
- def _SetComp(self, t):
- self.write("{")
- self.dispatch(t.elt)
- for gen in t.generators:
- self.dispatch(gen)
- self.write("}")
-
- def _DictComp(self, t):
- self.write("{")
- self.dispatch(t.key)
- self.write(": ")
- self.dispatch(t.value)
- for gen in t.generators:
- self.dispatch(gen)
- self.write("}")
-
- def _comprehension(self, t):
- self.write(" for ")
- self.dispatch(t.target)
- self.write(" in ")
- self.dispatch(t.iter)
- for if_clause in t.ifs:
- self.write(" if ")
- self.dispatch(if_clause)
-
- def _IfExp(self, t):
- self.write("(")
- self.dispatch(t.body)
- self.write(" if ")
- self.dispatch(t.test)
- self.write(" else ")
- self.dispatch(t.orelse)
- self.write(")")
-
- def _Set(self, t):
- assert(t.elts) # should be at least one element
- self.write("{")
- interleave(lambda: self.write(", "), self.dispatch, t.elts)
- self.write("}")
-
- def _Dict(self, t):
- self.write("{")
- def write_pair(pair):
- (k, v) = pair
- self.dispatch(k)
- self.write(": ")
- self.dispatch(v)
- interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
- self.write("}")
-
- def _Tuple(self, t):
- self.write("(")
- if len(t.elts) == 1:
- (elt,) = t.elts
- self.dispatch(elt)
- self.write(",")
- else:
- interleave(lambda: self.write(", "), self.dispatch, t.elts)
- self.write(")")
-
- unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
- def _UnaryOp(self, t):
- self.write("(")
- self.write(self.unop[t.op.__class__.__name__])
- self.write(" ")
- self.dispatch(t.operand)
- self.write(")")
-
- binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
- "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
- "FloorDiv":"//", "Pow": "**"}
- def _BinOp(self, t):
- self.write("(")
- self.dispatch(t.left)
- self.write(" " + self.binop[t.op.__class__.__name__] + " ")
- self.dispatch(t.right)
- self.write(")")
-
- cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
- "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
- def _Compare(self, t):
- self.write("(")
- self.dispatch(t.left)
- for o, e in zip(t.ops, t.comparators):
- self.write(" " + self.cmpops[o.__class__.__name__] + " ")
- self.dispatch(e)
- self.write(")")
-
- boolops = {ast.And: 'and', ast.Or: 'or'}
- def _BoolOp(self, t):
- self.write("(")
- s = " %s " % self.boolops[t.op.__class__]
- interleave(lambda: self.write(s), self.dispatch, t.values)
- self.write(")")
-
- def _Attribute(self,t):
- self.dispatch(t.value)
- # Special case: 3.__abs__() is a syntax error, so if t.value
- # is an integer literal then we need to either parenthesize
- # it or add an extra space to get 3 .__abs__().
- if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
- self.write(" ")
- self.write(".")
- self.write(t.attr)
-
- def _Call(self, t):
- self.dispatch(t.func)
- self.write("(")
- comma = False
- for e in t.args:
- if comma: self.write(", ")
- else: comma = True
- self.dispatch(e)
- for e in t.keywords:
- if comma: self.write(", ")
- else: comma = True
- self.dispatch(e)
- if t.starargs:
- if comma: self.write(", ")
- else: comma = True
- self.write("*")
- self.dispatch(t.starargs)
- if t.kwargs:
- if comma: self.write(", ")
- else: comma = True
- self.write("**")
- self.dispatch(t.kwargs)
- self.write(")")
-
- def _Subscript(self, t):
- self.dispatch(t.value)
- self.write("[")
- self.dispatch(t.slice)
- self.write("]")
-
- def _Starred(self, t):
- self.write("*")
- self.dispatch(t.value)
-
- # slice
- def _Ellipsis(self, t):
- self.write("...")
-
- def _Index(self, t):
- self.dispatch(t.value)
-
- def _Slice(self, t):
- if t.lower:
- self.dispatch(t.lower)
- self.write(":")
- if t.upper:
- self.dispatch(t.upper)
- if t.step:
- self.write(":")
- self.dispatch(t.step)
-
- def _ExtSlice(self, t):
- interleave(lambda: self.write(', '), self.dispatch, t.dims)
-
- # argument
- def _arg(self, t):
- self.write(t.arg)
- if t.annotation:
- self.write(": ")
- self.dispatch(t.annotation)
-
- # others
- def _arguments(self, t):
- first = True
- # normal arguments
- defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
- for a, d in zip(t.args, defaults):
- if first:first = False
- else: self.write(", ")
- self.dispatch(a)
- if d:
- self.write("=")
- self.dispatch(d)
-
- # varargs, or bare '*' if no varargs but keyword-only arguments present
- if t.vararg or t.kwonlyargs:
- if first:first = False
- else: self.write(", ")
- self.write("*")
- if t.vararg:
- self.write(t.vararg.arg)
- if t.vararg.annotation:
- self.write(": ")
- self.dispatch(t.vararg.annotation)
-
- # keyword-only arguments
- if t.kwonlyargs:
- for a, d in zip(t.kwonlyargs, t.kw_defaults):
- if first:first = False
- else: self.write(", ")
- self.dispatch(a),
- if d:
- self.write("=")
- self.dispatch(d)
-
- # kwargs
- if t.kwarg:
- if first:first = False
- else: self.write(", ")
- self.write("**"+t.kwarg.arg)
- if t.kwarg.annotation:
- self.write(": ")
- self.dispatch(t.kwarg.annotation)
-
- def _keyword(self, t):
- self.write(t.arg)
- self.write("=")
- self.dispatch(t.value)
-
- def _Lambda(self, t):
- self.write("(")
- self.write("lambda ")
- self.dispatch(t.args)
- self.write(": ")
- self.dispatch(t.body)
- self.write(")")
-
- def _alias(self, t):
- self.write(t.name)
- if t.asname:
- self.write(" as "+t.asname)
-
- def _withitem(self, t):
- self.dispatch(t.context_expr)
- if t.optional_vars:
- self.write(" as ")
- self.dispatch(t.optional_vars)
-
-def roundtrip(filename, output=sys.stdout):
- with open(filename, "rb") as pyfile:
- encoding = tokenize.detect_encoding(pyfile.readline)[0]
- with open(filename, "r", encoding=encoding) as pyfile:
- source = pyfile.read()
- tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
- Unparser(tree, output)
-
-
-
-def testdir(a):
- try:
- names = [n for n in os.listdir(a) if n.endswith('.py')]
- except OSError:
- print("Directory not readable: %s" % a, file=sys.stderr)
- else:
- for n in names:
- fullname = os.path.join(a, n)
- if os.path.isfile(fullname):
- output = io.StringIO()
- print('Testing %s' % fullname)
- try:
- roundtrip(fullname, output)
- except Exception as e:
- print(' Failed to compile, exception is %s' % repr(e))
- elif os.path.isdir(fullname):
- testdir(fullname)
-
-def main(args):
- if args[0] == '--testdir':
- for a in args[1:]:
- testdir(a)
- else:
- for a in args:
- roundtrip(a)
-
-if __name__=='__main__':
- main(sys.argv[1:])
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100644
index 2b14288..0000000
--- a/pytest.ini
+++ /dev/null
@@ -1,14 +0,0 @@
-[pytest]
-# testpaths is new in 2.8
-minversion = 2.8
-
-testpaths = mypy/test
-
-python_files = test*.py
-
-# empty patterns for default python collector, to stick to our plugin's collector
-python_classes =
-python_functions =
-
-# always run in parallel (requires pytest-xdist, see test-requirements.txt)
-addopts = -nauto --cov-append --cov-report=
diff --git a/runtests.py b/runtests.py
deleted file mode 100755
index 2caa751..0000000
--- a/runtests.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-"""Mypy test runner."""
-
-if False:
- import typing
-
-if True:
- # When this is run as a script, `typing` is not available yet.
- import sys
- from os.path import join, isdir
-
- def get_versions(): # type: () -> typing.List[str]
- major = sys.version_info[0]
- minor = sys.version_info[1]
- if major == 2:
- return ['2.7']
- else:
- # generates list of python versions to use.
- # For Python2, this is only [2.7].
- # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0].
- return ['%d.%d' % (major, i) for i in range(minor, -1, -1)]
-
- sys.path[0:0] = [v for v in [join('lib-typing', v) for v in get_versions()] if isdir(v)]
- # Now `typing` is available.
-
-
-from typing import Dict, List, Optional, Set, Iterable
-
-from mypy.waiter import Waiter, LazySubprocess
-from mypy import util
-from mypy.test.config import test_data_prefix
-from mypy.test.testpythoneval import python_eval_files, python_34_eval_files
-
-import itertools
-import os
-import re
-
-
-# Ideally, all tests would be `discover`able so that they can be driven
-# (and parallelized) by an external test driver.
-
-class Driver:
-
- def __init__(self, whitelist: List[str], blacklist: List[str],
- arglist: List[str], verbosity: int, parallel_limit: int,
- xfail: List[str], coverage: bool) -> None:
- self.whitelist = whitelist
- self.blacklist = blacklist
- self.arglist = arglist
- self.verbosity = verbosity
- self.waiter = Waiter(verbosity=verbosity, limit=parallel_limit, xfail=xfail)
- self.versions = get_versions()
- self.cwd = os.getcwd()
- self.mypy = os.path.join(self.cwd, 'scripts', 'mypy')
- self.env = dict(os.environ)
- self.coverage = coverage
-
- def prepend_path(self, name: str, paths: List[str]) -> None:
- old_val = self.env.get(name)
- paths = [p for p in paths if isdir(p)]
- if not paths:
- return
- if old_val is not None:
- new_val = ':'.join(itertools.chain(paths, [old_val]))
- else:
- new_val = ':'.join(paths)
- self.env[name] = new_val
-
- def allow(self, name: str) -> bool:
- if any(f in name for f in self.whitelist):
- if not any(f in name for f in self.blacklist):
- if self.verbosity >= 2:
- print('SELECT #%d %s' % (len(self.waiter.queue), name))
- return True
- if self.verbosity >= 3:
- print('OMIT %s' % name)
- return False
-
- def add_mypy_cmd(self, name: str, mypy_args: List[str], cwd: Optional[str] = None) -> None:
- full_name = 'check %s' % name
- if not self.allow(full_name):
- return
- args = [sys.executable, self.mypy] + mypy_args
- args.append('--show-traceback')
- self.waiter.add(LazySubprocess(full_name, args, cwd=cwd, env=self.env))
-
- def add_mypy(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
- self.add_mypy_cmd(name, list(args), cwd=cwd)
-
- def add_mypy_modules(self, name: str, modules: Iterable[str],
- cwd: Optional[str] = None) -> None:
- args = list(itertools.chain(*(['-m', mod] for mod in modules)))
- self.add_mypy_cmd(name, args, cwd=cwd)
-
- def add_mypy_package(self, name: str, packagename: str, *flags: str) -> None:
- self.add_mypy_cmd(name, ['-p', packagename] + list(flags))
-
- def add_mypy_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
- self.add_mypy_cmd(name, ['-c'] + list(args), cwd=cwd)
-
- def add_pytest(self, name: str, pytest_args: List[str], coverage: bool = False) -> None:
- full_name = 'pytest %s' % name
- if not self.allow(full_name):
- return
- if coverage and self.coverage:
- args = [sys.executable, '-m', 'pytest', '--cov=mypy'] + pytest_args
- else:
- args = [sys.executable, '-m', 'pytest'] + pytest_args
-
- self.waiter.add(LazySubprocess(full_name, args, env=self.env))
-
- def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
- name = 'run %s' % name
- if not self.allow(name):
- return
- largs = list(args)
- largs[0:0] = [sys.executable]
- env = self.env
- self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
-
- def add_python_mod(self, name: str, *args: str, cwd: Optional[str] = None,
- coverage: bool = False) -> None:
- name = 'run %s' % name
- if not self.allow(name):
- return
- largs = list(args)
- if coverage and self.coverage:
- largs[0:0] = ['coverage', 'run', '-m']
- else:
- largs[0:0] = [sys.executable, '-m']
- env = self.env
- self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
-
- def add_python_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
- name = 'run %s' % name
- if not self.allow(name):
- return
- largs = list(args)
- largs[0:0] = [sys.executable, '-c']
- env = self.env
- self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
-
- def add_python2(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
- name = 'run2 %s' % name
- if not self.allow(name):
- return
- largs = list(args)
- python2 = util.try_find_python2_interpreter()
- assert python2, "Couldn't find a Python 2.7 interpreter"
- largs[0:0] = [python2]
- env = self.env
- self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
-
- def add_flake8(self, cwd: Optional[str] = None) -> None:
- name = 'lint'
- if not self.allow(name):
- return
- largs = ['flake8', '-j{}'.format(self.waiter.limit)]
- env = self.env
- self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
-
- def list_tasks(self) -> None:
- for id, task in enumerate(self.waiter.queue):
- print('{id}:{task}'.format(id=id, task=task.name))
-
-
-def add_basic(driver: Driver) -> None:
- if False:
- driver.add_mypy('file setup.py', 'setup.py')
- driver.add_mypy('file runtests.py', 'runtests.py')
- driver.add_mypy('legacy entry script', 'scripts/mypy')
- driver.add_mypy('legacy myunit script', 'scripts/myunit')
- # needs typed_ast installed:
- driver.add_mypy('fast-parse', '--fast-parse', 'test-data/samples/hello.py')
-
-
-def add_selftypecheck(driver: Driver) -> None:
- driver.add_mypy_package('package mypy', 'mypy', '--fast-parser',
- '--config-file', 'mypy_self_check.ini')
- driver.add_mypy_package('package mypy', 'mypy', '--fast-parser',
- '--config-file', 'mypy_strict_optional.ini')
-
-
-def find_files(base: str, prefix: str = '', suffix: str = '') -> List[str]:
- return [join(root, f)
- for root, dirs, files in os.walk(base)
- for f in files
- if f.startswith(prefix) and f.endswith(suffix)]
-
-
-def file_to_module(file: str) -> str:
- rv = os.path.splitext(file)[0].replace(os.sep, '.')
- if rv.endswith('.__init__'):
- rv = rv[:-len('.__init__')]
- return rv
-
-
-def add_imports(driver: Driver) -> None:
- # Make sure each module can be imported originally.
- # There is currently a bug in mypy where a module can pass typecheck
- # because of *implicit* imports from other modules.
- for f in find_files('mypy', suffix='.py'):
- mod = file_to_module(f)
- if not mod.endswith('.__main__'):
- driver.add_python_string('import %s' % mod, 'import %s' % mod)
-
-
-PYTEST_FILES = [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in [
- 'testcheck', 'testextensions',
-]]
-
-
-def add_pytest(driver: Driver) -> None:
- for f in PYTEST_FILES:
- driver.add_pytest(f, [f] + driver.arglist, True)
-
-
-def add_myunit(driver: Driver) -> None:
- for f in find_files('mypy', prefix='test', suffix='.py'):
- mod = file_to_module(f)
- if mod in ('mypy.test.testpythoneval', 'mypy.test.testcmdline'):
- # Run Python evaluation integration tests and command-line
- # parsing tests separately since they are much slower than
- # proper unit tests.
- pass
- elif f in PYTEST_FILES:
- # This module has been converted to pytest; don't try to use myunit.
- pass
- else:
- driver.add_python_mod('unit-test %s' % mod, 'mypy.myunit', '-m', mod,
- *driver.arglist, coverage=True)
-
-
-def add_pythoneval(driver: Driver) -> None:
- cases = set()
- case_re = re.compile(r'^\[case ([^\]]+)\]$')
- for file in python_eval_files + python_34_eval_files:
- with open(os.path.join(test_data_prefix, file), 'r') as f:
- for line in f:
- m = case_re.match(line)
- if m:
- case_name = m.group(1)
- assert case_name[:4] == 'test'
- cases.add(case_name[4:5])
-
- for prefix in sorted(cases):
- driver.add_python_mod(
- 'eval-test-' + prefix,
- 'mypy.myunit',
- '-m',
- 'mypy.test.testpythoneval',
- 'test_testpythoneval_PythonEvaluationSuite.test' + prefix + '*',
- *driver.arglist,
- coverage=True
- )
-
-
-def add_cmdline(driver: Driver) -> None:
- driver.add_python_mod('cmdline-test', 'mypy.myunit',
- '-m', 'mypy.test.testcmdline', *driver.arglist,
- coverage=True)
-
-
-def add_stubs(driver: Driver) -> None:
- # We only test each module in the one version mypy prefers to find.
- # TODO: test stubs for other versions, especially Python 2 stubs.
-
- modules = set() # type: Set[str]
- modules.add('typing')
- # TODO: This should also test Python 2, and pass pyversion accordingly.
- for version in ["2and3", "3", "3.3", "3.4", "3.5"]:
- for stub_type in ['builtins', 'stdlib', 'third_party']:
- stubdir = join('typeshed', stub_type, version)
- for f in find_files(stubdir, suffix='.pyi'):
- module = file_to_module(f[len(stubdir) + 1:])
- modules.add(module)
-
- driver.add_mypy_modules('stubs', sorted(modules))
-
-
-def add_stdlibsamples(driver: Driver) -> None:
- seen = set() # type: Set[str]
- for version in driver.versions:
- stdlibsamples_dir = join(driver.cwd, 'test-data', 'stdlib-samples', version)
- modules = [] # type: List[str]
- for f in find_files(stdlibsamples_dir, prefix='test_', suffix='.py'):
- module = file_to_module(f[len(stdlibsamples_dir) + 1:])
- if module not in seen:
- seen.add(module)
- modules.append(module)
- if modules:
- driver.add_mypy_modules('stdlibsamples (%s)' % (version,), modules,
- cwd=stdlibsamples_dir)
-
-
-def add_samples(driver: Driver) -> None:
- for f in find_files(os.path.join('test-data', 'samples'), suffix='.py'):
- driver.add_mypy('file %s' % f, f, '--fast-parser')
-
-
-def usage(status: int) -> None:
- print('Usage: %s [-h | -v | -q | [-x] FILTER | -a ARG] ... [-- FILTER ...]' % sys.argv[0])
- print()
- print('Run mypy tests. If given no arguments, run all tests.')
- print()
- print('Examples:')
- print(' %s unit-test (run unit tests only)' % sys.argv[0])
- print(' %s unit-test -a "*tuple*"' % sys.argv[0])
- print(' (run all unit tests with "tuple" in test name)')
- print()
- print('Options:')
- print(' -h, --help show this help')
- print(' -v, --verbose increase driver verbosity')
- print(' -q, --quiet decrease driver verbosity')
- print(' -jN run N tasks at once (default: one per CPU)')
- print(' -a, --argument ARG pass an argument to myunit tasks')
- print(' (-v: verbose; glob pattern: filter by test name)')
- print(' -l, --list list included tasks (after filtering) and exit')
- print(' FILTER include tasks matching FILTER')
- print(' -x, --exclude FILTER exclude tasks matching FILTER')
- print(' -c, --coverage calculate code coverage while running tests')
- print(' -- treat all remaining arguments as positional')
- sys.exit(status)
-
-
-def sanity() -> None:
- paths = os.getenv('PYTHONPATH')
- if paths is None:
- return
- failed = False
- for p in paths.split(os.pathsep):
- if not os.path.isabs(p):
- print('Relative PYTHONPATH entry %r' % p)
- failed = True
- if failed:
- print('Please use absolute so that chdir() tests can work.')
- print('Cowardly refusing to continue.')
- sys.exit(1)
-
-
-def main() -> None:
- sanity()
-
- verbosity = 0
- parallel_limit = 0
- whitelist = [] # type: List[str]
- blacklist = [] # type: List[str]
- arglist = [] # type: List[str]
- list_only = False
- coverage = False
-
- allow_opts = True
- curlist = whitelist
- for a in sys.argv[1:]:
- if curlist is not arglist and allow_opts and a.startswith('-'):
- if curlist is not whitelist:
- break
- if a == '--':
- allow_opts = False
- elif a == '-v' or a == '--verbose':
- verbosity += 1
- elif a == '-q' or a == '--quiet':
- verbosity -= 1
- elif a.startswith('-j'):
- try:
- parallel_limit = int(a[2:])
- except ValueError:
- usage(1)
- elif a == '-x' or a == '--exclude':
- curlist = blacklist
- elif a == '-a' or a == '--argument':
- curlist = arglist
- elif a == '-l' or a == '--list':
- list_only = True
- elif a == '-c' or a == '--coverage':
- coverage = True
- elif a == '-h' or a == '--help':
- usage(0)
- else:
- usage(1)
- else:
- curlist.append(a)
- curlist = whitelist
- if curlist is blacklist:
- sys.exit('-x must be followed by a filter')
- if curlist is arglist:
- sys.exit('-a must be followed by an argument')
- # empty string is a substring of all names
- if not whitelist:
- whitelist.append('')
-
- driver = Driver(whitelist=whitelist, blacklist=blacklist, arglist=arglist,
- verbosity=verbosity, parallel_limit=parallel_limit, xfail=[], coverage=coverage)
-
- driver.prepend_path('PATH', [join(driver.cwd, 'scripts')])
- driver.prepend_path('MYPYPATH', [driver.cwd])
- driver.prepend_path('PYTHONPATH', [driver.cwd])
- driver.prepend_path('PYTHONPATH', [join(driver.cwd, 'lib-typing', v) for v in driver.versions])
-
- add_pythoneval(driver)
- add_cmdline(driver)
- add_basic(driver)
- add_selftypecheck(driver)
- add_pytest(driver)
- add_myunit(driver)
- add_imports(driver)
- add_stubs(driver)
- add_stdlibsamples(driver)
- add_samples(driver)
- driver.add_flake8()
-
- if list_only:
- driver.list_tasks()
- return
-
- exit_code = driver.waiter.run()
-
- if verbosity >= 1:
- times = driver.waiter.times2 if verbosity >= 2 else driver.waiter.times1
- times_sortable = ((t, tp) for (tp, t) in times.items())
- for total_time, test_type in sorted(times_sortable, reverse=True):
- print('total time in %s: %f' % (test_type, total_time))
-
- sys.exit(exit_code)
-
-
-if __name__ == '__main__':
- main()
diff --git a/scripts/dumpmodule.py b/scripts/dumpmodule.py
new file mode 100644
index 0000000..5d31241
--- /dev/null
+++ b/scripts/dumpmodule.py
@@ -0,0 +1,161 @@
+"""Dump the runtime structure of a module as JSON.
+
+This is used for testing stubs.
+
+This needs to run in Python 2.7 and 3.x.
+"""
+
+from __future__ import print_function
+
+import importlib
+import json
+import sys
+import types
+from typing import Text
+from collections import defaultdict
+
+
+if sys.version_info >= (3, 0):
+ import inspect
+ long = int
+else:
+ import inspect2 as inspect
+
+
+
+def dump_module(id):
+ m = importlib.import_module(id)
+ data = module_to_json(m)
+ print(json.dumps(data, ensure_ascii=True, indent=4, sort_keys=True))
+
+
+def module_to_json(m):
+ result = {}
+ for name, value in m.__dict__.items():
+ # Filter out some useless attributes.
+
+ if name in ('__file__',
+ '__doc__',
+ '__name__',
+ '__builtins__',
+ '__package__'):
+ continue
+
+ if name == '__all__':
+ result[name] = {'type': 'list', 'values': sorted(value)}
+ else:
+ result[name] = dump_value(value)
+
+ try:
+ _, line = inspect.getsourcelines(getattr(m, name))
+ except (TypeError, OSError):
+ line = None
+
+ result[name]['line'] = line
+
+ return result
+
+
+def dump_value(value, depth=0):
+ if depth > 10:
+ return 'max_recursion_depth_exceeded'
+ if isinstance(value, type):
+ return dump_class(value, depth + 1)
+ if inspect.isfunction(value):
+ return dump_function(value)
+ if callable(value):
+ return {'type': 'callable'} # TODO more information
+ if isinstance(value, types.ModuleType):
+ return {'type': 'module'} # TODO module name
+ if inspect.isdatadescriptor(value):
+ return {'type': 'datadescriptor'}
+
+ if inspect.ismemberdescriptor(value):
+ return {'type': 'memberdescriptor'}
+ return dump_simple(value)
+
+
+def dump_simple(value):
+ if type(value) in (int, bool, float, str, bytes, Text, long, list, set, dict, tuple):
+ return {'type': type(value).__name__}
+ if value is None:
+ return {'type': 'None'}
+ if value is inspect.Parameter.empty:
+ return {'type': None} # 'None' and None: Ruh-Roh
+ return {'type': 'unknown'}
+
+
+def dump_class(value, depth):
+ return {
+ 'type': 'class',
+ 'attributes': dump_attrs(value, depth),
+ }
+
+
+special_methods = [
+ '__init__',
+ '__str__',
+ '__int__',
+ '__float__',
+ '__bool__',
+ '__contains__',
+ '__iter__',
+]
+
+
+# Change to return a dict
+def dump_attrs(d, depth):
+ result = {}
+ seen = set()
+ try:
+ mro = d.mro()
+ except TypeError:
+ mro = [d]
+ for base in mro:
+ v = vars(base)
+ for name, value in v.items():
+ if name not in seen:
+ result[name] = dump_value(value, depth + 1)
+ seen.add(name)
+ for m in special_methods:
+ if hasattr(d, m) and m not in seen:
+ result[m] = dump_value(getattr(d, m), depth + 1)
+ return result
+
+
+kind_map = {
+ inspect.Parameter.POSITIONAL_ONLY: 'POS_ONLY',
+ inspect.Parameter.POSITIONAL_OR_KEYWORD: 'POS_OR_KW',
+ inspect.Parameter.VAR_POSITIONAL: 'VAR_POS',
+ inspect.Parameter.KEYWORD_ONLY: 'KW_ONLY',
+ inspect.Parameter.VAR_KEYWORD: 'VAR_KW',
+}
+
+
+def param_kind(p):
+ s = kind_map[p.kind]
+ if p.default != inspect.Parameter.empty:
+ assert s in ('POS_ONLY', 'POS_OR_KW', 'KW_ONLY')
+ s += '_OPT'
+ return s
+
+
+def dump_function(value):
+ try:
+ sig = inspect.signature(value)
+ except ValueError:
+ # The signature call sometimes fails for some reason.
+ return {'type': 'invalid_signature'}
+ params = list(sig.parameters.items())
+ return {
+ 'type': 'function',
+ 'args': [(name, param_kind(p), dump_simple(p.default))
+ for name, p in params],
+ }
+
+
+if __name__ == '__main__':
+ import sys
+ if len(sys.argv) != 2:
+ sys.exit('usage: dumpmodule.py module-name')
+ dump_module(sys.argv[1])
diff --git a/scripts/stubtest.py b/scripts/stubtest.py
new file mode 100644
index 0000000..40c804f
--- /dev/null
+++ b/scripts/stubtest.py
@@ -0,0 +1,205 @@
+"""Tests for stubs.
+
+Verify that various things in stubs are consistent with how things behave
+at runtime.
+"""
+
+import importlib
+import sys
+from typing import Dict, Any, List
+from collections import defaultdict, namedtuple
+
+from mypy import build
+from mypy.build import default_data_dir, default_lib_path, find_modules_recursive
+from mypy.errors import CompileError
+from mypy import nodes
+from mypy.options import Options
+
+import dumpmodule
+
+if sys.version_info < (3, 4):
+ from singledispatch import singledispatch
+else:
+ from functools import singledispatch
+
+# TODO: email.contentmanager has a symbol table with a None node.
+# This seems like it should not be.
+
+skip = {
+ '_importlib_modulespec',
+ '_subprocess',
+ 'distutils.command.bdist_msi',
+ 'distutils.command.bdist_packager',
+ 'msvcrt',
+ 'wsgiref.types',
+ 'mypy_extensions',
+ 'unittest.mock', # mock.call infinite loops on inspect.getsourcelines
+ # https://bugs.python.org/issue25532
+ # TODO: can we filter only call?
+}
+
+messages = {
+ 'not_in_runtime': ('{error.stub_type} "{error.name}" defined at line '
+ ' {error.line} in stub but is not defined at runtime'),
+ 'not_in_stub': ('{error.module_type} "{error.name}" defined at line'
+ ' {error.line} at runtime but is not defined in stub'),
+ 'no_stubs': 'could not find typeshed {error.name}',
+ 'inconsistent': ('"{error.name}" is {error.stub_type} in stub but'
+ ' {error.module_type} at runtime'),
+}
+
+Error = namedtuple('Error', (
+ 'module',
+ 'name',
+ 'error_type',
+ 'line',
+ 'stub_type',
+ 'module_type'))
+
+
+def test_stub(name: str):
+ stubs = {
+ mod: stub for mod, stub in build_stubs(name).items()
+ if (mod == name or mod.startswith(name + '.')) and mod not in skip
+ }
+
+ for mod, stub in stubs.items():
+ instance = dump_module(mod)
+
+ for identifiers, *error in verify(stub, instance):
+ yield Error(mod, '.'.join(identifiers), *error)
+
+
+ at singledispatch
+def verify(node, module_node):
+ raise TypeError('unknown mypy node ' + str(node))
+
+
+
+ at verify.register(nodes.MypyFile)
+def verify_mypyfile(stub, instance):
+ if instance is None:
+ yield [], 'not_in_runtime', stub.line, type(stub), None
+ elif instance['type'] != 'file':
+ yield [], 'inconsistent', stub.line, type(stub), instance['type']
+ else:
+ stub_children = defaultdict(lambda: None, stub.names)
+ instance_children = defaultdict(lambda: None, instance['names'])
+
+ # TODO: I would rather not filter public children here.
+ # For example, what if the checkersurfaces an inconsistency
+ # in the typing of a private child
+ public_nodes = {
+ name: (stub_children[name], instance_children[name])
+ for name in set(stub_children) | set(instance_children)
+ if not name.startswith('_')
+ and (stub_children[name] is None or stub_children[name].module_public)
+ }
+
+ for node, (stub_child, instance_child) in public_nodes.items():
+ stub_child = getattr(stub_child, 'node', None)
+ for identifiers, *error in verify(stub_child, instance_child):
+ yield ([node] + identifiers, *error)
+
+ at verify.register(nodes.TypeInfo)
+def verify_typeinfo(stub, instance):
+ if not instance:
+ yield [], 'not_in_runtime', stub.line, type(stub), None
+ elif instance['type'] != 'class':
+ yield [], 'inconsistent', stub.line, type(stub), instance['type']
+ else:
+ for attr, attr_node in stub.names.items():
+ subdump = instance['attributes'].get(attr, None)
+ for identifiers, *error in verify(attr_node.node, subdump):
+ yield ([attr] + identifiers, *error)
+
+
+ at verify.register(nodes.FuncItem)
+def verify_funcitem(stub, instance):
+ if not instance:
+ yield [], 'not_in_runtime', stub.line, type(stub), None
+ elif 'type' not in instance or instance['type'] not in ('function', 'callable'):
+ yield [], 'inconsistent', stub.line, type(stub), instance['type']
+ # TODO check arguments and return value
+
+
+ at verify.register(type(None))
+def verify_none(stub, instance):
+ if instance is None:
+ yield [], 'not_in_stub', None, None, None
+ else:
+ yield [], 'not_in_stub', instance['line'], None, instance['type']
+
+
+ at verify.register(nodes.Var)
+def verify_var(node, module_node):
+ if False:
+ yield None
+ # Need to check if types are inconsistent.
+ #if 'type' not in dump or dump['type'] != node.node.type:
+ # import ipdb; ipdb.set_trace()
+ # yield name, 'inconsistent', node.node.line, shed_type, module_type
+
+
+ at verify.register(nodes.OverloadedFuncDef)
+def verify_overloadedfuncdef(node, module_node):
+ # Should check types of the union of the overloaded types.
+ if False:
+ yield None
+
+
+ at verify.register(nodes.TypeVarExpr)
+def verify_typevarexpr(node, module_node):
+ if False:
+ yield None
+
+
+ at verify.register(nodes.Decorator)
+def verify_decorator(node, module_noode):
+ if False:
+ yield None
+
+
+def dump_module(name: str) -> Dict[str, Any]:
+ mod = importlib.import_module(name)
+ return {'type': 'file', 'names': dumpmodule.module_to_json(mod)}
+
+
+def build_stubs(mod):
+ data_dir = default_data_dir(None)
+ options = Options()
+ options.python_version = (3, 6)
+ lib_path = default_lib_path(data_dir,
+ options.python_version,
+ custom_typeshed_dir=None)
+ sources = find_modules_recursive(mod, lib_path)
+ try:
+ res = build.build(sources=sources,
+ options=options)
+ messages = res.errors
+ except CompileError as error:
+ messages = error.messages
+
+ if messages:
+ for msg in messages:
+ print(msg)
+ sys.exit(1)
+ return res.files
+
+
+def main(args):
+ if len(args) == 1:
+ print('must provide at least one module to test')
+ sys.exit(1)
+ else:
+ modules = args[1:]
+
+ for module in modules:
+ for error in test_stub(module):
+ yield error
+
+
+if __name__ == '__main__':
+
+ for err in main(sys.argv):
+ print(messages[err.error_type].format(error=err))
diff --git a/setup.cfg b/setup.cfg
index 857ea67..8320c2f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,14 @@
[flake8]
max-line-length = 99
exclude =
+ build,
+ @*,
+ env,
+ docs/build,
+ out,
+ .venv,
+ .mypy_cache,
+ .cache,
docs/source/conf.py,
lib-typing/*,
misc/*,
@@ -21,7 +29,7 @@ show_missing = true
[metadata]
requires-dist =
- typed-ast >= 0.6.3
+ typed-ast >= 1.0.3, < 1.1.0
typing >= 3.5.3; python_version < "3.5"
[egg_info]
diff --git a/setup.py b/setup.py
index ec62117..78c6a63 100644
--- a/setup.py
+++ b/setup.py
@@ -14,12 +14,15 @@ if sys.version_info < (3, 2, 0):
# alternative forms of installing, as suggested by README.md).
from setuptools import setup
from setuptools.command.build_py import build_py
-from mypy.version import base_version
+from mypy.version import base_version, __version__
from mypy import git
git.verify_git_integrity_or_abort(".")
-version = base_version
+if any(dist_arg in sys.argv[1:] for dist_arg in ('bdist_wheel', 'sdist')):
+ version = base_version
+else:
+ version = __version__
description = 'Optional static typing for Python'
long_description = '''
Mypy -- Optional Static Typing for Python
@@ -100,7 +103,7 @@ if os.name == 'nt':
# "pip3 install git+git://github.com/python/mypy.git"
# (as suggested by README.md).
install_requires = []
-install_requires.append('typed-ast >= 0.6.3')
+install_requires.append('typed-ast >= 1.0.3, < 1.1.0')
if sys.version_info < (3, 5):
install_requires.append('typing >= 3.5.3')
diff --git a/test-data/.flake8 b/test-data/.flake8
deleted file mode 100644
index a011f30..0000000
--- a/test-data/.flake8
+++ /dev/null
@@ -1,21 +0,0 @@
-# Some PEP8 deviations are considered irrelevant to stub files:
-# (error counts as of 2016-12-19)
-# 17381 E704 multiple statements on one line (def)
-# 11840 E301 expected 1 blank line
-# 7467 E302 expected 2 blank lines
-# 1772 E501 line too long
-# 1487 F401 imported but unused
-# 1248 E701 multiple statements on one line (colon)
-# 427 F811 redefinition
-# 356 E305 expected 2 blank lines
-
-# Nice-to-haves ignored for now
-# 152 E128 continuation line under-indented for visual indent
-# 43 E127 continuation line over-indented for visual indent
-
-[flake8]
-ignore = F401, F811, E127, E128, E301, E302, E305, E501, E701, E704, B303
-# We are checking with Python 3 but many of the stubs are Python 2 stubs.
-# A nice future improvement would be to provide separate .flake8
-# configurations for Python 2 and Python 3 files.
-builtins = StandardError,apply,basestring,buffer,cmp,coerce,execfile,file,intern,long,raw_input,reduce,reload,unichr,unicode,xrange
diff --git a/test-data/samples/bottles.py b/test-data/samples/bottles.py
deleted file mode 100644
index ddf77f5..0000000
--- a/test-data/samples/bottles.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import typing
-
-REFRAIN = '''
-%d bottles of beer on the wall,
-%d bottles of beer,
-take one down, pass it around,
-%d bottles of beer on the wall!
-'''
-bottles_of_beer = 99
-while bottles_of_beer > 1:
- print(REFRAIN % (bottles_of_beer, bottles_of_beer,
- bottles_of_beer - 1))
- bottles_of_beer -= 1
diff --git a/test-data/samples/class.py b/test-data/samples/class.py
deleted file mode 100644
index d2eb4ac..0000000
--- a/test-data/samples/class.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import typing
-
-
-class BankAccount(object):
- def __init__(self, initial_balance: int = 0) -> None:
- self.balance = initial_balance
-
- def deposit(self, amount: int) -> None:
- self.balance += amount
-
- def withdraw(self, amount: int) -> None:
- self.balance -= amount
-
- def overdrawn(self) -> bool:
- return self.balance < 0
-my_account = BankAccount(15)
-my_account.withdraw(5)
-print(my_account.balance)
diff --git a/test-data/samples/cmdline.py b/test-data/samples/cmdline.py
deleted file mode 100644
index 105c27a..0000000
--- a/test-data/samples/cmdline.py
+++ /dev/null
@@ -1,8 +0,0 @@
-# This program adds up integers in the command line
-import sys
-import typing
-try:
- total = sum(int(arg) for arg in sys.argv[1:])
- print('sum =', total)
-except ValueError:
- print('Please supply integer arguments')
diff --git a/test-data/samples/crawl.py b/test-data/samples/crawl.py
deleted file mode 100644
index 56b6f8f..0000000
--- a/test-data/samples/crawl.py
+++ /dev/null
@@ -1,863 +0,0 @@
-#!/usr/bin/env python3.4
-
-"""A simple web crawler."""
-
-# This is cloned from <asyncio>/examples/crawl.py,
-# with type annotations added (PEP 484).
-#
-# TODO: convert to `async def` + `await` (PEP 492).
-
-import argparse
-import asyncio
-import cgi
-from http.client import BadStatusLine
-import logging
-import re
-import sys
-import time
-import urllib.parse
-from typing import Any, Generator, IO, Optional, Sequence, Set, Tuple
-
-
-ARGS = argparse.ArgumentParser(description="Web crawler")
-ARGS.add_argument(
- '--iocp', action='store_true', dest='iocp',
- default=False, help='Use IOCP event loop (Windows only)')
-ARGS.add_argument(
- '--select', action='store_true', dest='select',
- default=False, help='Use Select event loop instead of default')
-ARGS.add_argument(
- 'roots', nargs='*',
- default=[], help='Root URL (may be repeated)')
-ARGS.add_argument(
- '--max_redirect', action='store', type=int, metavar='N',
- default=10, help='Limit redirection chains (for 301, 302 etc.)')
-ARGS.add_argument(
- '--max_tries', action='store', type=int, metavar='N',
- default=4, help='Limit retries on network errors')
-ARGS.add_argument(
- '--max_tasks', action='store', type=int, metavar='N',
- default=100, help='Limit concurrent connections')
-ARGS.add_argument(
- '--max_pool', action='store', type=int, metavar='N',
- default=100, help='Limit connection pool size')
-ARGS.add_argument(
- '--exclude', action='store', metavar='REGEX',
- help='Exclude matching URLs')
-ARGS.add_argument(
- '--strict', action='store_true',
- default=True, help='Strict host matching (default)')
-ARGS.add_argument(
- '--lenient', action='store_false', dest='strict',
- default=False, help='Lenient host matching')
-ARGS.add_argument(
- '-v', '--verbose', action='count', dest='level',
- default=1, help='Verbose logging (repeat for more verbose)')
-ARGS.add_argument(
- '-q', '--quiet', action='store_const', const=0, dest='level',
- default=1, help='Quiet logging (opposite of --verbose)')
-
-
-ESCAPES = [('quot', '"'),
- ('gt', '>'),
- ('lt', '<'),
- ('amp', '&') # Must be last.
- ]
-
-
-def unescape(url: str) -> str:
- """Turn & into &, and so on.
-
- This is the inverse of cgi.escape().
- """
- for name, char in ESCAPES:
- url = url.replace('&' + name + ';', char)
- return url
-
-
-def fix_url(url: str) -> str:
- """Prefix a schema-less URL with http://."""
- if '://' not in url:
- url = 'http://' + url
- return url
-
-
-class Logger:
-
- def __init__(self, level: int) -> None:
- self.level = level
-
- def _log(self, n: int, args: Sequence[Any]) -> None:
- if self.level >= n:
- print(*args, file=sys.stderr, flush=True)
-
- def log(self, n: int, *args: Any) -> None:
- self._log(n, args)
-
- def __call__(self, n: int, *args: Any) -> None:
- self._log(n, args)
-
-
-KeyTuple = Tuple[str, int, bool]
-
-
-class ConnectionPool:
- """A connection pool.
-
- To open a connection, use reserve(). To recycle it, use unreserve().
-
- The pool is mostly just a mapping from (host, port, ssl) tuples to
- lists of Connections. The currently active connections are *not*
- in the data structure; get_connection() takes the connection out,
- and recycle_connection() puts it back in. To recycle a
- connection, call conn.close(recycle=True).
-
- There are limits to both the overall pool and the per-key pool.
- """
-
- def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None:
- self.log = log
- self.max_pool = max_pool # Overall limit.
- self.max_tasks = max_tasks # Per-key limit.
- self.loop = asyncio.get_event_loop()
- self.connections = {} # type: Dict[KeyTuple, List[Connection]]
- self.queue = [] # type: List[Connection]
-
- def close(self) -> None:
- """Close all connections available for reuse."""
- for conns in self.connections.values():
- for conn in conns:
- conn.close()
- self.connections.clear()
- self.queue.clear()
-
- @asyncio.coroutine
- def get_connection(self, host: str, port: int, ssl: bool) -> Generator[Any, None, 'Connection']:
- """Create or reuse a connection."""
- port = port or (443 if ssl else 80)
- try:
- ipaddrs = yield from self.loop.getaddrinfo(host, port)
- except Exception as exc:
- self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port))
- raise
- self.log(1, '* %s resolves to %s' %
- (host, ', '.join(ip[4][0] for ip in ipaddrs)))
-
- # Look for a reusable connection.
- for _1, _2, _3, _4, (h, p, *_5) in ipaddrs:
- key = h, p, ssl
- conn = None
- conns = self.connections.get(key)
- while conns:
- conn = conns.pop(0)
- self.queue.remove(conn)
- if not conns:
- del self.connections[key]
- if conn.stale():
- self.log(1, 'closing stale connection for', key)
- conn.close() # Just in case.
- else:
- self.log(1, '* Reusing pooled connection', key,
- 'FD =', conn.fileno())
- return conn
-
- # Create a new connection.
- conn = Connection(self.log, self, host, port, ssl)
- yield from conn.connect()
- self.log(1, '* New connection', conn.key, 'FD =', conn.fileno())
- return conn
-
- def recycle_connection(self, conn: 'Connection') -> None:
- """Make a connection available for reuse.
-
- This also prunes the pool if it exceeds the size limits.
- """
- if conn.stale():
- conn.close()
- return
-
- key = conn.key
- conns = self.connections.setdefault(key, [])
- conns.append(conn)
- self.queue.append(conn)
-
- if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool:
- return
-
- # Prune the queue.
-
- # Close stale connections for this key first.
- stale = [conn for conn in conns if conn.stale()]
- if stale:
- for conn in stale:
- conns.remove(conn)
- self.queue.remove(conn)
- self.log(1, 'closing stale connection for', key)
- conn.close()
- if not conns:
- del self.connections[key]
-
- # Close oldest connection(s) for this key if limit reached.
- while len(conns) > self.max_tasks:
- conn = conns.pop(0)
- self.queue.remove(conn)
- self.log(1, 'closing oldest connection for', key)
- conn.close()
-
- if len(self.queue) <= self.max_pool:
- return
-
- # Close overall stale connections.
- stale = [conn for conn in self.queue if conn.stale()]
- if stale:
- for conn in stale:
- conns = self.connections.get(conn.key)
- conns.remove(conn)
- self.queue.remove(conn)
- self.log(1, 'closing stale connection for', key)
- conn.close()
-
- # Close oldest overall connection(s) if limit reached.
- while len(self.queue) > self.max_pool:
- conn = self.queue.pop(0)
- conns = self.connections.get(conn.key)
- c = conns.pop(0)
- assert conn == c, (conn.key, conn, c, conns)
- self.log(1, 'closing overall oldest connection for', conn.key)
- conn.close()
-
-
-class Connection:
-
- def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None:
- self.log = log
- self.pool = pool
- self.host = host
- self.port = port
- self.ssl = ssl
- self.reader = None # type: asyncio.StreamReader
- self.writer = None # type: asyncio.StreamWriter
- self.key = None # type: KeyTuple
-
- def stale(self) -> bool:
- return self.reader is None or self.reader.at_eof()
-
- def fileno(self) -> Optional[int]:
- writer = self.writer
- if writer is not None:
- transport = writer.transport
- if transport is not None:
- sock = transport.get_extra_info('socket')
- if sock is not None:
- return sock.fileno()
- return None
-
- @asyncio.coroutine
- def connect(self) -> Generator[Any, None, None]:
- self.reader, self.writer = yield from asyncio.open_connection(
- self.host, self.port, ssl=self.ssl)
- peername = self.writer.get_extra_info('peername')
- if peername:
- self.host, self.port = peername[:2]
- else:
- self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl)
- self.key = self.host, self.port, self.ssl
-
- def close(self, recycle: bool = False) -> None:
- if recycle and not self.stale():
- self.pool.recycle_connection(self)
- else:
- self.writer.close()
- self.pool = self.reader = self.writer = None
-
-
-class Request:
- """HTTP request.
-
- Use connect() to open a connection; send_request() to send the
- request; get_response() to receive the response headers.
- """
-
- def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None:
- self.log = log
- self.url = url
- self.pool = pool
- self.parts = urllib.parse.urlparse(self.url)
- self.scheme = self.parts.scheme
- assert self.scheme in ('http', 'https'), repr(url)
- self.ssl = self.parts.scheme == 'https'
- self.netloc = self.parts.netloc
- self.hostname = self.parts.hostname
- self.port = self.parts.port or (443 if self.ssl else 80)
- self.path = (self.parts.path or '/')
- self.query = self.parts.query
- if self.query:
- self.full_path = '%s?%s' % (self.path, self.query)
- else:
- self.full_path = self.path
- self.http_version = 'HTTP/1.1'
- self.method = 'GET'
- self.headers = [] # type: List[Tuple[str, str]]
- self.conn = None # type: Connection
-
- @asyncio.coroutine
- def connect(self) -> Generator[Any, None, None]:
- """Open a connection to the server."""
- self.log(1, '* Connecting to %s:%s using %s for %s' %
- (self.hostname, self.port,
- 'ssl' if self.ssl else 'tcp',
- self.url))
- self.conn = yield from self.pool.get_connection(self.hostname,
- self.port, self.ssl)
-
- def close(self, recycle: bool = False) -> None:
- """Close the connection, recycle if requested."""
- if self.conn is not None:
- if not recycle:
- self.log(1, 'closing connection for', self.conn.key)
- self.conn.close(recycle)
- self.conn = None
-
- @asyncio.coroutine
- def putline(self, line: str) -> Generator[Any, None, None]:
- """Write a line to the connection.
-
- Used for the request line and headers.
- """
- self.log(2, '>', line)
- self.conn.writer.write(line.encode('latin-1') + b'\r\n')
-
- @asyncio.coroutine
- def send_request(self) -> Generator[Any, None, None]:
- """Send the request."""
- request_line = '%s %s %s' % (self.method, self.full_path,
- self.http_version)
- yield from self.putline(request_line)
- # TODO: What if a header is already set?
- self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0'))
- self.headers.append(('Host', self.netloc))
- self.headers.append(('Accept', '*/*'))
- # self.headers.append(('Accept-Encoding', 'gzip'))
- for key, value in self.headers:
- line = '%s: %s' % (key, value)
- yield from self.putline(line)
- yield from self.putline('')
-
- @asyncio.coroutine
- def get_response(self) -> Generator[Any, None, 'Response']:
- """Receive the response."""
- response = Response(self.log, self.conn.reader)
- yield from response.read_headers()
- return response
-
-
-class Response:
- """HTTP response.
-
- Call read_headers() to receive the request headers. Then check
- the status attribute and call get_header() to inspect the headers.
- Finally call read() to receive the body.
- """
-
- def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None:
- self.log = log
- self.reader = reader
- self.http_version = None # type: str # 'HTTP/1.1'
- self.status = None # type: int # 200
- self.reason = None # type: str # 'Ok'
- self.headers = [] # type: List[Tuple[str, str]] # [('Content-Type', 'text/html')]
-
- @asyncio.coroutine
- def getline(self) -> Generator[Any, None, str]:
- """Read one line from the connection."""
- line = (yield from self.reader.readline()).decode('latin-1').rstrip()
- self.log(2, '<', line)
- return line
-
- @asyncio.coroutine
- def read_headers(self) -> Generator[Any, None, None]:
- """Read the response status and the request headers."""
- status_line = yield from self.getline()
- status_parts = status_line.split(None, 2)
- if len(status_parts) != 3:
- self.log(0, 'bad status_line', repr(status_line))
- raise BadStatusLine(status_line)
- self.http_version, status, self.reason = status_parts
- self.status = int(status)
- while True:
- header_line = yield from self.getline()
- if not header_line:
- break
- # TODO: Continuation lines.
- key, value = header_line.split(':', 1)
- self.headers.append((key, value.strip()))
-
- def get_redirect_url(self, default: str = '') -> str:
- """Inspect the status and return the redirect url if appropriate."""
- if self.status not in (300, 301, 302, 303, 307):
- return default
- return self.get_header('Location', default)
-
- def get_header(self, key: str, default: str = '') -> str:
- """Get one header value, using a case insensitive header name."""
- key = key.lower()
- for k, v in self.headers:
- if k.lower() == key:
- return v
- return default
-
- @asyncio.coroutine
- def read(self) -> Generator[Any, None, bytes]:
- """Read the response body.
-
- This honors Content-Length and Transfer-Encoding: chunked.
- """
- nbytes = None
- for key, value in self.headers:
- if key.lower() == 'content-length':
- nbytes = int(value)
- break
- if nbytes is None:
- if self.get_header('transfer-encoding').lower() == 'chunked':
- self.log(2, 'parsing chunked response')
- blocks = []
- while True:
- size_header = yield from self.reader.readline()
- if not size_header:
- self.log(0, 'premature end of chunked response')
- break
- self.log(3, 'size_header =', repr(size_header))
- parts = size_header.split(b';')
- size = int(parts[0], 16)
- if size:
- self.log(3, 'reading chunk of', size, 'bytes')
- block = yield from self.reader.readexactly(size)
- assert len(block) == size, (len(block), size)
- blocks.append(block)
- crlf = yield from self.reader.readline()
- assert crlf == b'\r\n', repr(crlf)
- if not size:
- break
- body = b''.join(blocks)
- self.log(1, 'chunked response had', len(body),
- 'bytes in', len(blocks), 'blocks')
- else:
- self.log(3, 'reading until EOF')
- body = yield from self.reader.read()
- # TODO: Should make sure not to recycle the connection
- # in this case.
- else:
- body = yield from self.reader.readexactly(nbytes)
- return body
-
-
-class Fetcher:
- """Logic and state for one URL.
-
- When found in crawler.busy, this represents a URL to be fetched or
- in the process of being fetched; when found in crawler.done, this
- holds the results from fetching it.
-
- This is usually associated with a task. This references the
- crawler for the connection pool and to add more URLs to its todo
- list.
-
- Call fetch() to do the fetching, then report() to print the results.
- """
-
- def __init__(self, log: Logger, url: str, crawler: 'Crawler',
- max_redirect: int = 10, max_tries: int = 4) -> None:
- self.log = log
- self.url = url
- self.crawler = crawler
- # We don't loop resolving redirects here -- we just use this
- # to decide whether to add the redirect URL to crawler.todo.
- self.max_redirect = max_redirect
- # But we do loop to retry on errors a few times.
- self.max_tries = max_tries
- # Everything we collect from the response goes here.
- self.task = None # type: asyncio.Task
- self.exceptions = [] # type: List[Exception]
- self.tries = 0
- self.request = None # type: Request
- self.response = None # type: Response
- self.body = None # type: bytes
- self.next_url = None # type: str
- self.ctype = None # type: str
- self.pdict = None # type: Dict[str, str]
- self.encoding = None # type: str
- self.urls = None # type: Set[str]
- self.new_urls = None # type: Set[str]
-
- @asyncio.coroutine
- def fetch(self) -> Generator[Any, None, None]:
- """Attempt to fetch the contents of the URL.
-
- If successful, and the data is HTML, extract further links and
- add them to the crawler. Redirects are also added back there.
- """
- while self.tries < self.max_tries:
- self.tries += 1
- self.request = None
- try:
- self.request = Request(self.log, self.url, self.crawler.pool)
- yield from self.request.connect()
- yield from self.request.send_request()
- self.response = yield from self.request.get_response()
- self.body = yield from self.response.read()
- h_conn = self.response.get_header('connection').lower()
- if h_conn != 'close':
- self.request.close(recycle=True)
- self.request = None
- if self.tries > 1:
- self.log(1, 'try', self.tries, 'for', self.url, 'success')
- break
- except (BadStatusLine, OSError) as exc:
- self.exceptions.append(exc)
- self.log(1, 'try', self.tries, 'for', self.url,
- 'raised', repr(exc))
- # import pdb; pdb.set_trace()
- # Don't reuse the connection in this case.
- finally:
- if self.request is not None:
- self.request.close()
- else:
- # We never broke out of the while loop, i.e. all tries failed.
- self.log(0, 'no success for', self.url,
- 'in', self.max_tries, 'tries')
- return
- next_url = self.response.get_redirect_url()
- if next_url:
- self.next_url = urllib.parse.urljoin(self.url, next_url)
- if self.max_redirect > 0:
- self.log(1, 'redirect to', self.next_url, 'from', self.url)
- self.crawler.add_url(self.next_url, self.max_redirect - 1)
- else:
- self.log(0, 'redirect limit reached for', self.next_url,
- 'from', self.url)
- else:
- if self.response.status == 200:
- self.ctype = self.response.get_header('content-type')
- self.pdict = {}
- if self.ctype:
- self.ctype, self.pdict = cgi.parse_header(self.ctype)
- self.encoding = self.pdict.get('charset', 'utf-8')
- if self.ctype == 'text/html':
- body = self.body.decode(self.encoding, 'replace')
- # Replace href with (?:href|src) to follow image links.
- self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
- body))
- if self.urls:
- self.log(1, 'got', len(self.urls),
- 'distinct urls from', self.url)
- self.new_urls = set()
- for url in self.urls:
- url = unescape(url)
- url = urllib.parse.urljoin(self.url, url)
- url, frag = urllib.parse.urldefrag(url)
- if self.crawler.add_url(url):
- self.new_urls.add(url)
-
- def report(self, stats: 'Stats', file: IO[str] = None) -> None:
- """Print a report on the state for this URL.
-
- Also update the Stats instance.
- """
- if self.task is not None:
- if not self.task.done():
- stats.add('pending')
- print(self.url, 'pending', file=file)
- return
- elif self.task.cancelled():
- stats.add('cancelled')
- print(self.url, 'cancelled', file=file)
- return
- elif self.task.exception():
- stats.add('exception')
- exc = self.task.exception()
- stats.add('exception_' + exc.__class__.__name__)
- print(self.url, exc, file=file)
- return
- if len(self.exceptions) == self.tries:
- stats.add('fail')
- exc = self.exceptions[-1]
- stats.add('fail_' + str(exc.__class__.__name__))
- print(self.url, 'error', exc, file=file)
- elif self.next_url:
- stats.add('redirect')
- print(self.url, self.response.status, 'redirect', self.next_url,
- file=file)
- elif self.ctype == 'text/html':
- stats.add('html')
- size = len(self.body or b'')
- stats.add('html_bytes', size)
- if self.log.level:
- print(self.url, self.response.status,
- self.ctype, self.encoding,
- size,
- '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())),
- file=file)
- elif self.response is None:
- print(self.url, 'no response object')
- else:
- size = len(self.body or b'')
- if self.response.status == 200:
- stats.add('other')
- stats.add('other_bytes', size)
- else:
- stats.add('error')
- stats.add('error_bytes', size)
- stats.add('status_%s' % self.response.status)
- print(self.url, self.response.status,
- self.ctype, self.encoding,
- size,
- file=file)
-
-
-class Stats:
- """Record stats of various sorts."""
-
- def __init__(self) -> None:
- self.stats = {} # type: Dict[str, int]
-
- def add(self, key: str, count: int = 1) -> None:
- self.stats[key] = self.stats.get(key, 0) + count
-
- def report(self, file: IO[str] = None) -> None:
- for key, count in sorted(self.stats.items()):
- print('%10d' % count, key, file=file)
-
-
-class Crawler:
- """Crawl a set of URLs.
-
- This manages three disjoint sets of URLs (todo, busy, done). The
- data structures actually store dicts -- the values in todo give
- the redirect limit, while the values in busy and done are Fetcher
- instances.
- """
- def __init__(self, log: Logger,
- roots: Set[str], exclude: str = None, strict: bool = True, # What to crawl.
- max_redirect: int = 10, max_tries: int = 4, # Per-url limits.
- max_tasks: int = 10, max_pool: int = 10, # Global limits.
- ) -> None:
- self.log = log
- self.roots = roots
- self.exclude = exclude
- self.strict = strict
- self.max_redirect = max_redirect
- self.max_tries = max_tries
- self.max_tasks = max_tasks
- self.max_pool = max_pool
- self.todo = {} # type: Dict[str, int]
- self.busy = {} # type: Dict[str, Fetcher]
- self.done = {} # type: Dict[str, Fetcher]
- self.pool = ConnectionPool(self.log, max_pool, max_tasks)
- self.root_domains = set() # type: Set[str]
- for root in roots:
- host = urllib.parse.urlparse(root).hostname
- if not host:
- continue
- if re.match(r'\A[\d\.]*\Z', host):
- self.root_domains.add(host)
- else:
- host = host.lower()
- if self.strict:
- self.root_domains.add(host)
- if host.startswith('www.'):
- self.root_domains.add(host[4:])
- else:
- self.root_domains.add('www.' + host)
- else:
- parts = host.split('.')
- if len(parts) > 2:
- host = '.'.join(parts[-2:])
- self.root_domains.add(host)
- for root in roots:
- self.add_url(root)
- self.governor = asyncio.Semaphore(max_tasks)
- self.termination = asyncio.Condition()
- self.t0 = time.time()
- self.t1 = None # type: Optional[float]
-
- def close(self) -> None:
- """Close resources (currently only the pool)."""
- self.pool.close()
-
- def host_okay(self, host: str) -> bool:
- """Check if a host should be crawled.
-
- A literal match (after lowercasing) is always good. For hosts
- that don't look like IP addresses, some approximate matches
- are okay depending on the strict flag.
- """
- host = host.lower()
- if host in self.root_domains:
- return True
- if re.match(r'\A[\d\.]*\Z', host):
- return False
- if self.strict:
- return self._host_okay_strictish(host)
- else:
- return self._host_okay_lenient(host)
-
- def _host_okay_strictish(self, host: str) -> bool:
- """Check if a host should be crawled, strict-ish version.
-
- This checks for equality modulo an initial 'www.' component.
- """
- if host.startswith('www.'):
- if host[4:] in self.root_domains:
- return True
- else:
- if 'www.' + host in self.root_domains:
- return True
- return False
-
- def _host_okay_lenient(self, host: str) -> bool:
- """Check if a host should be crawled, lenient version.
-
- This compares the last two components of the host.
- """
- parts = host.split('.')
- if len(parts) > 2:
- host = '.'.join(parts[-2:])
- return host in self.root_domains
-
- def add_url(self, url: str, max_redirect: int = None) -> bool:
- """Add a URL to the todo list if not seen before."""
- if self.exclude and re.search(self.exclude, url):
- return False
- parsed = urllib.parse.urlparse(url)
- if parsed.scheme not in ('http', 'https'):
- self.log(2, 'skipping non-http scheme in', url)
- return False
- host = parsed.hostname
- if not self.host_okay(host):
- self.log(2, 'skipping non-root host in', url)
- return False
- if max_redirect is None:
- max_redirect = self.max_redirect
- if url in self.todo or url in self.busy or url in self.done:
- return False
- self.log(1, 'adding', url, max_redirect)
- self.todo[url] = max_redirect
- return True
-
- @asyncio.coroutine
- def crawl(self) -> Generator[Any, None, None]:
- """Run the crawler until all finished."""
- with (yield from self.termination):
- while self.todo or self.busy:
- if self.todo:
- url, max_redirect = self.todo.popitem()
- fetcher = Fetcher(self.log, url,
- crawler=self,
- max_redirect=max_redirect,
- max_tries=self.max_tries,
- )
- self.busy[url] = fetcher
- fetcher.task = asyncio.Task(self.fetch(fetcher))
- else:
- yield from self.termination.wait()
- self.t1 = time.time()
-
- @asyncio.coroutine
- def fetch(self, fetcher: Fetcher) -> Generator[Any, None, None]:
- """Call the Fetcher's fetch(), with a limit on concurrency.
-
- Once this returns, move the fetcher from busy to done.
- """
- url = fetcher.url
- with (yield from self.governor):
- try:
- yield from fetcher.fetch() # Fetcher gonna fetch.
- finally:
- # Force GC of the task, so the error is logged.
- fetcher.task = None
- with (yield from self.termination):
- self.done[url] = fetcher
- del self.busy[url]
- self.termination.notify()
-
- def report(self, file: IO[str] = None) -> None:
- """Print a report on all completed URLs."""
- if self.t1 is None:
- self.t1 = time.time()
- dt = self.t1 - self.t0
- if dt and self.max_tasks:
- speed = len(self.done) / dt / self.max_tasks
- else:
- speed = 0
- stats = Stats()
- print('*** Report ***', file=file)
- try:
- show = [] # type: List[Tuple[str, Fetcher]]
- show.extend(self.done.items())
- show.extend(self.busy.items())
- show.sort()
- for url, fetcher in show:
- fetcher.report(stats, file=file)
- except KeyboardInterrupt:
- print('\nInterrupted', file=file)
- print('Finished', len(self.done),
- 'urls in %.3f secs' % dt,
- '(max_tasks=%d)' % self.max_tasks,
- '(%.3f urls/sec/task)' % speed,
- file=file)
- stats.report(file=file)
- print('Todo:', len(self.todo), file=file)
- print('Busy:', len(self.busy), file=file)
- print('Done:', len(self.done), file=file)
- print('Date:', time.ctime(), 'local time', file=file)
-
-
-def main() -> None:
- """Main program.
-
- Parse arguments, set up event loop, run crawler, print report.
- """
- args = ARGS.parse_args()
- if not args.roots:
- print('Use --help for command line help')
- return
-
- log = Logger(args.level)
-
- if args.iocp:
- if sys.platform == 'win32':
- from asyncio import ProactorEventLoop
- loop = ProactorEventLoop() # type: ignore
- asyncio.set_event_loop(loop)
- else:
- assert False
- elif args.select:
- loop = asyncio.SelectorEventLoop() # type: ignore
- asyncio.set_event_loop(loop)
- else:
- loop = asyncio.get_event_loop()
-
- roots = {fix_url(root) for root in args.roots}
-
- crawler = Crawler(log,
- roots, exclude=args.exclude,
- strict=args.strict,
- max_redirect=args.max_redirect,
- max_tries=args.max_tries,
- max_tasks=args.max_tasks,
- max_pool=args.max_pool,
- )
- try:
- loop.run_until_complete(crawler.crawl()) # Crawler gonna crawl.
- except KeyboardInterrupt:
- sys.stderr.flush()
- print('\nInterrupted\n')
- finally:
- crawler.report()
- crawler.close()
- loop.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.INFO) # type: ignore
- main()
diff --git a/test-data/samples/crawl2.py b/test-data/samples/crawl2.py
deleted file mode 100644
index 5eaad70..0000000
--- a/test-data/samples/crawl2.py
+++ /dev/null
@@ -1,852 +0,0 @@
-#!/usr/bin/env python3.4
-
-"""A simple web crawler."""
-
-# This is cloned from <asyncio>/examples/crawl.py,
-# with type annotations added (PEP 484).
-#
-# This version (crawl2.) has also been converted to use `async def` +
-# `await` (PEP 492).
-
-import argparse
-import asyncio
-import cgi
-from http.client import BadStatusLine
-import logging
-import re
-import sys
-import time
-import urllib.parse
-from typing import Any, Awaitable, IO, Optional, Sequence, Set, Tuple
-
-
-ARGS = argparse.ArgumentParser(description="Web crawler")
-ARGS.add_argument(
- '--iocp', action='store_true', dest='iocp',
- default=False, help='Use IOCP event loop (Windows only)')
-ARGS.add_argument(
- '--select', action='store_true', dest='select',
- default=False, help='Use Select event loop instead of default')
-ARGS.add_argument(
- 'roots', nargs='*',
- default=[], help='Root URL (may be repeated)')
-ARGS.add_argument(
- '--max_redirect', action='store', type=int, metavar='N',
- default=10, help='Limit redirection chains (for 301, 302 etc.)')
-ARGS.add_argument(
- '--max_tries', action='store', type=int, metavar='N',
- default=4, help='Limit retries on network errors')
-ARGS.add_argument(
- '--max_tasks', action='store', type=int, metavar='N',
- default=100, help='Limit concurrent connections')
-ARGS.add_argument(
- '--max_pool', action='store', type=int, metavar='N',
- default=100, help='Limit connection pool size')
-ARGS.add_argument(
- '--exclude', action='store', metavar='REGEX',
- help='Exclude matching URLs')
-ARGS.add_argument(
- '--strict', action='store_true',
- default=True, help='Strict host matching (default)')
-ARGS.add_argument(
- '--lenient', action='store_false', dest='strict',
- default=False, help='Lenient host matching')
-ARGS.add_argument(
- '-v', '--verbose', action='count', dest='level',
- default=1, help='Verbose logging (repeat for more verbose)')
-ARGS.add_argument(
- '-q', '--quiet', action='store_const', const=0, dest='level',
- default=1, help='Quiet logging (opposite of --verbose)')
-
-
-ESCAPES = [('quot', '"'),
- ('gt', '>'),
- ('lt', '<'),
- ('amp', '&') # Must be last.
- ]
-
-
-def unescape(url: str) -> str:
- """Turn & into &, and so on.
-
- This is the inverse of cgi.escape().
- """
- for name, char in ESCAPES:
- url = url.replace('&' + name + ';', char)
- return url
-
-
-def fix_url(url: str) -> str:
- """Prefix a schema-less URL with http://."""
- if '://' not in url:
- url = 'http://' + url
- return url
-
-
-class Logger:
-
- def __init__(self, level: int) -> None:
- self.level = level
-
- def _log(self, n: int, args: Sequence[Any]) -> None:
- if self.level >= n:
- print(*args, file=sys.stderr, flush=True)
-
- def log(self, n: int, *args: Any) -> None:
- self._log(n, args)
-
- def __call__(self, n: int, *args: Any) -> None:
- self._log(n, args)
-
-
-KeyTuple = Tuple[str, int, bool]
-
-
-class ConnectionPool:
- """A connection pool.
-
- To open a connection, use reserve(). To recycle it, use unreserve().
-
- The pool is mostly just a mapping from (host, port, ssl) tuples to
- lists of Connections. The currently active connections are *not*
- in the data structure; get_connection() takes the connection out,
- and recycle_connection() puts it back in. To recycle a
- connection, call conn.close(recycle=True).
-
- There are limits to both the overall pool and the per-key pool.
- """
-
- def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None:
- self.log = log
- self.max_pool = max_pool # Overall limit.
- self.max_tasks = max_tasks # Per-key limit.
- self.loop = asyncio.get_event_loop()
- self.connections = {} # type: Dict[KeyTuple, List[Connection]]
- self.queue = [] # type: List[Connection]
-
- def close(self) -> None:
- """Close all connections available for reuse."""
- for conns in self.connections.values():
- for conn in conns:
- conn.close()
- self.connections.clear()
- self.queue.clear()
-
- async def get_connection(self, host: str, port: int, ssl: bool) -> 'Connection':
- """Create or reuse a connection."""
- port = port or (443 if ssl else 80)
- try:
- ipaddrs = await self.loop.getaddrinfo(host, port)
- except Exception as exc:
- self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port))
- raise
- self.log(1, '* %s resolves to %s' %
- (host, ', '.join(ip[4][0] for ip in ipaddrs)))
-
- # Look for a reusable connection.
- for _1, _2, _3, _4, (h, p, *_5) in ipaddrs:
- key = h, p, ssl
- conn = None
- conns = self.connections.get(key)
- while conns:
- conn = conns.pop(0)
- self.queue.remove(conn)
- if not conns:
- del self.connections[key]
- if conn.stale():
- self.log(1, 'closing stale connection for', key)
- conn.close() # Just in case.
- else:
- self.log(1, '* Reusing pooled connection', key,
- 'FD =', conn.fileno())
- return conn
-
- # Create a new connection.
- conn = Connection(self.log, self, host, port, ssl)
- await conn.connect()
- self.log(1, '* New connection', conn.key, 'FD =', conn.fileno())
- return conn
-
- def recycle_connection(self, conn: 'Connection') -> None:
- """Make a connection available for reuse.
-
- This also prunes the pool if it exceeds the size limits.
- """
- if conn.stale():
- conn.close()
- return
-
- key = conn.key
- conns = self.connections.setdefault(key, [])
- conns.append(conn)
- self.queue.append(conn)
-
- if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool:
- return
-
- # Prune the queue.
-
- # Close stale connections for this key first.
- stale = [conn for conn in conns if conn.stale()]
- if stale:
- for conn in stale:
- conns.remove(conn)
- self.queue.remove(conn)
- self.log(1, 'closing stale connection for', key)
- conn.close()
- if not conns:
- del self.connections[key]
-
- # Close oldest connection(s) for this key if limit reached.
- while len(conns) > self.max_tasks:
- conn = conns.pop(0)
- self.queue.remove(conn)
- self.log(1, 'closing oldest connection for', key)
- conn.close()
-
- if len(self.queue) <= self.max_pool:
- return
-
- # Close overall stale connections.
- stale = [conn for conn in self.queue if conn.stale()]
- if stale:
- for conn in stale:
- conns = self.connections.get(conn.key)
- conns.remove(conn)
- self.queue.remove(conn)
- self.log(1, 'closing stale connection for', key)
- conn.close()
-
- # Close oldest overall connection(s) if limit reached.
- while len(self.queue) > self.max_pool:
- conn = self.queue.pop(0)
- conns = self.connections.get(conn.key)
- c = conns.pop(0)
- assert conn == c, (conn.key, conn, c, conns)
- self.log(1, 'closing overall oldest connection for', conn.key)
- conn.close()
-
-
-class Connection:
-
- def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None:
- self.log = log
- self.pool = pool
- self.host = host
- self.port = port
- self.ssl = ssl
- self.reader = None # type: asyncio.StreamReader
- self.writer = None # type: asyncio.StreamWriter
- self.key = None # type: KeyTuple
-
- def stale(self) -> bool:
- return self.reader is None or self.reader.at_eof()
-
- def fileno(self) -> Optional[int]:
- writer = self.writer
- if writer is not None:
- transport = writer.transport
- if transport is not None:
- sock = transport.get_extra_info('socket')
- if sock is not None:
- return sock.fileno()
- return None
-
- async def connect(self) -> None:
- self.reader, self.writer = await asyncio.open_connection(
- self.host, self.port, ssl=self.ssl)
- peername = self.writer.get_extra_info('peername')
- if peername:
- self.host, self.port = peername[:2]
- else:
- self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl)
- self.key = self.host, self.port, self.ssl
-
- def close(self, recycle: bool = False) -> None:
- if recycle and not self.stale():
- self.pool.recycle_connection(self)
- else:
- self.writer.close()
- self.pool = self.reader = self.writer = None
-
-
-class Request:
- """HTTP request.
-
- Use connect() to open a connection; send_request() to send the
- request; get_response() to receive the response headers.
- """
-
- def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None:
- self.log = log
- self.url = url
- self.pool = pool
- self.parts = urllib.parse.urlparse(self.url)
- self.scheme = self.parts.scheme
- assert self.scheme in ('http', 'https'), repr(url)
- self.ssl = self.parts.scheme == 'https'
- self.netloc = self.parts.netloc
- self.hostname = self.parts.hostname
- self.port = self.parts.port or (443 if self.ssl else 80)
- self.path = (self.parts.path or '/')
- self.query = self.parts.query
- if self.query:
- self.full_path = '%s?%s' % (self.path, self.query)
- else:
- self.full_path = self.path
- self.http_version = 'HTTP/1.1'
- self.method = 'GET'
- self.headers = [] # type: List[Tuple[str, str]]
- self.conn = None # type: Connection
-
- async def connect(self) -> None:
- """Open a connection to the server."""
- self.log(1, '* Connecting to %s:%s using %s for %s' %
- (self.hostname, self.port,
- 'ssl' if self.ssl else 'tcp',
- self.url))
- self.conn = await self.pool.get_connection(self.hostname,
- self.port, self.ssl)
-
- def close(self, recycle: bool = False) -> None:
- """Close the connection, recycle if requested."""
- if self.conn is not None:
- if not recycle:
- self.log(1, 'closing connection for', self.conn.key)
- self.conn.close(recycle)
- self.conn = None
-
- async def putline(self, line: str) -> None:
- """Write a line to the connection.
-
- Used for the request line and headers.
- """
- self.log(2, '>', line)
- self.conn.writer.write(line.encode('latin-1') + b'\r\n')
-
- async def send_request(self) -> None:
- """Send the request."""
- request_line = '%s %s %s' % (self.method, self.full_path,
- self.http_version)
- await self.putline(request_line)
- # TODO: What if a header is already set?
- self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0'))
- self.headers.append(('Host', self.netloc))
- self.headers.append(('Accept', '*/*'))
- # self.headers.append(('Accept-Encoding', 'gzip'))
- for key, value in self.headers:
- line = '%s: %s' % (key, value)
- await self.putline(line)
- await self.putline('')
-
- async def get_response(self) -> 'Response':
- """Receive the response."""
- response = Response(self.log, self.conn.reader)
- await response.read_headers()
- return response
-
-
-class Response:
- """HTTP response.
-
- Call read_headers() to receive the request headers. Then check
- the status attribute and call get_header() to inspect the headers.
- Finally call read() to receive the body.
- """
-
- def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None:
- self.log = log
- self.reader = reader
- self.http_version = None # type: str # 'HTTP/1.1'
- self.status = None # type: int # 200
- self.reason = None # type: str # 'Ok'
- self.headers = [] # type: List[Tuple[str, str]] # [('Content-Type', 'text/html')]
-
- async def getline(self) -> str:
- """Read one line from the connection."""
- line = (await self.reader.readline()).decode('latin-1').rstrip()
- self.log(2, '<', line)
- return line
-
- async def read_headers(self) -> None:
- """Read the response status and the request headers."""
- status_line = await self.getline()
- status_parts = status_line.split(None, 2)
- if len(status_parts) != 3:
- self.log(0, 'bad status_line', repr(status_line))
- raise BadStatusLine(status_line)
- self.http_version, status, self.reason = status_parts
- self.status = int(status)
- while True:
- header_line = await self.getline()
- if not header_line:
- break
- # TODO: Continuation lines.
- key, value = header_line.split(':', 1)
- self.headers.append((key, value.strip()))
-
- def get_redirect_url(self, default: str = '') -> str:
- """Inspect the status and return the redirect url if appropriate."""
- if self.status not in (300, 301, 302, 303, 307):
- return default
- return self.get_header('Location', default)
-
- def get_header(self, key: str, default: str = '') -> str:
- """Get one header value, using a case insensitive header name."""
- key = key.lower()
- for k, v in self.headers:
- if k.lower() == key:
- return v
- return default
-
- async def read(self) -> bytes:
- """Read the response body.
-
- This honors Content-Length and Transfer-Encoding: chunked.
- """
- nbytes = None
- for key, value in self.headers:
- if key.lower() == 'content-length':
- nbytes = int(value)
- break
- if nbytes is None:
- if self.get_header('transfer-encoding').lower() == 'chunked':
- self.log(2, 'parsing chunked response')
- blocks = []
- while True:
- size_header = await self.reader.readline()
- if not size_header:
- self.log(0, 'premature end of chunked response')
- break
- self.log(3, 'size_header =', repr(size_header))
- parts = size_header.split(b';')
- size = int(parts[0], 16)
- if size:
- self.log(3, 'reading chunk of', size, 'bytes')
- block = await self.reader.readexactly(size)
- assert len(block) == size, (len(block), size)
- blocks.append(block)
- crlf = await self.reader.readline()
- assert crlf == b'\r\n', repr(crlf)
- if not size:
- break
- body = b''.join(blocks)
- self.log(1, 'chunked response had', len(body),
- 'bytes in', len(blocks), 'blocks')
- else:
- self.log(3, 'reading until EOF')
- body = await self.reader.read()
- # TODO: Should make sure not to recycle the connection
- # in this case.
- else:
- body = await self.reader.readexactly(nbytes)
- return body
-
-
-class Fetcher:
- """Logic and state for one URL.
-
- When found in crawler.busy, this represents a URL to be fetched or
- in the process of being fetched; when found in crawler.done, this
- holds the results from fetching it.
-
- This is usually associated with a task. This references the
- crawler for the connection pool and to add more URLs to its todo
- list.
-
- Call fetch() to do the fetching, then report() to print the results.
- """
-
- def __init__(self, log: Logger, url: str, crawler: 'Crawler',
- max_redirect: int = 10, max_tries: int = 4) -> None:
- self.log = log
- self.url = url
- self.crawler = crawler
- # We don't loop resolving redirects here -- we just use this
- # to decide whether to add the redirect URL to crawler.todo.
- self.max_redirect = max_redirect
- # But we do loop to retry on errors a few times.
- self.max_tries = max_tries
- # Everything we collect from the response goes here.
- self.task = None # type: asyncio.Task
- self.exceptions = [] # type: List[Exception]
- self.tries = 0
- self.request = None # type: Request
- self.response = None # type: Response
- self.body = None # type: bytes
- self.next_url = None # type: str
- self.ctype = None # type: str
- self.pdict = None # type: Dict[str, str]
- self.encoding = None # type: str
- self.urls = None # type: Set[str]
- self.new_urls = None # type: Set[str]
-
- async def fetch(self) -> None:
- """Attempt to fetch the contents of the URL.
-
- If successful, and the data is HTML, extract further links and
- add them to the crawler. Redirects are also added back there.
- """
- while self.tries < self.max_tries:
- self.tries += 1
- self.request = None
- try:
- self.request = Request(self.log, self.url, self.crawler.pool)
- await self.request.connect()
- await self.request.send_request()
- self.response = await self.request.get_response()
- self.body = await self.response.read()
- h_conn = self.response.get_header('connection').lower()
- if h_conn != 'close':
- self.request.close(recycle=True)
- self.request = None
- if self.tries > 1:
- self.log(1, 'try', self.tries, 'for', self.url, 'success')
- break
- except (BadStatusLine, OSError) as exc:
- self.exceptions.append(exc)
- self.log(1, 'try', self.tries, 'for', self.url,
- 'raised', repr(exc))
- # import pdb; pdb.set_trace()
- # Don't reuse the connection in this case.
- finally:
- if self.request is not None:
- self.request.close()
- else:
- # We never broke out of the while loop, i.e. all tries failed.
- self.log(0, 'no success for', self.url,
- 'in', self.max_tries, 'tries')
- return
- next_url = self.response.get_redirect_url()
- if next_url:
- self.next_url = urllib.parse.urljoin(self.url, next_url)
- if self.max_redirect > 0:
- self.log(1, 'redirect to', self.next_url, 'from', self.url)
- self.crawler.add_url(self.next_url, self.max_redirect - 1)
- else:
- self.log(0, 'redirect limit reached for', self.next_url,
- 'from', self.url)
- else:
- if self.response.status == 200:
- self.ctype = self.response.get_header('content-type')
- self.pdict = {}
- if self.ctype:
- self.ctype, self.pdict = cgi.parse_header(self.ctype)
- self.encoding = self.pdict.get('charset', 'utf-8')
- if self.ctype == 'text/html':
- body = self.body.decode(self.encoding, 'replace')
- # Replace href with (?:href|src) to follow image links.
- self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
- body))
- if self.urls:
- self.log(1, 'got', len(self.urls),
- 'distinct urls from', self.url)
- self.new_urls = set()
- for url in self.urls:
- url = unescape(url)
- url = urllib.parse.urljoin(self.url, url)
- url, frag = urllib.parse.urldefrag(url)
- if self.crawler.add_url(url):
- self.new_urls.add(url)
-
- def report(self, stats: 'Stats', file: IO[str] = None) -> None:
- """Print a report on the state for this URL.
-
- Also update the Stats instance.
- """
- if self.task is not None:
- if not self.task.done():
- stats.add('pending')
- print(self.url, 'pending', file=file)
- return
- elif self.task.cancelled():
- stats.add('cancelled')
- print(self.url, 'cancelled', file=file)
- return
- elif self.task.exception():
- stats.add('exception')
- exc = self.task.exception()
- stats.add('exception_' + exc.__class__.__name__)
- print(self.url, exc, file=file)
- return
- if len(self.exceptions) == self.tries:
- stats.add('fail')
- exc = self.exceptions[-1]
- stats.add('fail_' + str(exc.__class__.__name__))
- print(self.url, 'error', exc, file=file)
- elif self.next_url:
- stats.add('redirect')
- print(self.url, self.response.status, 'redirect', self.next_url,
- file=file)
- elif self.ctype == 'text/html':
- stats.add('html')
- size = len(self.body or b'')
- stats.add('html_bytes', size)
- if self.log.level:
- print(self.url, self.response.status,
- self.ctype, self.encoding,
- size,
- '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())),
- file=file)
- elif self.response is None:
- print(self.url, 'no response object')
- else:
- size = len(self.body or b'')
- if self.response.status == 200:
- stats.add('other')
- stats.add('other_bytes', size)
- else:
- stats.add('error')
- stats.add('error_bytes', size)
- stats.add('status_%s' % self.response.status)
- print(self.url, self.response.status,
- self.ctype, self.encoding,
- size,
- file=file)
-
-
-class Stats:
- """Record stats of various sorts."""
-
- def __init__(self) -> None:
- self.stats = {} # type: Dict[str, int]
-
- def add(self, key: str, count: int = 1) -> None:
- self.stats[key] = self.stats.get(key, 0) + count
-
- def report(self, file: IO[str] = None) -> None:
- for key, count in sorted(self.stats.items()):
- print('%10d' % count, key, file=file)
-
-
-class Crawler:
- """Crawl a set of URLs.
-
- This manages three disjoint sets of URLs (todo, busy, done). The
- data structures actually store dicts -- the values in todo give
- the redirect limit, while the values in busy and done are Fetcher
- instances.
- """
- def __init__(self, log: Logger,
- roots: Set[str], exclude: str = None, strict: bool = True, # What to crawl.
- max_redirect: int = 10, max_tries: int = 4, # Per-url limits.
- max_tasks: int = 10, max_pool: int = 10, # Global limits.
- ) -> None:
- self.log = log
- self.roots = roots
- self.exclude = exclude
- self.strict = strict
- self.max_redirect = max_redirect
- self.max_tries = max_tries
- self.max_tasks = max_tasks
- self.max_pool = max_pool
- self.todo = {} # type: Dict[str, int]
- self.busy = {} # type: Dict[str, Fetcher]
- self.done = {} # type: Dict[str, Fetcher]
- self.pool = ConnectionPool(self.log, max_pool, max_tasks)
- self.root_domains = set() # type: Set[str]
- for root in roots:
- host = urllib.parse.urlparse(root).hostname
- if not host:
- continue
- if re.match(r'\A[\d\.]*\Z', host):
- self.root_domains.add(host)
- else:
- host = host.lower()
- if self.strict:
- self.root_domains.add(host)
- if host.startswith('www.'):
- self.root_domains.add(host[4:])
- else:
- self.root_domains.add('www.' + host)
- else:
- parts = host.split('.')
- if len(parts) > 2:
- host = '.'.join(parts[-2:])
- self.root_domains.add(host)
- for root in roots:
- self.add_url(root)
- self.governor = asyncio.Semaphore(max_tasks)
- self.termination = asyncio.Condition()
- self.t0 = time.time()
- self.t1 = None # type: Optional[float]
-
- def close(self) -> None:
- """Close resources (currently only the pool)."""
- self.pool.close()
-
- def host_okay(self, host: str) -> bool:
- """Check if a host should be crawled.
-
- A literal match (after lowercasing) is always good. For hosts
- that don't look like IP addresses, some approximate matches
- are okay depending on the strict flag.
- """
- host = host.lower()
- if host in self.root_domains:
- return True
- if re.match(r'\A[\d\.]*\Z', host):
- return False
- if self.strict:
- return self._host_okay_strictish(host)
- else:
- return self._host_okay_lenient(host)
-
- def _host_okay_strictish(self, host: str) -> bool:
- """Check if a host should be crawled, strict-ish version.
-
- This checks for equality modulo an initial 'www.' component.
- """
- if host.startswith('www.'):
- if host[4:] in self.root_domains:
- return True
- else:
- if 'www.' + host in self.root_domains:
- return True
- return False
-
- def _host_okay_lenient(self, host: str) -> bool:
- """Check if a host should be crawled, lenient version.
-
- This compares the last two components of the host.
- """
- parts = host.split('.')
- if len(parts) > 2:
- host = '.'.join(parts[-2:])
- return host in self.root_domains
-
- def add_url(self, url: str, max_redirect: int = None) -> bool:
- """Add a URL to the todo list if not seen before."""
- if self.exclude and re.search(self.exclude, url):
- return False
- parsed = urllib.parse.urlparse(url)
- if parsed.scheme not in ('http', 'https'):
- self.log(2, 'skipping non-http scheme in', url)
- return False
- host = parsed.hostname
- if not self.host_okay(host):
- self.log(2, 'skipping non-root host in', url)
- return False
- if max_redirect is None:
- max_redirect = self.max_redirect
- if url in self.todo or url in self.busy or url in self.done:
- return False
- self.log(1, 'adding', url, max_redirect)
- self.todo[url] = max_redirect
- return True
-
- async def crawl(self) -> None:
- """Run the crawler until all finished."""
- with (await self.termination):
- while self.todo or self.busy:
- if self.todo:
- url, max_redirect = self.todo.popitem()
- fetcher = Fetcher(self.log, url,
- crawler=self,
- max_redirect=max_redirect,
- max_tries=self.max_tries,
- )
- self.busy[url] = fetcher
- fetcher.task = asyncio.Task(self.fetch(fetcher))
- else:
- await self.termination.wait()
- self.t1 = time.time()
-
- async def fetch(self, fetcher: Fetcher) -> None:
- """Call the Fetcher's fetch(), with a limit on concurrency.
-
- Once this returns, move the fetcher from busy to done.
- """
- url = fetcher.url
- with (await self.governor):
- try:
- await fetcher.fetch() # Fetcher gonna fetch.
- finally:
- # Force GC of the task, so the error is logged.
- fetcher.task = None
- with (await self.termination):
- self.done[url] = fetcher
- del self.busy[url]
- self.termination.notify()
-
- def report(self, file: IO[str] = None) -> None:
- """Print a report on all completed URLs."""
- if self.t1 is None:
- self.t1 = time.time()
- dt = self.t1 - self.t0
- if dt and self.max_tasks:
- speed = len(self.done) / dt / self.max_tasks
- else:
- speed = 0
- stats = Stats()
- print('*** Report ***', file=file)
- try:
- show = [] # type: List[Tuple[str, Fetcher]]
- show.extend(self.done.items())
- show.extend(self.busy.items())
- show.sort()
- for url, fetcher in show:
- fetcher.report(stats, file=file)
- except KeyboardInterrupt:
- print('\nInterrupted', file=file)
- print('Finished', len(self.done),
- 'urls in %.3f secs' % dt,
- '(max_tasks=%d)' % self.max_tasks,
- '(%.3f urls/sec/task)' % speed,
- file=file)
- stats.report(file=file)
- print('Todo:', len(self.todo), file=file)
- print('Busy:', len(self.busy), file=file)
- print('Done:', len(self.done), file=file)
- print('Date:', time.ctime(), 'local time', file=file)
-
-
-def main() -> None:
- """Main program.
-
- Parse arguments, set up event loop, run crawler, print report.
- """
- args = ARGS.parse_args()
- if not args.roots:
- print('Use --help for command line help')
- return
-
- log = Logger(args.level)
-
- if args.iocp:
- if sys.platform == 'win32':
- from asyncio import ProactorEventLoop
- loop = ProactorEventLoop() # type: ignore
- asyncio.set_event_loop(loop)
- else:
- assert False
- elif args.select:
- loop = asyncio.SelectorEventLoop() # type: ignore
- asyncio.set_event_loop(loop)
- else:
- loop = asyncio.get_event_loop()
-
- roots = {fix_url(root) for root in args.roots}
-
- crawler = Crawler(log,
- roots, exclude=args.exclude,
- strict=args.strict,
- max_redirect=args.max_redirect,
- max_tries=args.max_tries,
- max_tasks=args.max_tasks,
- max_pool=args.max_pool,
- )
- try:
- loop.run_until_complete(crawler.crawl()) # Crawler gonna crawl.
- except KeyboardInterrupt:
- sys.stderr.flush()
- print('\nInterrupted\n')
- finally:
- crawler.report()
- crawler.close()
- loop.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.INFO) # type: ignore
- main()
diff --git a/test-data/samples/dict.py b/test-data/samples/dict.py
deleted file mode 100644
index d74a5b5..0000000
--- a/test-data/samples/dict.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import typing
-prices = {'apple': 0.40, 'banana': 0.50}
-my_purchase = {
- 'apple': 1,
- 'banana': 6}
-grocery_bill = sum(prices[fruit] * my_purchase[fruit]
- for fruit in my_purchase)
-print('I owe the grocer $%.2f' % grocery_bill)
diff --git a/test-data/samples/fib.py b/test-data/samples/fib.py
deleted file mode 100644
index 26248c8..0000000
--- a/test-data/samples/fib.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import typing
-parents, babies = (1, 1)
-while babies < 100:
- print('This generation has {0} babies'.format(babies))
- parents, babies = (babies, parents + babies)
diff --git a/test-data/samples/files.py b/test-data/samples/files.py
deleted file mode 100644
index f540c7c..0000000
--- a/test-data/samples/files.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# indent your Python code to put into an email
-import glob
-import typing
-# glob supports Unix style pathname extensions
-python_files = glob.glob('*.py')
-for file_name in sorted(python_files):
- print(' ------' + file_name)
-
- f = open(file_name)
- for line in f:
- print(' ' + line.rstrip())
- f.close()
-
- print()
diff --git a/test-data/samples/for.py b/test-data/samples/for.py
deleted file mode 100644
index f7eeed4..0000000
--- a/test-data/samples/for.py
+++ /dev/null
@@ -1,4 +0,0 @@
-import typing
-friends = ['john', 'pat', 'gary', 'michael']
-for i, name in enumerate(friends):
- print("iteration {iteration} is {name}".format(iteration=i, name=name))
diff --git a/test-data/samples/generators.py b/test-data/samples/generators.py
deleted file mode 100644
index 9150c96..0000000
--- a/test-data/samples/generators.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Prime number sieve with generators
-
-import itertools
-from typing import Iterator
-
-
-def iter_primes() -> Iterator[int]:
- # an iterator of all numbers between 2 and +infinity
- numbers = itertools.count(2)
-
- # generate primes forever
- while True:
- # get the first number from the iterator (always a prime)
- prime = next(numbers)
- yield prime
-
- # this code iteratively builds up a chain of
- # filters...slightly tricky, but ponder it a bit
- numbers = filter(prime.__rmod__, numbers)
-
-for p in iter_primes():
- if p > 1000:
- break
- print(p)
diff --git a/test-data/samples/greet.py b/test-data/samples/greet.py
deleted file mode 100644
index 47e7626..0000000
--- a/test-data/samples/greet.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import typing
-
-
-def greet(name: str) -> None:
- print('Hello', name)
-greet('Jack')
-greet('Jill')
-greet('Bob')
diff --git a/test-data/samples/guess.py b/test-data/samples/guess.py
deleted file mode 100644
index d3f1cee..0000000
--- a/test-data/samples/guess.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# "Guess the Number" Game (edited) from http://inventwithpython.com
-
-import random
-import typing
-
-guesses_made = 0
-
-name = input('Hello! What is your name?\n')
-
-number = random.randint(1, 20)
-print('Well, {0}, I am thinking of a number between 1 and 20.'.format(name))
-
-while guesses_made < 6:
-
- guess = int(input('Take a guess: '))
-
- guesses_made += 1
-
- if guess < number:
- print('Your guess is too low.')
-
- if guess > number:
- print('Your guess is too high.')
-
- if guess == number:
- break
-
-if guess == number:
- print('Good job, {0}! You guessed my number in {1} guesses!'.format(
- name, guesses_made))
-else:
- print('Nope. The number I was thinking of was {0}'.format(number))
diff --git a/test-data/samples/hello.py b/test-data/samples/hello.py
deleted file mode 100644
index 6c0b2ca..0000000
--- a/test-data/samples/hello.py
+++ /dev/null
@@ -1,2 +0,0 @@
-import typing
-print('Hello, world')
diff --git a/test-data/samples/input.py b/test-data/samples/input.py
deleted file mode 100644
index cca9233..0000000
--- a/test-data/samples/input.py
+++ /dev/null
@@ -1,3 +0,0 @@
-import typing
-name = input('What is your name?\n')
-print('Hi, %s.' % name)
diff --git a/test-data/samples/itertool.py b/test-data/samples/itertool.py
deleted file mode 100644
index 9ee2475..0000000
--- a/test-data/samples/itertool.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from itertools import groupby
-import typing
-lines = '''
-This is the
-first paragraph.
-
-This is the second.
-'''.splitlines()
-# Use itertools.groupby and bool to return groups of
-# consecutive lines that either have content or don't.
-for has_chars, frags in groupby(lines, bool):
- if has_chars:
- print(' '.join(frags))
-# PRINTS:
-# This is the first paragraph.
-# This is the second.
diff --git a/test-data/samples/readme.txt b/test-data/samples/readme.txt
deleted file mode 100644
index 5889a8e..0000000
--- a/test-data/samples/readme.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-Mypy Sample Programs
---------------------
-
-The sample programs use static typing unless otherwise noted in comments.
-
-Original credits for sample programs:
-
- fib.py - Python Wiki [1]
- for.py - Python Wiki [1]
- greet.py - Python Wiki [1]
- hello.py - Python Wiki [1]
- input.py - Python Wiki [1]
- regexp.py - Python Wiki [1]
- dict.py - Python Wiki [1]
- cmdline.py - Python Wiki [1]
- files.py - Python Wiki [1]
- bottles.py - Python Wiki [1]
- class.py - Python Wiki [1]
- guess.py - Python Wiki [1]
- generators.py - Python Wiki [1]
- itertool.py - Python Wiki [1]
-
-The sample programs were ported to mypy by Jukka Lehtosalo.
-
-[1] http://wiki.python.org/moin/SimplePrograms
diff --git a/test-data/samples/regexp.py b/test-data/samples/regexp.py
deleted file mode 100644
index 6d8d799..0000000
--- a/test-data/samples/regexp.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import typing
-import re
-for test_string in ['555-1212', 'ILL-EGAL']:
- if re.match(r'^\d{3}-\d{4}$', test_string):
- print(test_string, 'is a valid US local phone number')
- else:
- print(test_string, 'rejected')
diff --git a/test-data/stdlib-samples/3.2/base64.py b/test-data/stdlib-samples/3.2/base64.py
deleted file mode 100644
index ef91964..0000000
--- a/test-data/stdlib-samples/3.2/base64.py
+++ /dev/null
@@ -1,411 +0,0 @@
-#! /usr/bin/env python3
-
-"""RFC 3548: Base16, Base32, Base64 Data Encodings"""
-
-# Modified 04-Oct-1995 by Jack Jansen to use binascii module
-# Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support
-# Modified 22-May-2007 by Guido van Rossum to use bytes everywhere
-
-import re
-import struct
-import binascii
-
-from typing import Dict, List, AnyStr, IO
-
-
-__all__ = [
- # Legacy interface exports traditional RFC 1521 Base64 encodings
- 'encode', 'decode', 'encodebytes', 'decodebytes',
- # Generalized interface for other encodings
- 'b64encode', 'b64decode', 'b32encode', 'b32decode',
- 'b16encode', 'b16decode',
- # Standard Base64 encoding
- 'standard_b64encode', 'standard_b64decode',
- # Some common Base64 alternatives. As referenced by RFC 3458, see thread
- # starting at:
- #
- # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html
- 'urlsafe_b64encode', 'urlsafe_b64decode',
- ]
-
-
-bytes_types = (bytes, bytearray) # Types acceptable as binary data
-
-
-def _translate(s: bytes, altchars: Dict[AnyStr, bytes]) -> bytes:
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- translation = bytearray(range(256))
- for k, v in altchars.items():
- translation[ord(k)] = v[0]
- return s.translate(translation)
-
-
-
-# Base64 encoding/decoding uses binascii
-
-def b64encode(s: bytes, altchars: bytes = None) -> bytes:
- """Encode a byte string using Base64.
-
- s is the byte string to encode. Optional altchars must be a byte
- string of length 2 which specifies an alternative alphabet for the
- '+' and '/' characters. This allows an application to
- e.g. generate url or filesystem safe Base64 strings.
-
- The encoded byte string is returned.
- """
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- # Strip off the trailing newline
- encoded = binascii.b2a_base64(s)[:-1]
- if altchars is not None:
- if not isinstance(altchars, bytes_types):
- raise TypeError("expected bytes, not %s"
- % altchars.__class__.__name__)
- assert len(altchars) == 2, repr(altchars)
- return _translate(encoded, {'+': altchars[0:1], '/': altchars[1:2]})
- return encoded
-
-
-def b64decode(s: bytes, altchars: bytes = None,
- validate: bool = False) -> bytes:
- """Decode a Base64 encoded byte string.
-
- s is the byte string to decode. Optional altchars must be a
- string of length 2 which specifies the alternative alphabet used
- instead of the '+' and '/' characters.
-
- The decoded string is returned. A binascii.Error is raised if s is
- incorrectly padded.
-
- If validate is False (the default), non-base64-alphabet characters are
- discarded prior to the padding check. If validate is True,
- non-base64-alphabet characters in the input result in a binascii.Error.
- """
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- if altchars is not None:
- if not isinstance(altchars, bytes_types):
- raise TypeError("expected bytes, not %s"
- % altchars.__class__.__name__)
- assert len(altchars) == 2, repr(altchars)
- s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'})
- if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s):
- raise binascii.Error('Non-base64 digit found')
- return binascii.a2b_base64(s)
-
-
-def standard_b64encode(s: bytes) -> bytes:
- """Encode a byte string using the standard Base64 alphabet.
-
- s is the byte string to encode. The encoded byte string is returned.
- """
- return b64encode(s)
-
-def standard_b64decode(s: bytes) -> bytes:
- """Decode a byte string encoded with the standard Base64 alphabet.
-
- s is the byte string to decode. The decoded byte string is
- returned. binascii.Error is raised if the input is incorrectly
- padded or if there are non-alphabet characters present in the
- input.
- """
- return b64decode(s)
-
-def urlsafe_b64encode(s: bytes) -> bytes:
- """Encode a byte string using a url-safe Base64 alphabet.
-
- s is the byte string to encode. The encoded byte string is
- returned. The alphabet uses '-' instead of '+' and '_' instead of
- '/'.
- """
- return b64encode(s, b'-_')
-
-def urlsafe_b64decode(s: bytes) -> bytes:
- """Decode a byte string encoded with the standard Base64 alphabet.
-
- s is the byte string to decode. The decoded byte string is
- returned. binascii.Error is raised if the input is incorrectly
- padded or if there are non-alphabet characters present in the
- input.
-
- The alphabet uses '-' instead of '+' and '_' instead of '/'.
- """
- return b64decode(s, b'-_')
-
-
-
-# Base32 encoding/decoding must be done in Python
-_b32alphabet = {
- 0: b'A', 9: b'J', 18: b'S', 27: b'3',
- 1: b'B', 10: b'K', 19: b'T', 28: b'4',
- 2: b'C', 11: b'L', 20: b'U', 29: b'5',
- 3: b'D', 12: b'M', 21: b'V', 30: b'6',
- 4: b'E', 13: b'N', 22: b'W', 31: b'7',
- 5: b'F', 14: b'O', 23: b'X',
- 6: b'G', 15: b'P', 24: b'Y',
- 7: b'H', 16: b'Q', 25: b'Z',
- 8: b'I', 17: b'R', 26: b'2',
- }
-
-_b32tab = [v[0] for k, v in sorted(_b32alphabet.items())]
-_b32rev = dict([(v[0], k) for k, v in _b32alphabet.items()])
-
-
-def b32encode(s: bytes) -> bytes:
- """Encode a byte string using Base32.
-
- s is the byte string to encode. The encoded byte string is returned.
- """
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- quanta, leftover = divmod(len(s), 5)
- # Pad the last quantum with zero bits if necessary
- if leftover:
- s = s + bytes(5 - leftover) # Don't use += !
- quanta += 1
- encoded = bytes()
- for i in range(quanta):
- # c1 and c2 are 16 bits wide, c3 is 8 bits wide. The intent of this
- # code is to process the 40 bits in units of 5 bits. So we take the 1
- # leftover bit of c1 and tack it onto c2. Then we take the 2 leftover
- # bits of c2 and tack them onto c3. The shifts and masks are intended
- # to give us values of exactly 5 bits in width.
- c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5]) # type: (int, int, int)
- c2 += (c1 & 1) << 16 # 17 bits wide
- c3 += (c2 & 3) << 8 # 10 bits wide
- encoded += bytes([_b32tab[c1 >> 11], # bits 1 - 5
- _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10
- _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15
- _b32tab[c2 >> 12], # bits 16 - 20 (1 - 5)
- _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10)
- _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15)
- _b32tab[c3 >> 5], # bits 31 - 35 (1 - 5)
- _b32tab[c3 & 0x1f], # bits 36 - 40 (1 - 5)
- ])
- # Adjust for any leftover partial quanta
- if leftover == 1:
- return encoded[:-6] + b'======'
- elif leftover == 2:
- return encoded[:-4] + b'===='
- elif leftover == 3:
- return encoded[:-3] + b'==='
- elif leftover == 4:
- return encoded[:-1] + b'='
- return encoded
-
-
-def b32decode(s: bytes, casefold: bool = False, map01: bytes = None) -> bytes:
- """Decode a Base32 encoded byte string.
-
- s is the byte string to decode. Optional casefold is a flag
- specifying whether a lowercase alphabet is acceptable as input.
- For security purposes, the default is False.
-
- RFC 3548 allows for optional mapping of the digit 0 (zero) to the
- letter O (oh), and for optional mapping of the digit 1 (one) to
- either the letter I (eye) or letter L (el). The optional argument
- map01 when not None, specifies which letter the digit 1 should be
- mapped to (when map01 is not None, the digit 0 is always mapped to
- the letter O). For security purposes the default is None, so that
- 0 and 1 are not allowed in the input.
-
- The decoded byte string is returned. binascii.Error is raised if
- the input is incorrectly padded or if there are non-alphabet
- characters present in the input.
- """
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- quanta, leftover = divmod(len(s), 8)
- if leftover:
- raise binascii.Error('Incorrect padding')
- # Handle section 2.4 zero and one mapping. The flag map01 will be either
- # False, or the character to map the digit 1 (one) to. It should be
- # either L (el) or I (eye).
- if map01 is not None:
- if not isinstance(map01, bytes_types):
- raise TypeError("expected bytes, not %s" % map01.__class__.__name__)
- assert len(map01) == 1, repr(map01)
- s = _translate(s, {b'0': b'O', b'1': map01})
- if casefold:
- s = s.upper()
- # Strip off pad characters from the right. We need to count the pad
- # characters because this will tell us how many null bytes to remove from
- # the end of the decoded string.
- padchars = 0
- mo = re.search(b'(?P<pad>[=]*)$', s)
- if mo:
- padchars = len(mo.group('pad'))
- if padchars > 0:
- s = s[:-padchars]
- # Now decode the full quanta
- parts = [] # type: List[bytes]
- acc = 0
- shift = 35
- for c in s:
- val = _b32rev.get(c)
- if val is None:
- raise TypeError('Non-base32 digit found')
- acc += _b32rev[c] << shift
- shift -= 5
- if shift < 0:
- parts.append(binascii.unhexlify(bytes('%010x' % acc, "ascii")))
- acc = 0
- shift = 35
- # Process the last, partial quanta
- last = binascii.unhexlify(bytes('%010x' % acc, "ascii"))
- if padchars == 0:
- last = b'' # No characters
- elif padchars == 1:
- last = last[:-1]
- elif padchars == 3:
- last = last[:-2]
- elif padchars == 4:
- last = last[:-3]
- elif padchars == 6:
- last = last[:-4]
- else:
- raise binascii.Error('Incorrect padding')
- parts.append(last)
- return b''.join(parts)
-
-
-
-# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
-# lowercase. The RFC also recommends against accepting input case
-# insensitively.
-def b16encode(s: bytes) -> bytes:
- """Encode a byte string using Base16.
-
- s is the byte string to encode. The encoded byte string is returned.
- """
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- return binascii.hexlify(s).upper()
-
-
-def b16decode(s: bytes, casefold: bool = False) -> bytes:
- """Decode a Base16 encoded byte string.
-
- s is the byte string to decode. Optional casefold is a flag
- specifying whether a lowercase alphabet is acceptable as input.
- For security purposes, the default is False.
-
- The decoded byte string is returned. binascii.Error is raised if
- s were incorrectly padded or if there are non-alphabet characters
- present in the string.
- """
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- if casefold:
- s = s.upper()
- if re.search(b'[^0-9A-F]', s):
- raise binascii.Error('Non-base16 digit found')
- return binascii.unhexlify(s)
-
-
-
-# Legacy interface. This code could be cleaned up since I don't believe
-# binascii has any line length limitations. It just doesn't seem worth it
-# though. The files should be opened in binary mode.
-
-MAXLINESIZE = 76 # Excluding the CRLF
-MAXBINSIZE = (MAXLINESIZE//4)*3
-
-def encode(input: IO[bytes], output: IO[bytes]) -> None:
- """Encode a file; input and output are binary files."""
- while True:
- s = input.read(MAXBINSIZE)
- if not s:
- break
- while len(s) < MAXBINSIZE:
- ns = input.read(MAXBINSIZE-len(s))
- if not ns:
- break
- s += ns
- line = binascii.b2a_base64(s)
- output.write(line)
-
-
-def decode(input: IO[bytes], output: IO[bytes]) -> None:
- """Decode a file; input and output are binary files."""
- while True:
- line = input.readline()
- if not line:
- break
- s = binascii.a2b_base64(line)
- output.write(s)
-
-
-def encodebytes(s: bytes) -> bytes:
- """Encode a bytestring into a bytestring containing multiple lines
- of base-64 data."""
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- pieces = [] # type: List[bytes]
- for i in range(0, len(s), MAXBINSIZE):
- chunk = s[i : i + MAXBINSIZE]
- pieces.append(binascii.b2a_base64(chunk))
- return b"".join(pieces)
-
-def encodestring(s: bytes) -> bytes:
- """Legacy alias of encodebytes()."""
- import warnings
- warnings.warn("encodestring() is a deprecated alias, use encodebytes()",
- DeprecationWarning, 2)
- return encodebytes(s)
-
-
-def decodebytes(s: bytes) -> bytes:
- """Decode a bytestring of base-64 data into a bytestring."""
- if not isinstance(s, bytes_types):
- raise TypeError("expected bytes, not %s" % s.__class__.__name__)
- return binascii.a2b_base64(s)
-
-def decodestring(s: bytes) -> bytes:
- """Legacy alias of decodebytes()."""
- import warnings
- warnings.warn("decodestring() is a deprecated alias, use decodebytes()",
- DeprecationWarning, 2)
- return decodebytes(s)
-
-
-# Usable as a script...
-def main() -> None:
- """Small main program"""
- import sys, getopt
- try:
- opts, args = getopt.getopt(sys.argv[1:], 'deut')
- except getopt.error as msg:
- sys.stdout = sys.stderr
- print(msg)
- print("""usage: %s [-d|-e|-u|-t] [file|-]
- -d, -u: decode
- -e: encode (default)
- -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0])
- sys.exit(2)
- func = encode
- for o, a in opts:
- if o == '-e': func = encode
- if o == '-d': func = decode
- if o == '-u': func = decode
- if o == '-t': test(); return
- if args and args[0] != '-':
- with open(args[0], 'rb') as f:
- func(f, sys.stdout.buffer)
- else:
- func(sys.stdin.buffer, sys.stdout.buffer)
-
-
-def test() -> None:
- s0 = b"Aladdin:open sesame"
- print(repr(s0))
- s1 = encodebytes(s0)
- print(repr(s1))
- s2 = decodebytes(s1)
- print(repr(s2))
- assert s0 == s2
-
-
-if __name__ == '__main__':
- main()
diff --git a/test-data/stdlib-samples/3.2/fnmatch.py b/test-data/stdlib-samples/3.2/fnmatch.py
deleted file mode 100644
index ec27b90..0000000
--- a/test-data/stdlib-samples/3.2/fnmatch.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""Filename matching with shell patterns.
-
-fnmatch(FILENAME, PATTERN) matches according to the local convention.
-fnmatchcase(FILENAME, PATTERN) always takes case in account.
-
-The functions operate by translating the pattern into a regular
-expression. They cache the compiled regular expressions for speed.
-
-The function translate(PATTERN) returns a regular expression
-corresponding to PATTERN. (It does not compile it.)
-"""
-import os
-import posixpath
-import re
-import functools
-
-from typing import Iterable, List, AnyStr, Any, Callable, Match
-
-__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
-
-def fnmatch(name: AnyStr, pat: AnyStr) -> bool:
- """Test whether FILENAME matches PATTERN.
-
- Patterns are Unix shell style:
-
- * matches everything
- ? matches any single character
- [seq] matches any character in seq
- [!seq] matches any char not in seq
-
- An initial period in FILENAME is not special.
- Both FILENAME and PATTERN are first case-normalized
- if the operating system requires it.
- If you don't want this, use fnmatchcase(FILENAME, PATTERN).
- """
- name = os.path.normcase(name)
- pat = os.path.normcase(pat)
- return fnmatchcase(name, pat)
-
- at functools.lru_cache(maxsize=250)
-def _compile_pattern(pat: AnyStr,
- is_bytes: bool = False) -> Callable[[AnyStr],
- Match[AnyStr]]:
- if isinstance(pat, bytes):
- pat_str = str(pat, 'ISO-8859-1')
- res_str = translate(pat_str)
- res = bytes(res_str, 'ISO-8859-1')
- else:
- res = translate(pat)
- return re.compile(res).match
-
-def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]:
- """Return the subset of the list NAMES that match PAT."""
- result = [] # type: List[AnyStr]
- pat = os.path.normcase(pat)
- match = _compile_pattern(pat, isinstance(pat, bytes))
- if os.path is posixpath:
- # normcase on posix is NOP. Optimize it away from the loop.
- for name in names:
- if match(name):
- result.append(name)
- else:
- for name in names:
- if match(os.path.normcase(name)):
- result.append(name)
- return result
-
-def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool:
- """Test whether FILENAME matches PATTERN, including case.
-
- This is a version of fnmatch() which doesn't case-normalize
- its arguments.
- """
- match = _compile_pattern(pat, isinstance(pat, bytes))
- return match(name) is not None
-
-def translate(pat: str) -> str:
- """Translate a shell PATTERN to a regular expression.
-
- There is no way to quote meta-characters.
- """
-
- i, n = 0, len(pat)
- res = ''
- while i < n:
- c = pat[i]
- i = i+1
- if c == '*':
- res = res + '.*'
- elif c == '?':
- res = res + '.'
- elif c == '[':
- j = i
- if j < n and pat[j] == '!':
- j = j+1
- if j < n and pat[j] == ']':
- j = j+1
- while j < n and pat[j] != ']':
- j = j+1
- if j >= n:
- res = res + '\\['
- else:
- stuff = pat[i:j].replace('\\','\\\\')
- i = j+1
- if stuff[0] == '!':
- stuff = '^' + stuff[1:]
- elif stuff[0] == '^':
- stuff = '\\' + stuff
- res = '%s[%s]' % (res, stuff)
- else:
- res = res + re.escape(c)
- return res + '\Z(?ms)'
diff --git a/test-data/stdlib-samples/3.2/genericpath.py b/test-data/stdlib-samples/3.2/genericpath.py
deleted file mode 100644
index bd1fddf..0000000
--- a/test-data/stdlib-samples/3.2/genericpath.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""
-Path operations common to more than one OS
-Do not use directly. The OS specific modules import the appropriate
-functions from this module themselves.
-"""
-import os
-import stat
-
-from typing import (
- Any as Any_, List as List_, AnyStr as AnyStr_, Tuple as Tuple_
-)
-
-__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
- 'getsize', 'isdir', 'isfile']
-
-
-# Does a path exist?
-# This is false for dangling symbolic links on systems that support them.
-def exists(path: AnyStr_) -> bool:
- """Test whether a path exists. Returns False for broken symbolic links"""
- try:
- os.stat(path)
- except os.error:
- return False
- return True
-
-
-# This follows symbolic links, so both islink() and isdir() can be true
-# for the same path ono systems that support symlinks
-def isfile(path: AnyStr_) -> bool:
- """Test whether a path is a regular file"""
- try:
- st = os.stat(path)
- except os.error:
- return False
- return stat.S_ISREG(st.st_mode)
-
-
-# Is a path a directory?
-# This follows symbolic links, so both islink() and isdir()
-# can be true for the same path on systems that support symlinks
-def isdir(s: AnyStr_) -> bool:
- """Return true if the pathname refers to an existing directory."""
- try:
- st = os.stat(s)
- except os.error:
- return False
- return stat.S_ISDIR(st.st_mode)
-
-
-def getsize(filename: AnyStr_) -> int:
- """Return the size of a file, reported by os.stat()."""
- return os.stat(filename).st_size
-
-
-def getmtime(filename: AnyStr_) -> float:
- """Return the last modification time of a file, reported by os.stat()."""
- return os.stat(filename).st_mtime
-
-
-def getatime(filename: AnyStr_) -> float:
- """Return the last access time of a file, reported by os.stat()."""
- return os.stat(filename).st_atime
-
-
-def getctime(filename: AnyStr_) -> float:
- """Return the metadata change time of a file, reported by os.stat()."""
- return os.stat(filename).st_ctime
-
-
-# Return the longest prefix of all list elements.
-def commonprefix(m: List_[Any_]) -> Any_:
- "Given a list of pathnames, returns the longest common leading component"
- if not m: return ''
- s1 = min(m)
- s2 = max(m)
- for i, c in enumerate(s1):
- if c != s2[i]:
- return s1[:i]
- return s1
-
-
-# Split a path in root and extension.
-# The extension is everything starting at the last dot in the last
-# pathname component; the root is everything before that.
-# It is always true that root + ext == p.
-
-# Generic implementation of splitext, to be parametrized with
-# the separators
-def _splitext(p: AnyStr_, sep: AnyStr_, altsep: AnyStr_,
- extsep: AnyStr_) -> Tuple_[AnyStr_, AnyStr_]:
- """Split the extension from a pathname.
-
- Extension is everything from the last dot to the end, ignoring
- leading dots. Returns "(root, ext)"; ext may be empty."""
- # NOTE: This code must work for text and bytes strings.
-
- sepIndex = p.rfind(sep)
- if altsep:
- altsepIndex = p.rfind(altsep)
- sepIndex = max(sepIndex, altsepIndex)
-
- dotIndex = p.rfind(extsep)
- if dotIndex > sepIndex:
- # skip all leading dots
- filenameIndex = sepIndex + 1
- while filenameIndex < dotIndex:
- if p[filenameIndex:filenameIndex+1] != extsep:
- return p[:dotIndex], p[dotIndex:]
- filenameIndex += 1
-
- return p, p[:0]
diff --git a/test-data/stdlib-samples/3.2/getopt.py b/test-data/stdlib-samples/3.2/getopt.py
deleted file mode 100644
index 32f5bce..0000000
--- a/test-data/stdlib-samples/3.2/getopt.py
+++ /dev/null
@@ -1,220 +0,0 @@
-"""Parser for command line options.
-
-This module helps scripts to parse the command line arguments in
-sys.argv. It supports the same conventions as the Unix getopt()
-function (including the special meanings of arguments of the form `-'
-and `--'). Long options similar to those supported by GNU software
-may be used as well via an optional third argument. This module
-provides two functions and an exception:
-
-getopt() -- Parse command line options
-gnu_getopt() -- Like getopt(), but allow option and non-option arguments
-to be intermixed.
-GetoptError -- exception (class) raised with 'opt' attribute, which is the
-option involved with the exception.
-"""
-
-# Long option support added by Lars Wirzenius <liw at iki.fi>.
-#
-# Gerrit Holl <gerrit at nl.linux.org> moved the string-based exceptions
-# to class-based exceptions.
-#
-# Peter Ã
strand <astrand at lysator.liu.se> added gnu_getopt().
-#
-# TODO for gnu_getopt():
-#
-# - GNU getopt_long_only mechanism
-# - allow the caller to specify ordering
-# - RETURN_IN_ORDER option
-# - GNU extension with '-' as first character of option string
-# - optional arguments, specified by double colons
-# - a option string with a W followed by semicolon should
-# treat "-W foo" as "--foo"
-
-__all__ = ["GetoptError","error","getopt","gnu_getopt"]
-
-import os
-
-from typing import List, Tuple, Iterable
-
-class GetoptError(Exception):
- opt = ''
- msg = ''
- def __init__(self, msg: str, opt: str = '') -> None:
- self.msg = msg
- self.opt = opt
- Exception.__init__(self, msg, opt)
-
- def __str__(self) -> str:
- return self.msg
-
-error = GetoptError # backward compatibility
-
-def getopt(args: List[str], shortopts: str,
- longopts: Iterable[str] = []) -> Tuple[List[Tuple[str, str]],
- List[str]]:
- """getopt(args, options[, long_options]) -> opts, args
-
- Parses command line options and parameter list. args is the
- argument list to be parsed, without the leading reference to the
- running program. Typically, this means "sys.argv[1:]". shortopts
- is the string of option letters that the script wants to
- recognize, with options that require an argument followed by a
- colon (i.e., the same format that Unix getopt() uses). If
- specified, longopts is a list of strings with the names of the
- long options which should be supported. The leading '--'
- characters should not be included in the option name. Options
- which require an argument should be followed by an equal sign
- ('=').
-
- The return value consists of two elements: the first is a list of
- (option, value) pairs; the second is the list of program arguments
- left after the option list was stripped (this is a trailing slice
- of the first argument). Each option-and-value pair returned has
- the option as its first element, prefixed with a hyphen (e.g.,
- '-x'), and the option argument as its second element, or an empty
- string if the option has no argument. The options occur in the
- list in the same order in which they were found, thus allowing
- multiple occurrences. Long and short options may be mixed.
-
- """
-
- opts = [] # type: List[Tuple[str, str]]
- if isinstance(longopts, str):
- longopts = [longopts]
- else:
- longopts = list(longopts)
- while args and args[0].startswith('-') and args[0] != '-':
- if args[0] == '--':
- args = args[1:]
- break
- if args[0].startswith('--'):
- opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
- else:
- opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
-
- return opts, args
-
-def gnu_getopt(args: List[str], shortopts: str,
- longopts: Iterable[str] = []) -> Tuple[List[Tuple[str, str]],
- List[str]]:
- """getopt(args, options[, long_options]) -> opts, args
-
- This function works like getopt(), except that GNU style scanning
- mode is used by default. This means that option and non-option
- arguments may be intermixed. The getopt() function stops
- processing options as soon as a non-option argument is
- encountered.
-
- If the first character of the option string is `+', or if the
- environment variable POSIXLY_CORRECT is set, then option
- processing stops as soon as a non-option argument is encountered.
-
- """
-
- opts = [] # type: List[Tuple[str, str]]
- prog_args = [] # type: List[str]
- if isinstance(longopts, str):
- longopts = [longopts]
- else:
- longopts = list(longopts)
-
- # Allow options after non-option arguments?
- if shortopts.startswith('+'):
- shortopts = shortopts[1:]
- all_options_first = True
- elif os.environ.get("POSIXLY_CORRECT"):
- all_options_first = True
- else:
- all_options_first = False
-
- while args:
- if args[0] == '--':
- prog_args += args[1:]
- break
-
- if args[0][:2] == '--':
- opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
- elif args[0][:1] == '-' and args[0] != '-':
- opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
- else:
- if all_options_first:
- prog_args += args
- break
- else:
- prog_args.append(args[0])
- args = args[1:]
-
- return opts, prog_args
-
-def do_longs(opts: List[Tuple[str, str]], opt: str,
- longopts: List[str],
- args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]:
- try:
- i = opt.index('=')
- except ValueError:
- optarg = None # type: str
- else:
- opt, optarg = opt[:i], opt[i+1:]
-
- has_arg, opt = long_has_args(opt, longopts)
- if has_arg:
- if optarg is None:
- if not args:
- raise GetoptError('option --%s requires argument' % opt, opt)
- optarg, args = args[0], args[1:]
- elif optarg is not None:
- raise GetoptError('option --%s must not have an argument' % opt, opt)
- opts.append(('--' + opt, optarg or ''))
- return opts, args
-
-# Return:
-# has_arg?
-# full option name
-def long_has_args(opt: str, longopts: List[str]) -> Tuple[bool, str]:
- possibilities = [o for o in longopts if o.startswith(opt)]
- if not possibilities:
- raise GetoptError('option --%s not recognized' % opt, opt)
- # Is there an exact match?
- if opt in possibilities:
- return False, opt
- elif opt + '=' in possibilities:
- return True, opt
- # No exact match, so better be unique.
- if len(possibilities) > 1:
- # XXX since possibilities contains all valid continuations, might be
- # nice to work them into the error msg
- raise GetoptError('option --%s not a unique prefix' % opt, opt)
- assert len(possibilities) == 1
- unique_match = possibilities[0]
- has_arg = unique_match.endswith('=')
- if has_arg:
- unique_match = unique_match[:-1]
- return has_arg, unique_match
-
-def do_shorts(opts: List[Tuple[str, str]], optstring: str,
- shortopts: str, args: List[str]) -> Tuple[List[Tuple[str, str]],
- List[str]]:
- while optstring != '':
- opt, optstring = optstring[0], optstring[1:]
- if short_has_arg(opt, shortopts):
- if optstring == '':
- if not args:
- raise GetoptError('option -%s requires argument' % opt,
- opt)
- optstring, args = args[0], args[1:]
- optarg, optstring = optstring, ''
- else:
- optarg = ''
- opts.append(('-' + opt, optarg))
- return opts, args
-
-def short_has_arg(opt: str, shortopts: str) -> bool:
- for i in range(len(shortopts)):
- if opt == shortopts[i] != ':':
- return shortopts.startswith(':', i+1)
- raise GetoptError('option -%s not recognized' % opt, opt)
-
-if __name__ == '__main__':
- import sys
- print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"]))
diff --git a/test-data/stdlib-samples/3.2/glob.py b/test-data/stdlib-samples/3.2/glob.py
deleted file mode 100644
index 0f3d5f5..0000000
--- a/test-data/stdlib-samples/3.2/glob.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""Filename globbing utility."""
-
-import os
-import re
-import fnmatch
-
-from typing import List, Iterator, Iterable, Any, AnyStr
-
-__all__ = ["glob", "iglob"]
-
-def glob(pathname: AnyStr) -> List[AnyStr]:
- """Return a list of paths matching a pathname pattern.
-
- The pattern may contain simple shell-style wildcards a la fnmatch.
-
- """
- return list(iglob(pathname))
-
-def iglob(pathname: AnyStr) -> Iterator[AnyStr]:
- """Return an iterator which yields the paths matching a pathname pattern.
-
- The pattern may contain simple shell-style wildcards a la fnmatch.
-
- """
- if not has_magic(pathname):
- if os.path.lexists(pathname):
- yield pathname
- return
- dirname, basename = os.path.split(pathname)
- if not dirname:
- for name in glob1(None, basename):
- yield name
- return
- if has_magic(dirname):
- dirs = iglob(dirname) # type: Iterable[AnyStr]
- else:
- dirs = [dirname]
- if has_magic(basename):
- glob_in_dir = glob1 # type: Any
- else:
- glob_in_dir = glob0
- for dirname in dirs:
- for name in glob_in_dir(dirname, basename):
- yield os.path.join(dirname, name)
-
-# These 2 helper functions non-recursively glob inside a literal directory.
-# They return a list of basenames. `glob1` accepts a pattern while `glob0`
-# takes a literal basename (so it only has to check for its existence).
-
-def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]:
- if not dirname:
- if isinstance(pattern, bytes):
- dirname = bytes(os.curdir, 'ASCII')
- else:
- dirname = os.curdir
- try:
- names = os.listdir(dirname)
- except os.error:
- return []
- if pattern[0] != '.':
- names = [x for x in names if x[0] != '.']
- return fnmatch.filter(names, pattern)
-
-def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]:
- if basename == '':
- # `os.path.split()` returns an empty basename for paths ending with a
- # directory separator. 'q*x/' should match only directories.
- if os.path.isdir(dirname):
- return [basename]
- else:
- if os.path.lexists(os.path.join(dirname, basename)):
- return [basename]
- return []
-
-
-magic_check = re.compile('[*?[]')
-magic_check_bytes = re.compile(b'[*?[]')
-
-def has_magic(s: AnyStr) -> bool:
- if isinstance(s, bytes):
- match = magic_check_bytes.search(s)
- else:
- match = magic_check.search(s)
- return match is not None
diff --git a/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py b/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
deleted file mode 100644
index aa861eb..0000000
--- a/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
+++ /dev/null
@@ -1,1873 +0,0 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
-#
-# Permission to use, copy, modify, and distribute this software and its
-# documentation for any purpose and without fee is hereby granted,
-# provided that the above copyright notice appear in all copies and that
-# both that copyright notice and this permission notice appear in
-# supporting documentation, and that the name of Vinay Sajip
-# not be used in advertising or publicity pertaining to distribution
-# of the software without specific, written prior permission.
-# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
-# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Logging package for Python. Based on PEP 282 and comments thereto in
-comp.lang.python, and influenced by Apache's log4j system.
-
-Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved.
-
-To use, simply 'import logging' and log away!
-"""
-
-import sys, os, time, io, traceback, warnings, weakref
-from string import Template
-
-__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
- 'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
- 'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
- 'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
- 'captureWarnings', 'critical', 'debug', 'disable', 'error',
- 'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
- 'info', 'log', 'makeLogRecord', 'setLoggerClass', 'warn', 'warning',
- 'getLogRecordFactory', 'setLogRecordFactory', 'lastResort']
-
-import codecs
-
-import _thread as thread
-import threading
-
-__author__ = "Vinay Sajip <vinay_sajip at red-dove.com>"
-__status__ = "production"
-__version__ = "0.5.1.2"
-__date__ = "07 February 2010"
-
-#---------------------------------------------------------------------------
-# Miscellaneous module data
-#---------------------------------------------------------------------------
-
-#
-# _srcfile is used when walking the stack to check when we've got the first
-# caller stack frame.
-#
-if hasattr(sys, 'frozen'): #support for py2exe
- _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
-else:
- _srcfile = __file__
-_srcfile = os.path.normcase(_srcfile)
-
-# next bit filched from 1.5.2's inspect.py
-def _currentframe():
- """Return the frame object for the caller's stack frame."""
- try:
- raise Exception
- except:
- return sys.exc_info()[2].tb_frame.f_back
-currentframe = _currentframe
-
-if hasattr(sys, '_getframe'): currentframe = lambda: sys._getframe(3)
-# done filching
-
-# _srcfile is only used in conjunction with sys._getframe().
-# To provide compatibility with older versions of Python, set _srcfile
-# to None if _getframe() is not available; this value will prevent
-# findCaller() from being called.
-#if not hasattr(sys, "_getframe"):
-# _srcfile = None
-
-#
-#_startTime is used as the base when calculating the relative time of events
-#
-_startTime = time.time()
-
-#
-#raiseExceptions is used to see if exceptions during handling should be
-#propagated
-#
-raiseExceptions = 1
-
-#
-# If you don't want threading information in the log, set this to zero
-#
-logThreads = 1
-
-#
-# If you don't want multiprocessing information in the log, set this to zero
-#
-logMultiprocessing = 1
-
-#
-# If you don't want process information in the log, set this to zero
-#
-logProcesses = 1
-
-#---------------------------------------------------------------------------
-# Level related stuff
-#---------------------------------------------------------------------------
-#
-# Default levels and level names, these can be replaced with any positive set
-# of values having corresponding names. There is a pseudo-level, NOTSET, which
-# is only really there as a lower limit for user-defined levels. Handlers and
-# loggers are initialized with NOTSET so that they will log all messages, even
-# at user-defined levels.
-#
-
-CRITICAL = 50
-FATAL = CRITICAL
-ERROR = 40
-WARNING = 30
-WARN = WARNING
-INFO = 20
-DEBUG = 10
-NOTSET = 0
-
-_levelNames = {
- CRITICAL : 'CRITICAL',
- ERROR : 'ERROR',
- WARNING : 'WARNING',
- INFO : 'INFO',
- DEBUG : 'DEBUG',
- NOTSET : 'NOTSET',
- 'CRITICAL' : CRITICAL,
- 'ERROR' : ERROR,
- 'WARN' : WARNING,
- 'WARNING' : WARNING,
- 'INFO' : INFO,
- 'DEBUG' : DEBUG,
- 'NOTSET' : NOTSET,
-}
-
-def getLevelName(level):
- """
- Return the textual representation of logging level 'level'.
-
- If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
- INFO, DEBUG) then you get the corresponding string. If you have
- associated levels with names using addLevelName then the name you have
- associated with 'level' is returned.
-
- If a numeric value corresponding to one of the defined levels is passed
- in, the corresponding string representation is returned.
-
- Otherwise, the string "Level %s" % level is returned.
- """
- return _levelNames.get(level, ("Level %s" % level))
-
-def addLevelName(level, levelName):
- """
- Associate 'levelName' with 'level'.
-
- This is used when converting levels to text during message formatting.
- """
- _acquireLock()
- try: #unlikely to cause an exception, but you never know...
- _levelNames[level] = levelName
- _levelNames[levelName] = level
- finally:
- _releaseLock()
-
-def _checkLevel(level):
- if isinstance(level, int):
- rv = level
- elif str(level) == level:
- if level not in _levelNames:
- raise ValueError("Unknown level: %r" % level)
- rv = _levelNames[level]
- else:
- raise TypeError("Level not an integer or a valid string: %r" % level)
- return rv
-
-#---------------------------------------------------------------------------
-# Thread-related stuff
-#---------------------------------------------------------------------------
-
-#
-#_lock is used to serialize access to shared data structures in this module.
-#This needs to be an RLock because fileConfig() creates and configures
-#Handlers, and so might arbitrary user threads. Since Handler code updates the
-#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
-#the lock would already have been acquired - so we need an RLock.
-#The same argument applies to Loggers and Manager.loggerDict.
-#
-if thread:
- _lock = threading.RLock()
-else:
- _lock = None
-
-
-def _acquireLock():
- """
- Acquire the module-level lock for serializing access to shared data.
-
- This should be released with _releaseLock().
- """
- if _lock:
- _lock.acquire()
-
-def _releaseLock():
- """
- Release the module-level lock acquired by calling _acquireLock().
- """
- if _lock:
- _lock.release()
-
-#---------------------------------------------------------------------------
-# The logging record
-#---------------------------------------------------------------------------
-
-class LogRecord(object):
- """
- A LogRecord instance represents an event being logged.
-
- LogRecord instances are created every time something is logged. They
- contain all the information pertinent to the event being logged. The
- main information passed in is in msg and args, which are combined
- using str(msg) % args to create the message field of the record. The
- record also includes information such as when the record was created,
- the source line where the logging call was made, and any exception
- information to be logged.
- """
- def __init__(self, name, level, pathname, lineno,
- msg, args, exc_info, func=None, sinfo=None, **kwargs):
- """
- Initialize a logging record with interesting information.
- """
- ct = time.time()
- self.name = name
- self.msg = msg
- #
- # The following statement allows passing of a dictionary as a sole
- # argument, so that you can do something like
- # logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
- # Suggested by Stefan Behnel.
- # Note that without the test for args[0], we get a problem because
- # during formatting, we test to see if the arg is present using
- # 'if self.args:'. If the event being logged is e.g. 'Value is %d'
- # and if the passed arg fails 'if self.args:' then no formatting
- # is done. For example, logger.warn('Value is %d', 0) would log
- # 'Value is %d' instead of 'Value is 0'.
- # For the use case of passing a dictionary, this should not be a
- # problem.
- if args and len(args) == 1 and isinstance(args[0], dict) and args[0]:
- args = args[0]
- self.args = args
- self.levelname = getLevelName(level)
- self.levelno = level
- self.pathname = pathname
- try:
- self.filename = os.path.basename(pathname)
- self.module = os.path.splitext(self.filename)[0]
- except (TypeError, ValueError, AttributeError):
- self.filename = pathname
- self.module = "Unknown module"
- self.exc_info = exc_info
- self.exc_text = None # used to cache the traceback text
- self.stack_info = sinfo
- self.lineno = lineno
- self.funcName = func
- self.created = ct
- self.msecs = (ct - int(ct)) * 1000
- self.relativeCreated = (self.created - _startTime) * 1000
- if logThreads and thread:
- self.thread = thread.get_ident()
- self.threadName = threading.current_thread().name
- else:
- self.thread = None
- self.threadName = None
- if not logMultiprocessing:
- self.processName = None
- else:
- self.processName = 'MainProcess'
- mp = sys.modules.get('multiprocessing')
- if mp is not None:
- # Errors may occur if multiprocessing has not finished loading
- # yet - e.g. if a custom import hook causes third-party code
- # to run when multiprocessing calls import. See issue 8200
- # for an example
- try:
- self.processName = mp.current_process().name
- except Exception:
- pass
- if logProcesses and hasattr(os, 'getpid'):
- self.process = os.getpid()
- else:
- self.process = None
-
- def __str__(self):
- return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
- self.pathname, self.lineno, self.msg)
-
- def getMessage(self):
- """
- Return the message for this LogRecord.
-
- Return the message for this LogRecord after merging any user-supplied
- arguments with the message.
- """
- msg = str(self.msg)
- if self.args:
- msg = msg % self.args
- return msg
-
-#
-# Determine which class to use when instantiating log records.
-#
-_logRecordFactory = LogRecord
-
-def setLogRecordFactory(factory):
- """
- Set the factory to be used when instantiating a log record.
-
- :param factory: A callable which will be called to instantiate
- a log record.
- """
- global _logRecordFactory
- _logRecordFactory = factory
-
-def getLogRecordFactory():
- """
- Return the factory to be used when instantiating a log record.
- """
-
- return _logRecordFactory
-
-def makeLogRecord(dict):
- """
- Make a LogRecord whose attributes are defined by the specified dictionary,
- This function is useful for converting a logging event received over
- a socket connection (which is sent as a dictionary) into a LogRecord
- instance.
- """
- rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
- rv.__dict__.update(dict)
- return rv
-
-#---------------------------------------------------------------------------
-# Formatter classes and functions
-#---------------------------------------------------------------------------
-
-class PercentStyle(object):
-
- default_format = '%(message)s'
- asctime_format = '%(asctime)s'
- asctime_search = '%(asctime)'
-
- def __init__(self, fmt):
- self._fmt = fmt or self.default_format
-
- def usesTime(self):
- return self._fmt.find(self.asctime_search) >= 0
-
- def format(self, record):
- return self._fmt % record.__dict__
-
-class StrFormatStyle(PercentStyle):
- default_format = '{message}'
- asctime_format = '{asctime}'
- asctime_search = '{asctime'
-
- def format(self, record):
- return self._fmt.format(**record.__dict__)
-
-
-class StringTemplateStyle(PercentStyle):
- default_format = '${message}'
- asctime_format = '${asctime}'
- asctime_search = '${asctime}'
-
- def __init__(self, fmt):
- self._fmt = fmt or self.default_format
- self._tpl = Template(self._fmt)
-
- def usesTime(self):
- fmt = self._fmt
- return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
-
- def format(self, record):
- return self._tpl.substitute(**record.__dict__)
-
-_STYLES = {
- '%': PercentStyle,
- '{': StrFormatStyle,
- '$': StringTemplateStyle
-}
-
-class Formatter(object):
- """
- Formatter instances are used to convert a LogRecord to text.
-
- Formatters need to know how a LogRecord is constructed. They are
- responsible for converting a LogRecord to (usually) a string which can
- be interpreted by either a human or an external system. The base Formatter
- allows a formatting string to be specified. If none is supplied, the
- default value of "%s(message)" is used.
-
- The Formatter can be initialized with a format string which makes use of
- knowledge of the LogRecord attributes - e.g. the default value mentioned
- above makes use of the fact that the user's message and arguments are pre-
- formatted into a LogRecord's message attribute. Currently, the useful
- attributes in a LogRecord are described by:
-
- %(name)s Name of the logger (logging channel)
- %(levelno)s Numeric logging level for the message (DEBUG, INFO,
- WARNING, ERROR, CRITICAL)
- %(levelname)s Text logging level for the message ("DEBUG", "INFO",
- "WARNING", "ERROR", "CRITICAL")
- %(pathname)s Full pathname of the source file where the logging
- call was issued (if available)
- %(filename)s Filename portion of pathname
- %(module)s Module (name portion of filename)
- %(lineno)d Source line number where the logging call was issued
- (if available)
- %(funcName)s Function name
- %(created)f Time when the LogRecord was created (time.time()
- return value)
- %(asctime)s Textual time when the LogRecord was created
- %(msecs)d Millisecond portion of the creation time
- %(relativeCreated)d Time in milliseconds when the LogRecord was created,
- relative to the time the logging module was loaded
- (typically at application startup time)
- %(thread)d Thread ID (if available)
- %(threadName)s Thread name (if available)
- %(process)d Process ID (if available)
- %(message)s The result of record.getMessage(), computed just as
- the record is emitted
- """
-
- converter = time.localtime
-
- def __init__(self, fmt=None, datefmt=None, style='%'):
- """
- Initialize the formatter with specified format strings.
-
- Initialize the formatter either with the specified format string, or a
- default as described above. Allow for specialized date formatting with
- the optional datefmt argument (if omitted, you get the ISO8601 format).
-
- Use a style parameter of '%', '{' or '$' to specify that you want to
- use one of %-formatting, :meth:`str.format` (``{}``) formatting or
- :class:`string.Template` formatting in your format string.
-
- .. versionchanged: 3.2
- Added the ``style`` parameter.
- """
- if style not in _STYLES:
- raise ValueError('Style must be one of: %s' % ','.join(
- _STYLES.keys()))
- self._style = _STYLES[style](fmt)
- self._fmt = self._style._fmt
- self.datefmt = datefmt
-
- def formatTime(self, record, datefmt=None):
- """
- Return the creation time of the specified LogRecord as formatted text.
-
- This method should be called from format() by a formatter which
- wants to make use of a formatted time. This method can be overridden
- in formatters to provide for any specific requirement, but the
- basic behaviour is as follows: if datefmt (a string) is specified,
- it is used with time.strftime() to format the creation time of the
- record. Otherwise, the ISO8601 format is used. The resulting
- string is returned. This function uses a user-configurable function
- to convert the creation time to a tuple. By default, time.localtime()
- is used; to change this for a particular formatter instance, set the
- 'converter' attribute to a function with the same signature as
- time.localtime() or time.gmtime(). To change it for all formatters,
- for example if you want all logging times to be shown in GMT,
- set the 'converter' attribute in the Formatter class.
- """
- ct = self.converter(record.created)
- if datefmt:
- s = time.strftime(datefmt, ct)
- else:
- t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
- s = "%s,%03d" % (t, record.msecs) # the use of % here is internal
- return s
-
- def formatException(self, ei):
- """
- Format and return the specified exception information as a string.
-
- This default implementation just uses
- traceback.print_exception()
- """
- sio = io.StringIO()
- tb = ei[2]
- # See issues #9427, #1553375. Commented out for now.
- #if getattr(self, 'fullstack', False):
- # traceback.print_stack(tb.tb_frame.f_back, file=sio)
- traceback.print_exception(ei[0], ei[1], tb, None, sio)
- s = sio.getvalue()
- sio.close()
- if s[-1:] == "\n":
- s = s[:-1]
- return s
-
- def usesTime(self):
- """
- Check if the format uses the creation time of the record.
- """
- return self._style.usesTime()
-
- def formatMessage(self, record):
- return self._style.format(record)
-
- def formatStack(self, stack_info):
- """
- This method is provided as an extension point for specialized
- formatting of stack information.
-
- The input data is a string as returned from a call to
- :func:`traceback.print_stack`, but with the last trailing newline
- removed.
-
- The base implementation just returns the value passed in.
- """
- return stack_info
-
- def format(self, record):
- """
- Format the specified record as text.
-
- The record's attribute dictionary is used as the operand to a
- string formatting operation which yields the returned string.
- Before formatting the dictionary, a couple of preparatory steps
- are carried out. The message attribute of the record is computed
- using LogRecord.getMessage(). If the formatting string uses the
- time (as determined by a call to usesTime(), formatTime() is
- called to format the event time. If there is exception information,
- it is formatted using formatException() and appended to the message.
- """
- record.message = record.getMessage()
- if self.usesTime():
- record.asctime = self.formatTime(record, self.datefmt)
- s = self.formatMessage(record)
- if record.exc_info:
- # Cache the traceback text to avoid converting it multiple times
- # (it's constant anyway)
- if not record.exc_text:
- record.exc_text = self.formatException(record.exc_info)
- if record.exc_text:
- if s[-1:] != "\n":
- s = s + "\n"
- s = s + record.exc_text
- if record.stack_info:
- if s[-1:] != "\n":
- s = s + "\n"
- s = s + self.formatStack(record.stack_info)
- return s
-
-#
-# The default formatter to use when no other is specified
-#
-_defaultFormatter = Formatter()
-
-class BufferingFormatter(object):
- """
- A formatter suitable for formatting a number of records.
- """
- def __init__(self, linefmt=None):
- """
- Optionally specify a formatter which will be used to format each
- individual record.
- """
- if linefmt:
- self.linefmt = linefmt
- else:
- self.linefmt = _defaultFormatter
-
- def formatHeader(self, records):
- """
- Return the header string for the specified records.
- """
- return ""
-
- def formatFooter(self, records):
- """
- Return the footer string for the specified records.
- """
- return ""
-
- def format(self, records):
- """
- Format the specified records and return the result as a string.
- """
- rv = ""
- if len(records) > 0:
- rv = rv + self.formatHeader(records)
- for record in records:
- rv = rv + self.linefmt.format(record)
- rv = rv + self.formatFooter(records)
- return rv
-
-#---------------------------------------------------------------------------
-# Filter classes and functions
-#---------------------------------------------------------------------------
-
-class Filter(object):
- """
- Filter instances are used to perform arbitrary filtering of LogRecords.
-
- Loggers and Handlers can optionally use Filter instances to filter
- records as desired. The base filter class only allows events which are
- below a certain point in the logger hierarchy. For example, a filter
- initialized with "A.B" will allow events logged by loggers "A.B",
- "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
- initialized with the empty string, all events are passed.
- """
- def __init__(self, name=''):
- """
- Initialize a filter.
-
- Initialize with the name of the logger which, together with its
- children, will have its events allowed through the filter. If no
- name is specified, allow every event.
- """
- self.name = name
- self.nlen = len(name)
-
- def filter(self, record):
- """
- Determine if the specified record is to be logged.
-
- Is the specified record to be logged? Returns 0 for no, nonzero for
- yes. If deemed appropriate, the record may be modified in-place.
- """
- if self.nlen == 0:
- return 1
- elif self.name == record.name:
- return 1
- elif record.name.find(self.name, 0, self.nlen) != 0:
- return 0
- return (record.name[self.nlen] == ".")
-
-class Filterer(object):
- """
- A base class for loggers and handlers which allows them to share
- common code.
- """
- def __init__(self):
- """
- Initialize the list of filters to be an empty list.
- """
- self.filters = []
-
- def addFilter(self, filter):
- """
- Add the specified filter to this handler.
- """
- if not (filter in self.filters):
- self.filters.append(filter)
-
- def removeFilter(self, filter):
- """
- Remove the specified filter from this handler.
- """
- if filter in self.filters:
- self.filters.remove(filter)
-
- def filter(self, record):
- """
- Determine if a record is loggable by consulting all the filters.
-
- The default is to allow the record to be logged; any filter can veto
- this and the record is then dropped. Returns a zero value if a record
- is to be dropped, else non-zero.
-
- .. versionchanged: 3.2
-
- Allow filters to be just callables.
- """
- rv = 1
- for f in self.filters:
- if hasattr(f, 'filter'):
- result = f.filter(record)
- else:
- result = f(record) # assume callable - will raise if not
- if not result:
- rv = 0
- break
- return rv
-
-#---------------------------------------------------------------------------
-# Handler classes and functions
-#---------------------------------------------------------------------------
-
-any _handlers = weakref.WeakValueDictionary() #map of handler names to handlers
-any _handlerList = [] # added to allow handlers to be removed in reverse of order initialized
-
-def _removeHandlerRef(wr):
- """
- Remove a handler reference from the internal cleanup list.
- """
- # This function can be called during module teardown, when globals are
- # set to None. If _acquireLock is None, assume this is the case and do
- # nothing.
- if _acquireLock is not None:
- _acquireLock()
- try:
- if wr in _handlerList:
- _handlerList.remove(wr)
- finally:
- _releaseLock()
-
-def _addHandlerRef(handler):
- """
- Add a handler to the internal cleanup list using a weak reference.
- """
- _acquireLock()
- try:
- _handlerList.append(weakref.ref(handler, _removeHandlerRef))
- finally:
- _releaseLock()
-
-class Handler(Filterer):
- """
- Handler instances dispatch logging events to specific destinations.
-
- The base handler class. Acts as a placeholder which defines the Handler
- interface. Handlers can optionally use Formatter instances to format
- records as desired. By default, no formatter is specified; in this case,
- the 'raw' message as determined by record.message is logged.
- """
- def __init__(self, level=NOTSET):
- """
- Initializes the instance - basically setting the formatter to None
- and the filter list to empty.
- """
- Filterer.__init__(self)
- self._name = None
- self.level = _checkLevel(level)
- self.formatter = None
- # Add the handler to the global _handlerList (for cleanup on shutdown)
- _addHandlerRef(self)
- self.createLock()
-
- def get_name(self):
- return self._name
-
- def set_name(self, name):
- _acquireLock()
- try:
- if self._name in _handlers:
- del _handlers[self._name]
- self._name = name
- if name:
- _handlers[name] = self
- finally:
- _releaseLock()
-
- #name = property(get_name, set_name)
-
- def createLock(self):
- """
- Acquire a thread lock for serializing access to the underlying I/O.
- """
- if thread:
- self.lock = threading.RLock()
- else:
- self.lock = None
-
- def acquire(self):
- """
- Acquire the I/O thread lock.
- """
- if self.lock:
- self.lock.acquire()
-
- def release(self):
- """
- Release the I/O thread lock.
- """
- if self.lock:
- self.lock.release()
-
- def setLevel(self, level):
- """
- Set the logging level of this handler.
- """
- self.level = _checkLevel(level)
-
- def format(self, record):
- """
- Format the specified record.
-
- If a formatter is set, use it. Otherwise, use the default formatter
- for the module.
- """
- if self.formatter:
- fmt = self.formatter
- else:
- fmt = _defaultFormatter
- return fmt.format(record)
-
- def emit(self, record):
- """
- Do whatever it takes to actually log the specified logging record.
-
- This version is intended to be implemented by subclasses and so
- raises a NotImplementedError.
- """
- raise NotImplementedError('emit must be implemented '
- 'by Handler subclasses')
-
- def handle(self, record):
- """
- Conditionally emit the specified logging record.
-
- Emission depends on filters which may have been added to the handler.
- Wrap the actual emission of the record with acquisition/release of
- the I/O thread lock. Returns whether the filter passed the record for
- emission.
- """
- rv = self.filter(record)
- if rv:
- self.acquire()
- try:
- self.emit(record)
- finally:
- self.release()
- return rv
-
- def setFormatter(self, fmt):
- """
- Set the formatter for this handler.
- """
- self.formatter = fmt
-
- def flush(self):
- """
- Ensure all logging output has been flushed.
-
- This version does nothing and is intended to be implemented by
- subclasses.
- """
- pass
-
- def close(self):
- """
- Tidy up any resources used by the handler.
-
- This version removes the handler from an internal map of handlers,
- _handlers, which is used for handler lookup by name. Subclasses
- should ensure that this gets called from overridden close()
- methods.
- """
- #get the module data lock, as we're updating a shared structure.
- _acquireLock()
- try: #unlikely to raise an exception, but you never know...
- if self._name and self._name in _handlers:
- del _handlers[self._name]
- finally:
- _releaseLock()
-
- def handleError(self, record):
- """
- Handle errors which occur during an emit() call.
-
- This method should be called from handlers when an exception is
- encountered during an emit() call. If raiseExceptions is false,
- exceptions get silently ignored. This is what is mostly wanted
- for a logging system - most users will not care about errors in
- the logging system, they are more interested in application errors.
- You could, however, replace this with a custom handler if you wish.
- The record which was being processed is passed in to this method.
- """
- if raiseExceptions and sys.stderr: # see issue 13807
- ei = sys.exc_info()
- try:
- traceback.print_exception(ei[0], ei[1], ei[2],
- None, sys.stderr)
- sys.stderr.write('Logged from file %s, line %s\n' % (
- record.filename, record.lineno))
- except IOError:
- pass # see issue 5971
- finally:
- ei = None
-
-class StreamHandler(Handler):
- """
- A handler class which writes logging records, appropriately formatted,
- to a stream. Note that this class does not close the stream, as
- sys.stdout or sys.stderr may be used.
- """
-
- terminator = '\n'
-
- def __init__(self, stream=None):
- """
- Initialize the handler.
-
- If stream is not specified, sys.stderr is used.
- """
- Handler.__init__(self)
- if stream is None:
- stream = sys.stderr
- self.stream = stream
-
- def flush(self):
- """
- Flushes the stream.
- """
- if self.stream and hasattr(self.stream, "flush"):
- self.stream.flush()
-
- def emit(self, record):
- """
- Emit a record.
-
- If a formatter is specified, it is used to format the record.
- The record is then written to the stream with a trailing newline. If
- exception information is present, it is formatted using
- traceback.print_exception and appended to the stream. If the stream
- has an 'encoding' attribute, it is used to determine how to do the
- output to the stream.
- """
- try:
- msg = self.format(record)
- stream = self.stream
- stream.write(msg)
- stream.write(self.terminator)
- self.flush()
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- self.handleError(record)
-
-class FileHandler(StreamHandler):
- """
- A handler class which writes formatted logging records to disk files.
- """
- def __init__(self, filename, mode='a', encoding=None, delay=0):
- """
- Open the specified file and use it as the stream for logging.
- """
- #keep the absolute path, otherwise derived classes which use this
- #may come a cropper when the current directory changes
- if codecs is None:
- encoding = None
- self.baseFilename = os.path.abspath(filename)
- self.mode = mode
- self.encoding = encoding
- if delay:
- #We don't open the stream, but we still need to call the
- #Handler constructor to set level, formatter, lock etc.
- Handler.__init__(self)
- self.stream = None
- else:
- StreamHandler.__init__(self, self._open())
-
- def close(self):
- """
- Closes the stream.
- """
- if self.stream:
- self.flush()
- if hasattr(self.stream, "close"):
- self.stream.close()
- StreamHandler.close(self)
- self.stream = None
-
- def _open(self):
- """
- Open the current base file with the (original) mode and encoding.
- Return the resulting stream.
- """
- if self.encoding is None:
- stream = open(self.baseFilename, self.mode)
- else:
- stream = codecs.open(self.baseFilename, self.mode, self.encoding)
- return stream
-
- def emit(self, record):
- """
- Emit a record.
-
- If the stream was not opened because 'delay' was specified in the
- constructor, open it before calling the superclass's emit.
- """
- if self.stream is None:
- self.stream = self._open()
- StreamHandler.emit(self, record)
-
-class _StderrHandler(StreamHandler):
- """
- This class is like a StreamHandler using sys.stderr, but always uses
- whatever sys.stderr is currently set to rather than the value of
- sys.stderr at handler construction time.
- """
- def __init__(self, level=NOTSET):
- """
- Initialize the handler.
- """
- Handler.__init__(self, level)
-
- #@property
- #def stream(self):
- # return sys.stderr
-
-
-_defaultLastResort = _StderrHandler(WARNING)
-lastResort = _defaultLastResort
-
-#---------------------------------------------------------------------------
-# Manager classes and functions
-#---------------------------------------------------------------------------
-
-class PlaceHolder(object):
- """
- PlaceHolder instances are used in the Manager logger hierarchy to take
- the place of nodes for which no loggers have been defined. This class is
- intended for internal use only and not as part of the public API.
- """
- def __init__(self, alogger):
- """
- Initialize with the specified logger being a child of this placeholder.
- """
- self.loggerMap = { alogger : None }
-
- def append(self, alogger):
- """
- Add the specified logger as a child of this placeholder.
- """
- if alogger not in self.loggerMap:
- self.loggerMap[alogger] = None
-
-#
-# Determine which class to use when instantiating loggers.
-#
-any _loggerClass = None
-
-def setLoggerClass(klass):
- """
- Set the class to be used when instantiating a logger. The class should
- define __init__() such that only a name argument is required, and the
- __init__() should call Logger.__init__()
- """
- if klass != Logger:
- if not issubclass(klass, Logger):
- raise TypeError("logger not derived from logging.Logger: "
- + klass.__name__)
- global _loggerClass
- _loggerClass = klass
-
-def getLoggerClass():
- """
- Return the class to be used when instantiating a logger.
- """
-
- return _loggerClass
-
-class Manager(object):
- """
- There is [under normal circumstances] just one Manager instance, which
- holds the hierarchy of loggers.
- """
- def __init__(self, rootnode):
- """
- Initialize the manager with the root node of the logger hierarchy.
- """
- self.root = rootnode
- self.disable = 0
- self.emittedNoHandlerWarning = False
- self.loggerDict = {}
- self.loggerClass = None
- self.logRecordFactory = None
-
- def getLogger(self, name):
- """
- Get a logger with the specified name (channel name), creating it
- if it doesn't yet exist. This name is a dot-separated hierarchical
- name, such as "a", "a.b", "a.b.c" or similar.
-
- If a PlaceHolder existed for the specified name [i.e. the logger
- didn't exist but a child of it did], replace it with the created
- logger and fix up the parent/child references which pointed to the
- placeholder to now point to the logger.
- """
- rv = None
- if not isinstance(name, str):
- raise TypeError('A logger name must be a string')
- _acquireLock()
- try:
- if name in self.loggerDict:
- rv = self.loggerDict[name]
- if isinstance(rv, PlaceHolder):
- ph = rv
- rv = (self.loggerClass or _loggerClass)(name)
- rv.manager = self
- self.loggerDict[name] = rv
- self._fixupChildren(ph, rv)
- self._fixupParents(rv)
- else:
- rv = (self.loggerClass or _loggerClass)(name)
- rv.manager = self
- self.loggerDict[name] = rv
- self._fixupParents(rv)
- finally:
- _releaseLock()
- return rv
-
- def setLoggerClass(self, klass):
- """
- Set the class to be used when instantiating a logger with this Manager.
- """
- if klass != Logger:
- if not issubclass(klass, Logger):
- raise TypeError("logger not derived from logging.Logger: "
- + klass.__name__)
- self.loggerClass = klass
-
- def setLogRecordFactory(self, factory):
- """
- Set the factory to be used when instantiating a log record with this
- Manager.
- """
- self.logRecordFactory = factory
-
- def _fixupParents(self, alogger):
- """
- Ensure that there are either loggers or placeholders all the way
- from the specified logger to the root of the logger hierarchy.
- """
- name = alogger.name
- i = name.rfind(".")
- rv = None
- while (i > 0) and not rv:
- substr = name[:i]
- if substr not in self.loggerDict:
- self.loggerDict[substr] = PlaceHolder(alogger)
- else:
- obj = self.loggerDict[substr]
- if isinstance(obj, Logger):
- rv = obj
- else:
- assert isinstance(obj, PlaceHolder)
- obj.append(alogger)
- i = name.rfind(".", 0, i - 1)
- if not rv:
- rv = self.root
- alogger.parent = rv
-
- def _fixupChildren(self, ph, alogger):
- """
- Ensure that children of the placeholder ph are connected to the
- specified logger.
- """
- name = alogger.name
- namelen = len(name)
- for c in ph.loggerMap.keys():
- #The if means ... if not c.parent.name.startswith(nm)
- if c.parent.name[:namelen] != name:
- alogger.parent = c.parent
- c.parent = alogger
-
-#---------------------------------------------------------------------------
-# Logger classes and functions
-#---------------------------------------------------------------------------
-
-class Logger(Filterer):
- """
- Instances of the Logger class represent a single logging channel. A
- "logging channel" indicates an area of an application. Exactly how an
- "area" is defined is up to the application developer. Since an
- application can have any number of areas, logging channels are identified
- by a unique string. Application areas can be nested (e.g. an area
- of "input processing" might include sub-areas "read CSV files", "read
- XLS files" and "read Gnumeric files"). To cater for this natural nesting,
- channel names are organized into a namespace hierarchy where levels are
- separated by periods, much like the Java or Python package namespace. So
- in the instance given above, channel names might be "input" for the upper
- level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
- There is no arbitrary limit to the depth of nesting.
- """
-
- any root
- any manager
-
- def __init__(self, name, level=NOTSET):
- """
- Initialize the logger with a name and an optional level.
- """
- Filterer.__init__(self)
- self.name = name
- self.level = _checkLevel(level)
- self.parent = None
- self.propagate = 1
- self.handlers = []
- self.disabled = 0
-
- def setLevel(self, level):
- """
- Set the logging level of this logger.
- """
- self.level = _checkLevel(level)
-
- def debug(self, msg, *args, **kwargs):
- """
- Log 'msg % args' with severity 'DEBUG'.
-
- To pass exception information, use the keyword argument exc_info with
- a true value, e.g.
-
- logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
- """
- if self.isEnabledFor(DEBUG):
- self._log(DEBUG, msg, args, **kwargs)
-
- def info(self, msg, *args, **kwargs):
- """
- Log 'msg % args' with severity 'INFO'.
-
- To pass exception information, use the keyword argument exc_info with
- a true value, e.g.
-
- logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
- """
- if self.isEnabledFor(INFO):
- self._log(INFO, msg, args, **kwargs)
-
- def warning(self, msg, *args, **kwargs):
- """
- Log 'msg % args' with severity 'WARNING'.
-
- To pass exception information, use the keyword argument exc_info with
- a true value, e.g.
-
- logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
- """
- if self.isEnabledFor(WARNING):
- self._log(WARNING, msg, args, **kwargs)
-
- warn = warning
-
- def error(self, msg, *args, **kwargs):
- """
- Log 'msg % args' with severity 'ERROR'.
-
- To pass exception information, use the keyword argument exc_info with
- a true value, e.g.
-
- logger.error("Houston, we have a %s", "major problem", exc_info=1)
- """
- if self.isEnabledFor(ERROR):
- self._log(ERROR, msg, args, **kwargs)
-
- def exception(self, msg, *args, **kwargs):
- """
- Convenience method for logging an ERROR with exception information.
- """
- kwargs['exc_info'] = True
- self.error(msg, *args, **kwargs)
-
- def critical(self, msg, *args, **kwargs):
- """
- Log 'msg % args' with severity 'CRITICAL'.
-
- To pass exception information, use the keyword argument exc_info with
- a true value, e.g.
-
- logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
- """
- if self.isEnabledFor(CRITICAL):
- self._log(CRITICAL, msg, args, **kwargs)
-
- fatal = critical
-
- def log(self, level, msg, *args, **kwargs):
- """
- Log 'msg % args' with the integer severity 'level'.
-
- To pass exception information, use the keyword argument exc_info with
- a true value, e.g.
-
- logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
- """
- if not isinstance(level, int):
- if raiseExceptions:
- raise TypeError("level must be an integer")
- else:
- return
- if self.isEnabledFor(level):
- self._log(level, msg, args, **kwargs)
-
- def findCaller(self, stack_info=False):
- """
- Find the stack frame of the caller so that we can note the source
- file name, line number and function name.
- """
- f = currentframe()
- #On some versions of IronPython, currentframe() returns None if
- #IronPython isn't run with -X:Frames.
- if f is not None:
- f = f.f_back
- rv = "(unknown file)", 0, "(unknown function)", None
- while hasattr(f, "f_code"):
- co = f.f_code
- filename = os.path.normcase(co.co_filename)
- if filename == _srcfile:
- f = f.f_back
- continue
- sinfo = None
- if stack_info:
- sio = io.StringIO()
- sio.write('Stack (most recent call last):\n')
- traceback.print_stack(f, file=sio)
- sinfo = sio.getvalue()
- if sinfo[-1] == '\n':
- sinfo = sinfo[:-1]
- sio.close()
- rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
- break
- return rv
-
- def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
- func=None, extra=None, sinfo=None):
- """
- A factory method which can be overridden in subclasses to create
- specialized LogRecords.
- """
- rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
- sinfo)
- if extra is not None:
- for key in extra:
- if (key in ["message", "asctime"]) or (key in rv.__dict__):
- raise KeyError("Attempt to overwrite %r in LogRecord" % key)
- rv.__dict__[key] = extra[key]
- return rv
-
- def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
- """
- Low-level logging routine which creates a LogRecord and then calls
- all the handlers of this logger to handle the record.
- """
- sinfo = None
- if _srcfile:
- #IronPython doesn't track Python frames, so findCaller throws an
- #exception on some versions of IronPython. We trap it here so that
- #IronPython can use logging.
- try:
- fn, lno, func, sinfo = self.findCaller(stack_info)
- except ValueError:
- fn, lno, func = "(unknown file)", 0, "(unknown function)"
- else:
- fn, lno, func = "(unknown file)", 0, "(unknown function)"
- if exc_info:
- if not isinstance(exc_info, tuple):
- exc_info = sys.exc_info()
- record = self.makeRecord(self.name, level, fn, lno, msg, args,
- exc_info, func, extra, sinfo)
- self.handle(record)
-
- def handle(self, record):
- """
- Call the handlers for the specified record.
-
- This method is used for unpickled records received from a socket, as
- well as those created locally. Logger-level filtering is applied.
- """
- if (not self.disabled) and self.filter(record):
- self.callHandlers(record)
-
- def addHandler(self, hdlr):
- """
- Add the specified handler to this logger.
- """
- _acquireLock()
- try:
- if not (hdlr in self.handlers):
- self.handlers.append(hdlr)
- finally:
- _releaseLock()
-
- def removeHandler(self, hdlr):
- """
- Remove the specified handler from this logger.
- """
- _acquireLock()
- try:
- if hdlr in self.handlers:
- self.handlers.remove(hdlr)
- finally:
- _releaseLock()
-
- def hasHandlers(self):
- """
- See if this logger has any handlers configured.
-
- Loop through all handlers for this logger and its parents in the
- logger hierarchy. Return True if a handler was found, else False.
- Stop searching up the hierarchy whenever a logger with the "propagate"
- attribute set to zero is found - that will be the last logger which
- is checked for the existence of handlers.
- """
- c = self
- rv = False
- while c:
- if c.handlers:
- rv = True
- break
- if not c.propagate:
- break
- else:
- c = c.parent
- return rv
-
- def callHandlers(self, record):
- """
- Pass a record to all relevant handlers.
-
- Loop through all handlers for this logger and its parents in the
- logger hierarchy. If no handler was found, output a one-off error
- message to sys.stderr. Stop searching up the hierarchy whenever a
- logger with the "propagate" attribute set to zero is found - that
- will be the last logger whose handlers are called.
- """
- c = self
- found = 0
- while c:
- for hdlr in c.handlers:
- found = found + 1
- if record.levelno >= hdlr.level:
- hdlr.handle(record)
- if not c.propagate:
- c = None #break out
- else:
- c = c.parent
- if (found == 0):
- if lastResort:
- if record.levelno >= lastResort.level:
- lastResort.handle(record)
- elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
- sys.stderr.write("No handlers could be found for logger"
- " \"%s\"\n" % self.name)
- self.manager.emittedNoHandlerWarning = True
-
- def getEffectiveLevel(self):
- """
- Get the effective level for this logger.
-
- Loop through this logger and its parents in the logger hierarchy,
- looking for a non-zero logging level. Return the first one found.
- """
- logger = self
- while logger:
- if logger.level:
- return logger.level
- logger = logger.parent
- return NOTSET
-
- def isEnabledFor(self, level):
- """
- Is this logger enabled for level 'level'?
- """
- if self.manager.disable >= level:
- return 0
- return level >= self.getEffectiveLevel()
-
- def getChild(self, suffix):
- """
- Get a logger which is a descendant to this one.
-
- This is a convenience method, such that
-
- logging.getLogger('abc').getChild('def.ghi')
-
- is the same as
-
- logging.getLogger('abc.def.ghi')
-
- It's useful, for example, when the parent logger is named using
- __name__ rather than a literal string.
- """
- if self.root is not self:
- suffix = '.'.join((self.name, suffix))
- return self.manager.getLogger(suffix)
-
-class RootLogger(Logger):
- """
- A root logger is not that different to any other logger, except that
- it must have a logging level and there is only one instance of it in
- the hierarchy.
- """
- def __init__(self, level):
- """
- Initialize the logger with the name "root".
- """
- Logger.__init__(self, "root", level)
-
-_loggerClass = Logger
-
-class LoggerAdapter(object):
- """
- An adapter for loggers which makes it easier to specify contextual
- information in logging output.
- """
-
- def __init__(self, logger, extra):
- """
- Initialize the adapter with a logger and a dict-like object which
- provides contextual information. This constructor signature allows
- easy stacking of LoggerAdapters, if so desired.
-
- You can effectively pass keyword arguments as shown in the
- following example:
-
- adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
- """
- self.logger = logger
- self.extra = extra
-
- def process(self, msg, kwargs):
- """
- Process the logging message and keyword arguments passed in to
- a logging call to insert contextual information. You can either
- manipulate the message itself, the keyword args or both. Return
- the message and kwargs modified (or not) to suit your needs.
-
- Normally, you'll only need to override this one method in a
- LoggerAdapter subclass for your specific needs.
- """
- kwargs["extra"] = self.extra
- return msg, kwargs
-
- #
- # Boilerplate convenience methods
- #
- def debug(self, msg, *args, **kwargs):
- """
- Delegate a debug call to the underlying logger.
- """
- self.log(DEBUG, msg, *args, **kwargs)
-
- def info(self, msg, *args, **kwargs):
- """
- Delegate an info call to the underlying logger.
- """
- self.log(INFO, msg, *args, **kwargs)
-
- def warning(self, msg, *args, **kwargs):
- """
- Delegate a warning call to the underlying logger.
- """
- self.log(WARNING, msg, *args, **kwargs)
-
- warn = warning
-
- def error(self, msg, *args, **kwargs):
- """
- Delegate an error call to the underlying logger.
- """
- self.log(ERROR, msg, *args, **kwargs)
-
- def exception(self, msg, *args, **kwargs):
- """
- Delegate an exception call to the underlying logger.
- """
- kwargs["exc_info"] = 1
- self.log(ERROR, msg, *args, **kwargs)
-
- def critical(self, msg, *args, **kwargs):
- """
- Delegate a critical call to the underlying logger.
- """
- self.log(CRITICAL, msg, *args, **kwargs)
-
- def log(self, level, msg, *args, **kwargs):
- """
- Delegate a log call to the underlying logger, after adding
- contextual information from this adapter instance.
- """
- if self.isEnabledFor(level):
- msg, kwargs = self.process(msg, kwargs)
- self.logger._log(level, msg, args, **kwargs)
-
- def isEnabledFor(self, level):
- """
- Is this logger enabled for level 'level'?
- """
- if self.logger.manager.disable >= level:
- return False
- return level >= self.getEffectiveLevel()
-
- def setLevel(self, level):
- """
- Set the specified level on the underlying logger.
- """
- self.logger.setLevel(level)
-
- def getEffectiveLevel(self):
- """
- Get the effective level for the underlying logger.
- """
- return self.logger.getEffectiveLevel()
-
- def hasHandlers(self):
- """
- See if the underlying logger has any handlers.
- """
- return self.logger.hasHandlers()
-
-root = RootLogger(WARNING)
-Logger.root = root
-Logger.manager = Manager(Logger.root)
-
-#---------------------------------------------------------------------------
-# Configuration classes and functions
-#---------------------------------------------------------------------------
-
-BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
-
-def basicConfig(**kwargs):
- """
- Do basic configuration for the logging system.
-
- This function does nothing if the root logger already has handlers
- configured. It is a convenience method intended for use by simple scripts
- to do one-shot configuration of the logging package.
-
- The default behaviour is to create a StreamHandler which writes to
- sys.stderr, set a formatter using the BASIC_FORMAT format string, and
- add the handler to the root logger.
-
- A number of optional keyword arguments may be specified, which can alter
- the default behaviour.
-
- filename Specifies that a FileHandler be created, using the specified
- filename, rather than a StreamHandler.
- filemode Specifies the mode to open the file, if filename is specified
- (if filemode is unspecified, it defaults to 'a').
- format Use the specified format string for the handler.
- datefmt Use the specified date/time format.
- style If a format string is specified, use this to specify the
- type of format string (possible values '%', '{', '$', for
- %-formatting, :meth:`str.format` and :class:`string.Template`
- - defaults to '%').
- level Set the root logger level to the specified level.
- stream Use the specified stream to initialize the StreamHandler. Note
- that this argument is incompatible with 'filename' - if both
- are present, 'stream' is ignored.
-
- Note that you could specify a stream created using open(filename, mode)
- rather than passing the filename and mode in. However, it should be
- remembered that StreamHandler does not close its stream (since it may be
- using sys.stdout or sys.stderr), whereas FileHandler closes its stream
- when the handler is closed.
-
- .. versionchanged: 3.2
- Added the ``style`` parameter.
- """
- # Add thread safety in case someone mistakenly calls
- # basicConfig() from multiple threads
- _acquireLock()
- try:
- if len(root.handlers) == 0:
- filename = kwargs.get("filename")
- if filename:
- mode = kwargs.get("filemode", 'a')
- hdlr = FileHandler(filename, mode)
- else:
- stream = kwargs.get("stream")
- hdlr = StreamHandler(stream)
- fs = kwargs.get("format", BASIC_FORMAT)
- dfs = kwargs.get("datefmt", None)
- style = kwargs.get("style", '%')
- fmt = Formatter(fs, dfs, style)
- hdlr.setFormatter(fmt)
- root.addHandler(hdlr)
- level = kwargs.get("level")
- if level is not None:
- root.setLevel(level)
- finally:
- _releaseLock()
-
-#---------------------------------------------------------------------------
-# Utility functions at module level.
-# Basically delegate everything to the root logger.
-#---------------------------------------------------------------------------
-
-def getLogger(name=None):
- """
- Return a logger with the specified name, creating it if necessary.
-
- If no name is specified, return the root logger.
- """
- if name:
- return Logger.manager.getLogger(name)
- else:
- return root
-
-def critical(msg, *args, **kwargs):
- """
- Log a message with severity 'CRITICAL' on the root logger. If the logger
- has no handlers, call basicConfig() to add a console handler with a
- pre-defined format.
- """
- if len(root.handlers) == 0:
- basicConfig()
- root.critical(msg, *args, **kwargs)
-
-fatal = critical
-
-def error(msg, *args, **kwargs):
- """
- Log a message with severity 'ERROR' on the root logger. If the logger has
- no handlers, call basicConfig() to add a console handler with a pre-defined
- format.
- """
- if len(root.handlers) == 0:
- basicConfig()
- root.error(msg, *args, **kwargs)
-
-def exception(msg, *args, **kwargs):
- """
- Log a message with severity 'ERROR' on the root logger, with exception
- information. If the logger has no handlers, basicConfig() is called to add
- a console handler with a pre-defined format.
- """
- kwargs['exc_info'] = True
- error(msg, *args, **kwargs)
-
-def warning(msg, *args, **kwargs):
- """
- Log a message with severity 'WARNING' on the root logger. If the logger has
- no handlers, call basicConfig() to add a console handler with a pre-defined
- format.
- """
- if len(root.handlers) == 0:
- basicConfig()
- root.warning(msg, *args, **kwargs)
-
-warn = warning
-
-def info(msg, *args, **kwargs):
- """
- Log a message with severity 'INFO' on the root logger. If the logger has
- no handlers, call basicConfig() to add a console handler with a pre-defined
- format.
- """
- if len(root.handlers) == 0:
- basicConfig()
- root.info(msg, *args, **kwargs)
-
-def debug(msg, *args, **kwargs):
- """
- Log a message with severity 'DEBUG' on the root logger. If the logger has
- no handlers, call basicConfig() to add a console handler with a pre-defined
- format.
- """
- if len(root.handlers) == 0:
- basicConfig()
- root.debug(msg, *args, **kwargs)
-
-def log(level, msg, *args, **kwargs):
- """
- Log 'msg % args' with the integer severity 'level' on the root logger. If
- the logger has no handlers, call basicConfig() to add a console handler
- with a pre-defined format.
- """
- if len(root.handlers) == 0:
- basicConfig()
- root.log(level, msg, *args, **kwargs)
-
-def disable(level):
- """
- Disable all logging calls of severity 'level' and below.
- """
- root.manager.disable = level
-
-def shutdown(handlerList=_handlerList):
- """
- Perform any cleanup actions in the logging system (e.g. flushing
- buffers).
-
- Should be called at application exit.
- """
- for wr in reversed(handlerList[:]):
- #errors might occur, for example, if files are locked
- #we just ignore them if raiseExceptions is not set
- try:
- h = wr()
- if h:
- try:
- h.acquire()
- h.flush()
- h.close()
- except (IOError, ValueError):
- # Ignore errors which might be caused
- # because handlers have been closed but
- # references to them are still around at
- # application exit.
- pass
- finally:
- h.release()
- except:
- if raiseExceptions:
- raise
- #else, swallow
-
-#Let's try and shutdown automatically on application exit...
-import atexit
-atexit.register(shutdown)
-
-# Null handler
-
-class NullHandler(Handler):
- """
- This handler does nothing. It's intended to be used to avoid the
- "No handlers could be found for logger XXX" one-off warning. This is
- important for library code, which may contain code to log events. If a user
- of the library does not configure logging, the one-off warning might be
- produced; to avoid this, the library developer simply needs to instantiate
- a NullHandler and add it to the top-level logger of the library module or
- package.
- """
- def handle(self, record):
- pass
-
- def emit(self, record):
- pass
-
- def createLock(self):
- self.lock = None
-
-# Warnings integration
-
-any _warnings_showwarning = None
-
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- """
- Implementation of showwarnings which redirects to logging, which will first
- check to see if the file parameter is None. If a file is specified, it will
- delegate to the original warnings implementation of showwarning. Otherwise,
- it will call warnings.formatwarning and will log the resulting string to a
- warnings logger named "py.warnings" with level logging.WARNING.
- """
- if file is not None:
- if _warnings_showwarning is not None:
- _warnings_showwarning(message, category, filename, lineno, file, line)
- else:
- s = warnings.formatwarning(message, category, filename, lineno, line)
- logger = getLogger("py.warnings")
- if not logger.handlers:
- logger.addHandler(NullHandler())
- logger.warning("%s", s)
-
-def captureWarnings(capture):
- """
- If capture is true, redirect all warnings to the logging package.
- If capture is False, ensure that warnings are not redirected to logging
- but to their original destinations.
- """
- global _warnings_showwarning
- if capture:
- if _warnings_showwarning is None:
- _warnings_showwarning = warnings.showwarning
- warnings.showwarning = _showwarning
- else:
- if _warnings_showwarning is not None:
- warnings.showwarning = _warnings_showwarning
- _warnings_showwarning = None
diff --git a/test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py b/test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py b/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
deleted file mode 100644
index 4fa65c4..0000000
--- a/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
+++ /dev/null
@@ -1,980 +0,0 @@
-"""Parse (absolute and relative) URLs.
-
-urlparse module is based upon the following RFC specifications.
-
-RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
-and L. Masinter, January 2005.
-
-RFC 2732 : "Format for Literal IPv6 Addresses in URLs" by R.Hinden, B.Carpenter
-and L.Masinter, December 1999.
-
-RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T.
-Berners-Lee, R. Fielding, and L. Masinter, August 1998.
-
-RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998.
-
-RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
-1995.
-
-RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
-McCahill, December 1994
-
-RFC 3986 is considered the current standard and any future changes to
-urlparse module should conform with it. The urlparse module is
-currently not entirely compliant with this RFC due to defacto
-scenarios for parsing, and for backward compatibility purposes, some
-parsing quirks from older RFCs are retained. The testcases in
-test_urlparse.py provides a good indicator of parsing behavior.
-"""
-
-import sys
-import collections
-
-__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
- "urlsplit", "urlunsplit", "urlencode", "parse_qs",
- "parse_qsl", "quote", "quote_plus", "quote_from_bytes",
- "unquote", "unquote_plus", "unquote_to_bytes"]
-
-# A classification of schemes ('' means apply by default)
-uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
- 'wais', 'file', 'https', 'shttp', 'mms',
- 'prospero', 'rtsp', 'rtspu', '', 'sftp',
- 'svn', 'svn+ssh']
-uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
- 'imap', 'wais', 'file', 'mms', 'https', 'shttp',
- 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
- 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh']
-non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
- 'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
-uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
- 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
- 'mms', '', 'sftp']
-uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
- 'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
-uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
- 'nntp', 'wais', 'https', 'shttp', 'snews',
- 'file', 'prospero', '']
-
-# Characters valid in scheme names
-scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- '0123456789'
- '+-.')
-
-# XXX: Consider replacing with functools.lru_cache
-MAX_CACHE_SIZE = 20
-_parse_cache = {}
-
-def clear_cache():
- """Clear the parse cache and the quoters cache."""
- _parse_cache.clear()
- _safe_quoters.clear()
-
-
-# Helpers for bytes handling
-# For 3.2, we deliberately require applications that
-# handle improperly quoted URLs to do their own
-# decoding and encoding. If valid use cases are
-# presented, we may relax this by using latin-1
-# decoding internally for 3.3
-_implicit_encoding = 'ascii'
-_implicit_errors = 'strict'
-
-def _noop(obj):
- return obj
-
-def _encode_result(obj, encoding=_implicit_encoding,
- errors=_implicit_errors):
- return obj.encode(encoding, errors)
-
-def _decode_args(args, encoding=_implicit_encoding,
- errors=_implicit_errors):
- return tuple(x.decode(encoding, errors) if x else '' for x in args)
-
-def _coerce_args(*args):
- # Invokes decode if necessary to create str args
- # and returns the coerced inputs along with
- # an appropriate result coercion function
- # - noop for str inputs
- # - encoding function otherwise
- str_input = isinstance(args[0], str)
- for arg in args[1:]:
- # We special-case the empty string to support the
- # "scheme=''" default argument to some functions
- if arg and isinstance(arg, str) != str_input:
- raise TypeError("Cannot mix str and non-str arguments")
- if str_input:
- return args + (_noop,)
- return _decode_args(args) + (_encode_result,)
-
-# Result objects are more helpful than simple tuples
-class _ResultMixinStr(object):
- """Standard approach to encoding parsed results from str to bytes"""
- __slots__ = ()
-
- def encode(self, encoding='ascii', errors='strict'):
- return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self))
-
-
-class _ResultMixinBytes(object):
- """Standard approach to decoding parsed results from bytes to str"""
- __slots__ = ()
-
- def decode(self, encoding='ascii', errors='strict'):
- return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self))
-
-
-class _NetlocResultMixinBase(object):
- """Shared methods for the parsed result objects containing a netloc element"""
- __slots__ = ()
-
- @property
- def username(self):
- return self._userinfo[0]
-
- @property
- def password(self):
- return self._userinfo[1]
-
- @property
- def hostname(self):
- hostname = self._hostinfo[0]
- if not hostname:
- hostname = None
- elif hostname is not None:
- hostname = hostname.lower()
- return hostname
-
- @property
- def port(self):
- port = self._hostinfo[1]
- if port is not None:
- port = int(port, 10)
- return port
-
-
-class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
- __slots__ = ()
-
- @property
- def _userinfo(self):
- netloc = self.netloc
- userinfo, have_info, hostinfo = netloc.rpartition('@')
- if have_info:
- username, have_password, password = userinfo.partition(':')
- if not have_password:
- password = None
- else:
- username = password = None
- return username, password
-
- @property
- def _hostinfo(self):
- netloc = self.netloc
- _, _, hostinfo = netloc.rpartition('@')
- _, have_open_br, bracketed = hostinfo.partition('[')
- if have_open_br:
- hostname, _, port = bracketed.partition(']')
- _, have_port, port = port.partition(':')
- else:
- hostname, have_port, port = hostinfo.partition(':')
- if not have_port:
- port = None
- return hostname, port
-
-
-class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes):
- __slots__ = ()
-
- @property
- def _userinfo(self):
- netloc = self.netloc
- userinfo, have_info, hostinfo = netloc.rpartition(b'@')
- if have_info:
- username, have_password, password = userinfo.partition(b':')
- if not have_password:
- password = None
- else:
- username = password = None
- return username, password
-
- @property
- def _hostinfo(self):
- netloc = self.netloc
- _, _, hostinfo = netloc.rpartition(b'@')
- _, have_open_br, bracketed = hostinfo.partition(b'[')
- if have_open_br:
- hostname, _, port = bracketed.partition(b']')
- _, have_port, port = port.partition(b':')
- else:
- hostname, have_port, port = hostinfo.partition(b':')
- if not have_port:
- port = None
- return hostname, port
-
-
-from collections import namedtuple
-
-_DefragResultBase = namedtuple('DefragResult', 'url fragment')
-_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment')
-_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment')
-
-# For backwards compatibility, alias _NetlocResultMixinStr
-# ResultBase is no longer part of the documented API, but it is
-# retained since deprecating it isn't worth the hassle
-ResultBase = _NetlocResultMixinStr
-
-# Structured result objects for string data
-class DefragResult(_DefragResultBase, _ResultMixinStr):
- __slots__ = ()
- def geturl(self):
- if self.fragment:
- return self.url + '#' + self.fragment
- else:
- return self.url
-
-class SplitResult(_SplitResultBase, _NetlocResultMixinStr):
- __slots__ = ()
- def geturl(self):
- return urlunsplit(self)
-
-class ParseResult(_ParseResultBase, _NetlocResultMixinStr):
- __slots__ = ()
- def geturl(self):
- return urlunparse(self)
-
-# Structured result objects for bytes data
-class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
- __slots__ = ()
- def geturl(self):
- if self.fragment:
- return self.url + b'#' + self.fragment
- else:
- return self.url
-
-class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes):
- __slots__ = ()
- def geturl(self):
- return urlunsplit(self)
-
-class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes):
- __slots__ = ()
- def geturl(self):
- return urlunparse(self)
-
-# Set up the encode/decode result pairs
-def _fix_result_transcoding():
- _result_pairs = (
- (DefragResult, DefragResultBytes),
- (SplitResult, SplitResultBytes),
- (ParseResult, ParseResultBytes),
- )
- for _decoded, _encoded in _result_pairs:
- _decoded._encoded_counterpart = _encoded
- _encoded._decoded_counterpart = _decoded
-
-_fix_result_transcoding()
-del _fix_result_transcoding
-
-def urlparse(url, scheme='', allow_fragments=True):
- """Parse a URL into 6 components:
- <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
- Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
- Note that we don't break the components up in smaller bits
- (e.g. netloc is a single string) and we don't expand % escapes."""
- url, scheme, _coerce_result = _coerce_args(url, scheme)
- tuple = urlsplit(url, scheme, allow_fragments)
- scheme, netloc, url, query, fragment = tuple
- if scheme in uses_params and ';' in url:
- url, params = _splitparams(url)
- else:
- params = ''
- result = ParseResult(scheme, netloc, url, params, query, fragment)
- return _coerce_result(result)
-
-def _splitparams(url):
- if '/' in url:
- i = url.find(';', url.rfind('/'))
- if i < 0:
- return url, ''
- else:
- i = url.find(';')
- return url[:i], url[i+1:]
-
-def _splitnetloc(url, start=0):
- delim = len(url) # position of end of domain part of url, default is end
- for c in '/?#': # look for delimiters; the order is NOT important
- wdelim = url.find(c, start) # find first of this delim
- if wdelim >= 0: # if found
- delim = min(delim, wdelim) # use earliest delim position
- return url[start:delim], url[delim:] # return (domain, rest)
-
-def urlsplit(url, scheme='', allow_fragments=True):
- """Parse a URL into 5 components:
- <scheme>://<netloc>/<path>?<query>#<fragment>
- Return a 5-tuple: (scheme, netloc, path, query, fragment).
- Note that we don't break the components up in smaller bits
- (e.g. netloc is a single string) and we don't expand % escapes."""
- url, scheme, _coerce_result = _coerce_args(url, scheme)
- allow_fragments = bool(allow_fragments)
- key = url, scheme, allow_fragments, type(url), type(scheme)
- cached = _parse_cache.get(key, None)
- if cached:
- return _coerce_result(cached)
- if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
- clear_cache()
- netloc = query = fragment = ''
- i = url.find(':')
- if i > 0:
- if url[:i] == 'http': # optimize the common case
- scheme = url[:i].lower()
- url = url[i+1:]
- if url[:2] == '//':
- netloc, url = _splitnetloc(url, 2)
- if (('[' in netloc and ']' not in netloc) or
- (']' in netloc and '[' not in netloc)):
- raise ValueError("Invalid IPv6 URL")
- if allow_fragments and '#' in url:
- url, fragment = url.split('#', 1)
- if '?' in url:
- url, query = url.split('?', 1)
- v = SplitResult(scheme, netloc, url, query, fragment)
- _parse_cache[key] = v
- return _coerce_result(v)
- for c in url[:i]:
- if c not in scheme_chars:
- break
- else:
- try:
- # make sure "url" is not actually a port number (in which case
- # "scheme" is really part of the path
- _testportnum = int(url[i+1:])
- except ValueError:
- scheme, url = url[:i].lower(), url[i+1:]
-
- if url[:2] == '//':
- netloc, url = _splitnetloc(url, 2)
- if (('[' in netloc and ']' not in netloc) or
- (']' in netloc and '[' not in netloc)):
- raise ValueError("Invalid IPv6 URL")
- if allow_fragments and scheme in uses_fragment and '#' in url:
- url, fragment = url.split('#', 1)
- if scheme in uses_query and '?' in url:
- url, query = url.split('?', 1)
- v = SplitResult(scheme, netloc, url, query, fragment)
- _parse_cache[key] = v
- return _coerce_result(v)
-
-def urlunparse(components):
- """Put a parsed URL back together again. This may result in a
- slightly different, but equivalent URL, if the URL that was parsed
- originally had redundant delimiters, e.g. a ? with an empty query
- (the draft states that these are equivalent)."""
- scheme, netloc, url, params, query, fragment, _coerce_result = (
- _coerce_args(*components))
- if params:
- url = "%s;%s" % (url, params)
- return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment)))
-
-def urlunsplit(components):
- """Combine the elements of a tuple as returned by urlsplit() into a
- complete URL as a string. The data argument can be any five-item iterable.
- This may result in a slightly different, but equivalent URL, if the URL that
- was parsed originally had unnecessary delimiters (for example, a ? with an
- empty query; the RFC states that these are equivalent)."""
- scheme, netloc, url, query, fragment, _coerce_result = (
- _coerce_args(*components))
- if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
- if url and url[:1] != '/': url = '/' + url
- url = '//' + (netloc or '') + url
- if scheme:
- url = scheme + ':' + url
- if query:
- url = url + '?' + query
- if fragment:
- url = url + '#' + fragment
- return _coerce_result(url)
-
-def urljoin(base, url, allow_fragments=True):
- """Join a base URL and a possibly relative URL to form an absolute
- interpretation of the latter."""
- if not base:
- return url
- if not url:
- return base
- base, url, _coerce_result = _coerce_args(base, url)
- bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
- urlparse(base, '', allow_fragments)
- scheme, netloc, path, params, query, fragment = \
- urlparse(url, bscheme, allow_fragments)
- if scheme != bscheme or scheme not in uses_relative:
- return _coerce_result(url)
- if scheme in uses_netloc:
- if netloc:
- return _coerce_result(urlunparse((scheme, netloc, path,
- params, query, fragment)))
- netloc = bnetloc
- if path[:1] == '/':
- return _coerce_result(urlunparse((scheme, netloc, path,
- params, query, fragment)))
- if not path and not params:
- path = bpath
- params = bparams
- if not query:
- query = bquery
- return _coerce_result(urlunparse((scheme, netloc, path,
- params, query, fragment)))
- segments = bpath.split('/')[:-1] + path.split('/')
- # XXX The stuff below is bogus in various ways...
- if segments[-1] == '.':
- segments[-1] = ''
- while '.' in segments:
- segments.remove('.')
- while 1:
- i = 1
- n = len(segments) - 1
- while i < n:
- if (segments[i] == '..'
- and segments[i-1] not in ('', '..')):
- del segments[i-1:i+1]
- break
- i = i+1
- else:
- break
- if segments == ['', '..']:
- segments[-1] = ''
- elif len(segments) >= 2 and segments[-1] == '..':
- segments[-2:] = ['']
- return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments),
- params, query, fragment)))
-
-def urldefrag(url):
- """Removes any existing fragment from URL.
-
- Returns a tuple of the defragmented URL and the fragment. If
- the URL contained no fragments, the second element is the
- empty string.
- """
- url, _coerce_result = _coerce_args(url)
- if '#' in url:
- s, n, p, a, q, frag = urlparse(url)
- defrag = urlunparse((s, n, p, a, q, ''))
- else:
- frag = ''
- defrag = url
- return _coerce_result(DefragResult(defrag, frag))
-
-def unquote_to_bytes(string):
- """unquote_to_bytes('abc%20def') -> b'abc def'."""
- # Note: strings are encoded as UTF-8. This is only an issue if it contains
- # unescaped non-ASCII characters, which URIs should not.
- if not string:
- # Is it a string-like object?
- string.split
- return b''
- if isinstance(string, str):
- string = string.encode('utf-8')
- res = string.split(b'%')
- if len(res) == 1:
- return string
- string = res[0]
- for item in res[1:]:
- try:
- string += bytes([int(item[:2], 16)]) + item[2:]
- except ValueError:
- string += b'%' + item
- return string
-
-def unquote(string, encoding='utf-8', errors='replace'):
- """Replace %xx escapes by their single-character equivalent. The optional
- encoding and errors parameters specify how to decode percent-encoded
- sequences into Unicode characters, as accepted by the bytes.decode()
- method.
- By default, percent-encoded sequences are decoded with UTF-8, and invalid
- sequences are replaced by a placeholder character.
-
- unquote('abc%20def') -> 'abc def'.
- """
- if string == '':
- return string
- res = string.split('%')
- if len(res) == 1:
- return string
- if encoding is None:
- encoding = 'utf-8'
- if errors is None:
- errors = 'replace'
- # pct_sequence: contiguous sequence of percent-encoded bytes, decoded
- pct_sequence = b''
- string = res[0]
- for item in res[1:]:
- try:
- if not item:
- raise ValueError
- pct_sequence += bytes.fromhex(item[:2])
- rest = item[2:]
- if not rest:
- # This segment was just a single percent-encoded character.
- # May be part of a sequence of code units, so delay decoding.
- # (Stored in pct_sequence).
- continue
- except ValueError:
- rest = '%' + item
- # Encountered non-percent-encoded characters. Flush the current
- # pct_sequence.
- string += pct_sequence.decode(encoding, errors) + rest
- pct_sequence = b''
- if pct_sequence:
- # Flush the final pct_sequence
- string += pct_sequence.decode(encoding, errors)
- return string
-
-def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace'):
- """Parse a query given as a string argument.
-
- Arguments:
-
- qs: percent-encoded query string to be parsed
-
- keep_blank_values: flag indicating whether blank values in
- percent-encoded queries should be treated as blank strings.
- A true value indicates that blanks should be retained as
- blank strings. The default false value indicates that
- blank values are to be ignored and treated as if they were
- not included.
-
- strict_parsing: flag indicating what to do with parsing errors.
- If false (the default), errors are silently ignored.
- If true, errors raise a ValueError exception.
-
- encoding and errors: specify how to decode percent-encoded sequences
- into Unicode characters, as accepted by the bytes.decode() method.
- """
- dict = {}
- pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
- encoding=encoding, errors=errors)
- for name, value in pairs:
- if name in dict:
- dict[name].append(value)
- else:
- dict[name] = [value]
- return dict
-
-def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace'):
- """Parse a query given as a string argument.
-
- Arguments:
-
- qs: percent-encoded query string to be parsed
-
- keep_blank_values: flag indicating whether blank values in
- percent-encoded queries should be treated as blank strings. A
- true value indicates that blanks should be retained as blank
- strings. The default false value indicates that blank values
- are to be ignored and treated as if they were not included.
-
- strict_parsing: flag indicating what to do with parsing errors. If
- false (the default), errors are silently ignored. If true,
- errors raise a ValueError exception.
-
- encoding and errors: specify how to decode percent-encoded sequences
- into Unicode characters, as accepted by the bytes.decode() method.
-
- Returns a list, as G-d intended.
- """
- qs, _coerce_result = _coerce_args(qs)
- pairs = []
- for s1 in qs.split('&'):
- for s2 in s1.split(';'):
- pairs.append(s2)
- r = []
- for name_value in pairs:
- if not name_value and not strict_parsing:
- continue
- nv = name_value.split('=', 1)
- if len(nv) != 2:
- if strict_parsing:
- raise ValueError("bad query field: %r" % (name_value,))
- # Handle case of a control-name with no equal sign
- if keep_blank_values:
- nv.append('')
- else:
- continue
- if len(nv[1]) or keep_blank_values:
- name = nv[0].replace('+', ' ')
- name = unquote(name, encoding=encoding, errors=errors)
- name = _coerce_result(name)
- value = nv[1].replace('+', ' ')
- value = unquote(value, encoding=encoding, errors=errors)
- value = _coerce_result(value)
- r.append((name, value))
- return r
-
-def unquote_plus(string, encoding='utf-8', errors='replace'):
- """Like unquote(), but also replace plus signs by spaces, as required for
- unquoting HTML form values.
-
- unquote_plus('%7e/abc+def') -> '~/abc def'
- """
- string = string.replace('+', ' ')
- return unquote(string, encoding, errors)
-
-_ALWAYS_SAFE = frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- b'abcdefghijklmnopqrstuvwxyz'
- b'0123456789'
- b'_.-')
-_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE)
-_safe_quoters = {}
-
-class Quoter(collections.defaultdict):
- """A mapping from bytes (in range(0,256)) to strings.
-
- String values are percent-encoded byte values, unless the key < 128, and
- in the "safe" set (either the specified safe set, or default set).
- """
- # Keeps a cache internally, using defaultdict, for efficiency (lookups
- # of cached keys don't call Python code at all).
- def __init__(self, safe):
- """safe: bytes object."""
- self.safe = _ALWAYS_SAFE.union(safe)
-
- def __repr__(self):
- # Without this, will just display as a defaultdict
- return "<Quoter %r>" % dict(self)
-
- def __missing__(self, b):
- # Handle a cache miss. Store quoted string in cache and return.
- res = chr(b) if b in self.safe else '%{:02X}'.format(b)
- self[b] = res
- return res
-
-def quote(string, safe='/', encoding=None, errors=None):
- """quote('abc def') -> 'abc%20def'
-
- Each part of a URL, e.g. the path info, the query, etc., has a
- different set of reserved characters that must be quoted.
-
- RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
- the following reserved characters.
-
- reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
- "$" | ","
-
- Each of these characters is reserved in some component of a URL,
- but not necessarily in all of them.
-
- By default, the quote function is intended for quoting the path
- section of a URL. Thus, it will not encode '/'. This character
- is reserved, but in typical usage the quote function is being
- called on a path where the existing slash characters are used as
- reserved characters.
-
- string and safe may be either str or bytes objects. encoding must
- not be specified if string is a str.
-
- The optional encoding and errors parameters specify how to deal with
- non-ASCII characters, as accepted by the str.encode method.
- By default, encoding='utf-8' (characters are encoded with UTF-8), and
- errors='strict' (unsupported characters raise a UnicodeEncodeError).
- """
- if isinstance(string, str):
- if not string:
- return string
- if encoding is None:
- encoding = 'utf-8'
- if errors is None:
- errors = 'strict'
- string = string.encode(encoding, errors)
- else:
- if encoding is not None:
- raise TypeError("quote() doesn't support 'encoding' for bytes")
- if errors is not None:
- raise TypeError("quote() doesn't support 'errors' for bytes")
- return quote_from_bytes(string, safe)
-
-def quote_plus(string, safe='', encoding=None, errors=None):
- """Like quote(), but also replace ' ' with '+', as required for quoting
- HTML form values. Plus signs in the original string are escaped unless
- they are included in safe. It also does not have safe default to '/'.
- """
- # Check if ' ' in string, where string may either be a str or bytes. If
- # there are no spaces, the regular quote will produce the right answer.
- if ((isinstance(string, str) and ' ' not in string) or
- (isinstance(string, bytes) and b' ' not in string)):
- return quote(string, safe, encoding, errors)
- if isinstance(safe, str):
- space = ' '
- else:
- space = b' '
- string = quote(string, safe + space, encoding, errors)
- return string.replace(' ', '+')
-
-def quote_from_bytes(bs, safe='/'):
- """Like quote(), but accepts a bytes object rather than a str, and does
- not perform string-to-bytes encoding. It always returns an ASCII string.
- quote_from_bytes(b'abc def\xab') -> 'abc%20def%AB'
- """
- if not isinstance(bs, (bytes, bytearray)):
- raise TypeError("quote_from_bytes() expected bytes")
- if not bs:
- return ''
- if isinstance(safe, str):
- # Normalize 'safe' by converting to bytes and removing non-ASCII chars
- safe = safe.encode('ascii', 'ignore')
- else:
- safe = bytes([c for c in safe if c < 128])
- if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe):
- return bs.decode()
- try:
- quoter = _safe_quoters[safe]
- except KeyError:
- _safe_quoters[safe] = quoter = Quoter(safe).__getitem__
- return ''.join([quoter(char) for char in bs])
-
-def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
- """Encode a sequence of two-element tuples or dictionary into a URL query string.
-
- If any values in the query arg are sequences and doseq is true, each
- sequence element is converted to a separate parameter.
-
- If the query arg is a sequence of two-element tuples, the order of the
- parameters in the output will match the order of parameters in the
- input.
-
- The query arg may be either a string or a bytes type. When query arg is a
- string, the safe, encoding and error parameters are sent the quote_plus for
- encoding.
- """
-
- if hasattr(query, "items"):
- query = query.items()
- else:
- # It's a bother at times that strings and string-like objects are
- # sequences.
- try:
- # non-sequence items should not work with len()
- # non-empty strings will fail this
- if len(query) and not isinstance(query[0], tuple):
- raise TypeError
- # Zero-length sequences of all types will get here and succeed,
- # but that's a minor nit. Since the original implementation
- # allowed empty dicts that type of behavior probably should be
- # preserved for consistency
- except TypeError:
- ty, va, tb = sys.exc_info()
- raise TypeError("not a valid non-string sequence "
- "or mapping object").with_traceback(tb)
-
- l = []
- if not doseq:
- for k, v in query:
- if isinstance(k, bytes):
- k = quote_plus(k, safe)
- else:
- k = quote_plus(str(k), safe, encoding, errors)
-
- if isinstance(v, bytes):
- v = quote_plus(v, safe)
- else:
- v = quote_plus(str(v), safe, encoding, errors)
- l.append(k + '=' + v)
- else:
- for k, v in query:
- if isinstance(k, bytes):
- k = quote_plus(k, safe)
- else:
- k = quote_plus(str(k), safe, encoding, errors)
-
- if isinstance(v, bytes):
- v = quote_plus(v, safe)
- l.append(k + '=' + v)
- elif isinstance(v, str):
- v = quote_plus(v, safe, encoding, errors)
- l.append(k + '=' + v)
- else:
- try:
- # Is this a sufficient test for sequence-ness?
- x = len(v)
- except TypeError:
- # not a sequence
- v = quote_plus(str(v), safe, encoding, errors)
- l.append(k + '=' + v)
- else:
- # loop over the sequence
- for elt in v:
- if isinstance(elt, bytes):
- elt = quote_plus(elt, safe)
- else:
- elt = quote_plus(str(elt), safe, encoding, errors)
- l.append(k + '=' + elt)
- return '&'.join(l)
-
-# Utilities to parse URLs (most of these return None for missing parts):
-# unwrap('<URL:type://host/path>') --> 'type://host/path'
-# splittype('type:opaquestring') --> 'type', 'opaquestring'
-# splithost('//host[:port]/path') --> 'host[:port]', '/path'
-# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
-# splitpasswd('user:passwd') -> 'user', 'passwd'
-# splitport('host:port') --> 'host', 'port'
-# splitquery('/path?query') --> '/path', 'query'
-# splittag('/path#tag') --> '/path', 'tag'
-# splitattr('/path;attr1=value1;attr2=value2;...') ->
-# '/path', ['attr1=value1', 'attr2=value2', ...]
-# splitvalue('attr=value') --> 'attr', 'value'
-# urllib.parse.unquote('abc%20def') -> 'abc def'
-# quote('abc def') -> 'abc%20def')
-
-def to_bytes(url):
- """to_bytes(u"URL") --> 'URL'."""
- # Most URL schemes require ASCII. If that changes, the conversion
- # can be relaxed.
- # XXX get rid of to_bytes()
- if isinstance(url, str):
- try:
- url = url.encode("ASCII").decode()
- except UnicodeError:
- raise UnicodeError("URL " + repr(url) +
- " contains non-ASCII characters")
- return url
-
-def unwrap(url):
- """unwrap('<URL:type://host/path>') --> 'type://host/path'."""
- url = str(url).strip()
- if url[:1] == '<' and url[-1:] == '>':
- url = url[1:-1].strip()
- if url[:4] == 'URL:': url = url[4:].strip()
- return url
-
-_typeprog = None
-def splittype(url):
- """splittype('type:opaquestring') --> 'type', 'opaquestring'."""
- global _typeprog
- if _typeprog is None:
- import re
- _typeprog = re.compile('^([^/:]+):')
-
- match = _typeprog.match(url)
- if match:
- scheme = match.group(1)
- return scheme.lower(), url[len(scheme) + 1:]
- return None, url
-
-_hostprog = None
-def splithost(url):
- """splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
- global _hostprog
- if _hostprog is None:
- import re
- _hostprog = re.compile('^//([^/?]*)(.*)$')
-
- match = _hostprog.match(url)
- if match:
- host_port = match.group(1)
- path = match.group(2)
- if path and not path.startswith('/'):
- path = '/' + path
- return host_port, path
- return None, url
-
-_userprog = None
-def splituser(host):
- """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
- global _userprog
- if _userprog is None:
- import re
- _userprog = re.compile('^(.*)@(.*)$')
-
- match = _userprog.match(host)
- if match: return match.group(1, 2)
- return None, host
-
-_passwdprog = None
-def splitpasswd(user):
- """splitpasswd('user:passwd') -> 'user', 'passwd'."""
- global _passwdprog
- if _passwdprog is None:
- import re
- _passwdprog = re.compile('^([^:]*):(.*)$',re.S)
-
- match = _passwdprog.match(user)
- if match: return match.group(1, 2)
- return user, None
-
-# splittag('/path#tag') --> '/path', 'tag'
-_portprog = None
-def splitport(host):
- """splitport('host:port') --> 'host', 'port'."""
- global _portprog
- if _portprog is None:
- import re
- _portprog = re.compile('^(.*):([0-9]+)$')
-
- match = _portprog.match(host)
- if match: return match.group(1, 2)
- return host, None
-
-_nportprog = None
-def splitnport(host, defport=-1):
- """Split host and port, returning numeric port.
- Return given default port if no ':' found; defaults to -1.
- Return numerical port if a valid number are found after ':'.
- Return None if ':' but not a valid number."""
- global _nportprog
- if _nportprog is None:
- import re
- _nportprog = re.compile('^(.*):(.*)$')
-
- match = _nportprog.match(host)
- if match:
- host, port = match.group(1, 2)
- try:
- if not port: raise ValueError("no digits")
- nport = int(port)
- except ValueError:
- nport = None
- return host, nport
- return host, defport
-
-_queryprog = None
-def splitquery(url):
- """splitquery('/path?query') --> '/path', 'query'."""
- global _queryprog
- if _queryprog is None:
- import re
- _queryprog = re.compile('^(.*)\?([^?]*)$')
-
- match = _queryprog.match(url)
- if match: return match.group(1, 2)
- return url, None
-
-_tagprog = None
-def splittag(url):
- """splittag('/path#tag') --> '/path', 'tag'."""
- global _tagprog
- if _tagprog is None:
- import re
- _tagprog = re.compile('^(.*)#([^#]*)$')
-
- match = _tagprog.match(url)
- if match: return match.group(1, 2)
- return url, None
-
-def splitattr(url):
- """splitattr('/path;attr1=value1;attr2=value2;...') ->
- '/path', ['attr1=value1', 'attr2=value2', ...]."""
- words = url.split(';')
- return words[0], words[1:]
-
-_valueprog = None
-def splitvalue(attr):
- """splitvalue('attr=value') --> 'attr', 'value'."""
- global _valueprog
- if _valueprog is None:
- import re
- _valueprog = re.compile('^([^=]*)=(.*)$')
-
- match = _valueprog.match(attr)
- if match: return match.group(1, 2)
- return attr, None
diff --git a/test-data/stdlib-samples/3.2/posixpath.py b/test-data/stdlib-samples/3.2/posixpath.py
deleted file mode 100644
index cf5d59e..0000000
--- a/test-data/stdlib-samples/3.2/posixpath.py
+++ /dev/null
@@ -1,466 +0,0 @@
-"""Common operations on Posix pathnames.
-
-Instead of importing this module directly, import os and refer to
-this module as os.path. The "os.path" name is an alias for this
-module on Posix systems; on other systems (e.g. Mac, Windows),
-os.path provides the same operations in a manner specific to that
-platform, and is an alias to another module (e.g. macpath, ntpath).
-
-Some of this can actually be useful on non-Posix systems too, e.g.
-for manipulation of the pathname component of URLs.
-"""
-
-import os
-import sys
-import stat
-import genericpath
-from genericpath import *
-
-from typing import (
- Tuple, BinaryIO, TextIO, Pattern, AnyStr, List, Set, Any, Union, cast
-)
-
-__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
- "basename","dirname","commonprefix","getsize","getmtime",
- "getatime","getctime","islink","exists","lexists","isdir","isfile",
- "ismount", "expanduser","expandvars","normpath","abspath",
- "samefile","sameopenfile","samestat",
- "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
- "devnull","realpath","supports_unicode_filenames","relpath"]
-
-# Strings representing various path-related bits and pieces.
-# These are primarily for export; internally, they are hardcoded.
-curdir = '.'
-pardir = '..'
-extsep = '.'
-sep = '/'
-pathsep = ':'
-defpath = ':/bin:/usr/bin'
-altsep = None # type: str
-devnull = '/dev/null'
-
-def _get_sep(path: AnyStr) -> AnyStr:
- if isinstance(path, bytes):
- return b'/'
- else:
- return '/'
-
-# Normalize the case of a pathname. Trivial in Posix, string.lower on Mac.
-# On MS-DOS this may also turn slashes into backslashes; however, other
-# normalizations (such as optimizing '../' away) are not allowed
-# (another function should be defined to do that).
-
-def normcase(s: AnyStr) -> AnyStr:
- """Normalize case of pathname. Has no effect under Posix"""
- # TODO: on Mac OS X, this should really return s.lower().
- if not isinstance(s, (bytes, str)):
- raise TypeError("normcase() argument must be str or bytes, "
- "not '{}'".format(s.__class__.__name__))
- return cast(AnyStr, s)
-
-
-# Return whether a path is absolute.
-# Trivial in Posix, harder on the Mac or MS-DOS.
-
-def isabs(s: AnyStr) -> bool:
- """Test whether a path is absolute"""
- sep = _get_sep(s)
- return s.startswith(sep)
-
-
-# Join pathnames.
-# Ignore the previous parts if a part is absolute.
-# Insert a '/' unless the first part is empty or already ends in '/'.
-
-def join(a: AnyStr, *p: AnyStr) -> AnyStr:
- """Join two or more pathname components, inserting '/' as needed.
- If any component is an absolute path, all previous path components
- will be discarded."""
- sep = _get_sep(a)
- path = a
- for b in p:
- if b.startswith(sep):
- path = b
- elif not path or path.endswith(sep):
- path += b
- else:
- path += sep + b
- return path
-
-
-# Split a path in head (everything up to the last '/') and tail (the
-# rest). If the path ends in '/', tail will be empty. If there is no
-# '/' in the path, head will be empty.
-# Trailing '/'es are stripped from head unless it is the root.
-
-def split(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
- """Split a pathname. Returns tuple "(head, tail)" where "tail" is
- everything after the final slash. Either part may be empty."""
- sep = _get_sep(p)
- i = p.rfind(sep) + 1
- head, tail = p[:i], p[i:]
- if head and head != sep*len(head):
- head = head.rstrip(sep)
- return head, tail
-
-
-# Split a path in root and extension.
-# The extension is everything starting at the last dot in the last
-# pathname component; the root is everything before that.
-# It is always true that root + ext == p.
-
-def splitext(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
- if isinstance(p, bytes):
- sep = b'/'
- extsep = b'.'
- else:
- sep = '/'
- extsep = '.'
- return genericpath._splitext(p, sep, None, extsep)
-splitext.__doc__ = genericpath._splitext.__doc__
-
-# Split a pathname into a drive specification and the rest of the
-# path. Useful on DOS/Windows/NT; on Unix, the drive is always empty.
-
-def splitdrive(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
- """Split a pathname into drive and path. On Posix, drive is always
- empty."""
- return p[:0], p
-
-
-# Return the tail (basename) part of a path, same as split(path)[1].
-
-def basename(p: AnyStr) -> AnyStr:
- """Returns the final component of a pathname"""
- sep = _get_sep(p)
- i = p.rfind(sep) + 1
- return p[i:]
-
-
-# Return the head (dirname) part of a path, same as split(path)[0].
-
-def dirname(p: AnyStr) -> AnyStr:
- """Returns the directory component of a pathname"""
- sep = _get_sep(p)
- i = p.rfind(sep) + 1
- head = p[:i]
- if head and head != sep*len(head):
- head = head.rstrip(sep)
- return head
-
-
-# Is a path a symbolic link?
-# This will always return false on systems where os.lstat doesn't exist.
-
-def islink(path: AnyStr) -> bool:
- """Test whether a path is a symbolic link"""
- try:
- st = os.lstat(path)
- except (os.error, AttributeError):
- return False
- return stat.S_ISLNK(st.st_mode)
-
-# Being true for dangling symbolic links is also useful.
-
-def lexists(path: AnyStr) -> bool:
- """Test whether a path exists. Returns True for broken symbolic links"""
- try:
- os.lstat(path)
- except os.error:
- return False
- return True
-
-
-# Are two filenames really pointing to the same file?
-
-def samefile(f1: AnyStr, f2: AnyStr) -> bool:
- """Test whether two pathnames reference the same actual file"""
- s1 = os.stat(f1)
- s2 = os.stat(f2)
- return samestat(s1, s2)
-
-
-# Are two open files really referencing the same file?
-# (Not necessarily the same file descriptor!)
-
-def sameopenfile(fp1: int, fp2: int) -> bool:
- """Test whether two open file objects reference the same file"""
- s1 = os.fstat(fp1)
- s2 = os.fstat(fp2)
- return samestat(s1, s2)
-
-
-# Are two stat buffers (obtained from stat, fstat or lstat)
-# describing the same file?
-
-def samestat(s1: os.stat_result, s2: os.stat_result) -> bool:
- """Test whether two stat buffers reference the same file"""
- return s1.st_ino == s2.st_ino and \
- s1.st_dev == s2.st_dev
-
-
-# Is a path a mount point?
-# (Does this work for all UNIXes? Is it even guaranteed to work by Posix?)
-
-def ismount(path: AnyStr) -> bool:
- """Test whether a path is a mount point"""
- if islink(path):
- # A symlink can never be a mount point
- return False
- try:
- s1 = os.lstat(path)
- if isinstance(path, bytes):
- parent = join(path, b'..')
- else:
- parent = join(path, '..')
- s2 = os.lstat(parent)
- except os.error:
- return False # It doesn't exist -- so not a mount point :-)
- dev1 = s1.st_dev
- dev2 = s2.st_dev
- if dev1 != dev2:
- return True # path/.. on a different device as path
- ino1 = s1.st_ino
- ino2 = s2.st_ino
- if ino1 == ino2:
- return True # path/.. is the same i-node as path
- return False
-
-
-# Expand paths beginning with '~' or '~user'.
-# '~' means $HOME; '~user' means that user's home directory.
-# If the path doesn't begin with '~', or if the user or $HOME is unknown,
-# the path is returned unchanged (leaving error reporting to whatever
-# function is called with the expanded path as argument).
-# See also module 'glob' for expansion of *, ? and [...] in pathnames.
-# (A function should also be defined to do full *sh-style environment
-# variable expansion.)
-
-def expanduser(path: AnyStr) -> AnyStr:
- """Expand ~ and ~user constructions. If user or $HOME is unknown,
- do nothing."""
- if isinstance(path, bytes):
- tilde = b'~'
- else:
- tilde = '~'
- if not path.startswith(tilde):
- return path
- sep = _get_sep(path)
- i = path.find(sep, 1)
- if i < 0:
- i = len(path)
- if i == 1:
- userhome = None # type: Union[str, bytes]
- if 'HOME' not in os.environ:
- import pwd
- userhome = pwd.getpwuid(os.getuid()).pw_dir
- else:
- userhome = os.environ['HOME']
- else:
- import pwd
- name = path[1:i] # type: Union[str, bytes]
- if isinstance(name, bytes):
- name = str(name, 'ASCII')
- try:
- pwent = pwd.getpwnam(name)
- except KeyError:
- return path
- userhome = pwent.pw_dir
- if isinstance(path, bytes):
- userhome = os.fsencode(userhome)
- root = b'/'
- else:
- root = '/'
- userhome = userhome.rstrip(root)
- return (userhome + path[i:]) or root
-
-
-# Expand paths containing shell variable substitutions.
-# This expands the forms $variable and ${variable} only.
-# Non-existent variables are left unchanged.
-
-_varprog = None # type: Pattern[str]
-_varprogb = None # type: Pattern[bytes]
-
-def expandvars(path: AnyStr) -> AnyStr:
- """Expand shell variables of form $var and ${var}. Unknown variables
- are left unchanged."""
- global _varprog, _varprogb
- if isinstance(path, bytes):
- if b'$' not in path:
- return path
- if not _varprogb:
- import re
- _varprogb = re.compile(br'\$(\w+|\{[^}]*\})', re.ASCII)
- search = _varprogb.search
- start = b'{'
- end = b'}'
- else:
- if '$' not in path:
- return path
- if not _varprog:
- import re
- _varprog = re.compile(r'\$(\w+|\{[^}]*\})', re.ASCII)
- search = _varprog.search
- start = '{'
- end = '}'
- i = 0
- while True:
- m = search(path, i)
- if not m:
- break
- i, j = m.span(0)
- name = None # type: Union[str, bytes]
- name = m.group(1)
- if name.startswith(start) and name.endswith(end):
- name = name[1:-1]
- if isinstance(name, bytes):
- name = str(name, 'ASCII')
- if name in os.environ:
- tail = path[j:]
- value = None # type: Union[str, bytes]
- value = os.environ[name]
- if isinstance(path, bytes):
- value = value.encode('ASCII')
- path = path[:i] + value
- i = len(path)
- path += tail
- else:
- i = j
- return path
-
-
-# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
-# It should be understood that this may change the meaning of the path
-# if it contains symbolic links!
-
-def normpath(path: AnyStr) -> AnyStr:
- """Normalize path, eliminating double slashes, etc."""
- if isinstance(path, bytes):
- sep = b'/'
- empty = b''
- dot = b'.'
- dotdot = b'..'
- else:
- sep = '/'
- empty = ''
- dot = '.'
- dotdot = '..'
- if path == empty:
- return dot
- initial_slashes = path.startswith(sep) # type: int
- # POSIX allows one or two initial slashes, but treats three or more
- # as single slash.
- if (initial_slashes and
- path.startswith(sep*2) and not path.startswith(sep*3)):
- initial_slashes = 2
- comps = path.split(sep)
- new_comps = [] # type: List[AnyStr]
- for comp in comps:
- if comp in (empty, dot):
- continue
- if (comp != dotdot or (not initial_slashes and not new_comps) or
- (new_comps and new_comps[-1] == dotdot)):
- new_comps.append(comp)
- elif new_comps:
- new_comps.pop()
- comps = new_comps
- path = sep.join(comps)
- if initial_slashes:
- path = sep*initial_slashes + path
- return path or dot
-
-
-def abspath(path: AnyStr) -> AnyStr:
- """Return an absolute path."""
- if not isabs(path):
- if isinstance(path, bytes):
- cwd = os.getcwdb()
- else:
- cwd = os.getcwd()
- path = join(cwd, path)
- return normpath(path)
-
-
-# Return a canonical path (i.e. the absolute location of a file on the
-# filesystem).
-
-def realpath(filename: AnyStr) -> AnyStr:
- """Return the canonical path of the specified filename, eliminating any
-symbolic links encountered in the path."""
- if isinstance(filename, bytes):
- sep = b'/'
- empty = b''
- else:
- sep = '/'
- empty = ''
- if isabs(filename):
- bits = [sep] + filename.split(sep)[1:]
- else:
- bits = [empty] + filename.split(sep)
-
- for i in range(2, len(bits)+1):
- component = join(*bits[0:i])
- # Resolve symbolic links.
- if islink(component):
- resolved = _resolve_link(component)
- if resolved is None:
- # Infinite loop -- return original component + rest of the path
- return abspath(join(*([component] + bits[i:])))
- else:
- newpath = join(*([resolved] + bits[i:]))
- return realpath(newpath)
-
- return abspath(filename)
-
-
-def _resolve_link(path: AnyStr) -> AnyStr:
- """Internal helper function. Takes a path and follows symlinks
- until we either arrive at something that isn't a symlink, or
- encounter a path we've seen before (meaning that there's a loop).
- """
- paths_seen = set() # type: Set[AnyStr]
- while islink(path):
- if path in paths_seen:
- # Already seen this path, so we must have a symlink loop
- return None
- paths_seen.add(path)
- # Resolve where the link points to
- resolved = os.readlink(path)
- if not isabs(resolved):
- dir = dirname(path)
- path = normpath(join(dir, resolved))
- else:
- path = normpath(resolved)
- return path
-
-supports_unicode_filenames = (sys.platform == 'darwin')
-
-def relpath(path: AnyStr, start: AnyStr = None) -> AnyStr:
- """Return a relative version of a path"""
-
- if not path:
- raise ValueError("no path specified")
-
- if isinstance(path, bytes):
- curdir = b'.'
- sep = b'/'
- pardir = b'..'
- else:
- curdir = '.'
- sep = '/'
- pardir = '..'
-
- if start is None:
- start = curdir
-
- start_list = [x for x in abspath(start).split(sep) if x]
- path_list = [x for x in abspath(path).split(sep) if x]
-
- # Work out how much of the filepath is shared by start and path.
- i = len(commonprefix([start_list, path_list]))
-
- rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
- if not rel_list:
- return curdir
- return join(*rel_list)
diff --git a/test-data/stdlib-samples/3.2/pprint.py b/test-data/stdlib-samples/3.2/pprint.py
deleted file mode 100644
index 650c1a3..0000000
--- a/test-data/stdlib-samples/3.2/pprint.py
+++ /dev/null
@@ -1,380 +0,0 @@
-# Author: Fred L. Drake, Jr.
-# fdrake at acm.org
-#
-# This is a simple little module I wrote to make life easier. I didn't
-# see anything quite like it in the library, though I may have overlooked
-# something. I wrote this when I was trying to read some heavily nested
-# tuples with fairly non-descriptive content. This is modeled very much
-# after Lisp/Scheme - style pretty-printing of lists. If you find it
-# useful, thank small children who sleep at night.
-
-"""Support to pretty-print lists, tuples, & dictionaries recursively.
-
-Very simple, but useful, especially in debugging data structures.
-
-Classes
--------
-
-PrettyPrinter()
- Handle pretty-printing operations onto a stream using a configured
- set of formatting parameters.
-
-Functions
----------
-
-pformat()
- Format a Python object into a pretty-printed representation.
-
-pprint()
- Pretty-print a Python object to a stream [default is sys.stdout].
-
-saferepr()
- Generate a 'standard' repr()-like value, but protect against recursive
- data structures.
-
-"""
-
-import sys as _sys
-from collections import OrderedDict as _OrderedDict
-from io import StringIO as _StringIO
-
-from typing import Any, Tuple, Dict, TextIO, cast, List
-
-__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
- "PrettyPrinter"]
-
-# cache these for faster access:
-_commajoin = ", ".join
-_id = id
-_len = len
-_type = type
-
-
-def pprint(object: object, stream: TextIO = None, indent: int = 1,
- width: int = 80, depth: int = None) -> None:
- """Pretty-print a Python object to a stream [default is sys.stdout]."""
- printer = PrettyPrinter(
- stream=stream, indent=indent, width=width, depth=depth)
- printer.pprint(object)
-
-def pformat(object: object, indent: int = 1, width: int = 80,
- depth: int = None) -> str:
- """Format a Python object into a pretty-printed representation."""
- return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object)
-
-def saferepr(object: object) -> str:
- """Version of repr() which can handle recursive data structures."""
- return _safe_repr(object, {}, None, 0)[0]
-
-def isreadable(object: object) -> bool:
- """Determine if saferepr(object) is readable by eval()."""
- return _safe_repr(object, {}, None, 0)[1]
-
-def isrecursive(object: object) -> bool:
- """Determine if object requires a recursive representation."""
- return _safe_repr(object, {}, None, 0)[2]
-
-class _safe_key:
- """Helper function for key functions when sorting unorderable objects.
-
- The wrapped-object will fallback to an Py2.x style comparison for
- unorderable types (sorting first comparing the type name and then by
- the obj ids). Does not work recursively, so dict.items() must have
- _safe_key applied to both the key and the value.
-
- """
-
- __slots__ = ['obj']
-
- def __init__(self, obj: Any) -> None:
- self.obj = obj
-
- def __lt__(self, other: Any) -> Any:
- rv = self.obj.__lt__(other.obj) # type: Any
- if rv is NotImplemented:
- rv = (str(type(self.obj)), id(self.obj)) < \
- (str(type(other.obj)), id(other.obj))
- return rv
-
-def _safe_tuple(t: Tuple[Any, Any]) -> Tuple[_safe_key, _safe_key]:
- "Helper function for comparing 2-tuples"
- return _safe_key(t[0]), _safe_key(t[1])
-
-class PrettyPrinter:
- def __init__(self, indent: int = 1, width: int = 80, depth: int = None,
- stream: TextIO = None) -> None:
- """Handle pretty printing operations onto a stream using a set of
- configured parameters.
-
- indent
- Number of spaces to indent for each level of nesting.
-
- width
- Attempted maximum number of columns in the output.
-
- depth
- The maximum depth to print out nested structures.
-
- stream
- The desired output stream. If omitted (or false), the standard
- output stream available at construction will be used.
-
- """
- indent = int(indent)
- width = int(width)
- assert indent >= 0, "indent must be >= 0"
- assert depth is None or depth > 0, "depth must be > 0"
- assert width, "width must be != 0"
- self._depth = depth
- self._indent_per_level = indent
- self._width = width
- if stream is not None:
- self._stream = stream
- else:
- self._stream = _sys.stdout
-
- def pprint(self, object: object) -> None:
- self._format(object, self._stream, 0, 0, {}, 0)
- self._stream.write("\n")
-
- def pformat(self, object: object) -> str:
- sio = _StringIO()
- self._format(object, sio, 0, 0, {}, 0)
- return sio.getvalue()
-
- def isrecursive(self, object: object) -> int:
- return self.format(object, {}, 0, 0)[2]
-
- def isreadable(self, object: object) -> int:
- s, readable, recursive = self.format(object, {}, 0, 0)
- return readable and not recursive
-
- def _format(self, object: object, stream: TextIO, indent: int,
- allowance: int, context: Dict[int, int], level: int) -> None:
- level = level + 1
- objid = _id(object)
- if objid in context:
- stream.write(_recursion(object))
- self._recursive = True
- self._readable = False
- return
- rep = self._repr(object, context, level - 1)
- typ = _type(object)
- sepLines = _len(rep) > (self._width - 1 - indent - allowance)
- write = stream.write
-
- if self._depth and level > self._depth:
- write(rep)
- return
-
- if sepLines:
- r = getattr(typ, "__repr__", None)
- if isinstance(object, dict):
- write('{')
- if self._indent_per_level > 1:
- write((self._indent_per_level - 1) * ' ')
- length = _len(object)
- if length:
- context[objid] = 1
- indent = indent + self._indent_per_level
- if issubclass(typ, _OrderedDict):
- items = list(object.items())
- else:
- items = sorted(object.items(), key=_safe_tuple)
- key, ent = items[0]
- rep = self._repr(key, context, level)
- write(rep)
- write(': ')
- self._format(ent, stream, indent + _len(rep) + 2,
- allowance + 1, context, level)
- if length > 1:
- for key, ent in items[1:]:
- rep = self._repr(key, context, level)
- write(',\n%s%s: ' % (' '*indent, rep))
- self._format(ent, stream, indent + _len(rep) + 2,
- allowance + 1, context, level)
- indent = indent - self._indent_per_level
- del context[objid]
- write('}')
- return
-
- if ((issubclass(typ, list) and r is list.__repr__) or
- (issubclass(typ, tuple) and r is tuple.__repr__) or
- (issubclass(typ, set) and r is set.__repr__) or
- (issubclass(typ, frozenset) and r is frozenset.__repr__)
- ):
- anyobj = cast(Any, object) # TODO Collection?
- length = _len(anyobj)
- if issubclass(typ, list):
- write('[')
- endchar = ']'
- lst = anyobj
- elif issubclass(typ, set):
- if not length:
- write('set()')
- return
- write('{')
- endchar = '}'
- lst = sorted(anyobj, key=_safe_key)
- elif issubclass(typ, frozenset):
- if not length:
- write('frozenset()')
- return
- write('frozenset({')
- endchar = '})'
- lst = sorted(anyobj, key=_safe_key)
- indent += 10
- else:
- write('(')
- endchar = ')'
- lst = list(anyobj)
- if self._indent_per_level > 1:
- write((self._indent_per_level - 1) * ' ')
- if length:
- context[objid] = 1
- indent = indent + self._indent_per_level
- self._format(lst[0], stream, indent, allowance + 1,
- context, level)
- if length > 1:
- for ent in lst[1:]:
- write(',\n' + ' '*indent)
- self._format(ent, stream, indent,
- allowance + 1, context, level)
- indent = indent - self._indent_per_level
- del context[objid]
- if issubclass(typ, tuple) and length == 1:
- write(',')
- write(endchar)
- return
-
- write(rep)
-
- def _repr(self, object: object, context: Dict[int, int],
- level: int) -> str:
- repr, readable, recursive = self.format(object, context.copy(),
- self._depth, level)
- if not readable:
- self._readable = False
- if recursive:
- self._recursive = True
- return repr
-
- def format(self, object: object, context: Dict[int, int],
- maxlevels: int, level: int) -> Tuple[str, int, int]:
- """Format object for a specific context, returning a string
- and flags indicating whether the representation is 'readable'
- and whether the object represents a recursive construct.
- """
- return _safe_repr(object, context, maxlevels, level)
-
-
-# Return triple (repr_string, isreadable, isrecursive).
-
-def _safe_repr(object: object, context: Dict[int, int],
- maxlevels: int, level: int) -> Tuple[str, bool, bool]:
- typ = _type(object)
- if typ is str:
- s = cast(str, object)
- if 'locale' not in _sys.modules:
- return repr(object), True, False
- if "'" in s and '"' not in s:
- closure = '"'
- quotes = {'"': '\\"'}
- else:
- closure = "'"
- quotes = {"'": "\\'"}
- qget = quotes.get
- sio = _StringIO()
- write = sio.write
- for char in s:
- if char.isalpha():
- write(char)
- else:
- write(qget(char, repr(char)[1:-1]))
- return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False
-
- r = getattr(typ, "__repr__", None)
- if issubclass(typ, dict) and r is dict.__repr__:
- if not object:
- return "{}", True, False
- objid = _id(object)
- if maxlevels and level >= maxlevels:
- return "{...}", False, objid in context
- if objid in context:
- return _recursion(object), False, True
- context[objid] = 1
- readable = True
- recursive = False
- components = [] # type: List[str]
- append = components.append
- level += 1
- saferepr = _safe_repr
- items = sorted((cast(dict, object)).items(), key=_safe_tuple)
- for k, v in items:
- krepr, kreadable, krecur = saferepr(k, context, maxlevels, level)
- vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level)
- append("%s: %s" % (krepr, vrepr))
- readable = readable and kreadable and vreadable
- if krecur or vrecur:
- recursive = True
- del context[objid]
- return "{%s}" % _commajoin(components), readable, recursive
-
- if (issubclass(typ, list) and r is list.__repr__) or \
- (issubclass(typ, tuple) and r is tuple.__repr__):
- anyobj = cast(Any, object) # TODO Sequence?
- if issubclass(typ, list):
- if not object:
- return "[]", True, False
- format = "[%s]"
- elif _len(anyobj) == 1:
- format = "(%s,)"
- else:
- if not object:
- return "()", True, False
- format = "(%s)"
- objid = _id(object)
- if maxlevels and level >= maxlevels:
- return format % "...", False, objid in context
- if objid in context:
- return _recursion(object), False, True
- context[objid] = 1
- readable = True
- recursive = False
- components = []
- append = components.append
- level += 1
- for o in anyobj:
- orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level)
- append(orepr)
- if not oreadable:
- readable = False
- if orecur:
- recursive = True
- del context[objid]
- return format % _commajoin(components), readable, recursive
-
- rep = repr(object)
- return rep, bool(rep and not rep.startswith('<')), False
-
-
-def _recursion(object: object) -> str:
- return ("<Recursion on %s with id=%s>"
- % (_type(object).__name__, _id(object)))
-
-
-def _perfcheck(object: object = None) -> None:
- import time
- if object is None:
- object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
- p = PrettyPrinter()
- t1 = time.time()
- _safe_repr(object, {}, None, 0)
- t2 = time.time()
- p.pformat(object)
- t3 = time.time()
- print("_safe_repr:", t2 - t1)
- print("pformat:", t3 - t2)
-
-if __name__ == "__main__":
- _perfcheck()
diff --git a/test-data/stdlib-samples/3.2/random.py b/test-data/stdlib-samples/3.2/random.py
deleted file mode 100644
index 8ce0a69..0000000
--- a/test-data/stdlib-samples/3.2/random.py
+++ /dev/null
@@ -1,743 +0,0 @@
-"""Random variable generators.
-
- integers
- --------
- uniform within range
-
- sequences
- ---------
- pick random element
- pick random sample
- generate random permutation
-
- distributions on the real line:
- ------------------------------
- uniform
- triangular
- normal (Gaussian)
- lognormal
- negative exponential
- gamma
- beta
- pareto
- Weibull
-
- distributions on the circle (angles 0 to 2pi)
- ---------------------------------------------
- circular uniform
- von Mises
-
-General notes on the underlying Mersenne Twister core generator:
-
-* The period is 2**19937-1.
-* It is one of the most extensively tested generators in existence.
-* The random() method is implemented in C, executes in a single Python step,
- and is, therefore, threadsafe.
-
-"""
-
-from warnings import warn as _warn
-from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
-from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
-from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
-from os import urandom as _urandom
-from collections import Set as _Set, Sequence as _Sequence
-from hashlib import sha512 as _sha512
-
-from typing import (
- Any, TypeVar, Iterable, Sequence, List, Callable, Set, cast, SupportsInt, Union
-)
-
-__all__ = ["Random","seed","random","uniform","randint","choice","sample",
- "randrange","shuffle","normalvariate","lognormvariate",
- "expovariate","vonmisesvariate","gammavariate","triangular",
- "gauss","betavariate","paretovariate","weibullvariate",
- "getstate","setstate", "getrandbits",
- "SystemRandom"]
-
-NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
-TWOPI = 2.0*_pi
-LOG4 = _log(4.0)
-SG_MAGICCONST = 1.0 + _log(4.5)
-BPF = 53 # Number of bits in a float
-RECIP_BPF = 2**-BPF # type: float
-
-
-# Translated by Guido van Rossum from C source provided by
-# Adrian Baddeley. Adapted by Raymond Hettinger for use with
-# the Mersenne Twister and os.urandom() core generators.
-
-import _random
-
-T = TypeVar('T')
-
-class Random(_random.Random):
- """Random number generator base class used by bound module functions.
-
- Used to instantiate instances of Random to get generators that don't
- share state.
-
- Class Random can also be subclassed if you want to use a different basic
- generator of your own devising: in that case, override the following
- methods: random(), seed(), getstate(), and setstate().
- Optionally, implement a getrandbits() method so that randrange()
- can cover arbitrarily large ranges.
-
- """
-
- VERSION = 3 # used by getstate/setstate
- gauss_next = 0.0
-
- def __init__(self, x: object = None) -> None:
- """Initialize an instance.
-
- Optional argument x controls seeding, as for Random.seed().
- """
-
- self.seed(x)
- self.gauss_next = None
-
- def seed(self, a: Any = None, version: int = 2) -> None:
- """Initialize internal state from hashable object.
-
- None or no argument seeds from current time or from an operating
- system specific randomness source if available.
-
- For version 2 (the default), all of the bits are used if *a *is a str,
- bytes, or bytearray. For version 1, the hash() of *a* is used instead.
-
- If *a* is an int, all bits are used.
-
- """
-
- if a is None:
- try:
- a = int.from_bytes(_urandom(32), 'big')
- except NotImplementedError:
- import time
- a = int(time.time() * 256) # use fractional seconds
-
- if version == 2:
- if isinstance(a, (str, bytes, bytearray)):
- if isinstance(a, str):
- a = a.encode()
- a += _sha512(a).digest()
- a = int.from_bytes(a, 'big')
-
- super().seed(a)
- self.gauss_next = None
-
- def getstate(self) -> tuple:
- """Return internal state; can be passed to setstate() later."""
- return self.VERSION, super().getstate(), self.gauss_next
-
- def setstate(self, state: tuple) -> None:
- """Restore internal state from object returned by getstate()."""
- version = state[0]
- if version == 3:
- version, internalstate, self.gauss_next = state
- super().setstate(internalstate)
- elif version == 2:
- version, internalstate, self.gauss_next = state
- # In version 2, the state was saved as signed ints, which causes
- # inconsistencies between 32/64-bit systems. The state is
- # really unsigned 32-bit ints, so we convert negative ints from
- # version 2 to positive longs for version 3.
- try:
- internalstate = tuple(x % (2**32) for x in internalstate)
- except ValueError as e:
- raise TypeError()
- super().setstate(internalstate)
- else:
- raise ValueError("state with version %s passed to "
- "Random.setstate() of version %s" %
- (version, self.VERSION))
-
-## ---- Methods below this point do not need to be overridden when
-## ---- subclassing for the purpose of using a different core generator.
-
-## -------------------- pickle support -------------------
-
- def __getstate__(self) -> object: # for pickle
- return self.getstate()
-
- def __setstate__(self, state: Any) -> None: # for pickle
- self.setstate(state)
-
- def __reduce__(self) -> object:
- return self.__class__, (), self.getstate()
-
-## -------------------- integer methods -------------------
-
- def randrange(self, start: SupportsInt, stop: SupportsInt = None,
- step: int = 1, int: Callable[[SupportsInt],
- int] = int) -> int:
- """Choose a random item from range(start, stop[, step]).
-
- This fixes the problem with randint() which includes the
- endpoint; in Python this is usually not what you want.
-
- Do not supply the 'int' argument.
- """
-
- # This code is a bit messy to make it fast for the
- # common case while still doing adequate error checking.
- istart = int(start)
- if istart != start:
- raise ValueError("non-integer arg 1 for randrange()")
- if stop is None:
- if istart > 0:
- return self._randbelow(istart)
- raise ValueError("empty range for randrange()")
-
- # stop argument supplied.
- istop = int(stop)
- if istop != stop:
- raise ValueError("non-integer stop for randrange()")
- width = istop - istart
- if step == 1 and width > 0:
- return istart + self._randbelow(width)
- if step == 1:
- raise ValueError("empty range for randrange() (%d,%d, %d)" % (istart, istop, width))
-
- # Non-unit step argument supplied.
- istep = int(step)
- if istep != step:
- raise ValueError("non-integer step for randrange()")
- if istep > 0:
- n = (width + istep - 1) // istep
- elif istep < 0:
- n = (width + istep + 1) // istep
- else:
- raise ValueError("zero step for randrange()")
-
- if n <= 0:
- raise ValueError("empty range for randrange()")
-
- return istart + istep*self._randbelow(n)
-
- def randint(self, a: int, b: int) -> int:
- """Return random integer in range [a, b], including both end points.
- """
-
- return self.randrange(a, b+1)
-
- def _randbelow(self, n: int, int: Callable[[float], int] = int,
- maxsize: int = 1<<BPF,
- type: Callable[[object], type] = type,
- Method: type = _MethodType,
- BuiltinMethod: type = _BuiltinMethodType) -> int:
- "Return a random int in the range [0,n). Raises ValueError if n==0."
-
- getrandbits = self.getrandbits
- # Only call self.getrandbits if the original random() builtin method
- # has not been overridden or if a new getrandbits() was supplied.
- if type(self.random) is BuiltinMethod or type(getrandbits) is Method:
- k = n.bit_length() # don't use (n-1) here because n can be 1
- r = getrandbits(k) # 0 <= r < 2**k
- while r >= n:
- r = getrandbits(k)
- return r
- # There's an overriden random() method but no new getrandbits() method,
- # so we can only use random() from here.
- random = self.random
- if n >= maxsize:
- _warn("Underlying random() generator does not supply \n"
- "enough bits to choose from a population range this large.\n"
- "To remove the range limitation, add a getrandbits() method.")
- return int(random() * n)
- rem = maxsize % n
- limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0
- s = random()
- while s >= limit:
- s = random()
- return int(s*maxsize) % n
-
-## -------------------- sequence methods -------------------
-
- def choice(self, seq: Sequence[T]) -> T:
- """Choose a random element from a non-empty sequence."""
- try:
- i = self._randbelow(len(seq))
- except ValueError:
- raise IndexError('Cannot choose from an empty sequence')
- return seq[i]
-
- def shuffle(self, x: List[T],
- random: Callable[[], float] = None,
- int: Callable[[float], int] = int) -> None:
- """x, random=random.random -> shuffle list x in place; return None.
-
- Optional arg random is a 0-argument function returning a random
- float in [0.0, 1.0); by default, the standard random.random.
- """
-
- randbelow = self._randbelow
- for i in reversed(range(1, len(x))):
- # pick an element in x[:i+1] with which to exchange x[i]
- j = randbelow(i+1) if random is None else int(random() * (i+1))
- x[i], x[j] = x[j], x[i]
-
- def sample(self, population: Union[_Set[T], _Sequence[T]], k: int) -> List[T]:
- """Chooses k unique random elements from a population sequence or set.
-
- Returns a new list containing elements from the population while
- leaving the original population unchanged. The resulting list is
- in selection order so that all sub-slices will also be valid random
- samples. This allows raffle winners (the sample) to be partitioned
- into grand prize and second place winners (the subslices).
-
- Members of the population need not be hashable or unique. If the
- population contains repeats, then each occurrence is a possible
- selection in the sample.
-
- To choose a sample in a range of integers, use range as an argument.
- This is especially fast and space efficient for sampling from a
- large population: sample(range(10000000), 60)
- """
-
- # Sampling without replacement entails tracking either potential
- # selections (the pool) in a list or previous selections in a set.
-
- # When the number of selections is small compared to the
- # population, then tracking selections is efficient, requiring
- # only a small set and an occasional reselection. For
- # a larger number of selections, the pool tracking method is
- # preferred since the list takes less space than the
- # set and it doesn't suffer from frequent reselections.
-
- if isinstance(population, _Set):
- population = list(population)
- if not isinstance(population, _Sequence):
- raise TypeError("Population must be a sequence or set. For dicts, use list(d).")
- randbelow = self._randbelow
- n = len(population)
- if not (0 <= k and k <= n):
- raise ValueError("Sample larger than population")
- result = [cast(T, None)] * k
- setsize = 21 # size of a small set minus size of an empty list
- if k > 5:
- setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
- if n <= setsize:
- # An n-length list is smaller than a k-length set
- pool = list(population)
- for i in range(k): # invariant: non-selected at [0,n-i)
- j = randbelow(n-i)
- result[i] = pool[j]
- pool[j] = pool[n-i-1] # move non-selected item into vacancy
- else:
- selected = set() # type: Set[int]
- selected_add = selected.add
- for i in range(k):
- j = randbelow(n)
- while j in selected:
- j = randbelow(n)
- selected_add(j)
- result[i] = population[j]
- return result
-
-## -------------------- real-valued distributions -------------------
-
-## -------------------- uniform distribution -------------------
-
- def uniform(self, a: float, b: float) -> float:
- "Get a random number in the range [a, b) or [a, b] depending on rounding."
- return a + (b-a) * self.random()
-
-## -------------------- triangular --------------------
-
- def triangular(self, low: float = 0.0, high: float = 1.0,
- mode: float = None) -> float:
- """Triangular distribution.
-
- Continuous distribution bounded by given lower and upper limits,
- and having a given mode value in-between.
-
- http://en.wikipedia.org/wiki/Triangular_distribution
-
- """
- u = self.random()
- c = 0.5 if mode is None else (mode - low) / (high - low)
- if u > c:
- u = 1.0 - u
- c = 1.0 - c
- low, high = high, low
- return low + (high - low) * (u * c) ** 0.5
-
-## -------------------- normal distribution --------------------
-
- def normalvariate(self, mu: float, sigma: float) -> float:
- """Normal distribution.
-
- mu is the mean, and sigma is the standard deviation.
-
- """
- # mu = mean, sigma = standard deviation
-
- # Uses Kinderman and Monahan method. Reference: Kinderman,
- # A.J. and Monahan, J.F., "Computer generation of random
- # variables using the ratio of uniform deviates", ACM Trans
- # Math Software, 3, (1977), pp257-260.
-
- random = self.random
- while 1:
- u1 = random()
- u2 = 1.0 - random()
- z = NV_MAGICCONST*(u1-0.5)/u2
- zz = z*z/4.0
- if zz <= -_log(u2):
- break
- return mu + z*sigma
-
-## -------------------- lognormal distribution --------------------
-
- def lognormvariate(self, mu: float, sigma: float) -> float:
- """Log normal distribution.
-
- If you take the natural logarithm of this distribution, you'll get a
- normal distribution with mean mu and standard deviation sigma.
- mu can have any value, and sigma must be greater than zero.
-
- """
- return _exp(self.normalvariate(mu, sigma))
-
-## -------------------- exponential distribution --------------------
-
- def expovariate(self, lambd: float) -> float:
- """Exponential distribution.
-
- lambd is 1.0 divided by the desired mean. It should be
- nonzero. (The parameter would be called "lambda", but that is
- a reserved word in Python.) Returned values range from 0 to
- positive infinity if lambd is positive, and from negative
- infinity to 0 if lambd is negative.
-
- """
- # lambd: rate lambd = 1/mean
- # ('lambda' is a Python reserved word)
-
- # we use 1-random() instead of random() to preclude the
- # possibility of taking the log of zero.
- return -_log(1.0 - self.random())/lambd
-
-## -------------------- von Mises distribution --------------------
-
- def vonmisesvariate(self, mu: float, kappa: float) -> float:
- """Circular data distribution.
-
- mu is the mean angle, expressed in radians between 0 and 2*pi, and
- kappa is the concentration parameter, which must be greater than or
- equal to zero. If kappa is equal to zero, this distribution reduces
- to a uniform random angle over the range 0 to 2*pi.
-
- """
- # mu: mean angle (in radians between 0 and 2*pi)
- # kappa: concentration parameter kappa (>= 0)
- # if kappa = 0 generate uniform random angle
-
- # Based upon an algorithm published in: Fisher, N.I.,
- # "Statistical Analysis of Circular Data", Cambridge
- # University Press, 1993.
-
- # Thanks to Magnus Kessler for a correction to the
- # implementation of step 4.
-
- random = self.random
- if kappa <= 1e-6:
- return TWOPI * random()
-
- a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
- b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
- r = (1.0 + b * b)/(2.0 * b)
-
- while 1:
- u1 = random()
-
- z = _cos(_pi * u1)
- f = (1.0 + r * z)/(r + z)
- c = kappa * (r - f)
-
- u2 = random()
-
- if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c):
- break
-
- u3 = random()
- if u3 > 0.5:
- theta = (mu % TWOPI) + _acos(f)
- else:
- theta = (mu % TWOPI) - _acos(f)
-
- return theta
-
-## -------------------- gamma distribution --------------------
-
- def gammavariate(self, alpha: float, beta: float) -> float:
- """Gamma distribution. Not the gamma function!
-
- Conditions on the parameters are alpha > 0 and beta > 0.
-
- The probability distribution function is:
-
- x ** (alpha - 1) * math.exp(-x / beta)
- pdf(x) = --------------------------------------
- math.gamma(alpha) * beta ** alpha
-
- """
-
- # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
-
- # Warning: a few older sources define the gamma distribution in terms
- # of alpha > -1.0
- if alpha <= 0.0 or beta <= 0.0:
- raise ValueError('gammavariate: alpha and beta must be > 0.0')
-
- random = self.random
- if alpha > 1.0:
-
- # Uses R.C.H. Cheng, "The generation of Gamma
- # variables with non-integral shape parameters",
- # Applied Statistics, (1977), 26, No. 1, p71-74
-
- ainv = _sqrt(2.0 * alpha - 1.0)
- bbb = alpha - LOG4
- ccc = alpha + ainv
-
- while 1:
- u1 = random()
- if not (1e-7 < u1 and u1 < .9999999):
- continue
- u2 = 1.0 - random()
- v = _log(u1/(1.0-u1))/ainv
- x = alpha*_exp(v)
- z = u1*u1*u2
- r = bbb+ccc*v-x
- if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
- return x * beta
-
- elif alpha == 1.0:
- # expovariate(1)
- u = random()
- while u <= 1e-7:
- u = random()
- return -_log(u) * beta
-
- else: # alpha is between 0 and 1 (exclusive)
-
- # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
-
- while 1:
- u = random()
- b = (_e + alpha)/_e
- p = b*u
- if p <= 1.0:
- x = p ** (1.0/alpha)
- else:
- x = -_log((b-p)/alpha)
- u1 = random()
- if p > 1.0:
- if u1 <= x ** (alpha - 1.0):
- break
- elif u1 <= _exp(-x):
- break
- return x * beta
-
-## -------------------- Gauss (faster alternative) --------------------
-
- def gauss(self, mu: float, sigma: float) -> float:
- """Gaussian distribution.
-
- mu is the mean, and sigma is the standard deviation. This is
- slightly faster than the normalvariate() function.
-
- Not thread-safe without a lock around calls.
-
- """
-
- # When x and y are two variables from [0, 1), uniformly
- # distributed, then
- #
- # cos(2*pi*x)*sqrt(-2*log(1-y))
- # sin(2*pi*x)*sqrt(-2*log(1-y))
- #
- # are two *independent* variables with normal distribution
- # (mu = 0, sigma = 1).
- # (Lambert Meertens)
- # (corrected version; bug discovered by Mike Miller, fixed by LM)
-
- # Multithreading note: When two threads call this function
- # simultaneously, it is possible that they will receive the
- # same return value. The window is very small though. To
- # avoid this, you have to use a lock around all calls. (I
- # didn't want to slow this down in the serial case by using a
- # lock here.)
-
- random = self.random
- z = self.gauss_next
- self.gauss_next = None
- if z is None:
- x2pi = random() * TWOPI
- g2rad = _sqrt(-2.0 * _log(1.0 - random()))
- z = _cos(x2pi) * g2rad
- self.gauss_next = _sin(x2pi) * g2rad
-
- return mu + z*sigma
-
-## -------------------- beta --------------------
-## See
-## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
-## for Ivan Frohne's insightful analysis of why the original implementation:
-##
-## def betavariate(self, alpha, beta):
-## # Discrete Event Simulation in C, pp 87-88.
-##
-## y = self.expovariate(alpha)
-## z = self.expovariate(1.0/beta)
-## return z/(y+z)
-##
-## was dead wrong, and how it probably got that way.
-
- def betavariate(self, alpha: float, beta: float) -> 'float':
- """Beta distribution.
-
- Conditions on the parameters are alpha > 0 and beta > 0.
- Returned values range between 0 and 1.
-
- """
-
- # This version due to Janne Sinkkonen, and matches all the std
- # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
- y = self.gammavariate(alpha, 1.)
- if y == 0:
- return 0.0
- else:
- return y / (y + self.gammavariate(beta, 1.))
-
-## -------------------- Pareto --------------------
-
- def paretovariate(self, alpha: float) -> float:
- """Pareto distribution. alpha is the shape parameter."""
- # Jain, pg. 495
-
- u = 1.0 - self.random()
- return 1.0 / u ** (1.0/alpha)
-
-## -------------------- Weibull --------------------
-
- def weibullvariate(self, alpha: float, beta: float) -> float:
- """Weibull distribution.
-
- alpha is the scale parameter and beta is the shape parameter.
-
- """
- # Jain, pg. 499; bug fix courtesy Bill Arms
-
- u = 1.0 - self.random()
- return alpha * (-_log(u)) ** (1.0/beta)
-
-## --------------- Operating System Random Source ------------------
-
-class SystemRandom(Random):
- """Alternate random number generator using sources provided
- by the operating system (such as /dev/urandom on Unix or
- CryptGenRandom on Windows).
-
- Not available on all systems (see os.urandom() for details).
- """
-
- def random(self) -> float:
- """Get the next random number in the range [0.0, 1.0)."""
- return (int.from_bytes(_urandom(7), 'big') >> 3) * RECIP_BPF
-
- def getrandbits(self, k: int) -> int:
- """getrandbits(k) -> x. Generates a long int with k random bits."""
- if k <= 0:
- raise ValueError('number of bits must be greater than zero')
- if k != int(k):
- raise TypeError('number of bits should be an integer')
- numbytes = (k + 7) // 8 # bits / 8 and rounded up
- x = int.from_bytes(_urandom(numbytes), 'big')
- return x >> (numbytes * 8 - k) # trim excess bits
-
- def seed(self, a: object = None, version: int = None) -> None:
- "Stub method. Not used for a system random number generator."
- return
-
- def _notimplemented(self, *args: Any, **kwds: Any) -> Any:
- "Method should not be called for a system random number generator."
- raise NotImplementedError('System entropy source does not have state.')
- getstate = setstate = _notimplemented
-
-# Create one instance, seeded from current time, and export its methods
-# as module-level functions. The functions share state across all uses
-#(both in the user's code and in the Python libraries), but that's fine
-# for most programs and is easier for the casual user than making them
-# instantiate their own Random() instance.
-
-_inst = Random()
-seed = _inst.seed
-random = _inst.random
-uniform = _inst.uniform
-triangular = _inst.triangular
-randint = _inst.randint
-choice = _inst.choice
-randrange = _inst.randrange
-sample = _inst.sample
-shuffle = _inst.shuffle
-normalvariate = _inst.normalvariate
-lognormvariate = _inst.lognormvariate
-expovariate = _inst.expovariate
-vonmisesvariate = _inst.vonmisesvariate
-gammavariate = _inst.gammavariate
-gauss = _inst.gauss
-betavariate = _inst.betavariate
-paretovariate = _inst.paretovariate
-weibullvariate = _inst.weibullvariate
-getstate = _inst.getstate
-setstate = _inst.setstate
-getrandbits = _inst.getrandbits
-
-## -------------------- test program --------------------
-
-def _test_generator(n: int, func: Any, args: tuple) -> None:
- import time
- print(n, 'times', func.__name__)
- total = 0.0
- sqsum = 0.0
- smallest = 1e10
- largest = -1e10
- t0 = time.time()
- for i in range(n):
- x = func(*args) # type: float
- total += x
- sqsum = sqsum + x*x
- smallest = min(x, smallest)
- largest = max(x, largest)
- t1 = time.time()
- print(round(t1-t0, 3), 'sec,', end=' ')
- avg = total/n
- stddev = _sqrt(sqsum/n - avg*avg)
- print('avg %g, stddev %g, min %g, max %g' % \
- (avg, stddev, smallest, largest))
-
-
-def _test(N: int = 2000) -> None:
- _test_generator(N, random, ())
- _test_generator(N, normalvariate, (0.0, 1.0))
- _test_generator(N, lognormvariate, (0.0, 1.0))
- _test_generator(N, vonmisesvariate, (0.0, 1.0))
- _test_generator(N, gammavariate, (0.01, 1.0))
- _test_generator(N, gammavariate, (0.1, 1.0))
- _test_generator(N, gammavariate, (0.1, 2.0))
- _test_generator(N, gammavariate, (0.5, 1.0))
- _test_generator(N, gammavariate, (0.9, 1.0))
- _test_generator(N, gammavariate, (1.0, 1.0))
- _test_generator(N, gammavariate, (2.0, 1.0))
- _test_generator(N, gammavariate, (20.0, 1.0))
- _test_generator(N, gammavariate, (200.0, 1.0))
- _test_generator(N, gauss, (0.0, 1.0))
- _test_generator(N, betavariate, (3.0, 3.0))
- _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0))
-
-if __name__ == '__main__':
- _test()
diff --git a/test-data/stdlib-samples/3.2/shutil.py b/test-data/stdlib-samples/3.2/shutil.py
deleted file mode 100644
index 7204a4d..0000000
--- a/test-data/stdlib-samples/3.2/shutil.py
+++ /dev/null
@@ -1,790 +0,0 @@
-"""Utility functions for copying and archiving files and directory trees.
-
-XXX The functions here don't copy the resource fork or other metadata on Mac.
-
-"""
-
-import os
-import sys
-import stat
-from os.path import abspath
-import fnmatch
-import collections
-import errno
-import tarfile
-import builtins
-
-from typing import (
- Any, AnyStr, IO, List, Iterable, Callable, Tuple, Dict, Sequence, cast
-)
-from types import TracebackType
-
-try:
- import bz2
- _BZ2_SUPPORTED = True
-except ImportError:
- _BZ2_SUPPORTED = False
-
-try:
- from pwd import getpwnam as _getpwnam
- getpwnam = _getpwnam
-except ImportError:
- getpwnam = None
-
-try:
- from grp import getgrnam as _getgrnam
- getgrnam = _getgrnam
-except ImportError:
- getgrnam = None
-
-__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
- "copytree", "move", "rmtree", "Error", "SpecialFileError",
- "ExecError", "make_archive", "get_archive_formats",
- "register_archive_format", "unregister_archive_format",
- "get_unpack_formats", "register_unpack_format",
- "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
-
-class Error(EnvironmentError):
- pass
-
-class SpecialFileError(EnvironmentError):
- """Raised when trying to do a kind of operation (e.g. copying) which is
- not supported on a special file (e.g. a named pipe)"""
-
-class ExecError(EnvironmentError):
- """Raised when a command could not be executed"""
-
-class ReadError(EnvironmentError):
- """Raised when an archive cannot be read"""
-
-class RegistryError(Exception):
- """Raised when a registery operation with the archiving
- and unpacking registeries fails"""
-
-
-try:
- _WindowsError = WindowsError # type: type
-except NameError:
- _WindowsError = None
-
-
-# Function aliases to be patched in test cases
-rename = os.rename
-open = builtins.open
-
-
-def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr],
- length: int = 16*1024) -> None:
- """copy data from file-like object fsrc to file-like object fdst"""
- while 1:
- buf = fsrc.read(length)
- if not buf:
- break
- fdst.write(buf)
-
-def _samefile(src: str, dst: str) -> bool:
- # Macintosh, Unix.
- if hasattr(os.path, 'samefile'):
- try:
- return os.path.samefile(src, dst)
- except OSError:
- return False
-
- # All other platforms: check for same pathname.
- return (os.path.normcase(os.path.abspath(src)) ==
- os.path.normcase(os.path.abspath(dst)))
-
-def copyfile(src: str, dst: str) -> None:
- """Copy data from src to dst"""
- if _samefile(src, dst):
- raise Error("`%s` and `%s` are the same file" % (src, dst))
-
- for fn in [src, dst]:
- try:
- st = os.stat(fn)
- except OSError:
- # File most likely does not exist
- pass
- else:
- # XXX What about other special files? (sockets, devices...)
- if stat.S_ISFIFO(st.st_mode):
- raise SpecialFileError("`%s` is a named pipe" % fn)
-
- with open(src, 'rb') as fsrc:
- with open(dst, 'wb') as fdst:
- copyfileobj(fsrc, fdst)
-
-def copymode(src: str, dst: str) -> None:
- """Copy mode bits from src to dst"""
- if hasattr(os, 'chmod'):
- st = os.stat(src)
- mode = stat.S_IMODE(st.st_mode)
- os.chmod(dst, mode)
-
-def copystat(src: str, dst: str) -> None:
- """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
- st = os.stat(src)
- mode = stat.S_IMODE(st.st_mode)
- if hasattr(os, 'utime'):
- os.utime(dst, (st.st_atime, st.st_mtime))
- if hasattr(os, 'chmod'):
- os.chmod(dst, mode)
- if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
- try:
- os.chflags(dst, st.st_flags)
- except OSError as why:
- if (not hasattr(errno, 'EOPNOTSUPP') or
- why.errno != errno.EOPNOTSUPP):
- raise
-
-def copy(src: str, dst: str) -> None:
- """Copy data and mode bits ("cp src dst").
-
- The destination may be a directory.
-
- """
- if os.path.isdir(dst):
- dst = os.path.join(dst, os.path.basename(src))
- copyfile(src, dst)
- copymode(src, dst)
-
-def copy2(src: str, dst: str) -> None:
- """Copy data and all stat info ("cp -p src dst").
-
- The destination may be a directory.
-
- """
- if os.path.isdir(dst):
- dst = os.path.join(dst, os.path.basename(src))
- copyfile(src, dst)
- copystat(src, dst)
-
-def ignore_patterns(*patterns: str) -> Callable[[str, List[str]],
- Iterable[str]]:
- """Function that can be used as copytree() ignore parameter.
-
- Patterns is a sequence of glob-style patterns
- that are used to exclude files"""
- def _ignore_patterns(path: str, names: List[str]) -> Iterable[str]:
- ignored_names = [] # type: List[str]
- for pattern in patterns:
- ignored_names.extend(fnmatch.filter(names, pattern))
- return set(ignored_names)
- return _ignore_patterns
-
-def copytree(src: str, dst: str, symlinks: bool = False,
- ignore: Callable[[str, List[str]], Iterable[str]] = None,
- copy_function: Callable[[str, str], None] = copy2,
- ignore_dangling_symlinks: bool = False) -> None:
- """Recursively copy a directory tree.
-
- The destination directory must not already exist.
- If exception(s) occur, an Error is raised with a list of reasons.
-
- If the optional symlinks flag is true, symbolic links in the
- source tree result in symbolic links in the destination tree; if
- it is false, the contents of the files pointed to by symbolic
- links are copied. If the file pointed by the symlink doesn't
- exist, an exception will be added in the list of errors raised in
- an Error exception at the end of the copy process.
-
- You can set the optional ignore_dangling_symlinks flag to true if you
- want to silence this exception. Notice that this has no effect on
- platforms that don't support os.symlink.
-
- The optional ignore argument is a callable. If given, it
- is called with the `src` parameter, which is the directory
- being visited by copytree(), and `names` which is the list of
- `src` contents, as returned by os.listdir():
-
- callable(src, names) -> ignored_names
-
- Since copytree() is called recursively, the callable will be
- called once for each directory that is copied. It returns a
- list of names relative to the `src` directory that should
- not be copied.
-
- The optional copy_function argument is a callable that will be used
- to copy each file. It will be called with the source path and the
- destination path as arguments. By default, copy2() is used, but any
- function that supports the same signature (like copy()) can be used.
-
- """
- names = os.listdir(src)
- if ignore is not None:
- ignored_names = ignore(src, names)
- else:
- ignored_names = set()
-
- os.makedirs(dst)
- errors = [] # type: List[Tuple[str, str, str]]
- for name in names:
- if name in ignored_names:
- continue
- srcname = os.path.join(src, name)
- dstname = os.path.join(dst, name)
- try:
- if os.path.islink(srcname):
- linkto = os.readlink(srcname)
- if symlinks:
- os.symlink(linkto, dstname)
- else:
- # ignore dangling symlink if the flag is on
- if not os.path.exists(linkto) and ignore_dangling_symlinks:
- continue
- # otherwise let the copy occurs. copy2 will raise an error
- copy_function(srcname, dstname)
- elif os.path.isdir(srcname):
- copytree(srcname, dstname, symlinks, ignore, copy_function)
- else:
- # Will raise a SpecialFileError for unsupported file types
- copy_function(srcname, dstname)
- # catch the Error from the recursive copytree so that we can
- # continue with other files
- except Error as err:
- errors.extend(err.args[0])
- except EnvironmentError as why:
- errors.append((srcname, dstname, str(why)))
- try:
- copystat(src, dst)
- except OSError as why:
- if _WindowsError is not None and isinstance(why, _WindowsError):
- # Copying file access times may fail on Windows
- pass
- else:
- errors.append((src, dst, str(why)))
- if errors:
- raise Error(errors)
-
-def rmtree(path: str, ignore_errors: bool = False,
- onerror: Callable[[Any, str, Tuple[type, BaseException, TracebackType]],
- None] = None) -> None:
- """Recursively delete a directory tree.
-
- If ignore_errors is set, errors are ignored; otherwise, if onerror
- is set, it is called to handle the error with arguments (func,
- path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
- path is the argument to that function that caused it to fail; and
- exc_info is a tuple returned by sys.exc_info(). If ignore_errors
- is false and onerror is None, an exception is raised.
-
- """
- if ignore_errors:
- def _onerror(x: Any, y: str,
- z: Tuple[type, BaseException, TracebackType]) -> None:
- pass
- onerror = _onerror
- elif onerror is None:
- def __onerror(x: Any, y: str,
- z: Tuple[type, BaseException, TracebackType]) -> None:
- raise
- onerror = __onerror
- try:
- if os.path.islink(path):
- # symlinks to directories are forbidden, see bug #1669
- raise OSError("Cannot call rmtree on a symbolic link")
- except OSError:
- onerror(os.path.islink, path, sys.exc_info())
- # can't continue even if onerror hook returns
- return
- names = [] # type: List[str]
- try:
- names = os.listdir(path)
- except os.error as err:
- onerror(os.listdir, path, sys.exc_info())
- for name in names:
- fullname = os.path.join(path, name)
- try:
- mode = os.lstat(fullname).st_mode
- except os.error:
- mode = 0
- if stat.S_ISDIR(mode):
- rmtree(fullname, ignore_errors, onerror)
- else:
- try:
- os.remove(fullname)
- except os.error as err:
- onerror(os.remove, fullname, sys.exc_info())
- try:
- os.rmdir(path)
- except os.error:
- onerror(os.rmdir, path, sys.exc_info())
-
-
-def _basename(path: str) -> str:
- # A basename() variant which first strips the trailing slash, if present.
- # Thus we always get the last component of the path, even for directories.
- return os.path.basename(path.rstrip(os.path.sep))
-
-def move(src: str, dst: str) -> None:
- """Recursively move a file or directory to another location. This is
- similar to the Unix "mv" command.
-
- If the destination is a directory or a symlink to a directory, the source
- is moved inside the directory. The destination path must not already
- exist.
-
- If the destination already exists but is not a directory, it may be
- overwritten depending on os.rename() semantics.
-
- If the destination is on our current filesystem, then rename() is used.
- Otherwise, src is copied to the destination and then removed.
- A lot more could be done here... A look at a mv.c shows a lot of
- the issues this implementation glosses over.
-
- """
- real_dst = dst
- if os.path.isdir(dst):
- if _samefile(src, dst):
- # We might be on a case insensitive filesystem,
- # perform the rename anyway.
- os.rename(src, dst)
- return
-
- real_dst = os.path.join(dst, _basename(src))
- if os.path.exists(real_dst):
- raise Error("Destination path '%s' already exists" % real_dst)
- try:
- os.rename(src, real_dst)
- except OSError as exc:
- if os.path.isdir(src):
- if _destinsrc(src, dst):
- raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
- copytree(src, real_dst, symlinks=True)
- rmtree(src)
- else:
- copy2(src, real_dst)
- os.unlink(src)
-
-def _destinsrc(src: str, dst: str) -> bool:
- src = abspath(src)
- dst = abspath(dst)
- if not src.endswith(os.path.sep):
- src += os.path.sep
- if not dst.endswith(os.path.sep):
- dst += os.path.sep
- return dst.startswith(src)
-
-def _get_gid(name: str) -> int:
- """Returns a gid, given a group name."""
- if getgrnam is None or name is None:
- return None
- try:
- result = getgrnam(name)
- except KeyError:
- result = None
- if result is not None:
- return result.gr_gid
- return None
-
-def _get_uid(name: str) -> int:
- """Returns an uid, given a user name."""
- if getpwnam is None or name is None:
- return None
- try:
- result = getpwnam(name)
- except KeyError:
- result = None
- if result is not None:
- return result.pw_uid
- return None
-
-def _make_tarball(base_name: str, base_dir: str, compress: str = "gzip",
- verbose: bool = False, dry_run: bool = False,
- owner: str = None, group: str = None,
- logger: Any = None) -> str:
- """Create a (possibly compressed) tar file from all the files under
- 'base_dir'.
-
- 'compress' must be "gzip" (the default), "bzip2", or None.
-
- 'owner' and 'group' can be used to define an owner and a group for the
- archive that is being built. If not provided, the current owner and group
- will be used.
-
- The output tar file will be named 'base_name' + ".tar", possibly plus
- the appropriate compression extension (".gz", or ".bz2").
-
- Returns the output filename.
- """
- tar_compression = {'gzip': 'gz', None: ''}
- compress_ext = {'gzip': '.gz'}
-
- if _BZ2_SUPPORTED:
- tar_compression['bzip2'] = 'bz2'
- compress_ext['bzip2'] = '.bz2'
-
- # flags for compression program, each element of list will be an argument
- if compress is not None and compress not in compress_ext.keys():
- raise ValueError("bad value for 'compress', or compression format not "
- "supported : {0}".format(compress))
-
- archive_name = base_name + '.tar' + compress_ext.get(compress, '')
- archive_dir = os.path.dirname(archive_name)
-
- if not os.path.exists(archive_dir):
- if logger is not None:
- logger.info("creating %s", archive_dir)
- if not dry_run:
- os.makedirs(archive_dir)
-
- # creating the tarball
- if logger is not None:
- logger.info('Creating tar archive')
-
- uid = _get_uid(owner)
- gid = _get_gid(group)
-
- def _set_uid_gid(tarinfo):
- if gid is not None:
- tarinfo.gid = gid
- tarinfo.gname = group
- if uid is not None:
- tarinfo.uid = uid
- tarinfo.uname = owner
- return tarinfo
-
- if not dry_run:
- tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
- try:
- tar.add(base_dir, filter=_set_uid_gid)
- finally:
- tar.close()
-
- return archive_name
-
-def _call_external_zip(base_dir: str, zip_filename: str, verbose: bool = False,
- dry_run: bool = False) -> None:
- # XXX see if we want to keep an external call here
- if verbose:
- zipoptions = "-r"
- else:
- zipoptions = "-rq"
- from distutils.errors import DistutilsExecError
- from distutils.spawn import spawn
- try:
- spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
- except DistutilsExecError:
- # XXX really should distinguish between "couldn't find
- # external 'zip' command" and "zip failed".
- raise ExecError(("unable to create zip file '%s': "
- "could neither import the 'zipfile' module nor "
- "find a standalone zip utility") % zip_filename)
-
-def _make_zipfile(base_name: str, base_dir: str, verbose: bool = False,
- dry_run: bool = False, logger: Any = None) -> str:
- """Create a zip file from all the files under 'base_dir'.
-
- The output zip file will be named 'base_name' + ".zip". Uses either the
- "zipfile" Python module (if available) or the InfoZIP "zip" utility
- (if installed and found on the default search path). If neither tool is
- available, raises ExecError. Returns the name of the output zip
- file.
- """
- zip_filename = base_name + ".zip"
- archive_dir = os.path.dirname(base_name)
-
- if not os.path.exists(archive_dir):
- if logger is not None:
- logger.info("creating %s", archive_dir)
- if not dry_run:
- os.makedirs(archive_dir)
-
- # If zipfile module is not available, try spawning an external 'zip'
- # command.
- try:
- import zipfile
- except ImportError:
- zipfile = None
-
- if zipfile is None:
- _call_external_zip(base_dir, zip_filename, verbose, dry_run)
- else:
- if logger is not None:
- logger.info("creating '%s' and adding '%s' to it",
- zip_filename, base_dir)
-
- if not dry_run:
- zip = zipfile.ZipFile(zip_filename, "w",
- compression=zipfile.ZIP_DEFLATED)
-
- for dirpath, dirnames, filenames in os.walk(base_dir):
- for name in filenames:
- path = os.path.normpath(os.path.join(dirpath, name))
- if os.path.isfile(path):
- zip.write(path, path)
- if logger is not None:
- logger.info("adding '%s'", path)
- zip.close()
-
- return zip_filename
-
-_ARCHIVE_FORMATS = {
- 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
- 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
- 'zip': (_make_zipfile, [],"ZIP file")
- } # type: Dict[str, Tuple[Any, Sequence[Tuple[str, str]], str]]
-
-if _BZ2_SUPPORTED:
- _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
- "bzip2'ed tar-file")
-
-def get_archive_formats() -> List[Tuple[str, str]]:
- """Returns a list of supported formats for archiving and unarchiving.
-
- Each element of the returned sequence is a tuple (name, description)
- """
- formats = [(name, registry[2]) for name, registry in
- _ARCHIVE_FORMATS.items()]
- formats.sort()
- return formats
-
-def register_archive_format(name: str, function: Any,
- extra_args: Sequence[Tuple[str, Any]] = None,
- description: str = '') -> None:
- """Registers an archive format.
-
- name is the name of the format. function is the callable that will be
- used to create archives. If provided, extra_args is a sequence of
- (name, value) tuples that will be passed as arguments to the callable.
- description can be provided to describe the format, and will be returned
- by the get_archive_formats() function.
- """
- if extra_args is None:
- extra_args = []
- if not callable(function):
- raise TypeError('The %s object is not callable' % function)
- if not isinstance(extra_args, (tuple, list)):
- raise TypeError('extra_args needs to be a sequence')
- for element in extra_args:
- if not isinstance(element, (tuple, list)) or len(cast(tuple, element)) !=2 :
- raise TypeError('extra_args elements are : (arg_name, value)')
-
- _ARCHIVE_FORMATS[name] = (function, extra_args, description)
-
-def unregister_archive_format(name: str) -> None:
- del _ARCHIVE_FORMATS[name]
-
-def make_archive(base_name: str, format: str, root_dir: str = None,
- base_dir: str = None, verbose: bool = False,
- dry_run: bool = False, owner: str = None,
- group: str = None, logger: Any = None) -> str:
- """Create an archive file (eg. zip or tar).
-
- 'base_name' is the name of the file to create, minus any format-specific
- extension; 'format' is the archive format: one of "zip", "tar", "bztar"
- or "gztar".
-
- 'root_dir' is a directory that will be the root directory of the
- archive; ie. we typically chdir into 'root_dir' before creating the
- archive. 'base_dir' is the directory where we start archiving from;
- ie. 'base_dir' will be the common prefix of all files and
- directories in the archive. 'root_dir' and 'base_dir' both default
- to the current directory. Returns the name of the archive file.
-
- 'owner' and 'group' are used when creating a tar archive. By default,
- uses the current owner and group.
- """
- save_cwd = os.getcwd()
- if root_dir is not None:
- if logger is not None:
- logger.debug("changing into '%s'", root_dir)
- base_name = os.path.abspath(base_name)
- if not dry_run:
- os.chdir(root_dir)
-
- if base_dir is None:
- base_dir = os.curdir
-
- kwargs = {'dry_run': dry_run, 'logger': logger}
-
- try:
- format_info = _ARCHIVE_FORMATS[format]
- except KeyError:
- raise ValueError("unknown archive format '%s'" % format)
-
- func = format_info[0]
- for arg, val in format_info[1]:
- kwargs[arg] = val
-
- if format != 'zip':
- kwargs['owner'] = owner
- kwargs['group'] = group
-
- try:
- filename = func(base_name, base_dir, **kwargs)
- finally:
- if root_dir is not None:
- if logger is not None:
- logger.debug("changing back to '%s'", save_cwd)
- os.chdir(save_cwd)
-
- return filename
-
-
-def get_unpack_formats() -> List[Tuple[str, List[str], str]]:
- """Returns a list of supported formats for unpacking.
-
- Each element of the returned sequence is a tuple
- (name, extensions, description)
- """
- formats = [(name, info[0], info[3]) for name, info in
- _UNPACK_FORMATS.items()]
- formats.sort()
- return formats
-
-def _check_unpack_options(extensions: List[str], function: Any,
- extra_args: Sequence[Tuple[str, Any]]) -> None:
- """Checks what gets registered as an unpacker."""
- # first make sure no other unpacker is registered for this extension
- existing_extensions = {} # type: Dict[str, str]
- for name, info in _UNPACK_FORMATS.items():
- for ext in info[0]:
- existing_extensions[ext] = name
-
- for extension in extensions:
- if extension in existing_extensions:
- msg = '%s is already registered for "%s"'
- raise RegistryError(msg % (extension,
- existing_extensions[extension]))
-
- if not callable(function):
- raise TypeError('The registered function must be a callable')
-
-
-def register_unpack_format(name: str, extensions: List[str], function: Any,
- extra_args: Sequence[Tuple[str, Any]] = None,
- description: str = '') -> None:
- """Registers an unpack format.
-
- `name` is the name of the format. `extensions` is a list of extensions
- corresponding to the format.
-
- `function` is the callable that will be
- used to unpack archives. The callable will receive archives to unpack.
- If it's unable to handle an archive, it needs to raise a ReadError
- exception.
-
- If provided, `extra_args` is a sequence of
- (name, value) tuples that will be passed as arguments to the callable.
- description can be provided to describe the format, and will be returned
- by the get_unpack_formats() function.
- """
- if extra_args is None:
- extra_args = []
- _check_unpack_options(extensions, function, extra_args)
- _UNPACK_FORMATS[name] = extensions, function, extra_args, description
-
-def unregister_unpack_format(name: str) -> None:
- """Removes the pack format from the registery."""
- del _UNPACK_FORMATS[name]
-
-def _ensure_directory(path: str) -> None:
- """Ensure that the parent directory of `path` exists"""
- dirname = os.path.dirname(path)
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
-
-def _unpack_zipfile(filename: str, extract_dir: str) -> None:
- """Unpack zip `filename` to `extract_dir`
- """
- try:
- import zipfile
- except ImportError:
- raise ReadError('zlib not supported, cannot unpack this archive.')
-
- if not zipfile.is_zipfile(filename):
- raise ReadError("%s is not a zip file" % filename)
-
- zip = zipfile.ZipFile(filename)
- try:
- for info in zip.infolist():
- name = info.filename
-
- # don't extract absolute paths or ones with .. in them
- if name.startswith('/') or '..' in name:
- continue
-
- target = os.path.join(extract_dir, *name.split('/'))
- if not target:
- continue
-
- _ensure_directory(target)
- if not name.endswith('/'):
- # file
- data = zip.read(info.filename)
- f = open(target,'wb')
- try:
- f.write(data)
- finally:
- f.close()
- del data
- finally:
- zip.close()
-
-def _unpack_tarfile(filename: str, extract_dir: str) -> None:
- """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
- """
- try:
- tarobj = tarfile.open(filename)
- except tarfile.TarError:
- raise ReadError(
- "%s is not a compressed or uncompressed tar file" % filename)
- try:
- tarobj.extractall(extract_dir)
- finally:
- tarobj.close()
-
-_UNPACK_FORMATS = {
- 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
- 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
- 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
- } # type: Dict[str, Tuple[List[str], Any, Sequence[Tuple[str, Any]], str]]
-
-if _BZ2_SUPPORTED:
- _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
- "bzip2'ed tar-file")
-
-def _find_unpack_format(filename: str) -> str:
- for name, info in _UNPACK_FORMATS.items():
- for extension in info[0]:
- if filename.endswith(extension):
- return name
- return None
-
-def unpack_archive(filename: str, extract_dir: str = None,
- format: str = None) -> None:
- """Unpack an archive.
-
- `filename` is the name of the archive.
-
- `extract_dir` is the name of the target directory, where the archive
- is unpacked. If not provided, the current working directory is used.
-
- `format` is the archive format: one of "zip", "tar", or "gztar". Or any
- other registered format. If not provided, unpack_archive will use the
- filename extension and see if an unpacker was registered for that
- extension.
-
- In case none is found, a ValueError is raised.
- """
- if extract_dir is None:
- extract_dir = os.getcwd()
-
- if format is not None:
- try:
- format_info = _UNPACK_FORMATS[format]
- except KeyError:
- raise ValueError("Unknown unpack format '{0}'".format(format))
-
- func = format_info[1]
- func(filename, extract_dir, **dict(format_info[2]))
- else:
- # we need to look at the registered unpackers supported extensions
- format = _find_unpack_format(filename)
- if format is None:
- raise ReadError("Unknown archive format '{0}'".format(filename))
-
- func = _UNPACK_FORMATS[format][1]
- kwargs = dict(_UNPACK_FORMATS[format][2])
- func(filename, extract_dir, **kwargs)
diff --git a/test-data/stdlib-samples/3.2/subprocess.py b/test-data/stdlib-samples/3.2/subprocess.py
deleted file mode 100644
index e286525..0000000
--- a/test-data/stdlib-samples/3.2/subprocess.py
+++ /dev/null
@@ -1,1703 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand at lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-r"""subprocess - Subprocesses with accessible I/O streams
-
-This module allows you to spawn processes, connect to their
-input/output/error pipes, and obtain their return codes. This module
-intends to replace several other, older modules and functions, like:
-
-os.system
-os.spawn*
-
-Information about how the subprocess module can be used to replace these
-modules and functions can be found below.
-
-
-
-Using the subprocess module
-===========================
-This module defines one class called Popen:
-
-class Popen(args, bufsize=0, executable=None,
- stdin=None, stdout=None, stderr=None,
- preexec_fn=None, close_fds=True, shell=False,
- cwd=None, env=None, universal_newlines=False,
- startupinfo=None, creationflags=0,
- restore_signals=True, start_new_session=False, pass_fds=()):
-
-
-Arguments are:
-
-args should be a string, or a sequence of program arguments. The
-program to execute is normally the first item in the args sequence or
-string, but can be explicitly set by using the executable argument.
-
-On POSIX, with shell=False (default): In this case, the Popen class
-uses os.execvp() to execute the child program. args should normally
-be a sequence. A string will be treated as a sequence with the string
-as the only item (the program to execute).
-
-On POSIX, with shell=True: If args is a string, it specifies the
-command string to execute through the shell. If args is a sequence,
-the first item specifies the command string, and any additional items
-will be treated as additional shell arguments.
-
-On Windows: the Popen class uses CreateProcess() to execute the child
-program, which operates on strings. If args is a sequence, it will be
-converted to a string using the list2cmdline method. Please note that
-not all MS Windows applications interpret the command line the same
-way: The list2cmdline is designed for applications using the same
-rules as the MS C runtime.
-
-bufsize, if given, has the same meaning as the corresponding argument
-to the built-in open() function: 0 means unbuffered, 1 means line
-buffered, any other positive value means use a buffer of
-(approximately) that size. A negative bufsize means to use the system
-default, which usually means fully buffered. The default value for
-bufsize is 0 (unbuffered).
-
-stdin, stdout and stderr specify the executed programs' standard
-input, standard output and standard error file handles, respectively.
-Valid values are PIPE, an existing file descriptor (a positive
-integer), an existing file object, and None. PIPE indicates that a
-new pipe to the child should be created. With None, no redirection
-will occur; the child's file handles will be inherited from the
-parent. Additionally, stderr can be STDOUT, which indicates that the
-stderr data from the applications should be captured into the same
-file handle as for stdout.
-
-On POSIX, if preexec_fn is set to a callable object, this object will be
-called in the child process just before the child is executed. The use
-of preexec_fn is not thread safe, using it in the presence of threads
-could lead to a deadlock in the child process before the new executable
-is executed.
-
-If close_fds is true, all file descriptors except 0, 1 and 2 will be
-closed before the child process is executed. The default for close_fds
-varies by platform: Always true on POSIX. True when stdin/stdout/stderr
-are None on Windows, false otherwise.
-
-pass_fds is an optional sequence of file descriptors to keep open between the
-parent and child. Providing any pass_fds implicitly sets close_fds to true.
-
-if shell is true, the specified command will be executed through the
-shell.
-
-If cwd is not None, the current directory will be changed to cwd
-before the child is executed.
-
-On POSIX, if restore_signals is True all signals that Python sets to
-SIG_IGN are restored to SIG_DFL in the child process before the exec.
-Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals. This
-parameter does nothing on Windows.
-
-On POSIX, if start_new_session is True, the setsid() system call will be made
-in the child process prior to executing the command.
-
-If env is not None, it defines the environment variables for the new
-process.
-
-If universal_newlines is true, the file objects stdout and stderr are
-opened as a text files, but lines may be terminated by any of '\n',
-the Unix end-of-line convention, '\r', the old Macintosh convention or
-'\r\n', the Windows convention. All of these external representations
-are seen as '\n' by the Python program. Note: This feature is only
-available if Python is built with universal newline support (the
-default). Also, the newlines attribute of the file objects stdout,
-stdin and stderr are not updated by the communicate() method.
-
-The startupinfo and creationflags, if given, will be passed to the
-underlying CreateProcess() function. They can specify things such as
-appearance of the main window and priority for the new process.
-(Windows only)
-
-
-This module also defines some shortcut functions:
-
-call(*popenargs, **kwargs):
- Run command with arguments. Wait for command to complete, then
- return the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- >>> retcode = subprocess.call(["ls", "-l"])
-
-check_call(*popenargs, **kwargs):
- Run command with arguments. Wait for command to complete. If the
- exit code was zero then return, otherwise raise
- CalledProcessError. The CalledProcessError object will have the
- return code in the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- >>> subprocess.check_call(["ls", "-l"])
- 0
-
-getstatusoutput(cmd):
- Return (status, output) of executing cmd in a shell.
-
- Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
- (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the
- returned output will contain output or error messages. A trailing newline
- is stripped from the output. The exit status for the command can be
- interpreted according to the rules for the C function wait(). Example:
-
- >>> subprocess.getstatusoutput('ls /bin/ls')
- (0, '/bin/ls')
- >>> subprocess.getstatusoutput('cat /bin/junk')
- (256, 'cat: /bin/junk: No such file or directory')
- >>> subprocess.getstatusoutput('/bin/junk')
- (256, 'sh: /bin/junk: not found')
-
-getoutput(cmd):
- Return output (stdout or stderr) of executing cmd in a shell.
-
- Like getstatusoutput(), except the exit status is ignored and the return
- value is a string containing the command's output. Example:
-
- >>> subprocess.getoutput('ls /bin/ls')
- '/bin/ls'
-
-check_output(*popenargs, **kwargs):
- Run command with arguments and return its output as a byte string.
-
- If the exit code was non-zero it raises a CalledProcessError. The
- CalledProcessError object will have the return code in the returncode
- attribute and output in the output attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- >>> output = subprocess.check_output(["ls", "-l", "/dev/null"])
-
-
-Exceptions
-----------
-Exceptions raised in the child process, before the new program has
-started to execute, will be re-raised in the parent. Additionally,
-the exception object will have one extra attribute called
-'child_traceback', which is a string containing traceback information
-from the childs point of view.
-
-The most common exception raised is OSError. This occurs, for
-example, when trying to execute a non-existent file. Applications
-should prepare for OSErrors.
-
-A ValueError will be raised if Popen is called with invalid arguments.
-
-check_call() and check_output() will raise CalledProcessError, if the
-called process returns a non-zero return code.
-
-
-Security
---------
-Unlike some other popen functions, this implementation will never call
-/bin/sh implicitly. This means that all characters, including shell
-metacharacters, can safely be passed to child processes.
-
-
-Popen objects
-=============
-Instances of the Popen class have the following methods:
-
-poll()
- Check if child process has terminated. Returns returncode
- attribute.
-
-wait()
- Wait for child process to terminate. Returns returncode attribute.
-
-communicate(input=None)
- Interact with process: Send data to stdin. Read data from stdout
- and stderr, until end-of-file is reached. Wait for process to
- terminate. The optional input argument should be a string to be
- sent to the child process, or None, if no data should be sent to
- the child.
-
- communicate() returns a tuple (stdout, stderr).
-
- Note: The data read is buffered in memory, so do not use this
- method if the data size is large or unlimited.
-
-The following attributes are also available:
-
-stdin
- If the stdin argument is PIPE, this attribute is a file object
- that provides input to the child process. Otherwise, it is None.
-
-stdout
- If the stdout argument is PIPE, this attribute is a file object
- that provides output from the child process. Otherwise, it is
- None.
-
-stderr
- If the stderr argument is PIPE, this attribute is file object that
- provides error output from the child process. Otherwise, it is
- None.
-
-pid
- The process ID of the child process.
-
-returncode
- The child return code. A None value indicates that the process
- hasn't terminated yet. A negative value -N indicates that the
- child was terminated by signal N (POSIX only).
-
-
-Replacing older functions with the subprocess module
-====================================================
-In this section, "a ==> b" means that b can be used as a replacement
-for a.
-
-Note: All functions in this section fail (more or less) silently if
-the executed program cannot be found; this module raises an OSError
-exception.
-
-In the following examples, we assume that the subprocess module is
-imported with "from subprocess import *".
-
-
-Replacing /bin/sh shell backquote
----------------------------------
-output=`mycmd myarg`
-==>
-output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
-
-
-Replacing shell pipe line
--------------------------
-output=`dmesg | grep hda`
-==>
-p1 = Popen(["dmesg"], stdout=PIPE)
-p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
-output = p2.communicate()[0]
-
-
-Replacing os.system()
----------------------
-sts = os.system("mycmd" + " myarg")
-==>
-p = Popen("mycmd" + " myarg", shell=True)
-pid, sts = os.waitpid(p.pid, 0)
-
-Note:
-
-* Calling the program through the shell is usually not required.
-
-* It's easier to look at the returncode attribute than the
- exitstatus.
-
-A more real-world example would look like this:
-
-try:
- retcode = call("mycmd" + " myarg", shell=True)
- if retcode < 0:
- print("Child was terminated by signal", -retcode, file=sys.stderr)
- else:
- print("Child returned", retcode, file=sys.stderr)
-except OSError as e:
- print("Execution failed:", e, file=sys.stderr)
-
-
-Replacing os.spawn*
--------------------
-P_NOWAIT example:
-
-pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
-==>
-pid = Popen(["/bin/mycmd", "myarg"]).pid
-
-
-P_WAIT example:
-
-retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
-==>
-retcode = call(["/bin/mycmd", "myarg"])
-
-
-Vector example:
-
-os.spawnvp(os.P_NOWAIT, path, args)
-==>
-Popen([path] + args[1:])
-
-
-Environment example:
-
-os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
-==>
-Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
-"""
-
-import sys
-mswindows = (sys.platform == "win32")
-
-import io
-import os
-import traceback
-import gc
-import signal
-import builtins
-import warnings
-import errno
-
-from typing import (
- Any, Tuple, List, Sequence, Callable, Mapping, cast, Set, Dict, IO,
- TextIO, AnyStr
-)
-from types import TracebackType
-
-# Exception classes used by this module.
-class CalledProcessError(Exception):
- """This exception is raised when a process run by check_call() or
- check_output() returns a non-zero exit status.
- The exit status will be stored in the returncode attribute;
- check_output() will also store the output in the output attribute.
- """
- def __init__(self, returncode: int, cmd: str, output: Any = None) -> None:
- self.returncode = returncode
- self.cmd = cmd
- self.output = output
- def __str__(self) -> str:
- return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
-
-
-if mswindows:
- import threading
- import msvcrt
- import _subprocess
- class STARTUPINFO:
- dwFlags = 0
- hStdInput = cast(Any, None)
- hStdOutput = cast(Any, None)
- hStdError = cast(Any, None)
- wShowWindow = 0
- class pywintypes:
- error = IOError
-else:
- import select
- _has_poll = hasattr(select, 'poll')
- import fcntl
- import pickle
-
- try:
- import _posixsubprocess
- have_posixsubprocess = True
- except ImportError:
- have_posixsubprocess = False
- warnings.warn("The _posixsubprocess module is not being used. "
- "Child process reliability may suffer if your "
- "program uses threads.", RuntimeWarning)
-
- # When select or poll has indicated that the file is writable,
- # we can write up to _PIPE_BUF bytes without risk of blocking.
- # POSIX defines PIPE_BUF as >= 512.
- _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) # type: int
-
- _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1) # type: int
-
- def _set_cloexec(fd: int, cloexec: bool) -> None:
- old = fcntl.fcntl(fd, fcntl.F_GETFD)
- if cloexec:
- fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC)
- else:
- fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC)
-
- if have_posixsubprocess:
- _create_pipe = _posixsubprocess.cloexec_pipe
- else:
- def __create_pipe() -> Tuple[int, int]:
- fds = os.pipe()
- _set_cloexec(fds[0], True)
- _set_cloexec(fds[1], True)
- return fds
- _create_pipe = __create_pipe
-
-__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
- "getoutput", "check_output", "CalledProcessError"]
-
-if mswindows:
- from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
- STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
- STD_ERROR_HANDLE, SW_HIDE,
- STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
-
- __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
- "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
- "STD_ERROR_HANDLE", "SW_HIDE",
- "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"])
-try:
- MAXFD = os.sysconf("SC_OPEN_MAX")
-except:
- MAXFD = 256
-
-# This lists holds Popen instances for which the underlying process had not
-# exited at the time its __del__ method got called: those processes are wait()ed
-# for synchronously from _cleanup() when a new Popen object is created, to avoid
-# zombie processes.
-_active = [] # type: List[Popen]
-
-def _cleanup() -> None:
- for inst in _active[:]:
- res = inst._internal_poll(_deadstate=sys.maxsize)
- if res is not None:
- try:
- _active.remove(inst)
- except ValueError:
- # This can happen if two threads create a new Popen instance.
- # It's harmless that it was already removed, so ignore.
- pass
-
-PIPE = -1
-STDOUT = -2
-
-
-def _eintr_retry_call(func: Any, *args: Any) -> Any:
- while True:
- try:
- return func(*args)
- except (OSError, IOError) as e:
- if e.errno == errno.EINTR:
- continue
- raise
-
-
-def call(*popenargs: Any, **kwargs: Any) -> int:
- """Run command with arguments. Wait for command to complete, then
- return the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- retcode = call(["ls", "-l"])
- """
- return Popen(*popenargs, **kwargs).wait()
-
-
-def check_call(*popenargs: Any, **kwargs: Any) -> int:
- """Run command with arguments. Wait for command to complete. If
- the exit code was zero then return, otherwise raise
- CalledProcessError. The CalledProcessError object will have the
- return code in the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- check_call(["ls", "-l"])
- """
- retcode = call(*popenargs, **kwargs)
- if retcode:
- cmd = kwargs.get("args")
- if cmd is None:
- cmd = popenargs[0]
- raise CalledProcessError(retcode, cmd)
- return 0
-
-
-def check_output(*popenargs: Any, **kwargs: Any) -> bytes:
- r"""Run command with arguments and return its output as a byte string.
-
- If the exit code was non-zero it raises a CalledProcessError. The
- CalledProcessError object will have the return code in the returncode
- attribute and output in the output attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- >>> check_output(["ls", "-l", "/dev/null"])
- b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
-
- The stdout argument is not allowed as it is used internally.
- To capture standard error in the result, use stderr=STDOUT.
-
- >>> check_output(["/bin/sh", "-c",
- ... "ls -l non_existent_file ; exit 0"],
- ... stderr=STDOUT)
- b'ls: non_existent_file: No such file or directory\n'
- """
- if 'stdout' in kwargs:
- raise ValueError('stdout argument not allowed, it will be overridden.')
- kwargs['stdout'] = PIPE
- process = Popen(*popenargs, **kwargs)
- output, unused_err = process.communicate()
- retcode = process.poll()
- if retcode:
- cmd = kwargs.get("args")
- if cmd is None:
- cmd = popenargs[0]
- raise CalledProcessError(retcode, cmd, output=output)
- return output
-
-
-def list2cmdline(seq: Sequence[str]) -> str:
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
- # or search http://msdn.microsoft.com for
- # "Parsing C++ Command-Line Arguments"
- result = [] # type: List[str]
- needquote = False
- for arg in seq:
- bs_buf = [] # type: List[str]
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or not arg
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backslashes.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backslashes, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
-
-
-# Various tools for executing commands and looking at their output and status.
-#
-# NB This only works (and is only relevant) for POSIX.
-
-def getstatusoutput(cmd: str) -> Tuple[int, str]:
- """Return (status, output) of executing cmd in a shell.
-
- Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
- (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the
- returned output will contain output or error messages. A trailing newline
- is stripped from the output. The exit status for the command can be
- interpreted according to the rules for the C function wait(). Example:
-
- >>> import subprocess
- >>> subprocess.getstatusoutput('ls /bin/ls')
- (0, '/bin/ls')
- >>> subprocess.getstatusoutput('cat /bin/junk')
- (256, 'cat: /bin/junk: No such file or directory')
- >>> subprocess.getstatusoutput('/bin/junk')
- (256, 'sh: /bin/junk: not found')
- """
- pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
- text = pipe.read()
- sts = pipe.close()
- if sts is None: sts = 0
- if text[-1:] == '\n': text = text[:-1]
- return sts, text
-
-
-def getoutput(cmd: str) -> str:
- """Return output (stdout or stderr) of executing cmd in a shell.
-
- Like getstatusoutput(), except the exit status is ignored and the return
- value is a string containing the command's output. Example:
-
- >>> import subprocess
- >>> subprocess.getoutput('ls /bin/ls')
- '/bin/ls'
- """
- return getstatusoutput(cmd)[1]
-
-
-_PLATFORM_DEFAULT_CLOSE_FDS = object()
-
-
-class Popen(object):
- def __init__(self, args: Sequence[Any], bufsize: int = 0,
- executable: str = None, stdin: Any = None,
- stdout: Any = None, stderr: Any = None,
- preexec_fn: Callable[[], Any] = None,
- close_fds: Any = _PLATFORM_DEFAULT_CLOSE_FDS,
- shell: int = False, cwd: str = None,
- env: Mapping[str, str] = None,
- universal_newlines: int = False,
- startupinfo: 'STARTUPINFO' = None, creationflags: int = 0,
- restore_signals: bool = True, start_new_session: bool = False,
- pass_fds: Any = ()) -> None:
- """Create new Popen instance."""
- _cleanup()
-
- self._child_created = False
- if bufsize is None:
- bufsize = 0 # Restore default
- if not isinstance(bufsize, int):
- raise TypeError("bufsize must be an integer")
-
- if mswindows:
- if preexec_fn is not None:
- raise ValueError("preexec_fn is not supported on Windows "
- "platforms")
- any_stdio_set = (stdin is not None or stdout is not None or
- stderr is not None)
- if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
- if any_stdio_set:
- close_fds = False
- else:
- close_fds = True
- elif close_fds and any_stdio_set:
- raise ValueError(
- "close_fds is not supported on Windows platforms"
- " if you redirect stdin/stdout/stderr")
- else:
- # POSIX
- if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
- close_fds = True
- if pass_fds and not close_fds:
- warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
- close_fds = True
- if startupinfo is not None:
- raise ValueError("startupinfo is only supported on Windows "
- "platforms")
- if creationflags != 0:
- raise ValueError("creationflags is only supported on Windows "
- "platforms")
-
- self.stdin = None # type: IO[Any]
- self.stdout = None # type: IO[Any]
- self.stderr = None # type: IO[Any]
- self.pid = None # type: int
- self.returncode = None # type: int
- self.universal_newlines = universal_newlines
-
- # Input and output objects. The general principle is like
- # this:
- #
- # Parent Child
- # ------ -----
- # p2cwrite ---stdin---> p2cread
- # c2pread <--stdout--- c2pwrite
- # errread <--stderr--- errwrite
- #
- # On POSIX, the child objects are file descriptors. On
- # Windows, these are Windows file handles. The parent objects
- # are file descriptors on both platforms. The parent objects
- # are -1 when not using PIPEs. The child objects are -1
- # when not redirecting.
-
- (p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite) = self._get_handles(stdin, stdout, stderr)
-
- # We wrap OS handles *before* launching the child, otherwise a
- # quickly terminating child could make our fds unwrappable
- # (see #8458).
-
- if mswindows:
- if p2cwrite != -1:
- p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
- if c2pread != -1:
- c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
- if errread != -1:
- errread = msvcrt.open_osfhandle(errread.Detach(), 0)
-
- if p2cwrite != -1:
- self.stdin = io.open(p2cwrite, 'wb', bufsize)
- if self.universal_newlines:
- self.stdin = io.TextIOWrapper(self.stdin, write_through=True)
- if c2pread != -1:
- self.stdout = io.open(c2pread, 'rb', bufsize)
- if universal_newlines:
- self.stdout = io.TextIOWrapper(self.stdout)
- if errread != -1:
- self.stderr = io.open(errread, 'rb', bufsize)
- if universal_newlines:
- self.stderr = io.TextIOWrapper(self.stderr)
-
- try:
- self._execute_child(args, executable, preexec_fn, close_fds,
- pass_fds, cwd, env, universal_newlines,
- startupinfo, creationflags, shell,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite,
- restore_signals, start_new_session)
- except:
- # Cleanup if the child failed starting
- for f in filter(None, [self.stdin, self.stdout, self.stderr]):
- try:
- f.close()
- except EnvironmentError:
- # Ignore EBADF or other errors
- pass
- raise
-
-
- def _translate_newlines(self, data: bytes, encoding: str) -> str:
- data = data.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
- return data.decode(encoding)
-
- def __enter__(self) -> 'Popen':
- return self
-
- def __exit__(self, type: type, value: BaseException,
- traceback: TracebackType) -> bool:
- if self.stdout:
- self.stdout.close()
- if self.stderr:
- self.stderr.close()
- if self.stdin:
- self.stdin.close()
- # Wait for the process to terminate, to avoid zombies.
- self.wait()
-
- def __del__(self, _maxsize: int = sys.maxsize,
- _active: List['Popen'] = _active) -> None:
- # If __init__ hasn't had a chance to execute (e.g. if it
- # was passed an undeclared keyword argument), we don't
- # have a _child_created attribute at all.
- if not getattr(self, '_child_created', False):
- # We didn't get to successfully create a child process.
- return
- # In case the child hasn't been waited on, check if it's done.
- self._internal_poll(_deadstate=_maxsize)
- if self.returncode is None and _active is not None:
- # Child is still running, keep us alive until we can wait on it.
- _active.append(self)
-
-
- def communicate(self, input: Any = None) -> Tuple[Any, Any]:
- """Interact with process: Send data to stdin. Read data from
- stdout and stderr, until end-of-file is reached. Wait for
- process to terminate. The optional input argument should be a
- string to be sent to the child process, or None, if no data
- should be sent to the child.
-
- communicate() returns a tuple (stdout, stderr)."""
-
- # Optimization: If we are only using one pipe, or no pipe at
- # all, using select() or threads is unnecessary.
- if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
- stdout = None # type: IO[Any]
- stderr = None # type: IO[Any]
- if self.stdin:
- if input:
- try:
- self.stdin.write(input)
- except IOError as e:
- if e.errno != errno.EPIPE and e.errno != errno.EINVAL:
- raise
- self.stdin.close()
- elif self.stdout:
- stdout = _eintr_retry_call(self.stdout.read)
- self.stdout.close()
- elif self.stderr:
- stderr = _eintr_retry_call(self.stderr.read)
- self.stderr.close()
- self.wait()
- return (stdout, stderr)
-
- return self._communicate(input)
-
-
- def poll(self) -> int:
- return self._internal_poll()
-
-
- if mswindows:
- #
- # Windows methods
- #
- def _get_handles(self, stdin: Any, stdout: Any,
- stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]:
- """Construct and return tuple with IO objects:
- p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
- """
- if stdin is None and stdout is None and stderr is None:
- return (-1, -1, -1, -1, -1, -1)
-
- p2cread, p2cwrite = -1, -1 # type: (Any, Any)
- c2pread, c2pwrite = -1, -1 # type: (Any, Any)
- errread, errwrite = -1, -1 # type: (Any, Any)
-
- if stdin is None:
- p2cread = _subprocess.GetStdHandle(_subprocess.STD_INPUT_HANDLE)
- if p2cread is None:
- p2cread, _ = _subprocess.CreatePipe(None, 0)
- elif stdin == PIPE:
- p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
- elif isinstance(stdin, int):
- p2cread = msvcrt.get_osfhandle(stdin)
- else:
- # Assuming file-like object
- p2cread = msvcrt.get_osfhandle(stdin.fileno())
- p2cread = self._make_inheritable(p2cread)
-
- if stdout is None:
- c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE)
- if c2pwrite is None:
- _, c2pwrite = _subprocess.CreatePipe(None, 0)
- elif stdout == PIPE:
- c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
- elif isinstance(stdout, int):
- c2pwrite = msvcrt.get_osfhandle(stdout)
- else:
- # Assuming file-like object
- c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
- c2pwrite = self._make_inheritable(c2pwrite)
-
- if stderr is None:
- errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE)
- if errwrite is None:
- _, errwrite = _subprocess.CreatePipe(None, 0)
- elif stderr == PIPE:
- errread, errwrite = _subprocess.CreatePipe(None, 0)
- elif stderr == STDOUT:
- errwrite = c2pwrite
- elif isinstance(stderr, int):
- errwrite = msvcrt.get_osfhandle(stderr)
- else:
- # Assuming file-like object
- errwrite = msvcrt.get_osfhandle(stderr.fileno())
- errwrite = self._make_inheritable(errwrite)
-
- return (p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
-
-
- def _make_inheritable(self, handle: _subprocess.Handle) -> int:
- """Return a duplicate of handle, which is inheritable"""
- return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(),
- handle, _subprocess.GetCurrentProcess(), 0, 1,
- _subprocess.DUPLICATE_SAME_ACCESS)
-
-
- def _find_w9xpopen(self) -> str:
- """Find and return absolut path to w9xpopen.exe"""
- w9xpopen = os.path.join(
- os.path.dirname(_subprocess.GetModuleFileName(0)),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- # Eeek - file-not-found - possibly an embedding
- # situation - see if we can locate it in sys.exec_prefix
- w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- raise RuntimeError("Cannot locate w9xpopen.exe, which is "
- "needed for Popen to work with your "
- "shell or platform.")
- return w9xpopen
-
-
- def _execute_child(self, args: Sequence[str], executable: str,
- preexec_fn: Callable[[], Any], close_fds: Any,
- pass_fds: Any, cwd: str, env: Mapping[str, str],
- universal_newlines: int,
- startupinfo: STARTUPINFO, creationflags: int,
- shell: int,
- p2cread: Any, p2cwrite: Any,
- c2pread: Any, c2pwrite: Any,
- errread: Any, errwrite: Any,
- restore_signals: bool,
- start_new_session: bool) -> None:
- """Execute program (MS Windows version)"""
-
- assert not pass_fds, "pass_fds not supported on Windows."
-
- if not isinstance(args, str):
- args = list2cmdline(args)
-
- # Process startup details
- if startupinfo is None:
- startupinfo = STARTUPINFO()
- if -1 not in (p2cread, c2pwrite, errwrite):
- startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES
- startupinfo.hStdInput = p2cread
- startupinfo.hStdOutput = c2pwrite
- startupinfo.hStdError = errwrite
-
- if shell:
- startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
- startupinfo.wShowWindow = _subprocess.SW_HIDE
- comspec = os.environ.get("COMSPEC", "cmd.exe")
- args = '{} /c "{}"'.format (comspec, args)
- if (_subprocess.GetVersion() >= 0x80000000 or
- os.path.basename(comspec).lower() == "command.com"):
- # Win9x, or using command.com on NT. We need to
- # use the w9xpopen intermediate program. For more
- # information, see KB Q150956
- # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
- w9xpopen = self._find_w9xpopen()
- args = '"%s" %s' % (w9xpopen, args)
- # Not passing CREATE_NEW_CONSOLE has been known to
- # cause random failures on win9x. Specifically a
- # dialog: "Your program accessed mem currently in
- # use at xxx" and a hopeful warning about the
- # stability of your system. Cost is Ctrl+C won't
- # kill children.
- creationflags |= _subprocess.CREATE_NEW_CONSOLE
-
- # Start the process
- try:
- hp, ht, pid, tid = _subprocess.CreateProcess(executable,
- cast(str, args),
- # no special security
- None, None,
- int(not close_fds),
- creationflags,
- env,
- cwd,
- startupinfo)
- except pywintypes.error as e:
- # Translate pywintypes.error to WindowsError, which is
- # a subclass of OSError. FIXME: We should really
- # translate errno using _sys_errlist (or similar), but
- # how can this be done from Python?
- raise WindowsError(*e.args)
- finally:
- # Child is launched. Close the parent's copy of those pipe
- # handles that only the child should have open. You need
- # to make sure that no handles to the write end of the
- # output pipe are maintained in this process or else the
- # pipe will not close when the child process exits and the
- # ReadFile will hang.
- if p2cread != -1:
- p2cread.Close()
- if c2pwrite != -1:
- c2pwrite.Close()
- if errwrite != -1:
- errwrite.Close()
-
- # Retain the process handle, but close the thread handle
- self._child_created = True
- self._handle = hp
- self.pid = pid
- ht.Close()
-
- def _internal_poll(self, _deadstate: int = None) -> int:
- """Check if child process has terminated. Returns returncode
- attribute.
-
- This method is called by __del__, so it can only refer to objects
- in its local scope.
-
- """
- return self._internal_poll_win(_deadstate)
-
- from _subprocess import Handle
-
- def _internal_poll_win(self, _deadstate: int = None,
- _WaitForSingleObject: Callable[[Handle, int], int] =
- _subprocess.WaitForSingleObject,
- _WAIT_OBJECT_0: int = _subprocess.WAIT_OBJECT_0,
- _GetExitCodeProcess: Callable[[Handle], int] =
- _subprocess.GetExitCodeProcess) -> int:
- if self.returncode is None:
- if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
- self.returncode = _GetExitCodeProcess(self._handle)
- return self.returncode
-
-
- def wait(self) -> int:
- """Wait for child process to terminate. Returns returncode
- attribute."""
- if self.returncode is None:
- _subprocess.WaitForSingleObject(self._handle,
- _subprocess.INFINITE)
- self.returncode = _subprocess.GetExitCodeProcess(self._handle)
- return self.returncode
-
-
- def _readerthread(self, fh: IO[AnyStr], buffer: List[AnyStr]) -> None:
- buffer.append(fh.read())
- fh.close()
-
-
- def _communicate(self, input: Any) -> Tuple[Any, Any]:
- stdout = cast(Any, None) # Return
- stderr = cast(Any, None) # Return
-
- if self.stdout:
- stdout = []
- stdout_thread = threading.Thread(target=self._readerthread,
- args=(self.stdout, stdout))
- stdout_thread.daemon = True
- stdout_thread.start()
- if self.stderr:
- stderr = []
- stderr_thread = threading.Thread(target=self._readerthread,
- args=(self.stderr, stderr))
- stderr_thread.daemon = True
- stderr_thread.start()
-
- if self.stdin:
- if input is not None:
- try:
- self.stdin.write(input)
- except IOError as e:
- if e.errno != errno.EPIPE:
- raise
- self.stdin.close()
-
- if self.stdout:
- stdout_thread.join()
- if self.stderr:
- stderr_thread.join()
-
- # All data exchanged. Translate lists into strings.
- if stdout is not None:
- stdout = stdout[0]
- if stderr is not None:
- stderr = stderr[0]
-
- self.wait()
- return (stdout, stderr)
-
- def send_signal(self, sig: int) -> None:
- """Send a signal to the process
- """
- if sig == signal.SIGTERM:
- self.terminate()
- elif sig == signal.CTRL_C_EVENT:
- os.kill(self.pid, signal.CTRL_C_EVENT)
- elif sig == signal.CTRL_BREAK_EVENT:
- os.kill(self.pid, signal.CTRL_BREAK_EVENT)
- else:
- raise ValueError("Unsupported signal: {}".format(sig))
-
- def terminate(self) -> None:
- """Terminates the process
- """
- _subprocess.TerminateProcess(self._handle, 1)
-
- def kill(self) -> None:
- """Terminates the process
- """
- self.terminate()
-
- else:
- #
- # POSIX methods
- #
- def _get_handles(self, stdin: Any, stdout: Any,
- stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]:
- """Construct and return tuple with IO objects:
- p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
- """
- p2cread, p2cwrite = -1, -1
- c2pread, c2pwrite = -1, -1
- errread, errwrite = -1, -1
-
- if stdin is None:
- pass
- elif stdin == PIPE:
- p2cread, p2cwrite = _create_pipe()
- elif isinstance(stdin, int):
- p2cread = stdin
- else:
- # Assuming file-like object
- p2cread = stdin.fileno()
-
- if stdout is None:
- pass
- elif stdout == PIPE:
- c2pread, c2pwrite = _create_pipe()
- elif isinstance(stdout, int):
- c2pwrite = stdout
- else:
- # Assuming file-like object
- c2pwrite = stdout.fileno()
-
- if stderr is None:
- pass
- elif stderr == PIPE:
- errread, errwrite = _create_pipe()
- elif stderr == STDOUT:
- errwrite = c2pwrite
- elif isinstance(stderr, int):
- errwrite = stderr
- else:
- # Assuming file-like object
- errwrite = stderr.fileno()
-
- return (p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
-
-
- def _close_fds(self, fds_to_keep: Set[int]) -> None:
- start_fd = 3
- for fd in sorted(fds_to_keep):
- if fd >= start_fd:
- os.closerange(start_fd, fd)
- start_fd = fd + 1
- if start_fd <= MAXFD:
- os.closerange(start_fd, MAXFD)
-
-
- def _execute_child(self, args: Sequence[str], executable: str,
- preexec_fn: Callable[[], Any], close_fds: Any,
- pass_fds: Any, cwd: str, env: Mapping[str, str],
- universal_newlines: int,
- startupinfo: 'STARTUPINFO', creationflags: int,
- shell: int,
- p2cread: Any, p2cwrite: Any,
- c2pread: Any, c2pwrite: Any,
- errread: Any, errwrite: Any,
- restore_signals: bool,
- start_new_session: bool) -> None:
- """Execute program (POSIX version)"""
-
- if isinstance(args, str):
- args = [args]
- else:
- args = list(args)
-
- if shell:
- args = ["/bin/sh", "-c"] + args
- if executable:
- args[0] = executable
-
- if executable is None:
- executable = args[0]
-
- # For transferring possible exec failure from child to parent.
- # Data format: "exception name:hex errno:description"
- # Pickle is not used; it is complex and involves memory allocation.
- errpipe_read, errpipe_write = _create_pipe()
- try:
- try:
-
- if have_posixsubprocess:
- # We must avoid complex work that could involve
- # malloc or free in the child process to avoid
- # potential deadlocks, thus we do all this here.
- # and pass it to fork_exec()
-
- if env is not None:
- env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
- for k, v in env.items()]
- else:
- env_list = None # Use execv instead of execve.
- executable_enc = os.fsencode(executable)
- if os.path.dirname(executable_enc):
- executable_list = (executable_enc,) # type: tuple
- else:
- # This matches the behavior of os._execvpe().
- executable_list = tuple(
- os.path.join(os.fsencode(dir), executable_enc)
- for dir in os.get_exec_path(env))
- fds_to_keep = set(pass_fds)
- fds_to_keep.add(errpipe_write)
- self.pid = _posixsubprocess.fork_exec(
- args, executable_list,
- close_fds, sorted(fds_to_keep), cwd, env_list,
- p2cread, p2cwrite, c2pread, c2pwrite,
- errread, errwrite,
- errpipe_read, errpipe_write,
- restore_signals, start_new_session, preexec_fn)
- self._child_created = True
- else:
- # Pure Python implementation: It is not thread safe.
- # This implementation may deadlock in the child if your
- # parent process has any other threads running.
-
- gc_was_enabled = gc.isenabled()
- # Disable gc to avoid bug where gc -> file_dealloc ->
- # write to stderr -> hang. See issue1336
- gc.disable()
- try:
- self.pid = os.fork()
- except:
- if gc_was_enabled:
- gc.enable()
- raise
- self._child_created = True
- if self.pid == 0:
- # Child
- try:
- # Close parent's pipe ends
- if p2cwrite != -1:
- os.close(p2cwrite)
- if c2pread != -1:
- os.close(c2pread)
- if errread != -1:
- os.close(errread)
- os.close(errpipe_read)
-
- # When duping fds, if there arises a situation
- # where one of the fds is either 0, 1 or 2, it
- # is possible that it is overwritten (#12607).
- if c2pwrite == 0:
- c2pwrite = os.dup(c2pwrite)
- if errwrite == 0 or errwrite == 1:
- errwrite = os.dup(errwrite)
-
- # Dup fds for child
- def _dup2(a: int, b: int) -> None:
- # dup2() removes the CLOEXEC flag but
- # we must do it ourselves if dup2()
- # would be a no-op (issue #10806).
- if a == b:
- _set_cloexec(a, False)
- elif a != -1:
- os.dup2(a, b)
- _dup2(p2cread, 0)
- _dup2(c2pwrite, 1)
- _dup2(errwrite, 2)
-
- # Close pipe fds. Make sure we don't close the
- # same fd more than once, or standard fds.
- closed = set() # type: Set[int]
- for fd in [p2cread, c2pwrite, errwrite]:
- if fd > 2 and fd not in closed:
- os.close(fd)
- closed.add(fd)
-
- # Close all other fds, if asked for
- if close_fds:
- fds_to_keep = set(pass_fds)
- fds_to_keep.add(errpipe_write)
- self._close_fds(fds_to_keep)
-
-
- if cwd is not None:
- os.chdir(cwd)
-
- # This is a copy of Python/pythonrun.c
- # _Py_RestoreSignals(). If that were exposed
- # as a sys._py_restoresignals func it would be
- # better.. but this pure python implementation
- # isn't likely to be used much anymore.
- if restore_signals:
- signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
- for sig in signals:
- if hasattr(signal, sig):
- signal.signal(getattr(signal, sig),
- signal.SIG_DFL)
-
- if start_new_session and hasattr(os, 'setsid'):
- os.setsid()
-
- if preexec_fn:
- preexec_fn()
-
- if env is None:
- os.execvp(executable, args)
- else:
- os.execvpe(executable, args, env)
-
- except:
- try:
- exc_type, exc_value = sys.exc_info()[:2]
- if isinstance(exc_value, OSError):
- errno_num = exc_value.errno
- else:
- errno_num = 0
- message = '%s:%x:%s' % (exc_type.__name__,
- errno_num, exc_value)
- messageb = message.encode(errors="surrogatepass")
- os.write(errpipe_write, messageb)
- except Exception:
- # We MUST not allow anything odd happening
- # above to prevent us from exiting below.
- pass
-
- # This exitcode won't be reported to applications
- # so it really doesn't matter what we return.
- os._exit(255)
-
- # Parent
- if gc_was_enabled:
- gc.enable()
- finally:
- # be sure the FD is closed no matter what
- os.close(errpipe_write)
-
- if p2cread != -1 and p2cwrite != -1:
- os.close(p2cread)
- if c2pwrite != -1 and c2pread != -1:
- os.close(c2pwrite)
- if errwrite != -1 and errread != -1:
- os.close(errwrite)
-
- # Wait for exec to fail or succeed; possibly raising an
- # exception (limited in size)
- data = bytearray()
- while True:
- part = _eintr_retry_call(os.read, errpipe_read, 50000)
- data += part
- if not part or len(data) > 50000:
- break
- finally:
- # be sure the FD is closed no matter what
- os.close(errpipe_read)
-
- if data:
- try:
- _eintr_retry_call(os.waitpid, self.pid, 0)
- except OSError as e:
- if e.errno != errno.ECHILD:
- raise
- try:
- (exception_name, hex_errno,
- err_msg_b) = bytes(data).split(b':', 2)
- except ValueError:
- print('Bad exception data:', repr(data))
- exception_name = b'RuntimeError'
- hex_errno = b'0'
- err_msg_b = b'Unknown'
- child_exception_type = getattr(
- builtins, exception_name.decode('ascii'),
- RuntimeError)
- for fd in (p2cwrite, c2pread, errread):
- if fd != -1:
- os.close(fd)
- err_msg = err_msg_b.decode(errors="surrogatepass")
- if issubclass(child_exception_type, OSError) and hex_errno:
- errno_num = int(hex_errno, 16)
- if errno_num != 0:
- err_msg = os.strerror(errno_num)
- if errno_num == errno.ENOENT:
- err_msg += ': ' + repr(args[0])
- raise child_exception_type(errno_num, err_msg)
- raise child_exception_type(err_msg)
-
-
- def _handle_exitstatus(
- self, sts: int,
- _WIFSIGNALED: Callable[[int], bool] = os.WIFSIGNALED,
- _WTERMSIG: Callable[[int], bool] = os.WTERMSIG,
- _WIFEXITED: Callable[[int], bool] = os.WIFEXITED,
- _WEXITSTATUS: Callable[[int], bool] = os.WEXITSTATUS) -> None:
- # This method is called (indirectly) by __del__, so it cannot
- # refer to anything outside of its local scope."""
- if _WIFSIGNALED(sts):
- self.returncode = -_WTERMSIG(sts)
- elif _WIFEXITED(sts):
- self.returncode = _WEXITSTATUS(sts)
- else:
- # Should never happen
- raise RuntimeError("Unknown child exit status!")
-
-
- def _internal_poll(self, _deadstate: int = None) -> int:
- """Check if child process has terminated. Returns returncode
- attribute.
-
- This method is called by __del__, so it cannot reference anything
- outside of the local scope (nor can any methods it calls).
-
- """
- return self._internal_poll_posix(_deadstate)
-
- def _internal_poll_posix(self, _deadstate: int = None,
- _waitpid: Callable[[int, int],
- Tuple[int, int]] = os.waitpid,
- _WNOHANG: int = os.WNOHANG,
- _os_error: Any = os.error) -> int:
- if self.returncode is None:
- try:
- pid, sts = _waitpid(self.pid, _WNOHANG)
- if pid == self.pid:
- self._handle_exitstatus(sts)
- except _os_error:
- if _deadstate is not None:
- self.returncode = _deadstate
- return self.returncode
-
-
- def wait(self) -> int:
- """Wait for child process to terminate. Returns returncode
- attribute."""
- if self.returncode is None:
- try:
- pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
- except OSError as e:
- if e.errno != errno.ECHILD:
- raise
- # This happens if SIGCLD is set to be ignored or waiting
- # for child processes has otherwise been disabled for our
- # process. This child is dead, we can't get the status.
- sts = 0
- self._handle_exitstatus(sts)
- return self.returncode
-
-
- def _communicate(self, input: Any) -> Tuple[Any, Any]:
- if self.stdin:
- # Flush stdio buffer. This might block, if the user has
- # been writing to .stdin in an uncontrolled fashion.
- self.stdin.flush()
- if not input:
- self.stdin.close()
-
- if _has_poll:
- stdout, stderr = self._communicate_with_poll(input)
- else:
- stdout, stderr = self._communicate_with_select(input)
-
- # All data exchanged. Translate lists into strings.
- if stdout is not None:
- stdout2 = b''.join(stdout)
- else:
- stdout2 = None
- if stderr is not None:
- stderr2 = b''.join(stderr)
- else:
- stderr2 = None
-
- # Translate newlines, if requested.
- # This also turns bytes into strings.
- stdout3 = cast(Any, stdout2)
- stderr3 = cast(Any, stderr2)
- if self.universal_newlines:
- if stdout is not None:
- stdout3 = self._translate_newlines(
- stdout2, cast(TextIO, self.stdout).encoding)
- if stderr is not None:
- stderr3 = self._translate_newlines(
- stderr2, cast(TextIO, self.stderr).encoding)
-
- self.wait()
- return (stdout3, stderr3)
-
-
- def _communicate_with_poll(self, input: Any) -> Tuple[List[bytes],
- List[bytes]]:
- stdout = None # type: List[bytes] # Return
- stderr = None # type: List[bytes] # Return
- fd2file = {} # type: Dict[int, Any]
- fd2output = {} # type: Dict[int, List[bytes]]
-
- poller = select.poll()
- def register_and_append(file_obj: IO[Any], eventmask: int) -> None:
- poller.register(file_obj.fileno(), eventmask)
- fd2file[file_obj.fileno()] = file_obj
-
- def close_unregister_and_remove(fd: int) -> None:
- poller.unregister(fd)
- fd2file[fd].close()
- fd2file.pop(fd)
-
- if self.stdin and input:
- register_and_append(self.stdin, select.POLLOUT)
-
- select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
- if self.stdout:
- register_and_append(self.stdout, select_POLLIN_POLLPRI)
- fd2output[self.stdout.fileno()] = stdout = []
- if self.stderr:
- register_and_append(self.stderr, select_POLLIN_POLLPRI)
- fd2output[self.stderr.fileno()] = stderr = []
-
- input_offset = 0
- while fd2file:
- try:
- ready = poller.poll()
- except select.error as e:
- if e.args[0] == errno.EINTR:
- continue
- raise
-
- # XXX Rewrite these to use non-blocking I/O on the
- # file objects; they are no longer using C stdio!
-
- for fd, mode in ready:
- if mode & select.POLLOUT:
- chunk = input[input_offset : input_offset + _PIPE_BUF]
- try:
- input_offset += os.write(fd, chunk)
- except OSError as e2:
- if e2.errno == errno.EPIPE:
- close_unregister_and_remove(fd)
- else:
- raise
- else:
- if input_offset >= len(input):
- close_unregister_and_remove(fd)
- elif mode & select_POLLIN_POLLPRI:
- data = os.read(fd, 4096)
- if not data:
- close_unregister_and_remove(fd)
- fd2output[fd].append(data)
- else:
- # Ignore hang up or errors.
- close_unregister_and_remove(fd)
-
- return (stdout, stderr)
-
-
- def _communicate_with_select(self, input: Any) -> Tuple[List[bytes],
- List[bytes]]:
- read_set = [] # type: List[IO[Any]]
- write_set = [] # type: List[IO[Any]]
- stdout = None # type: List[bytes] # Return
- stderr = None # type: List[bytes] # Return
-
- if self.stdin and input:
- write_set.append(self.stdin)
- if self.stdout:
- read_set.append(self.stdout)
- stdout = []
- if self.stderr:
- read_set.append(self.stderr)
- stderr = []
-
- input_offset = 0
- while read_set or write_set:
- try:
- rlist, wlist, xlist = select.select(read_set, write_set, [])
- except select.error as e:
- if e.args[0] == errno.EINTR:
- continue
- raise
-
- # XXX Rewrite these to use non-blocking I/O on the
- # file objects; they are no longer using C stdio!
-
- if self.stdin in wlist:
- chunk = input[input_offset : input_offset + _PIPE_BUF]
- try:
- bytes_written = os.write(self.stdin.fileno(), chunk)
- except OSError as oe:
- if oe.errno == errno.EPIPE:
- self.stdin.close()
- write_set.remove(self.stdin)
- else:
- raise
- else:
- input_offset += bytes_written
- if input_offset >= len(input):
- self.stdin.close()
- write_set.remove(self.stdin)
-
- if self.stdout in rlist:
- data = os.read(self.stdout.fileno(), 1024)
- if not data:
- self.stdout.close()
- read_set.remove(self.stdout)
- stdout.append(data)
-
- if self.stderr in rlist:
- data = os.read(self.stderr.fileno(), 1024)
- if not data:
- self.stderr.close()
- read_set.remove(self.stderr)
- stderr.append(data)
-
- return (stdout, stderr)
-
-
- def send_signal(self, sig: int) -> None:
- """Send a signal to the process
- """
- os.kill(self.pid, sig)
-
- def terminate(self) -> None:
- """Terminate the process with SIGTERM
- """
- self.send_signal(signal.SIGTERM)
-
- def kill(self) -> None:
- """Kill the process with SIGKILL
- """
- self.send_signal(signal.SIGKILL)
-
-
-def _demo_posix() -> None:
- #
- # Example 1: Simple redirection: Get process list
- #
- plist = Popen(["ps"], stdout=PIPE).communicate()[0]
- print("Process list:")
- print(plist)
-
- #
- # Example 2: Change uid before executing child
- #
- if os.getuid() == 0:
- p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
- p.wait()
-
- #
- # Example 3: Connecting several subprocesses
- #
- print("Looking for 'hda'...")
- p1 = Popen(["dmesg"], stdout=PIPE)
- p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
- print(repr(p2.communicate()[0]))
-
- #
- # Example 4: Catch execution error
- #
- print()
- print("Trying a weird file...")
- try:
- print(Popen(["/this/path/does/not/exist"]).communicate())
- except OSError as e:
- if e.errno == errno.ENOENT:
- print("The file didn't exist. I thought so...")
- else:
- print("Error", e.errno)
- else:
- print("Gosh. No error.", file=sys.stderr)
-
-
-def _demo_windows() -> None:
- #
- # Example 1: Connecting several subprocesses
- #
- print("Looking for 'PROMPT' in set output...")
- p1 = Popen("set", stdout=PIPE, shell=True)
- p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
- print(repr(p2.communicate()[0]))
-
- #
- # Example 2: Simple execution of program
- #
- print("Executing calc...")
- p = Popen("calc")
- p.wait()
-
-
-if __name__ == "__main__":
- if mswindows:
- _demo_windows()
- else:
- _demo_posix()
diff --git a/test-data/stdlib-samples/3.2/tempfile.py b/test-data/stdlib-samples/3.2/tempfile.py
deleted file mode 100644
index d12e21e..0000000
--- a/test-data/stdlib-samples/3.2/tempfile.py
+++ /dev/null
@@ -1,717 +0,0 @@
-"""Temporary files.
-
-This module provides generic, low- and high-level interfaces for
-creating temporary files and directories. The interfaces listed
-as "safe" just below can be used without fear of race conditions.
-Those listed as "unsafe" cannot, and are provided for backward
-compatibility only.
-
-This module also provides some data items to the user:
-
- TMP_MAX - maximum number of names that will be tried before
- giving up.
- template - the default prefix for all temporary names.
- You may change this to control the default prefix.
- tempdir - If this is set to a string before the first use of
- any routine from this module, it will be considered as
- another candidate location to store temporary files.
-"""
-
-__all__ = [
- "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
- "SpooledTemporaryFile", "TemporaryDirectory",
- "mkstemp", "mkdtemp", # low level safe interfaces
- "mktemp", # deprecated unsafe interface
- "TMP_MAX", "gettempprefix", # constants
- "tempdir", "gettempdir"
- ]
-
-
-# Imports.
-
-import warnings as _warnings
-import sys as _sys
-import io as _io
-import os as _os
-import errno as _errno
-from random import Random as _Random
-
-from typing import (
- Any as _Any, Callable as _Callable, Iterator as _Iterator,
- List as _List, Tuple as _Tuple, Dict as _Dict, Iterable as _Iterable,
- IO as _IO, cast as _cast, Optional as _Optional, Type as _Type,
-)
-from types import TracebackType as _TracebackType
-
-try:
- import fcntl as _fcntl
-except ImportError:
- def _set_cloexec(fd: int) -> None:
- pass
-else:
- def _set_cloexec(fd: int) -> None:
- try:
- flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
- except IOError:
- pass
- else:
- # flags read successfully, modify
- flags |= _fcntl.FD_CLOEXEC
- _fcntl.fcntl(fd, _fcntl.F_SETFD, flags)
-
-
-try:
- import _thread
- _allocate_lock = _thread.allocate_lock # type: _Callable[[], _Any]
-except ImportError:
- import _dummy_thread
- _allocate_lock = _dummy_thread.allocate_lock
-
-_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
-if hasattr(_os, 'O_NOINHERIT'):
- _text_openflags |= _os.O_NOINHERIT
-if hasattr(_os, 'O_NOFOLLOW'):
- _text_openflags |= _os.O_NOFOLLOW
-
-_bin_openflags = _text_openflags
-if hasattr(_os, 'O_BINARY'):
- _bin_openflags |= _os.O_BINARY
-
-if hasattr(_os, 'TMP_MAX'):
- TMP_MAX = _os.TMP_MAX
-else:
- TMP_MAX = 10000
-
-template = "tmp"
-
-# Internal routines.
-
-_once_lock = _allocate_lock()
-
-if hasattr(_os, "lstat"):
- _stat = _os.lstat # type: _Callable[[str], object]
-elif hasattr(_os, "stat"):
- _stat = _os.stat
-else:
- # Fallback. All we need is something that raises os.error if the
- # file doesn't exist.
- def __stat(fn: str) -> object:
- try:
- f = open(fn)
- except IOError:
- raise _os.error()
- f.close()
- _stat = __stat
-
-def _exists(fn: str) -> bool:
- try:
- _stat(fn)
- except _os.error:
- return False
- else:
- return True
-
-class _RandomNameSequence(_Iterator[str]):
- """An instance of _RandomNameSequence generates an endless
- sequence of unpredictable strings which can safely be incorporated
- into file names. Each string is six characters long. Multiple
- threads can safely use the same instance at the same time.
-
- _RandomNameSequence is an iterator."""
-
- characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
-
- @property
- def rng(self) -> _Random:
- cur_pid = _os.getpid()
- if cur_pid != getattr(self, '_rng_pid', None):
- self._rng = _Random()
- self._rng_pid = cur_pid
- return self._rng
-
- def __iter__(self) -> _Iterator[str]:
- return self
-
- def __next__(self) -> str:
- c = self.characters
- choose = self.rng.choice
- letters = [choose(c) for dummy in "123456"]
- return ''.join(letters)
-
-def _candidate_tempdir_list() -> _List[str]:
- """Generate a list of candidate temporary directories which
- _get_default_tempdir will try."""
-
- dirlist = [] # type: _List[str]
-
- # First, try the environment.
- for envname in 'TMPDIR', 'TEMP', 'TMP':
- dirname = _os.getenv(envname)
- if dirname: dirlist.append(dirname)
-
- # Failing that, try OS-specific locations.
- if _os.name == 'nt':
- dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
- else:
- dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
-
- # As a last resort, the current directory.
- try:
- dirlist.append(_os.getcwd())
- except (AttributeError, _os.error):
- dirlist.append(_os.curdir)
-
- return dirlist
-
-def _get_default_tempdir() -> str:
- """Calculate the default directory to use for temporary files.
- This routine should be called exactly once.
-
- We determine whether or not a candidate temp dir is usable by
- trying to create and write to a file in that directory. If this
- is successful, the test file is deleted. To prevent denial of
- service, the name of the test file must be randomized."""
-
- namer = _RandomNameSequence()
- dirlist = _candidate_tempdir_list()
-
- for dir in dirlist:
- if dir != _os.curdir:
- dir = _os.path.normcase(_os.path.abspath(dir))
- # Try only a few names per directory.
- for seq in range(100):
- name = next(namer)
- filename = _os.path.join(dir, name)
- try:
- fd = _os.open(filename, _bin_openflags, 0o600)
- fp = _io.open(fd, 'wb')
- fp.write(b'blat')
- fp.close()
- _os.unlink(filename)
- fp = fd = None
- return dir
- except (OSError, IOError) as e:
- if e.args[0] != _errno.EEXIST:
- break # no point trying more names in this directory
- pass
- raise IOError(_errno.ENOENT,
- "No usable temporary directory found in %s" % dirlist)
-
-_name_sequence = None # type: _RandomNameSequence
-
-def _get_candidate_names() -> _RandomNameSequence:
- """Common setup sequence for all user-callable interfaces."""
-
- global _name_sequence
- if _name_sequence is None:
- _once_lock.acquire()
- try:
- if _name_sequence is None:
- _name_sequence = _RandomNameSequence()
- finally:
- _once_lock.release()
- return _name_sequence
-
-
-def _mkstemp_inner(dir: str, pre: str, suf: str,
- flags: int) -> _Tuple[int, str]:
- """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
-
- names = _get_candidate_names()
-
- for seq in range(TMP_MAX):
- name = next(names)
- file = _os.path.join(dir, pre + name + suf)
- try:
- fd = _os.open(file, flags, 0o600)
- _set_cloexec(fd)
- return (fd, _os.path.abspath(file))
- except OSError as e:
- if e.errno == _errno.EEXIST:
- continue # try again
- raise
-
- raise IOError(_errno.EEXIST, "No usable temporary file name found")
-
-
-# User visible interfaces.
-
-def gettempprefix() -> str:
- """Accessor for tempdir.template."""
- return template
-
-tempdir = None # type: str
-
-def gettempdir() -> str:
- """Accessor for tempfile.tempdir."""
- global tempdir
- if tempdir is None:
- _once_lock.acquire()
- try:
- if tempdir is None:
- tempdir = _get_default_tempdir()
- finally:
- _once_lock.release()
- return tempdir
-
-def mkstemp(suffix: str = "", prefix: str = template, dir: str = None,
- text: bool = False) -> _Tuple[int, str]:
- """User-callable function to create and return a unique temporary
- file. The return value is a pair (fd, name) where fd is the
- file descriptor returned by os.open, and name is the filename.
-
- If 'suffix' is specified, the file name will end with that suffix,
- otherwise there will be no suffix.
-
- If 'prefix' is specified, the file name will begin with that prefix,
- otherwise a default prefix is used.
-
- If 'dir' is specified, the file will be created in that directory,
- otherwise a default directory is used.
-
- If 'text' is specified and true, the file is opened in text
- mode. Else (the default) the file is opened in binary mode. On
- some operating systems, this makes no difference.
-
- The file is readable and writable only by the creating user ID.
- If the operating system uses permission bits to indicate whether a
- file is executable, the file is executable by no one. The file
- descriptor is not inherited by children of this process.
-
- Caller is responsible for deleting the file when done with it.
- """
-
- if dir is None:
- dir = gettempdir()
-
- if text:
- flags = _text_openflags
- else:
- flags = _bin_openflags
-
- return _mkstemp_inner(dir, prefix, suffix, flags)
-
-
-def mkdtemp(suffix: str = "", prefix: str = template, dir: str = None) -> str:
- """User-callable function to create and return a unique temporary
- directory. The return value is the pathname of the directory.
-
- Arguments are as for mkstemp, except that the 'text' argument is
- not accepted.
-
- The directory is readable, writable, and searchable only by the
- creating user.
-
- Caller is responsible for deleting the directory when done with it.
- """
-
- if dir is None:
- dir = gettempdir()
-
- names = _get_candidate_names()
-
- for seq in range(TMP_MAX):
- name = next(names)
- file = _os.path.join(dir, prefix + name + suffix)
- try:
- _os.mkdir(file, 0o700)
- return file
- except OSError as e:
- if e.errno == _errno.EEXIST:
- continue # try again
- raise
-
- raise IOError(_errno.EEXIST, "No usable temporary directory name found")
-
-def mktemp(suffix: str = "", prefix: str = template, dir: str = None) -> str:
- """User-callable function to return a unique temporary file name. The
- file is not created.
-
- Arguments are as for mkstemp, except that the 'text' argument is
- not accepted.
-
- This function is unsafe and should not be used. The file name
- refers to a file that did not exist at some point, but by the time
- you get around to creating it, someone else may have beaten you to
- the punch.
- """
-
-## from warnings import warn as _warn
-## _warn("mktemp is a potential security risk to your program",
-## RuntimeWarning, stacklevel=2)
-
- if dir is None:
- dir = gettempdir()
-
- names = _get_candidate_names()
- for seq in range(TMP_MAX):
- name = next(names)
- file = _os.path.join(dir, prefix + name + suffix)
- if not _exists(file):
- return file
-
- raise IOError(_errno.EEXIST, "No usable temporary filename found")
-
-
-class _TemporaryFileWrapper:
- """Temporary file wrapper
-
- This class provides a wrapper around files opened for
- temporary use. In particular, it seeks to automatically
- remove the file when it is no longer needed.
- """
-
- def __init__(self, file: _IO[_Any], name: str,
- delete: bool = True) -> None:
- self.file = file
- self.name = name
- self.close_called = False
- self.delete = delete
-
- if _os.name != 'nt':
- # Cache the unlinker so we don't get spurious errors at
- # shutdown when the module-level "os" is None'd out. Note
- # that this must be referenced as self.unlink, because the
- # name TemporaryFileWrapper may also get None'd out before
- # __del__ is called.
- self.unlink = _os.unlink
-
- def __getattr__(self, name: str) -> _Any:
- # Attribute lookups are delegated to the underlying file
- # and cached for non-numeric results
- # (i.e. methods are cached, closed and friends are not)
- file = _cast(_Any, self).__dict__['file'] # type: _IO[_Any]
- a = getattr(file, name)
- if not isinstance(a, int):
- setattr(self, name, a)
- return a
-
- # The underlying __enter__ method returns the wrong object
- # (self.file) so override it to return the wrapper
- def __enter__(self) -> '_TemporaryFileWrapper':
- self.file.__enter__()
- return self
-
- # iter() doesn't use __getattr__ to find the __iter__ method
- def __iter__(self) -> _Iterator[_Any]:
- return iter(self.file)
-
- # NT provides delete-on-close as a primitive, so we don't need
- # the wrapper to do anything special. We still use it so that
- # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
- if _os.name != 'nt':
- def close(self) -> None:
- if not self.close_called:
- self.close_called = True
- self.file.close()
- if self.delete:
- self.unlink(self.name)
-
- def __del__(self) -> None:
- self.close()
-
- # Need to trap __exit__ as well to ensure the file gets
- # deleted when used in a with statement
- def __exit__(self, exc: _Type[BaseException], value: BaseException,
- tb: _Optional[_TracebackType]) -> bool:
- result = self.file.__exit__(exc, value, tb)
- self.close()
- return result
- else:
- def __exit__(self, exc: _Type[BaseException], value: BaseException,
- tb: _Optional[_TracebackType]) -> bool:
- self.file.__exit__(exc, value, tb)
-
-
-def NamedTemporaryFile(mode: str = 'w+b', buffering: int = -1,
- encoding: str = None, newline: str = None,
- suffix: str = "", prefix: str = template,
- dir: str = None, delete: bool = True) -> _IO[_Any]:
- """Create and return a temporary file.
- Arguments:
- 'prefix', 'suffix', 'dir' -- as for mkstemp.
- 'mode' -- the mode argument to io.open (default "w+b").
- 'buffering' -- the buffer size argument to io.open (default -1).
- 'encoding' -- the encoding argument to io.open (default None)
- 'newline' -- the newline argument to io.open (default None)
- 'delete' -- whether the file is deleted on close (default True).
- The file is created as mkstemp() would do it.
-
- Returns an object with a file-like interface; the name of the file
- is accessible as file.name. The file will be automatically deleted
- when it is closed unless the 'delete' argument is set to False.
- """
-
- if dir is None:
- dir = gettempdir()
-
- flags = _bin_openflags
-
- # Setting O_TEMPORARY in the flags causes the OS to delete
- # the file when it is closed. This is only supported by Windows.
- if _os.name == 'nt' and delete:
- flags |= _os.O_TEMPORARY
-
- (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
- file = _io.open(fd, mode, buffering=buffering,
- newline=newline, encoding=encoding)
-
- return _cast(_IO[_Any], _TemporaryFileWrapper(file, name, delete))
-
-if _os.name != 'posix' or _sys.platform == 'cygwin':
- # On non-POSIX and Cygwin systems, assume that we cannot unlink a file
- # while it is open.
- TemporaryFile = NamedTemporaryFile
-
-else:
- def _TemporaryFile(mode: str = 'w+b', buffering: int = -1,
- encoding: str = None, newline: str = None,
- suffix: str = "", prefix: str = template,
- dir: str = None, delete: bool = True) -> _IO[_Any]:
- """Create and return a temporary file.
- Arguments:
- 'prefix', 'suffix', 'dir' -- as for mkstemp.
- 'mode' -- the mode argument to io.open (default "w+b").
- 'buffering' -- the buffer size argument to io.open (default -1).
- 'encoding' -- the encoding argument to io.open (default None)
- 'newline' -- the newline argument to io.open (default None)
- The file is created as mkstemp() would do it.
-
- Returns an object with a file-like interface. The file has no
- name, and will cease to exist when it is closed.
- """
-
- if dir is None:
- dir = gettempdir()
-
- flags = _bin_openflags
-
- (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
- try:
- _os.unlink(name)
- return _io.open(fd, mode, buffering=buffering,
- newline=newline, encoding=encoding)
- except:
- _os.close(fd)
- raise
- TemporaryFile = _TemporaryFile
-
-class SpooledTemporaryFile:
- """Temporary file wrapper, specialized to switch from
- StringIO to a real file when it exceeds a certain size or
- when a fileno is needed.
- """
- _rolled = False
- _file = None # type: _Any # BytesIO, StringIO or TemporaryFile
-
- def __init__(self, max_size: int = 0, mode: str = 'w+b',
- buffering: int = -1, encoding: str = None,
- newline: str = None, suffix: str = "",
- prefix: str = template, dir: str = None) -> None:
- if 'b' in mode:
- self._file = _io.BytesIO()
- else:
- # Setting newline="\n" avoids newline translation;
- # this is important because otherwise on Windows we'd
- # hget double newline translation upon rollover().
- self._file = _io.StringIO(newline="\n")
- self._max_size = max_size
- self._rolled = False
- self._TemporaryFileArgs = {
- 'mode': mode, 'buffering': buffering,
- 'suffix': suffix, 'prefix': prefix,
- 'encoding': encoding, 'newline': newline,
- 'dir': dir} # type: _Dict[str, _Any]
-
- def _check(self, file: _IO[_Any]) -> None:
- if self._rolled: return
- max_size = self._max_size
- if max_size and file.tell() > max_size:
- self.rollover()
-
- def rollover(self) -> None:
- if self._rolled: return
- file = self._file
- newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
- self._TemporaryFileArgs = None
-
- newfile.write(file.getvalue())
- newfile.seek(file.tell(), 0)
-
- self._rolled = True
-
- # The method caching trick from NamedTemporaryFile
- # won't work here, because _file may change from a
- # _StringIO instance to a real file. So we list
- # all the methods directly.
-
- # Context management protocol
- def __enter__(self) -> 'SpooledTemporaryFile':
- if self._file.closed:
- raise ValueError("Cannot enter context with closed file")
- return self
-
- def __exit__(self, exc: type, value: BaseException,
- tb: _TracebackType) -> bool:
- self._file.close()
-
- # file protocol
- def __iter__(self) -> _Iterable[_Any]:
- return self._file.__iter__()
-
- def close(self) -> None:
- self._file.close()
-
- @property
- def closed(self) -> bool:
- return self._file.closed
-
- @property
- def encoding(self) -> str:
- return self._file.encoding
-
- def fileno(self) -> int:
- self.rollover()
- return self._file.fileno()
-
- def flush(self) -> None:
- self._file.flush()
-
- def isatty(self) -> bool:
- return self._file.isatty()
-
- @property
- def mode(self) -> str:
- return self._file.mode
-
- @property
- def name(self) -> str:
- return self._file.name
-
- @property
- def newlines(self) -> _Any:
- return self._file.newlines
-
- #def next(self):
- # return self._file.next
-
- def read(self, n: int = -1) -> _Any:
- return self._file.read(n)
-
- def readline(self, limit: int = -1) -> _Any:
- return self._file.readline(limit)
-
- def readlines(self, *args) -> _List[_Any]:
- return self._file.readlines(*args)
-
- def seek(self, offset: int, whence: int = 0) -> None:
- self._file.seek(offset, whence)
-
- @property
- def softspace(self) -> bool:
- return self._file.softspace
-
- def tell(self) -> int:
- return self._file.tell()
-
- def truncate(self) -> None:
- self._file.truncate()
-
- def write(self, s: _Any) -> int:
- file = self._file # type: _IO[_Any]
- rv = file.write(s)
- self._check(file)
- return rv
-
- def writelines(self, iterable: _Iterable[_Any]) -> None:
- file = self._file # type: _IO[_Any]
- file.writelines(iterable)
- self._check(file)
-
- #def xreadlines(self, *args) -> _Any:
- # return self._file.xreadlines(*args)
-
-
-class TemporaryDirectory(object):
- """Create and return a temporary directory. This has the same
- behavior as mkdtemp but can be used as a context manager. For
- example:
-
- with TemporaryDirectory() as tmpdir:
- ...
-
- Upon exiting the context, the directory and everthing contained
- in it are removed.
- """
-
- def __init__(self, suffix: str = "", prefix: str = template,
- dir: str = None) -> None:
- self._closed = False
- self.name = None # type: str # Handle mkdtemp throwing an exception
- self.name = mkdtemp(suffix, prefix, dir)
-
- # XXX (ncoghlan): The following code attempts to make
- # this class tolerant of the module nulling out process
- # that happens during CPython interpreter shutdown
- # Alas, it doesn't actually manage it. See issue #10188
- self._listdir = _os.listdir
- self._path_join = _os.path.join
- self._isdir = _os.path.isdir
- self._islink = _os.path.islink
- self._remove = _os.remove
- self._rmdir = _os.rmdir
- self._os_error = _os.error
- self._warn = _warnings.warn
-
- def __repr__(self) -> str:
- return "<{} {!r}>".format(self.__class__.__name__, self.name)
-
- def __enter__(self) -> str:
- return self.name
-
- def cleanup(self, _warn: bool = False) -> None:
- if self.name and not self._closed:
- try:
- self._rmtree(self.name)
- except (TypeError, AttributeError) as ex:
- # Issue #10188: Emit a warning on stderr
- # if the directory could not be cleaned
- # up due to missing globals
- if "None" not in str(ex):
- raise
- print("ERROR: {!r} while cleaning up {!r}".format(ex, self,),
- file=_sys.stderr)
- return
- self._closed = True
- if _warn:
- self._warn("Implicitly cleaning up {!r}".format(self),
- ResourceWarning)
-
- def __exit__(self, exc: type, value: BaseException,
- tb: _TracebackType) -> bool:
- self.cleanup()
-
- def __del__(self) -> None:
- # Issue a ResourceWarning if implicit cleanup needed
- self.cleanup(_warn=True)
-
- def _rmtree(self, path: str) -> None:
- # Essentially a stripped down version of shutil.rmtree. We can't
- # use globals because they may be None'ed out at shutdown.
- for name in self._listdir(path):
- fullname = self._path_join(path, name)
- try:
- isdir = self._isdir(fullname) and not self._islink(fullname)
- except self._os_error:
- isdir = False
- if isdir:
- self._rmtree(fullname)
- else:
- try:
- self._remove(fullname)
- except self._os_error:
- pass
- try:
- self._rmdir(path)
- except self._os_error:
- pass
diff --git a/test-data/stdlib-samples/3.2/test/__init__.py b/test-data/stdlib-samples/3.2/test/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/test-data/stdlib-samples/3.2/test/randv2_32.pck b/test-data/stdlib-samples/3.2/test/randv2_32.pck
deleted file mode 100644
index 587ab24..0000000
--- a/test-data/stdlib-samples/3.2/test/randv2_32.pck
+++ /dev/null
@@ -1,633 +0,0 @@
-crandom
-Random
-p0
-(tRp1
-(I2
-(I-2147483648
-I-845974985
-I-1294090086
-I1193659239
-I-1849481736
-I-946579732
-I-34406770
-I1749049471
-I1997774682
-I1432026457
-I1288127073
-I-943175655
-I-1718073964
-I339993548
-I-1045260575
-I582505037
-I-1555108250
-I-1114765620
-I1578648750
-I-350384412
-I-20845848
-I-288255314
-I738790953
-I1901249641
-I1999324672
-I-277361068
-I-1515885839
-I2061761596
-I-809068089
-I1287981136
-I258129492
-I-6303745
-I-765148337
-I1090344911
-I1653434703
-I-1242923628
-I1639171313
-I-1870042660
-I-1655014050
-I345609048
-I2093410138
-I1963263374
-I-2122098342
-I1336859961
-I-810942729
-I945857753
-I2103049942
-I623922684
-I1418349549
-I690877342
-I754973107
-I-1605111847
-I1607137813
-I-1704917131
-I1317536428
-I1714882872
-I-1665385120
-I1823694397
-I-1790836866
-I-1696724812
-I-603979847
-I-498599394
-I-341265291
-I927388804
-I1778562135
-I1716895781
-I1023198122
-I1726145967
-I941955525
-I1240148950
-I-1929634545
-I-1288147083
-I-519318335
-I754559777
-I-707571958
-I374604022
-I420424061
-I-1095443486
-I1621934944
-I-1220502522
-I-140049608
-I-918917122
-I304341024
-I-1637446057
-I-353934485
-I1973436235
-I433380241
-I-686759465
-I-2111563154
-I-573422032
-I804304541
-I1513063483
-I1417381689
-I-804778729
-I211756408
-I544537322
-I890881641
-I150378374
-I1765739392
-I1011604116
-I584889095
-I1400520554
-I413747808
-I-1741992587
-I-1882421574
-I-1373001903
-I-1885348538
-I903819480
-I1083220038
-I-1318105424
-I1740421404
-I1693089625
-I775965557
-I1319608037
-I-2127475785
-I-367562895
-I-1416273451
-I1693000327
-I-1217438421
-I834405522
-I-128287275
-I864057548
-I-973917356
-I7304111
-I1712253182
-I1353897741
-I672982288
-I1778575559
-I-403058377
-I-38540378
-I-1393713496
-I13193171
-I1127196200
-I205176472
-I-2104790506
-I299985416
-I1403541685
-I-1018270667
-I-1980677490
-I-1182625797
-I1637015181
-I-1795357414
-I1514413405
-I-924516237
-I-1841873650
-I-1014591269
-I1576616065
-I-1319103135
-I-120847840
-I2062259778
-I-9285070
-I1160890300
-I-575137313
-I-1509108275
-I46701926
-I-287560914
-I-256824960
-I577558250
-I900598310
-I944607867
-I2121154920
-I-1170505192
-I-1347170575
-I77247778
-I-1899015765
-I1234103327
-I1027053658
-I1934632322
-I-792031234
-I1147322536
-I1290655117
-I1002059715
-I1325898538
-I896029793
-I-790940694
-I-980470721
-I-1922648255
-I-951672814
-I291543943
-I1158740218
-I-1959023736
-I-1977185236
-I1527900076
-I514104195
-I-814154113
-I-593157883
-I-1023704660
-I1285688377
-I-2117525386
-I768954360
-I-38676846
-I-799848659
-I-1305517259
-I-1938213641
-I-462146758
-I-1663302892
-I1899591069
-I-22935388
-I-275856976
-I-443736893
-I-739441156
-I93862068
-I-838105669
-I1735629845
-I-817484206
-I280814555
-I1753547179
-I1811123479
-I1974543632
-I-48447465
-I-642694345
-I-531149613
-I518698953
-I-221642627
-I-686519187
-I776644303
-I257774400
-I-1499134857
-I-1055273455
-I-237023943
-I1981752330
-I-917671662
-I-372905983
-I1588058420
-I1171936660
-I-1730977121
-I1360028989
-I1769469287
-I1910709542
-I-852692959
-I1396944667
-I-1723999155
-I-310975435
-I-1965453954
-I-1636858570
-I2005650794
-I680293715
-I1355629386
-I844514684
-I-1909152807
-I-808646074
-I1936510018
-I1134413810
-I-143411047
-I-1478436304
-I1394969244
-I-1170110660
-I1963112086
-I-1518351049
-I-1506287443
-I-455023090
-I-855366028
-I-1746785568
-I933990882
-I-703625141
-I-285036872
-I188277905
-I1471578620
-I-981382835
-I-586974220
-I945619758
-I1608778444
-I-1708548066
-I-1897629320
-I-42617810
-I-836840790
-I539154487
-I-235706962
-I332074418
-I-575700589
-I1534608003
-I632116560
-I-1819760653
-I642052958
-I-722391771
-I-1104719475
-I-1196847084
-I582413973
-I1563394876
-I642007944
-I108989456
-I361625014
-I677308625
-I-1806529496
-I-959050708
-I-1858251070
-I-216069832
-I701624579
-I501238033
-I12287030
-I1895107107
-I2089098638
-I-874806230
-I1236279203
-I563718890
-I-544352489
-I-1879707498
-I1767583393
-I-1776604656
-I-693294301
-I-88882831
-I169303357
-I1299196152
-I-1122791089
-I-379157172
-I1934671851
-I1575736961
-I-19573174
-I-1401511009
-I9305167
-I-1115174467
-I1670735537
-I1226436501
-I-2004524535
-I1767463878
-I-1722855079
-I-559413926
-I1529810851
-I1201272087
-I-1297130971
-I-1188149982
-I1396557188
-I-370358342
-I-1006619702
-I1600942463
-I906087130
-I-76991909
-I2069580179
-I-1674195181
-I-2098404729
-I-940972459
-I-573399187
-I-1930386277
-I-721311199
-I-647834744
-I1452181671
-I688681916
-I1812793731
-I1704380620
-I-1389615179
-I866287837
-I-1435265007
-I388400782
-I-147986600
-I-1613598851
-I-1040347408
-I782063323
-I-239282031
-I-575966722
-I-1865208174
-I-481365146
-I579572803
-I-1239481494
-I335361280
-I-429722947
-I1881772789
-I1908103808
-I1653690013
-I-1668588344
-I1933787953
-I-2033480609
-I22162797
-I-1516527040
-I-461232482
-I-16201372
-I-2043092030
-I114990337
-I-1524090084
-I1456374020
-I458606440
-I-1928083218
-I227773125
-I-1129028159
-I1678689
-I1575896907
-I-1792935220
-I-151387575
-I64084088
-I-95737215
-I1337335688
-I-1963466345
-I1243315130
-I-1798518411
-I-546013212
-I-607065396
-I1219824160
-I1715218469
-I-1368163783
-I1701552913
-I-381114888
-I1068821717
-I266062971
-I-2066513172
-I1767407229
-I-780936414
-I-705413443
-I-1256268847
-I1646874149
-I1107690353
-I839133072
-I67001749
-I860763503
-I884880613
-I91977084
-I755371933
-I420745153
-I-578480690
-I-1520193551
-I1011369331
-I-99754575
-I-733141064
-I-500598588
-I1081124271
-I-1341266575
-I921002612
-I-848852487
-I-1904467341
-I-1294256973
-I-94074714
-I-1778758498
-I-1401188547
-I2101830578
-I2058864877
-I-272875991
-I-1375854779
-I-1332937870
-I619425525
-I-1034529639
-I-36454393
-I-2030499985
-I-1637127500
-I-1408110287
-I-2108625749
-I-961007436
-I1475654951
-I-791946251
-I1667792115
-I1818978830
-I1897980514
-I1959546477
-I-74478911
-I-508643347
-I461594399
-I538802715
-I-2094970071
-I-2076660253
-I1091358944
-I1944029246
-I-343957436
-I-1915845022
-I1237620188
-I1144125174
-I1522190520
-I-670252952
-I-19469226
-I675626510
-I758750096
-I909724354
-I-1846259652
-I544669343
-I445182495
-I-821519930
-I-1124279685
-I-1668995122
-I1653284793
-I-678555151
-I-687513207
-I1558259445
-I-1978866839
-I1558835601
-I1732138472
-I-1904793363
-I620020296
-I1562597874
-I1942617227
-I-549632552
-I721603795
-I417978456
-I-1355281522
-I-538065208
-I-1079523196
-I187375699
-I449064972
-I1018083947
-I1632388882
-I-493269866
-I92769041
-I1477146750
-I1782708404
-I444873376
-I1085851104
-I-6823272
-I-1302251853
-I1602050688
-I-1042187824
-I287161745
-I-1972094479
-I103271491
-I2131619773
-I-2064115870
-I766815498
-I990861458
-I-1664407378
-I1083746756
-I-1018331904
-I-677315687
-I-951670647
-I-952356874
-I451460609
-I-818615564
-I851439508
-I656362634
-I-1351240485
-I823378078
-I1985597385
-I597757740
-I-1512303057
-I1590872798
-I1108424213
-I818850898
-I-1368594306
-I-201107761
-I1793370378
-I1247597611
-I-1594326264
-I-601653890
-I427642759
-I248322113
-I-292545338
-I1708985870
-I1917042771
-I429354503
-I-478470329
-I793960014
-I369939133
-I1728189157
-I-518963626
-I-278523974
-I-1877289696
-I-2088617658
-I-1367940049
-I-62295925
-I197975119
-I-252900777
-I803430539
-I485759441
-I-528283480
-I-1287443963
-I-478617444
-I-861906946
-I-649095555
-I-893184337
-I2050571322
-I803433133
-I1629574571
-I1649720417
-I-2050225209
-I1208598977
-I720314344
-I-615166251
-I-835077127
-I-1405372429
-I995698064
-I148123240
-I-943016676
-I-594609622
-I-1381596711
-I1017195301
-I-1268893013
-I-1815985179
-I-1393570351
-I-870027364
-I-476064472
-I185582645
-I569863326
-I1098584267
-I-1599147006
-I-485054391
-I-852098365
-I1477320135
-I222316762
-I-1515583064
-I-935051367
-I393383063
-I819617226
-I722921837
-I-1241806499
-I-1358566385
-I1666813591
-I1333875114
-I-1663688317
-I-47254623
-I-885800726
-I307388991
-I-1219459496
-I1374870300
-I2132047877
-I-1385624198
-I-245139206
-I1015139214
-I-926198559
-I1969798868
-I-1950480619
-I-559193432
-I-1256446518
-I-1983476981
-I790179655
-I1004289659
-I1541827617
-I1555805575
-I501127333
-I-1123446797
-I-453230915
-I2035104883
-I1296122398
-I-1843698604
-I-715464588
-I337143971
-I-1972119192
-I606777909
-I726977302
-I-1149501872
-I-1963733522
-I-1797504644
-I624
-tp2
-Ntp3
-b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/randv2_64.pck b/test-data/stdlib-samples/3.2/test/randv2_64.pck
deleted file mode 100644
index 090dd6f..0000000
--- a/test-data/stdlib-samples/3.2/test/randv2_64.pck
+++ /dev/null
@@ -1,633 +0,0 @@
-crandom
-Random
-p0
-(tRp1
-(I2
-(I2147483648
-I1812115682
-I2741755497
-I1028055730
-I809166036
-I2773628650
-I62321950
-I535290043
-I349877800
-I976167039
-I2490696940
-I3631326955
-I2107991114
-I2941205793
-I3199611605
-I1871971556
-I1456108540
-I2984591044
-I140836801
-I4203227310
-I3652722980
-I4031971234
-I555769760
-I697301296
-I2347638880
-I3302335858
-I320255162
-I2553586608
-I1570224361
-I2838780912
-I2315834918
-I2351348158
-I3545433015
-I2292018579
-I1177569331
-I758497559
-I2913311175
-I1014948880
-I1793619243
-I3982451053
-I3850988342
-I2393984324
-I1583100093
-I3144742543
-I3655047493
-I3507532385
-I3094515442
-I350042434
-I2455294844
-I1038739312
-I313809152
-I189433072
-I1653165452
-I4186650593
-I19281455
-I2589680619
-I4145931590
-I4283266118
-I636283172
-I943618337
-I3170184633
-I2308766231
-I634615159
-I538152647
-I2079576891
-I1029442616
-I3410689412
-I1370292761
-I1071718978
-I2139496322
-I1876699543
-I3485866187
-I3157490130
-I1633105386
-I1453253160
-I3841322080
-I3789608924
-I4110770792
-I95083673
-I931354627
-I2065389591
-I3448339827
-I3348204577
-I3263528560
-I2411324590
-I4003055026
-I1869670093
-I2737231843
-I4150701155
-I2689667621
-I2993263224
-I3239890140
-I1191430483
-I1214399779
-I3623428533
-I1817058866
-I3052274451
-I326030082
-I1505129312
-I2306812262
-I1349150363
-I1099127895
-I2543465574
-I2396380193
-I503926466
-I1607109730
-I3451716817
-I58037114
-I4290081119
-I947517597
-I3083440186
-I520522630
-I2948962496
-I4184319574
-I2957636335
-I668374201
-I2325446473
-I472785314
-I3791932366
-I573017189
-I2185725379
-I1262251492
-I3525089379
-I2951262653
-I1305347305
-I940958122
-I3343754566
-I359371744
-I3874044973
-I396897232
-I147188248
-I716683703
-I4013880315
-I1133359586
-I1794612249
-I3480815192
-I3988787804
-I1729355809
-I573408542
-I1419310934
-I1770030447
-I3552845567
-I1693976502
-I1271189893
-I2298236738
-I2049219027
-I3464198070
-I1233574082
-I1007451781
-I1838253750
-I687096593
-I1131375603
-I1223013895
-I1490478435
-I339265439
-I4232792659
-I491538536
-I2816256769
-I1044097522
-I2566227049
-I748762793
-I1511830494
-I3593259822
-I4121279213
-I3735541309
-I3609794797
-I1939942331
-I377570434
-I1437957554
-I1831285696
-I55062811
-I2046783110
-I1303902283
-I1838349877
-I420993556
-I1256392560
-I2795216506
-I2783687924
-I3322303169
-I512794749
-I308405826
-I517164429
-I3320436022
-I1328403632
-I2269184746
-I3729522810
-I3304314450
-I2238756124
-I1690581361
-I3813277532
-I4119706879
-I2659447875
-I388818978
-I2064580814
-I1586227676
-I2627522685
-I2017792269
-I547928109
-I859107450
-I1062238929
-I858886237
-I3795783146
-I4173914756
-I3835915965
-I3329504821
-I3494579904
-I838863205
-I3399734724
-I4247387481
-I3618414834
-I2984433798
-I2165205561
-I4260685684
-I3045904244
-I3450093836
-I3597307595
-I3215851166
-I3162801328
-I2558283799
-I950068105
-I1829664117
-I3108542987
-I2378860527
-I790023460
-I280087750
-I1171478018
-I2333653728
-I3976932140
-I896746152
-I1802494195
-I1232873794
-I2749440836
-I2032037296
-I2012091682
-I1296131034
-I3892133385
-I908161334
-I2296791795
-I548169794
-I696265
-I893156828
-I426904709
-I3565374535
-I2655906825
-I2792178515
-I2406814632
-I4038847579
-I3123934642
-I2197503004
-I3535032597
-I2266216689
-I2117613462
-I1787448518
-I1875089416
-I2037165384
-I1140676321
-I3606296464
-I3229138231
-I2458267132
-I1874651171
-I3331900867
-I1000557654
-I1432861701
-I473636323
-I2691783927
-I1871437447
-I1328016401
-I4118690062
-I449467602
-I681789035
-I864889442
-I1200888928
-I75769445
-I4008690037
-I2464577667
-I4167795823
-I3070097648
-I2579174882
-I1216886568
-I3810116343
-I2249507485
-I3266903480
-I3671233480
-I100191658
-I3087121334
-I365063087
-I3821275176
-I2165052848
-I1282465245
-I3601570637
-I3132413236
-I2780570459
-I3222142917
-I3129794692
-I2611590811
-I947031677
-I2991908938
-I750997949
-I3632575131
-I1632014461
-I2846484755
-I2347261779
-I2903959448
-I1397316686
-I1904578392
-I774649578
-I3164598558
-I2429587609
-I738244516
-I1563304975
-I1399317414
-I1021316297
-I3187933234
-I2126780757
-I4011907847
-I4095169219
-I3358010054
-I2729978247
-I3736811646
-I3009656410
-I2893043637
-I4027447385
-I1239610110
-I1488806900
-I2674866844
-I442876374
-I2853687260
-I2785921005
-I3151378528
-I1180567
-I2803146964
-I982221759
-I2192919417
-I3087026181
-I2480838002
-I738452921
-I687986185
-I3049371676
-I3636492954
-I3468311299
-I2379621102
-I788988633
-I1643210601
-I2983998168
-I2492730801
-I2586048705
-I604073029
-I4121082815
-I1496476928
-I2972357110
-I2663116968
-I2642628592
-I2116052039
-I487186279
-I2577680328
-I3974766614
-I730776636
-I3842528855
-I1929093695
-I44626622
-I3989908833
-I1695426222
-I3675479382
-I3051784964
-I1514876613
-I1254036595
-I2420450649
-I3034377361
-I2332990590
-I1535175126
-I185834384
-I1107372900
-I1707278185
-I1286285295
-I3332574225
-I2785672437
-I883170645
-I2005666473
-I3403131327
-I4122021352
-I1464032858
-I3702576112
-I260554598
-I1837731650
-I2594435345
-I75771049
-I2012484289
-I3058649775
-I29979703
-I3861335335
-I2506495152
-I3786448704
-I442947790
-I2582724774
-I4291336243
-I2568189843
-I1923072690
-I1121589611
-I837696302
-I3284631720
-I3865021324
-I3576453165
-I2559531629
-I1459231762
-I3506550036
-I3754420159
-I2622000757
-I124228596
-I1084328605
-I1692830753
-I547273558
-I674282621
-I655259103
-I3188629610
-I490502174
-I2081001293
-I3191330704
-I4109943593
-I1859948504
-I3163806460
-I508833168
-I1256371033
-I2709253790
-I2068956572
-I3092842814
-I3913926529
-I2039638759
-I981982529
-I536094190
-I368855295
-I51993975
-I1597480732
-I4058175522
-I2155896702
-I3196251991
-I1081913893
-I3952353788
-I3545548108
-I2370669647
-I2206572308
-I2576392991
-I1732303374
-I1153136290
-I537641955
-I1738691747
-I3232854186
-I2539632206
-I2829760278
-I3058187853
-I1202425792
-I3762361970
-I2863949342
-I2640635867
-I376638744
-I1857679757
-I330798087
-I1457400505
-I1135610046
-I606400715
-I1859536026
-I509811335
-I529772308
-I2579273244
-I1890382004
-I3959908876
-I2612335971
-I2834052227
-I1434475986
-I3684202717
-I4015011345
-I582567852
-I3689969571
-I3934753460
-I3034960691
-I208573292
-I4004113742
-I3992904842
-I2587153719
-I3529179079
-I1565424987
-I779130678
-I1048582935
-I3213591622
-I3607793434
-I3951254937
-I2047811901
-I7508850
-I248544605
-I4210090324
-I2331490884
-I70057213
-I776474945
-I1345528889
-I3290403612
-I1664955269
-I1533143116
-I545003424
-I4141564478
-I1257326139
-I868843601
-I2337603029
-I1918131449
-I1843439523
-I1125519035
-I673340118
-I421408852
-I1520454906
-I1804722630
-I3621254196
-I2329968000
-I39464672
-I430583134
-I294026512
-I53978525
-I2892276105
-I1418863764
-I3419054451
-I1391595797
-I3544981798
-I4191780858
-I825672357
-I2972000844
-I1571305069
-I4231982845
-I3611916419
-I3045163168
-I2982349733
-I278572141
-I4215338078
-I839860504
-I1819151779
-I1412347479
-I1386770353
-I3914589491
-I3783104977
-I4124296733
-I830546258
-I89825624
-I4110601328
-I2545483429
-I300600527
-I516641158
-I3693021034
-I2852912854
-I3240039868
-I4167407959
-I1479557946
-I3621188804
-I1391590944
-I3578441128
-I1227055556
-I406898396
-I3064054983
-I25835338
-I402664165
-I4097682779
-I2106728012
-I203613622
-I3045467686
-I1381726438
-I3798670110
-I1342314961
-I3552497361
-I535913619
-I2625787583
-I1606574307
-I1101269630
-I1950513752
-I1121355862
-I3586816903
-I438529984
-I2473182121
-I1229997203
-I405445940
-I1695535315
-I427014336
-I3916768430
-I392298359
-I1884642868
-I1244730821
-I741058080
-I567479957
-I3527621168
-I3191971011
-I3267069104
-I4108668146
-I1520795587
-I166581006
-I473794477
-I1562126550
-I929843010
-I889533294
-I1266556608
-I874518650
-I3520162092
-I3013765049
-I4220231414
-I547246449
-I3998093769
-I3737193746
-I3872944207
-I793651876
-I2606384318
-I875991012
-I1394836334
-I4102011644
-I854380426
-I2618666767
-I2568302000
-I1995512132
-I229491093
-I2673500286
-I3364550739
-I3836923416
-I243656987
-I3944388983
-I4064949677
-I1416956378
-I1703244487
-I3990798829
-I2023425781
-I3926702214
-I1229015501
-I3174247824
-I624
-tp2
-Ntp3
-b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/randv3.pck b/test-data/stdlib-samples/3.2/test/randv3.pck
deleted file mode 100644
index 09fc38b..0000000
--- a/test-data/stdlib-samples/3.2/test/randv3.pck
+++ /dev/null
@@ -1,633 +0,0 @@
-crandom
-Random
-p0
-(tRp1
-(I3
-(L2147483648L
-L994081831L
-L2806287265L
-L2228999830L
-L3396498069L
-L2956805457L
-L3273927761L
-L920726507L
-L1862624492L
-L2921292485L
-L1779526843L
-L2469105503L
-L251696293L
-L1254390717L
-L779197080L
-L3165356830L
-L2007365218L
-L1870028812L
-L2896519363L
-L1855578438L
-L979518416L
-L3481710246L
-L3191861507L
-L3993006593L
-L2967971479L
-L3353342753L
-L3576782572L
-L339685558L
-L2367675732L
-L116208555L
-L1220054437L
-L486597056L
-L1912115141L
-L1037044792L
-L4096904723L
-L3409146175L
-L3701651227L
-L315824610L
-L4138604583L
-L1385764892L
-L191878900L
-L2320582219L
-L3420677494L
-L2776503169L
-L1148247403L
-L829555069L
-L902064012L
-L2934642741L
-L2477108577L
-L2583928217L
-L1658612579L
-L2865447913L
-L129147346L
-L3691171887L
-L1569328110L
-L1372860143L
-L1054139183L
-L1617707080L
-L69020592L
-L3810271603L
-L1853953416L
-L3499803073L
-L1027545027L
-L3229043605L
-L250848720L
-L3324932626L
-L3537002962L
-L2494323345L
-L3238103962L
-L4147541579L
-L3636348186L
-L3025455083L
-L2678771977L
-L584700256L
-L3461826909L
-L854511420L
-L943463552L
-L3609239025L
-L3977577989L
-L253070090L
-L777394544L
-L2144086567L
-L1092947992L
-L854327284L
-L2222750082L
-L360183510L
-L1312466483L
-L3227531091L
-L2235022500L
-L3013060530L
-L2541091298L
-L3480126342L
-L1839762775L
-L2632608190L
-L1108889403L
-L3045050923L
-L731513126L
-L3505436788L
-L3062762017L
-L1667392680L
-L1354126500L
-L1143573930L
-L2816645702L
-L2100356873L
-L2817679106L
-L1210746010L
-L2409915248L
-L2910119964L
-L2309001420L
-L220351824L
-L3667352871L
-L3993148590L
-L2886160232L
-L4239393701L
-L1189270581L
-L3067985541L
-L147374573L
-L2355164869L
-L3696013550L
-L4227037846L
-L1905112743L
-L3312843689L
-L2930678266L
-L1828795355L
-L76933594L
-L3987100796L
-L1288361435L
-L3464529151L
-L965498079L
-L1444623093L
-L1372893415L
-L1536235597L
-L1341994850L
-L963594758L
-L2115295754L
-L982098685L
-L1053433904L
-L2078469844L
-L3059765792L
-L1753606181L
-L2130171254L
-L567588194L
-L529629426L
-L3621523534L
-L3027576564L
-L1176438083L
-L4096287858L
-L1168574683L
-L1425058962L
-L1429631655L
-L2902106759L
-L761900641L
-L1329183956L
-L1947050932L
-L447490289L
-L3282516276L
-L200037389L
-L921868197L
-L3331403999L
-L4088760249L
-L2188326318L
-L288401961L
-L1360802675L
-L314302808L
-L3314639210L
-L3749821203L
-L2286081570L
-L2768939062L
-L3200541016L
-L2133495482L
-L385029880L
-L4217232202L
-L3171617231L
-L1660846653L
-L2459987621L
-L2691776124L
-L4225030408L
-L3595396773L
-L1103680661L
-L539064057L
-L1492841101L
-L166195394L
-L757973658L
-L533893054L
-L2784879594L
-L1021821883L
-L2350548162L
-L176852116L
-L3503166025L
-L148079914L
-L1633466236L
-L2773090165L
-L1162846701L
-L3575737795L
-L1624178239L
-L2454894710L
-L3014691938L
-L526355679L
-L1870824081L
-L3362425857L
-L3907566665L
-L3462563184L
-L2229112004L
-L4203735748L
-L1557442481L
-L924133999L
-L1906634214L
-L880459727L
-L4065895870L
-L141426254L
-L1258450159L
-L3243115027L
-L1574958840L
-L313939294L
-L3055664260L
-L3459714255L
-L531778790L
-L509505506L
-L1620227491L
-L2675554942L
-L2516509560L
-L3797299887L
-L237135890L
-L3203142213L
-L1087745310L
-L1897151854L
-L3936590041L
-L132765167L
-L2385908063L
-L1360600289L
-L3574567769L
-L2752788114L
-L2644228966L
-L2377705183L
-L601277909L
-L4046480498L
-L324401408L
-L3279931760L
-L2227059377L
-L1538827493L
-L4220532064L
-L478044564L
-L2917117761L
-L635492832L
-L2319763261L
-L795944206L
-L1820473234L
-L1673151409L
-L1404095402L
-L1661067505L
-L3217106938L
-L2406310683L
-L1931309248L
-L2458622868L
-L3323670524L
-L3266852755L
-L240083943L
-L3168387397L
-L607722198L
-L1256837690L
-L3608124913L
-L4244969357L
-L1289959293L
-L519750328L
-L3229482463L
-L1105196988L
-L1832684479L
-L3761037224L
-L2363631822L
-L3297957711L
-L572766355L
-L1195822137L
-L2239207981L
-L2034241203L
-L163540514L
-L288160255L
-L716403680L
-L4019439143L
-L1536281935L
-L2345100458L
-L2786059178L
-L2822232109L
-L987025395L
-L3061166559L
-L490422513L
-L2551030115L
-L2638707620L
-L1344728502L
-L714108911L
-L2831719700L
-L2188615369L
-L373509061L
-L1351077504L
-L3136217056L
-L783521095L
-L2554949468L
-L2662499550L
-L1203826951L
-L1379632388L
-L1918858985L
-L607465976L
-L1980450237L
-L3540079211L
-L3397813410L
-L2913309266L
-L2289572621L
-L4133935327L
-L4166227663L
-L3371801704L
-L3065474909L
-L3580562343L
-L3832172378L
-L2556130719L
-L310473705L
-L3734014346L
-L2490413810L
-L347233056L
-L526668037L
-L1158393656L
-L544329703L
-L2150085419L
-L3914038146L
-L1060237586L
-L4159394837L
-L113205121L
-L309966775L
-L4098784465L
-L3635222960L
-L2417516569L
-L2089579233L
-L1725807541L
-L2728122526L
-L2365836523L
-L2504078522L
-L1443946869L
-L2384171411L
-L997046534L
-L3249131657L
-L1699875986L
-L3618097146L
-L1716038224L
-L2629818607L
-L2929217876L
-L1367250314L
-L1726434951L
-L1388496325L
-L2107602181L
-L2822366842L
-L3052979190L
-L3796798633L
-L1543813381L
-L959000121L
-L1363845999L
-L2952528150L
-L874184932L
-L1888387194L
-L2328695295L
-L3442959855L
-L841805947L
-L1087739275L
-L3230005434L
-L3045399265L
-L1161817318L
-L2898673139L
-L860011094L
-L940539782L
-L1297818080L
-L4243941623L
-L1577613033L
-L4204131887L
-L3819057225L
-L1969439558L
-L3297963932L
-L241874069L
-L3517033453L
-L2295345664L
-L1098911422L
-L886955008L
-L1477397621L
-L4279347332L
-L3616558791L
-L2384411957L
-L742537731L
-L764221540L
-L2871698900L
-L3530636393L
-L691256644L
-L758730966L
-L1717773090L
-L2751856377L
-L3188484000L
-L3767469670L
-L1623863053L
-L3533236793L
-L4099284176L
-L723921107L
-L310594036L
-L223978745L
-L2266565776L
-L201843303L
-L2969968546L
-L3351170888L
-L3465113624L
-L2712246712L
-L1521383057L
-L2384461798L
-L216357551L
-L2167301975L
-L3144653194L
-L2781220155L
-L3620747666L
-L95971265L
-L4255400243L
-L59999757L
-L4174273472L
-L3974511524L
-L1007123950L
-L3112477628L
-L806461512L
-L3148074008L
-L528352882L
-L2545979588L
-L2562281969L
-L3010249477L
-L1886331611L
-L3210656433L
-L1034099976L
-L2906893579L
-L1197048779L
-L1870004401L
-L3898300490L
-L2686856402L
-L3975723478L
-L613043532L
-L2565674353L
-L3760045310L
-L3468984376L
-L4126258L
-L303855424L
-L3988963552L
-L276256796L
-L544071807L
-L1023872062L
-L1747461519L
-L1975571260L
-L4033766958L
-L2946555557L
-L1492957796L
-L958271685L
-L46480515L
-L907760635L
-L1306626357L
-L819652378L
-L1172300279L
-L1116851319L
-L495601075L
-L1157715330L
-L534220108L
-L377320028L
-L1672286106L
-L2066219284L
-L1842386355L
-L2546059464L
-L1839457336L
-L3476194446L
-L3050550028L
-L594705582L
-L1905813535L
-L1813033412L
-L2700858157L
-L169067972L
-L4252889045L
-L1921944555L
-L497671474L
-L210143935L
-L2688398489L
-L325158375L
-L3450846447L
-L891760597L
-L712802536L
-L1132557436L
-L1417044075L
-L1639889660L
-L1746379970L
-L1478741647L
-L2817563486L
-L2573612532L
-L4266444457L
-L2911601615L
-L804745411L
-L2207254652L
-L1189140646L
-L3829725111L
-L3637367348L
-L1944731747L
-L2193440343L
-L1430195413L
-L1173515229L
-L1582618217L
-L2070767037L
-L247908936L
-L1460675439L
-L556001596L
-L327629335L
-L1036133876L
-L4228129605L
-L999174048L
-L3635804039L
-L1416550481L
-L1270540269L
-L4280743815L
-L39607659L
-L1552540623L
-L2762294062L
-L504137289L
-L4117044239L
-L1417130225L
-L1342970056L
-L1755716449L
-L1169447322L
-L2731401356L
-L2319976745L
-L2869221479L
-L23972655L
-L2251495389L
-L1429860878L
-L3728135992L
-L4241432973L
-L3698275076L
-L216416432L
-L4040046960L
-L246077176L
-L894675685L
-L3932282259L
-L3097205100L
-L2128818650L
-L1319010656L
-L1601974009L
-L2552960957L
-L3554016055L
-L4209395641L
-L2013340102L
-L3370447801L
-L2307272002L
-L1795091354L
-L202109401L
-L988345070L
-L2514870758L
-L1132726850L
-L582746224L
-L3112305421L
-L1843020683L
-L3600189223L
-L1101349165L
-L4211905855L
-L2866677581L
-L2881621130L
-L4165324109L
-L4238773191L
-L3635649550L
-L2670481044L
-L2996248219L
-L1676992480L
-L3473067050L
-L4205793699L
-L4019490897L
-L1579990481L
-L1899617990L
-L1136347713L
-L1802842268L
-L3591752960L
-L1197308739L
-L433629786L
-L4032142790L
-L3148041979L
-L3312138845L
-L3896860449L
-L3298182567L
-L907605170L
-L1658664067L
-L2682980313L
-L2523523173L
-L1208722103L
-L3808530363L
-L1079003946L
-L4282402864L
-L2041010073L
-L2667555071L
-L688018180L
-L1405121012L
-L4167994076L
-L3504695336L
-L1923944749L
-L1143598790L
-L3936268898L
-L3606243846L
-L1017420080L
-L4026211169L
-L596529763L
-L1844259624L
-L2840216282L
-L2673807759L
-L3407202575L
-L2737971083L
-L4075423068L
-L3684057432L
-L3146627241L
-L599650513L
-L69773114L
-L1257035919L
-L807485291L
-L2376230687L
-L3036593147L
-L2642411658L
-L106080044L
-L2199622729L
-L291834511L
-L2697611361L
-L11689733L
-L625123952L
-L3226023062L
-L3229663265L
-L753059444L
-L2843610189L
-L624L
-tp2
-Ntp3
-b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py b/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
deleted file mode 100644
index 1f61e13..0000000
--- a/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""When called as a script, print a comma-separated list of the open
-file descriptors on stdout."""
-
-import errno
-import os
-
-try:
- _MAXFD = os.sysconf("SC_OPEN_MAX")
-except:
- _MAXFD = 256
-
-if __name__ == "__main__":
- fds = []
- for fd in range(0, _MAXFD):
- try:
- st = os.fstat(fd)
- except OSError as e:
- if e.errno == errno.EBADF:
- continue
- raise
- # Ignore Solaris door files
- if st.st_mode & 0xF000 != 0xd000:
- fds.append(fd)
- print(','.join(map(str, fds)))
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py b/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
deleted file mode 100644
index 1dc3191..0000000
--- a/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
+++ /dev/null
@@ -1,7 +0,0 @@
-"""When called as a script, consumes the input"""
-
-import sys
-
-if __name__ == "__main__":
- for line in sys.stdin:
- pass
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py b/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
deleted file mode 100644
index fe6f9db..0000000
--- a/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
+++ /dev/null
@@ -1,7 +0,0 @@
-"""When ran as a script, simulates cat with no arguments."""
-
-import sys
-
-if __name__ == "__main__":
- for line in sys.stdin:
- sys.stdout.write(line)
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py b/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
deleted file mode 100644
index 6990637..0000000
--- a/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
+++ /dev/null
@@ -1,10 +0,0 @@
-"""When called with a single argument, simulated fgrep with a single
-argument and no options."""
-
-import sys
-
-if __name__ == "__main__":
- pattern = sys.argv[1]
- for line in sys.stdin:
- if pattern in line:
- sys.stdout.write(line)
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py b/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
deleted file mode 100644
index 6072aec..0000000
--- a/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import signal, subprocess, sys
-# On Linux this causes os.waitpid to fail with OSError as the OS has already
-# reaped our child process. The wait() passing the OSError on to the caller
-# and causing us to exit with an error is what we are testing against.
-signal.signal(signal.SIGCHLD, signal.SIG_IGN)
-subprocess.Popen([sys.executable, '-c', 'print("albatross")']).wait()
diff --git a/test-data/stdlib-samples/3.2/test/support.py b/test-data/stdlib-samples/3.2/test/support.py
deleted file mode 100644
index a36ba28..0000000
--- a/test-data/stdlib-samples/3.2/test/support.py
+++ /dev/null
@@ -1,1602 +0,0 @@
-"""Supporting definitions for the Python regression tests."""
-
-if __name__ != 'test.support':
- raise ImportError('support must be imported from the test package')
-
-import contextlib
-import errno
-import functools
-import gc
-import socket
-import sys
-import os
-import platform
-import shutil
-import warnings
-import unittest
-import importlib
-import collections
-import re
-import subprocess
-import imp
-import time
-import sysconfig
-import fnmatch
-import logging.handlers
-
-import _thread, threading
-from typing import Any, Dict, cast
-#try:
-# import multiprocessing.process
-#except ImportError:
-# multiprocessing = None
-
-
-__all__ = [
- "Error", "TestFailed", "ResourceDenied", "import_module",
- "verbose", "use_resources", "max_memuse", "record_original_stdout",
- "get_original_stdout", "unload", "unlink", "rmtree", "forget",
- "is_resource_enabled", "requires", "requires_mac_ver",
- "find_unused_port", "bind_port",
- "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd",
- "findfile", "sortdict", "check_syntax_error", "open_urlresource",
- "check_warnings", "CleanImport", "EnvironmentVarGuard",
- "TransientResource", "captured_output", "captured_stdout",
- "captured_stdin", "captured_stderr",
- "time_out", "socket_peer_reset", "ioerror_peer_reset",
- "run_with_locale", 'temp_umask', "transient_internet",
- "set_memlimit", "bigmemtest", "bigaddrspacetest", "BasicTestRunner",
- "run_unittest", "run_doctest", "threading_setup", "threading_cleanup",
- "reap_children", "cpython_only", "check_impl_detail", "get_attribute",
- "swap_item", "swap_attr", "requires_IEEE_754",
- "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
- "import_fresh_module", "failfast",
- ]
-
-class Error(Exception):
- """Base class for regression test exceptions."""
-
-class TestFailed(Error):
- """Test failed."""
-
-class ResourceDenied(unittest.SkipTest):
- """Test skipped because it requested a disallowed resource.
-
- This is raised when a test calls requires() for a resource that
- has not be enabled. It is used to distinguish between expected
- and unexpected skips.
- """
-
- at contextlib.contextmanager
-def _ignore_deprecated_imports(ignore=True):
- """Context manager to suppress package and module deprecation
- warnings when importing them.
-
- If ignore is False, this context manager has no effect."""
- if ignore:
- with warnings.catch_warnings():
- warnings.filterwarnings("ignore", ".+ (module|package)",
- DeprecationWarning)
- yield None
- else:
- yield None
-
-
-def import_module(name, deprecated=False):
- """Import and return the module to be tested, raising SkipTest if
- it is not available.
-
- If deprecated is True, any module or package deprecation messages
- will be suppressed."""
- with _ignore_deprecated_imports(deprecated):
- try:
- return importlib.import_module(name)
- except ImportError as msg:
- raise unittest.SkipTest(str(msg))
-
-
-def _save_and_remove_module(name, orig_modules):
- """Helper function to save and remove a module from sys.modules
-
- Raise ImportError if the module can't be imported."""
- # try to import the module and raise an error if it can't be imported
- if name not in sys.modules:
- __import__(name)
- del sys.modules[name]
- for modname in list(sys.modules):
- if modname == name or modname.startswith(name + '.'):
- orig_modules[modname] = sys.modules[modname]
- del sys.modules[modname]
-
-def _save_and_block_module(name, orig_modules):
- """Helper function to save and block a module in sys.modules
-
- Return True if the module was in sys.modules, False otherwise."""
- saved = True
- try:
- orig_modules[name] = sys.modules[name]
- except KeyError:
- saved = False
- sys.modules[name] = None
- return saved
-
-
-def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
- """Imports and returns a module, deliberately bypassing the sys.modules cache
- and importing a fresh copy of the module. Once the import is complete,
- the sys.modules cache is restored to its original state.
-
- Modules named in fresh are also imported anew if needed by the import.
- If one of these modules can't be imported, None is returned.
-
- Importing of modules named in blocked is prevented while the fresh import
- takes place.
-
- If deprecated is True, any module or package deprecation messages
- will be suppressed."""
- # NOTE: test_heapq, test_json and test_warnings include extra sanity checks
- # to make sure that this utility function is working as expected
- with _ignore_deprecated_imports(deprecated):
- # Keep track of modules saved for later restoration as well
- # as those which just need a blocking entry removed
- orig_modules = {}
- names_to_remove = []
- _save_and_remove_module(name, orig_modules)
- try:
- for fresh_name in fresh:
- _save_and_remove_module(fresh_name, orig_modules)
- for blocked_name in blocked:
- if not _save_and_block_module(blocked_name, orig_modules):
- names_to_remove.append(blocked_name)
- fresh_module = importlib.import_module(name)
- except ImportError:
- fresh_module = None
- finally:
- for orig_name, module in orig_modules.items():
- sys.modules[orig_name] = module
- for name_to_remove in names_to_remove:
- del sys.modules[name_to_remove]
- return fresh_module
-
-
-def get_attribute(obj, name):
- """Get an attribute, raising SkipTest if AttributeError is raised."""
- try:
- attribute = getattr(obj, name)
- except AttributeError:
- raise unittest.SkipTest("module %s has no attribute %s" % (
- obj.__name__, name))
- else:
- return attribute
-
-verbose = 1 # Flag set to 0 by regrtest.py
-use_resources = None # type: Any # Flag set to [] by regrtest.py
-max_memuse = 0 # Disable bigmem tests (they will still be run with
- # small sizes, to make sure they work.)
-real_max_memuse = 0
-failfast = False
-match_tests = None # type: Any
-
-# _original_stdout is meant to hold stdout at the time regrtest began.
-# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
-# The point is to have some flavor of stdout the user can actually see.
-_original_stdout = None # type: 'Any'
-def record_original_stdout(stdout):
- global _original_stdout
- _original_stdout = stdout
-
-def get_original_stdout():
- return _original_stdout or sys.stdout
-
-def unload(name):
- try:
- del sys.modules[name]
- except KeyError:
- pass
-
-def unlink(filename):
- try:
- os.unlink(filename)
- except OSError as error:
- # The filename need not exist.
- if error.errno not in (errno.ENOENT, errno.ENOTDIR):
- raise
-
-def rmtree(path):
- try:
- shutil.rmtree(path)
- except OSError as error:
- # Unix returns ENOENT, Windows returns ESRCH.
- if error.errno not in (errno.ENOENT, errno.ESRCH):
- raise
-
-def make_legacy_pyc(source):
- """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location.
-
- The choice of .pyc or .pyo extension is done based on the __debug__ flag
- value.
-
- :param source: The file system path to the source file. The source file
- does not need to exist, however the PEP 3147 pyc file must exist.
- :return: The file system path to the legacy pyc file.
- """
- pyc_file = imp.cache_from_source(source)
- up_one = os.path.dirname(os.path.abspath(source))
- if __debug__:
- ch = 'c'
- else:
- ch = 'o'
- legacy_pyc = os.path.join(up_one, source + ch)
- os.rename(pyc_file, legacy_pyc)
- return legacy_pyc
-
-def forget(modname):
- """'Forget' a module was ever imported.
-
- This removes the module from sys.modules and deletes any PEP 3147 or
- legacy .pyc and .pyo files.
- """
- unload(modname)
- for dirname in sys.path:
- source = os.path.join(dirname, modname + '.py')
- # It doesn't matter if they exist or not, unlink all possible
- # combinations of PEP 3147 and legacy pyc and pyo files.
- unlink(source + 'c')
- unlink(source + 'o')
- unlink(imp.cache_from_source(source, debug_override=True))
- unlink(imp.cache_from_source(source, debug_override=False))
-
-# On some platforms, should not run gui test even if it is allowed
-# in `use_resources'.
-#if sys.platform.startswith('win'):
- #import ctypes
- #import ctypes.wintypes
- #def _is_gui_available():
- # UOI_FLAGS = 1
- # WSF_VISIBLE = 0x0001
- # class USEROBJECTFLAGS(ctypes.Structure):
- # _fields_ = [("fInherit", ctypes.wintypes.BOOL),
- # ("fReserved", ctypes.wintypes.BOOL),
- # ("dwFlags", ctypes.wintypes.DWORD)]
- # dll = ctypes.windll.user32
- # h = dll.GetProcessWindowStation()
- # if not h:
- # raise ctypes.WinError()
- # uof = USEROBJECTFLAGS()
- # needed = ctypes.wintypes.DWORD()
- # res = dll.GetUserObjectInformationW(h,
- # UOI_FLAGS,
- # ctypes.byref(uof),
- # ctypes.sizeof(uof),
- # ctypes.byref(needed))
- # if not res:
- # raise ctypes.WinError()
- # return bool(uof.dwFlags & WSF_VISIBLE)
-#else:
-def _is_gui_available():
- return True
-
-def is_resource_enabled(resource):
- """Test whether a resource is enabled. Known resources are set by
- regrtest.py."""
- return use_resources is not None and resource in use_resources
-
-def requires(resource, msg=None):
- """Raise ResourceDenied if the specified resource is not available.
-
- If the caller's module is __main__ then automatically return True. The
- possibility of False being returned occurs when regrtest.py is
- executing.
- """
- if resource == 'gui' and not _is_gui_available():
- raise unittest.SkipTest("Cannot use the 'gui' resource")
- # see if the caller's module is __main__ - if so, treat as if
- # the resource was set
- if sys._getframe(1).f_globals.get("__name__") == "__main__":
- return
- if not is_resource_enabled(resource):
- if msg is None:
- msg = "Use of the `%s' resource not enabled" % resource
- raise ResourceDenied(msg)
-
-def requires_mac_ver(*min_version):
- """Decorator raising SkipTest if the OS is Mac OS X and the OS X
- version if less than min_version.
-
- For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
- is lesser than 10.5.
- """
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kw):
- if sys.platform == 'darwin':
- version_txt = platform.mac_ver()[0]
- try:
- version = tuple(map(int, version_txt.split('.')))
- except ValueError:
- pass
- else:
- if version < min_version:
- min_version_txt = '.'.join(map(str, min_version))
- raise unittest.SkipTest(
- "Mac OS X %s or higher required, not %s"
- % (min_version_txt, version_txt))
- return func(*args, **kw)
- wrapper.min_version = min_version
- return wrapper
- return decorator
-
-HOST = 'localhost'
-
-def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
- """Returns an unused port that should be suitable for binding. This is
- achieved by creating a temporary socket with the same family and type as
- the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to
- the specified host address (defaults to 0.0.0.0) with the port set to 0,
- eliciting an unused ephemeral port from the OS. The temporary socket is
- then closed and deleted, and the ephemeral port is returned.
-
- Either this method or bind_port() should be used for any tests where a
- server socket needs to be bound to a particular port for the duration of
- the test. Which one to use depends on whether the calling code is creating
- a python socket, or if an unused port needs to be provided in a constructor
- or passed to an external program (i.e. the -accept argument to openssl's
- s_server mode). Always prefer bind_port() over find_unused_port() where
- possible. Hard coded ports should *NEVER* be used. As soon as a server
- socket is bound to a hard coded port, the ability to run multiple instances
- of the test simultaneously on the same host is compromised, which makes the
- test a ticking time bomb in a buildbot environment. On Unix buildbots, this
- may simply manifest as a failed test, which can be recovered from without
- intervention in most cases, but on Windows, the entire python process can
- completely and utterly wedge, requiring someone to log in to the buildbot
- and manually kill the affected process.
-
- (This is easy to reproduce on Windows, unfortunately, and can be traced to
- the SO_REUSEADDR socket option having different semantics on Windows versus
- Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
- listen and then accept connections on identical host/ports. An EADDRINUSE
- socket.error will be raised at some point (depending on the platform and
- the order bind and listen were called on each socket).
-
- However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
- will ever be raised when attempting to bind two identical host/ports. When
- accept() is called on each socket, the second caller's process will steal
- the port from the first caller, leaving them both in an awkwardly wedged
- state where they'll no longer respond to any signals or graceful kills, and
- must be forcibly killed via OpenProcess()/TerminateProcess().
-
- The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option
- instead of SO_REUSEADDR, which effectively affords the same semantics as
- SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open
- Source world compared to Windows ones, this is a common mistake. A quick
- look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when
- openssl.exe is called with the 's_server' option, for example. See
- http://bugs.python.org/issue2550 for more info. The following site also
- has a very thorough description about the implications of both REUSEADDR
- and EXCLUSIVEADDRUSE on Windows:
- http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx)
-
- XXX: although this approach is a vast improvement on previous attempts to
- elicit unused ports, it rests heavily on the assumption that the ephemeral
- port returned to us by the OS won't immediately be dished back out to some
- other process when we close and delete our temporary socket but before our
- calling code has a chance to bind the returned port. We can deal with this
- issue if/when we come across it.
- """
-
- tempsock = socket.socket(family, socktype)
- port = bind_port(tempsock)
- tempsock.close()
- #del tempsock
- return port
-
-def bind_port(sock, host=HOST):
- """Bind the socket to a free port and return the port number. Relies on
- ephemeral ports in order to ensure we are using an unbound port. This is
- important as many tests may be running simultaneously, especially in a
- buildbot environment. This method raises an exception if the sock.family
- is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR
- or SO_REUSEPORT set on it. Tests should *never* set these socket options
- for TCP/IP sockets. The only case for setting these options is testing
- multicasting via multiple UDP sockets.
-
- Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e.
- on Windows), it will be set on the socket. This will prevent anyone else
- from bind()'ing to our host/port for the duration of the test.
- """
-
- if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
- if hasattr(socket, 'SO_REUSEADDR'):
- if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1:
- raise TestFailed("tests should never set the SO_REUSEADDR " \
- "socket option on TCP/IP sockets!")
- if hasattr(socket, 'SO_REUSEPORT'):
- if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1:
- raise TestFailed("tests should never set the SO_REUSEPORT " \
- "socket option on TCP/IP sockets!")
- if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
-
- sock.bind((host, 0))
- port = sock.getsockname()[1]
- return port
-
-FUZZ = 1e-6
-
-def fcmp(x, y): # fuzzy comparison function
- if isinstance(x, float) or isinstance(y, float):
- try:
- fuzz = (abs(x) + abs(y)) * FUZZ
- if abs(x-y) <= fuzz:
- return 0
- except:
- pass
- elif type(x) == type(y) and isinstance(x, (tuple, list)):
- for i in range(min(len(x), len(y))):
- outcome = fcmp(x[i], y[i])
- if outcome != 0:
- return outcome
- return (len(x) > len(y)) - (len(x) < len(y))
- return (x > y) - (x < y)
-
-# decorator for skipping tests on non-IEEE 754 platforms
-requires_IEEE_754 = unittest.skipUnless(
- cast(Any, float).__getformat__("double").startswith("IEEE"),
- "test requires IEEE 754 doubles")
-
-is_jython = sys.platform.startswith('java')
-
-TESTFN = ''
-# Filename used for testing
-if os.name == 'java':
- # Jython disallows @ in module names
- TESTFN = '$test'
-else:
- TESTFN = '@test'
-
-# Disambiguate TESTFN for parallel testing, while letting it remain a valid
-# module name.
-TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid())
-
-
-# TESTFN_UNICODE is a non-ascii filename
-TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f"
-if sys.platform == 'darwin':
- # In Mac OS X's VFS API file names are, by definition, canonically
- # decomposed Unicode, encoded using UTF-8. See QA1173:
- # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
- import unicodedata
- TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE)
-TESTFN_ENCODING = sys.getfilesystemencoding()
-
-# TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be
-# encoded by the filesystem encoding (in strict mode). It can be None if we
-# cannot generate such filename.
-TESTFN_UNENCODABLE = None # type: Any
-if os.name in ('nt', 'ce'):
- # skip win32s (0) or Windows 9x/ME (1)
- if sys.getwindowsversion().platform >= 2:
- # Different kinds of characters from various languages to minimize the
- # probability that the whole name is encodable to MBCS (issue #9819)
- TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80"
- try:
- TESTFN_UNENCODABLE.encode(TESTFN_ENCODING)
- except UnicodeEncodeError:
- pass
- else:
- print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). '
- 'Unicode filename tests may not be effective'
- % (TESTFN_UNENCODABLE, TESTFN_ENCODING))
- TESTFN_UNENCODABLE = None
-# Mac OS X denies unencodable filenames (invalid utf-8)
-elif sys.platform != 'darwin':
- try:
- # ascii and utf-8 cannot encode the byte 0xff
- b'\xff'.decode(TESTFN_ENCODING)
- except UnicodeDecodeError:
- # 0xff will be encoded using the surrogate character u+DCFF
- TESTFN_UNENCODABLE = TESTFN \
- + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape')
- else:
- # File system encoding (eg. ISO-8859-* encodings) can encode
- # the byte 0xff. Skip some unicode filename tests.
- pass
-
-# Save the initial cwd
-SAVEDCWD = os.getcwd()
-
- at contextlib.contextmanager
-def temp_cwd(name='tempcwd', quiet=False, path=None):
- """
- Context manager that temporarily changes the CWD.
-
- An existing path may be provided as *path*, in which case this
- function makes no changes to the file system.
-
- Otherwise, the new CWD is created in the current directory and it's
- named *name*. If *quiet* is False (default) and it's not possible to
- create or change the CWD, an error is raised. If it's True, only a
- warning is raised and the original CWD is used.
- """
- saved_dir = os.getcwd()
- is_temporary = False
- if path is None:
- path = name
- try:
- os.mkdir(name)
- is_temporary = True
- except OSError:
- if not quiet:
- raise
- warnings.warn('tests may fail, unable to create temp CWD ' + name,
- RuntimeWarning, stacklevel=3)
- try:
- os.chdir(path)
- except OSError:
- if not quiet:
- raise
- warnings.warn('tests may fail, unable to change the CWD to ' + name,
- RuntimeWarning, stacklevel=3)
- try:
- yield os.getcwd()
- finally:
- os.chdir(saved_dir)
- if is_temporary:
- rmtree(name)
-
-
- at contextlib.contextmanager
-def temp_umask(umask):
- """Context manager that temporarily sets the process umask."""
- oldmask = os.umask(umask)
- try:
- yield None
- finally:
- os.umask(oldmask)
-
-
-def findfile(file, here=__file__, subdir=None):
- """Try to find a file on sys.path and the working directory. If it is not
- found the argument passed to the function is returned (this does not
- necessarily signal failure; could still be the legitimate path)."""
- if os.path.isabs(file):
- return file
- if subdir is not None:
- file = os.path.join(subdir, file)
- path = sys.path
- path = [os.path.dirname(here)] + path
- for dn in path:
- fn = os.path.join(dn, file)
- if os.path.exists(fn): return fn
- return file
-
-def sortdict(dict):
- "Like repr(dict), but in sorted order."
- items = sorted(dict.items())
- reprpairs = ["%r: %r" % pair for pair in items]
- withcommas = ", ".join(reprpairs)
- return "{%s}" % withcommas
-
-def make_bad_fd():
- """
- Create an invalid file descriptor by opening and closing a file and return
- its fd.
- """
- file = open(TESTFN, "wb")
- try:
- return file.fileno()
- finally:
- file.close()
- unlink(TESTFN)
-
-def check_syntax_error(testcase, statement):
- raise NotImplementedError('no compile built-in')
- #testcase.assertRaises(SyntaxError, compile, statement,
- # '<test string>', 'exec')
-
-def open_urlresource(url, *args, **kw):
- from urllib import request, parse
-
- check = kw.pop('check', None)
-
- filename = parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
-
- fn = os.path.join(os.path.dirname(__file__), "data", filename)
-
- def check_valid_file(fn):
- f = open(fn, *args, **kw)
- if check is None:
- return f
- elif check(f):
- f.seek(0)
- return f
- f.close()
-
- if os.path.exists(fn):
- f = check_valid_file(fn)
- if f is not None:
- return f
- unlink(fn)
-
- # Verify the requirement before downloading the file
- requires('urlfetch')
-
- print('\tfetching %s ...' % url, file=get_original_stdout())
- f = request.urlopen(url, timeout=15)
- try:
- with open(fn, "wb") as out:
- s = f.read()
- while s:
- out.write(s)
- s = f.read()
- finally:
- f.close()
-
- f = check_valid_file(fn)
- if f is not None:
- return f
- raise TestFailed('invalid resource "%s"' % fn)
-
-
-class WarningsRecorder(object):
- """Convenience wrapper for the warnings list returned on
- entry to the warnings.catch_warnings() context manager.
- """
- def __init__(self, warnings_list):
- self._warnings = warnings_list
- self._last = 0
-
- def __getattr__(self, attr):
- if len(self._warnings) > self._last:
- return getattr(self._warnings[-1], attr)
- elif attr in warnings.WarningMessage._WARNING_DETAILS:
- return None
- raise AttributeError("%r has no attribute %r" % (self, attr))
-
- #@property
- #def warnings(self):
- # return self._warnings[self._last:]
-
- def reset(self):
- self._last = len(self._warnings)
-
-
-def _filterwarnings(filters, quiet=False):
- """Catch the warnings, then check if all the expected
- warnings have been raised and re-raise unexpected warnings.
- If 'quiet' is True, only re-raise the unexpected warnings.
- """
- # Clear the warning registry of the calling module
- # in order to re-raise the warnings.
- frame = sys._getframe(2)
- registry = frame.f_globals.get('__warningregistry__')
- if registry:
- registry.clear()
- with warnings.catch_warnings(record=True) as w:
- # Set filter "always" to record all warnings. Because
- # test_warnings swap the module, we need to look up in
- # the sys.modules dictionary.
- sys.modules['warnings'].simplefilter("always")
- yield WarningsRecorder(w)
- # Filter the recorded warnings
- reraise = list(w)
- missing = []
- for msg, cat in filters:
- seen = False
- for w in reraise[:]:
- warning = w.message
- # Filter out the matching messages
- if (re.match(msg, str(warning), re.I) and
- issubclass(warning.__class__, cat)):
- seen = True
- reraise.remove(w)
- if not seen and not quiet:
- # This filter caught nothing
- missing.append((msg, cat.__name__))
- if reraise:
- raise AssertionError("unhandled warning %s" % reraise[0])
- if missing:
- raise AssertionError("filter (%r, %s) did not catch any warning" %
- missing[0])
-
-
- at contextlib.contextmanager
-def check_warnings(*filters, **kwargs):
- """Context manager to silence warnings.
-
- Accept 2-tuples as positional arguments:
- ("message regexp", WarningCategory)
-
- Optional argument:
- - if 'quiet' is True, it does not fail if a filter catches nothing
- (default True without argument,
- default False if some filters are defined)
-
- Without argument, it defaults to:
- check_warnings(("", Warning), quiet=True)
- """
- quiet = kwargs.get('quiet')
- if not filters:
- filters = (("", Warning),)
- # Preserve backward compatibility
- if quiet is None:
- quiet = True
- return _filterwarnings(filters, quiet)
-
-
-class CleanImport(object):
- """Context manager to force import to return a new module reference.
-
- This is useful for testing module-level behaviours, such as
- the emission of a DeprecationWarning on import.
-
- Use like this:
-
- with CleanImport("foo"):
- importlib.import_module("foo") # new reference
- """
-
- def __init__(self, *module_names):
- self.original_modules = sys.modules.copy()
- for module_name in module_names:
- if module_name in sys.modules:
- module = sys.modules[module_name]
- # It is possible that module_name is just an alias for
- # another module (e.g. stub for modules renamed in 3.x).
- # In that case, we also need delete the real module to clear
- # the import cache.
- if module.__name__ != module_name:
- del sys.modules[module.__name__]
- del sys.modules[module_name]
-
- def __enter__(self):
- return self
-
- def __exit__(self, *ignore_exc):
- sys.modules.update(self.original_modules)
-
-
-class EnvironmentVarGuard(dict):
-
- """Class to help protect the environment variable properly. Can be used as
- a context manager."""
-
- def __init__(self):
- self._environ = os.environ
- self._changed = {}
-
- def __getitem__(self, envvar):
- return self._environ[envvar]
-
- def __setitem__(self, envvar, value):
- # Remember the initial value on the first access
- if envvar not in self._changed:
- self._changed[envvar] = self._environ.get(envvar)
- self._environ[envvar] = value
-
- def __delitem__(self, envvar):
- # Remember the initial value on the first access
- if envvar not in self._changed:
- self._changed[envvar] = self._environ.get(envvar)
- if envvar in self._environ:
- del self._environ[envvar]
-
- def keys(self):
- return self._environ.keys()
-
- def __iter__(self):
- return iter(self._environ)
-
- def __len__(self):
- return len(self._environ)
-
- def set(self, envvar, value):
- self[envvar] = value
-
- def unset(self, envvar):
- del self[envvar]
-
- def __enter__(self):
- return self
-
- def __exit__(self, *ignore_exc):
- for k, v in self._changed.items():
- if v is None:
- if k in self._environ:
- del self._environ[k]
- else:
- self._environ[k] = v
- os.environ = self._environ
-
-
-class DirsOnSysPath(object):
- """Context manager to temporarily add directories to sys.path.
-
- This makes a copy of sys.path, appends any directories given
- as positional arguments, then reverts sys.path to the copied
- settings when the context ends.
-
- Note that *all* sys.path modifications in the body of the
- context manager, including replacement of the object,
- will be reverted at the end of the block.
- """
-
- def __init__(self, *paths):
- self.original_value = sys.path[:]
- self.original_object = sys.path
- sys.path.extend(paths)
-
- def __enter__(self):
- return self
-
- def __exit__(self, *ignore_exc):
- sys.path = self.original_object
- sys.path[:] = self.original_value
-
-
-class TransientResource(object):
-
- """Raise ResourceDenied if an exception is raised while the context manager
- is in effect that matches the specified exception and attributes."""
-
- def __init__(self, exc, **kwargs):
- self.exc = exc
- self.attrs = kwargs
-
- def __enter__(self):
- return self
-
- def __exit__(self, type_=None, value=None, traceback=None):
- """If type_ is a subclass of self.exc and value has attributes matching
- self.attrs, raise ResourceDenied. Otherwise let the exception
- propagate (if any)."""
- if type_ is not None and issubclass(self.exc, type_):
- for attr, attr_value in self.attrs.items():
- if not hasattr(value, attr):
- break
- if getattr(value, attr) != attr_value:
- break
- else:
- raise ResourceDenied("an optional resource is not available")
-
-# Context managers that raise ResourceDenied when various issues
-# with the Internet connection manifest themselves as exceptions.
-# XXX deprecate these and use transient_internet() instead
-time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
-socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
-ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
-
-
- at contextlib.contextmanager
-def transient_internet(resource_name, *, timeout=30.0, errnos=()):
- """Return a context manager that raises ResourceDenied when various issues
- with the Internet connection manifest themselves as exceptions."""
- default_errnos = [
- ('ECONNREFUSED', 111),
- ('ECONNRESET', 104),
- ('EHOSTUNREACH', 113),
- ('ENETUNREACH', 101),
- ('ETIMEDOUT', 110),
- ]
- default_gai_errnos = [
- ('EAI_AGAIN', -3),
- ('EAI_FAIL', -4),
- ('EAI_NONAME', -2),
- ('EAI_NODATA', -5),
- # Encountered when trying to resolve IPv6-only hostnames
- ('WSANO_DATA', 11004),
- ]
-
- denied = ResourceDenied("Resource '%s' is not available" % resource_name)
- captured_errnos = errnos
- gai_errnos = []
- if not captured_errnos:
- captured_errnos = [getattr(errno, name, num)
- for name, num in default_errnos]
- gai_errnos = [getattr(socket, name, num)
- for name, num in default_gai_errnos]
-
- def filter_error(err):
- n = getattr(err, 'errno', None)
- if (isinstance(err, socket.timeout) or
- (isinstance(err, socket.gaierror) and n in gai_errnos) or
- n in captured_errnos):
- if not verbose:
- sys.stderr.write(denied.args[0] + "\n")
- raise denied from err
-
- old_timeout = socket.getdefaulttimeout()
- try:
- if timeout is not None:
- socket.setdefaulttimeout(timeout)
- yield None
- except IOError as err:
- # urllib can wrap original socket errors multiple times (!), we must
- # unwrap to get at the original error.
- while True:
- a = err.args
- if len(a) >= 1 and isinstance(a[0], IOError):
- err = a[0]
- # The error can also be wrapped as args[1]:
- # except socket.error as msg:
- # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
- elif len(a) >= 2 and isinstance(a[1], IOError):
- err = a[1]
- else:
- break
- filter_error(err)
- raise
- # XXX should we catch generic exceptions and look for their
- # __cause__ or __context__?
- finally:
- socket.setdefaulttimeout(old_timeout)
-
-
- at contextlib.contextmanager
-def captured_output(stream_name):
- """Return a context manager used by captured_stdout/stdin/stderr
- that temporarily replaces the sys stream *stream_name* with a StringIO."""
- import io
- orig_stdout = getattr(sys, stream_name)
- setattr(sys, stream_name, io.StringIO())
- try:
- yield getattr(sys, stream_name)
- finally:
- setattr(sys, stream_name, orig_stdout)
-
-def captured_stdout():
- """Capture the output of sys.stdout:
-
- with captured_stdout() as s:
- print("hello")
- self.assertEqual(s.getvalue(), "hello")
- """
- return captured_output("stdout")
-
-def captured_stderr():
- return captured_output("stderr")
-
-def captured_stdin():
- return captured_output("stdin")
-
-
-def gc_collect():
- """Force as many objects as possible to be collected.
-
- In non-CPython implementations of Python, this is needed because timely
- deallocation is not guaranteed by the garbage collector. (Even in CPython
- this can be the case in case of reference cycles.) This means that __del__
- methods may be called later than expected and weakrefs may remain alive for
- longer than expected. This function tries its best to force all garbage
- objects to disappear.
- """
- gc.collect()
- if is_jython:
- time.sleep(0.1)
- gc.collect()
- gc.collect()
-
-
-def python_is_optimized():
- """Find if Python was built with optimizations."""
- cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
- final_opt = ""
- for opt in cflags.split():
- if opt.startswith('-O'):
- final_opt = opt
- return final_opt and final_opt != '-O0'
-
-
-#=======================================================================
-# Decorator for running a function in a different locale, correctly resetting
-# it afterwards.
-
-def run_with_locale(catstr, *locales):
- def decorator(func):
- def inner(*args, **kwds):
- try:
- import locale
- category = getattr(locale, catstr)
- orig_locale = locale.setlocale(category)
- except AttributeError:
- # if the test author gives us an invalid category string
- raise
- except:
- # cannot retrieve original locale, so do nothing
- locale = orig_locale = None
- else:
- for loc in locales:
- try:
- locale.setlocale(category, loc)
- break
- except:
- pass
-
- # now run the function, resetting the locale on exceptions
- try:
- return func(*args, **kwds)
- finally:
- if locale and orig_locale:
- locale.setlocale(category, orig_locale)
- inner.__name__ = func.__name__
- inner.__doc__ = func.__doc__
- return inner
- return decorator
-
-#=======================================================================
-# Big-memory-test support. Separate from 'resources' because memory use
-# should be configurable.
-
-# Some handy shorthands. Note that these are used for byte-limits as well
-# as size-limits, in the various bigmem tests
-_1M = 1024*1024
-_1G = 1024 * _1M
-_2G = 2 * _1G
-_4G = 4 * _1G
-
-MAX_Py_ssize_t = sys.maxsize
-
-def set_memlimit(limit):
- global max_memuse
- global real_max_memuse
- sizes = {
- 'k': 1024,
- 'm': _1M,
- 'g': _1G,
- 't': 1024*_1G,
- }
- m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
- re.IGNORECASE | re.VERBOSE)
- if m is None:
- raise ValueError('Invalid memory limit %r' % (limit,))
- memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
- real_max_memuse = memlimit
- if memlimit > MAX_Py_ssize_t:
- memlimit = MAX_Py_ssize_t
- if memlimit < _2G - 1:
- raise ValueError('Memory limit %r too low to be useful' % (limit,))
- max_memuse = memlimit
-
-def _memory_watchdog(start_evt, finish_evt, period=10.0):
- """A function which periodically watches the process' memory consumption
- and prints it out.
- """
- # XXX: because of the GIL, and because the very long operations tested
- # in most bigmem tests are uninterruptible, the loop below gets woken up
- # much less often than expected.
- # The polling code should be rewritten in raw C, without holding the GIL,
- # and push results onto an anonymous pipe.
- try:
- page_size = os.sysconf('SC_PAGESIZE')
- except (ValueError, AttributeError):
- try:
- page_size = os.sysconf('SC_PAGE_SIZE')
- except (ValueError, AttributeError):
- page_size = 4096
- procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
- try:
- f = open(procfile, 'rb')
- except IOError as e:
- warnings.warn('/proc not available for stats: {}'.format(e),
- RuntimeWarning)
- sys.stderr.flush()
- return
- with f:
- start_evt.set()
- old_data = -1
- while not finish_evt.wait(period):
- f.seek(0)
- statm = f.read().decode('ascii')
- data = int(statm.split()[5])
- if data != old_data:
- old_data = data
- print(" ... process data size: {data:.1f}G"
- .format(data=data * page_size / (1024 ** 3)))
-
-def bigmemtest(size, memuse, dry_run=True):
- """Decorator for bigmem tests.
-
- 'minsize' is the minimum useful size for the test (in arbitrary,
- test-interpreted units.) 'memuse' is the number of 'bytes per size' for
- the test, or a good estimate of it.
-
- if 'dry_run' is False, it means the test doesn't support dummy runs
- when -M is not specified.
- """
- def decorator(f):
- def wrapper(self):
- size = wrapper.size
- memuse = wrapper.memuse
- if not real_max_memuse:
- maxsize = 5147
- else:
- maxsize = size
-
- if ((real_max_memuse or not dry_run)
- and real_max_memuse < maxsize * memuse):
- raise unittest.SkipTest(
- "not enough memory: %.1fG minimum needed"
- % (size * memuse / (1024 ** 3)))
-
- if real_max_memuse and verbose and threading:
- print()
- print(" ... expected peak memory use: {peak:.1f}G"
- .format(peak=size * memuse / (1024 ** 3)))
- sys.stdout.flush()
- start_evt = threading.Event()
- finish_evt = threading.Event()
- t = threading.Thread(target=_memory_watchdog,
- args=(start_evt, finish_evt, 0.5))
- t.daemon = True
- t.start()
- start_evt.set()
- else:
- t = None
-
- try:
- return f(self, maxsize)
- finally:
- if t:
- finish_evt.set()
- t.join()
-
- wrapper.size = size
- wrapper.memuse = memuse
- return wrapper
- return decorator
-
-def bigaddrspacetest(f):
- """Decorator for tests that fill the address space."""
- def wrapper(self):
- if max_memuse < MAX_Py_ssize_t:
- if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31:
- raise unittest.SkipTest(
- "not enough memory: try a 32-bit build instead")
- else:
- raise unittest.SkipTest(
- "not enough memory: %.1fG minimum needed"
- % (MAX_Py_ssize_t / (1024 ** 3)))
- else:
- return f(self)
- return wrapper
-
-#=======================================================================
-# unittest integration.
-
-class BasicTestRunner:
- def run(self, test):
- result = unittest.TestResult()
- test(result)
- return result
-
-def _id(obj):
- return obj
-
-def requires_resource(resource):
- if resource == 'gui' and not _is_gui_available():
- return unittest.skip("resource 'gui' is not available")
- if is_resource_enabled(resource):
- return _id
- else:
- return unittest.skip("resource {0!r} is not enabled".format(resource))
-
-def cpython_only(test):
- """
- Decorator for tests only applicable on CPython.
- """
- return impl_detail(cpython=True)(test)
-
-def impl_detail(msg=None, **guards):
- if check_impl_detail(**guards):
- return _id
- if msg is None:
- guardnames, default = _parse_guards(guards)
- if default:
- msg = "implementation detail not available on {0}"
- else:
- msg = "implementation detail specific to {0}"
- guardnames = sorted(guardnames.keys())
- msg = msg.format(' or '.join(guardnames))
- return unittest.skip(msg)
-
-def _parse_guards(guards):
- # Returns a tuple ({platform_name: run_me}, default_value)
- if not guards:
- return ({'cpython': True}, False)
- is_true = list(guards.values())[0]
- assert list(guards.values()) == [is_true] * len(guards) # all True or all False
- return (guards, not is_true)
-
-# Use the following check to guard CPython's implementation-specific tests --
-# or to run them only on the implementation(s) guarded by the arguments.
-def check_impl_detail(**guards):
- """This function returns True or False depending on the host platform.
- Examples:
- if check_impl_detail(): # only on CPython (default)
- if check_impl_detail(jython=True): # only on Jython
- if check_impl_detail(cpython=False): # everywhere except on CPython
- """
- guards, default = _parse_guards(guards)
- return guards.get(platform.python_implementation().lower(), default)
-
-
-def _filter_suite(suite, pred):
- """Recursively filter test cases in a suite based on a predicate."""
- newtests = []
- for test in suite._tests:
- if isinstance(test, unittest.TestSuite):
- _filter_suite(test, pred)
- newtests.append(test)
- else:
- if pred(test):
- newtests.append(test)
- suite._tests = newtests
-
-
-def _run_suite(suite):
- """Run tests from a unittest.TestSuite-derived class."""
- if verbose:
- runner = unittest.TextTestRunner(sys.stdout, verbosity=2,
- failfast=failfast)
- else:
- runner = BasicTestRunner()
-
- result = runner.run(suite)
- if not result.wasSuccessful():
- if len(result.errors) == 1 and not result.failures:
- err = result.errors[0][1]
- elif len(result.failures) == 1 and not result.errors:
- err = result.failures[0][1]
- else:
- err = "multiple errors occurred"
- if not verbose: err += "; run in verbose mode for details"
- raise TestFailed(err)
-
-
-def run_unittest(*classes):
- """Run tests from unittest.TestCase-derived classes."""
- valid_types = (unittest.TestSuite, unittest.TestCase)
- suite = unittest.TestSuite()
- for cls in classes:
- if isinstance(cls, str):
- if cls in sys.modules:
- suite.addTest(unittest.findTestCases(sys.modules[cls]))
- else:
- raise ValueError("str arguments must be keys in sys.modules")
- elif isinstance(cls, valid_types):
- suite.addTest(cls)
- else:
- suite.addTest(unittest.makeSuite(cls))
- def case_pred(test):
- if match_tests is None:
- return True
- for name in test.id().split("."):
- if fnmatch.fnmatchcase(name, match_tests):
- return True
- return False
- _filter_suite(suite, case_pred)
- _run_suite(suite)
-
-
-#=======================================================================
-# doctest driver.
-
-def run_doctest(module, verbosity=None):
- """Run doctest on the given module. Return (#failures, #tests).
-
- If optional argument verbosity is not specified (or is None), pass
- support's belief about verbosity on to doctest. Else doctest's
- usual behavior is used (it searches sys.argv for -v).
- """
-
- import doctest
-
- if verbosity is None:
- verbosity = verbose
- else:
- verbosity = None
-
- f, t = doctest.testmod(module, verbose=verbosity)
- if f:
- raise TestFailed("%d of %d doctests failed" % (f, t))
- if verbose:
- print('doctest (%s) ... %d tests with zero failures' %
- (module.__name__, t))
- return f, t
-
-
-#=======================================================================
-# Support for saving and restoring the imported modules.
-
-def modules_setup():
- return sys.modules.copy(),
-
-def modules_cleanup(oldmodules):
- # Encoders/decoders are registered permanently within the internal
- # codec cache. If we destroy the corresponding modules their
- # globals will be set to None which will trip up the cached functions.
- encodings = [(k, v) for k, v in sys.modules.items()
- if k.startswith('encodings.')]
- sys.modules.clear()
- sys.modules.update(encodings)
- # XXX: This kind of problem can affect more than just encodings. In particular
- # extension modules (such as _ssl) don't cope with reloading properly.
- # Really, test modules should be cleaning out the test specific modules they
- # know they added (ala test_runpy) rather than relying on this function (as
- # test_importhooks and test_pkg do currently).
- # Implicitly imported *real* modules should be left alone (see issue 10556).
- sys.modules.update(oldmodules)
-
-#=======================================================================
-# Threading support to prevent reporting refleaks when running regrtest.py -R
-
-# NOTE: we use thread._count() rather than threading.enumerate() (or the
-# moral equivalent thereof) because a threading.Thread object is still alive
-# until its __bootstrap() method has returned, even after it has been
-# unregistered from the threading module.
-# thread._count(), on the other hand, only gets decremented *after* the
-# __bootstrap() method has returned, which gives us reliable reference counts
-# at the end of a test run.
-
-def threading_setup():
- if _thread:
- return _thread._count(), threading._dangling.copy()
- else:
- return 1, ()
-
-def threading_cleanup(*original_values):
- if not _thread:
- return
- _MAX_COUNT = 10
- for count in range(_MAX_COUNT):
- values = _thread._count(), threading._dangling
- if values == original_values:
- break
- time.sleep(0.1)
- gc_collect()
- # XXX print a warning in case of failure?
-
-def reap_threads(func):
- """Use this function when threads are being used. This will
- ensure that the threads are cleaned up even when the test fails.
- If threading is unavailable this function does nothing.
- """
- if not _thread:
- return func
-
- @functools.wraps(func)
- def decorator(*args):
- key = threading_setup()
- try:
- return func(*args)
- finally:
- threading_cleanup(*key)
- return decorator
-
-def reap_children():
- """Use this function at the end of test_main() whenever sub-processes
- are started. This will help ensure that no extra children (zombies)
- stick around to hog resources and create problems when looking
- for refleaks.
- """
-
- # Reap all our dead child processes so we don't leave zombies around.
- # These hog resources and might be causing some of the buildbots to die.
- if hasattr(os, 'waitpid'):
- any_process = -1
- while True:
- try:
- # This will raise an exception on Windows. That's ok.
- pid, status = os.waitpid(any_process, os.WNOHANG)
- if pid == 0:
- break
- except:
- break
-
- at contextlib.contextmanager
-def swap_attr(obj, attr, new_val):
- """Temporary swap out an attribute with a new object.
-
- Usage:
- with swap_attr(obj, "attr", 5):
- ...
-
- This will set obj.attr to 5 for the duration of the with: block,
- restoring the old value at the end of the block. If `attr` doesn't
- exist on `obj`, it will be created and then deleted at the end of the
- block.
- """
- if hasattr(obj, attr):
- real_val = getattr(obj, attr)
- setattr(obj, attr, new_val)
- try:
- yield None
- finally:
- setattr(obj, attr, real_val)
- else:
- setattr(obj, attr, new_val)
- try:
- yield None
- finally:
- delattr(obj, attr)
-
- at contextlib.contextmanager
-def swap_item(obj, item, new_val):
- """Temporary swap out an item with a new object.
-
- Usage:
- with swap_item(obj, "item", 5):
- ...
-
- This will set obj["item"] to 5 for the duration of the with: block,
- restoring the old value at the end of the block. If `item` doesn't
- exist on `obj`, it will be created and then deleted at the end of the
- block.
- """
- if item in obj:
- real_val = obj[item]
- obj[item] = new_val
- try:
- yield None
- finally:
- obj[item] = real_val
- else:
- obj[item] = new_val
- try:
- yield None
- finally:
- del obj[item]
-
-def strip_python_stderr(stderr):
- """Strip the stderr of a Python process from potential debug output
- emitted by the interpreter.
-
- This will typically be run on the result of the communicate() method
- of a subprocess.Popen object.
- """
- stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip()
- return stderr
-
-def args_from_interpreter_flags():
- """Return a list of command-line arguments reproducing the current
- settings in sys.flags."""
- flag_opt_map = {
- 'bytes_warning': 'b',
- 'dont_write_bytecode': 'B',
- 'hash_randomization': 'R',
- 'ignore_environment': 'E',
- 'no_user_site': 's',
- 'no_site': 'S',
- 'optimize': 'O',
- 'verbose': 'v',
- }
- args = []
- for flag, opt in flag_opt_map.items():
- v = getattr(sys.flags, flag)
- if v > 0:
- args.append('-' + opt * v)
- return args
-
-#============================================================
-# Support for assertions about logging.
-#============================================================
-
-class TestHandler(logging.handlers.BufferingHandler):
- def __init__(self, matcher):
- # BufferingHandler takes a "capacity" argument
- # so as to know when to flush. As we're overriding
- # shouldFlush anyway, we can set a capacity of zero.
- # You can call flush() manually to clear out the
- # buffer.
- logging.handlers.BufferingHandler.__init__(self, 0)
- self.matcher = matcher
-
- def shouldFlush(self, record):
- return False
-
- def emit(self, record):
- self.format(record)
- self.buffer.append(record.__dict__)
-
- def matches(self, **kwargs):
- """
- Look for a saved dict whose keys/values match the supplied arguments.
- """
- result = False
- for d in self.buffer:
- if self.matcher.matches(d, **kwargs):
- result = True
- break
- return result
-
-class Matcher(object):
-
- _partial_matches = ('msg', 'message')
-
- def matches(self, d, **kwargs):
- """
- Try to match a single dict with the supplied arguments.
-
- Keys whose values are strings and which are in self._partial_matches
- will be checked for partial (i.e. substring) matches. You can extend
- this scheme to (for example) do regular expression matching, etc.
- """
- result = True
- for k in kwargs:
- v = kwargs[k]
- dv = d.get(k)
- if not self.match_value(k, dv, v):
- result = False
- break
- return result
-
- def match_value(self, k, dv, v):
- """
- Try to match a single stored value (dv) with a supplied value (v).
- """
- if type(v) != type(dv):
- result = False
- elif type(dv) is not str or k not in self._partial_matches:
- result = (v == dv)
- else:
- result = dv.find(v) >= 0
- return result
-
-
-_can_symlink = None # type: Any
-def can_symlink():
- global _can_symlink
- if _can_symlink is not None:
- return _can_symlink
- symlink_path = TESTFN + "can_symlink"
- try:
- os.symlink(TESTFN, symlink_path)
- can = True
- except (OSError, NotImplementedError, AttributeError):
- can = False
- else:
- os.remove(symlink_path)
- _can_symlink = can
- return can
-
-def skip_unless_symlink(test):
- """Skip decorator for tests that require functional symlink"""
- ok = can_symlink()
- msg = "Requires functional symlink implementation"
- if ok:
- return test
- else:
- return unittest.skip(msg)(test)
-
-def patch(test_instance, object_to_patch, attr_name, new_value):
- """Override 'object_to_patch'.'attr_name' with 'new_value'.
-
- Also, add a cleanup procedure to 'test_instance' to restore
- 'object_to_patch' value for 'attr_name'.
- The 'attr_name' should be a valid attribute for 'object_to_patch'.
-
- """
- # check that 'attr_name' is a real attribute for 'object_to_patch'
- # will raise AttributeError if it does not exist
- getattr(object_to_patch, attr_name)
-
- # keep a copy of the old value
- attr_is_local = False
- try:
- old_value = object_to_patch.__dict__[attr_name]
- except (AttributeError, KeyError):
- old_value = getattr(object_to_patch, attr_name, None)
- else:
- attr_is_local = True
-
- # restore the value when the test is done
- def cleanup():
- if attr_is_local:
- setattr(object_to_patch, attr_name, old_value)
- else:
- delattr(object_to_patch, attr_name)
-
- test_instance.addCleanup(cleanup)
-
- # actually override the attribute
- setattr(object_to_patch, attr_name, new_value)
diff --git a/test-data/stdlib-samples/3.2/test/test_base64.py b/test-data/stdlib-samples/3.2/test/test_base64.py
deleted file mode 100644
index 9e4dcf5..0000000
--- a/test-data/stdlib-samples/3.2/test/test_base64.py
+++ /dev/null
@@ -1,267 +0,0 @@
-import unittest
-from test import support
-import base64
-import binascii
-import sys
-import subprocess
-
-from typing import Any
-
-
-
-class LegacyBase64TestCase(unittest.TestCase):
- def test_encodebytes(self) -> None:
- eq = self.assertEqual
- eq(base64.encodebytes(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=\n")
- eq(base64.encodebytes(b"a"), b"YQ==\n")
- eq(base64.encodebytes(b"ab"), b"YWI=\n")
- eq(base64.encodebytes(b"abc"), b"YWJj\n")
- eq(base64.encodebytes(b""), b"")
- eq(base64.encodebytes(b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}"),
- b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n")
- self.assertRaises(TypeError, base64.encodebytes, "")
-
- def test_decodebytes(self) -> None:
- eq = self.assertEqual
- eq(base64.decodebytes(b"d3d3LnB5dGhvbi5vcmc=\n"), b"www.python.org")
- eq(base64.decodebytes(b"YQ==\n"), b"a")
- eq(base64.decodebytes(b"YWI=\n"), b"ab")
- eq(base64.decodebytes(b"YWJj\n"), b"abc")
- eq(base64.decodebytes(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"),
- b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}")
- eq(base64.decodebytes(b''), b'')
- self.assertRaises(TypeError, base64.decodebytes, "")
-
- def test_encode(self) -> None:
- eq = self.assertEqual
- from io import BytesIO
- infp = BytesIO(b'abcdefghijklmnopqrstuvwxyz'
- b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- b'0123456789!@#0^&*();:<>,. []{}')
- outfp = BytesIO()
- base64.encode(infp, outfp)
- eq(outfp.getvalue(),
- b'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE'
- b'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT'
- b'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n')
-
- def test_decode(self) -> None:
- from io import BytesIO
- infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=')
- outfp = BytesIO()
- base64.decode(infp, outfp)
- self.assertEqual(outfp.getvalue(), b'www.python.org')
-
-
-class BaseXYTestCase(unittest.TestCase):
- def test_b64encode(self) -> None:
- eq = self.assertEqual
- # Test default alphabet
- eq(base64.b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
- eq(base64.b64encode(b'\x00'), b'AA==')
- eq(base64.b64encode(b"a"), b"YQ==")
- eq(base64.b64encode(b"ab"), b"YWI=")
- eq(base64.b64encode(b"abc"), b"YWJj")
- eq(base64.b64encode(b""), b"")
- eq(base64.b64encode(b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}"),
- b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
- # Test with arbitrary alternative characters
- eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=b'*$'), b'01a*b$cd')
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.b64encode, "")
- self.assertRaises(TypeError, base64.b64encode, b"", altchars="")
- # Test standard alphabet
- eq(base64.standard_b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
- eq(base64.standard_b64encode(b"a"), b"YQ==")
- eq(base64.standard_b64encode(b"ab"), b"YWI=")
- eq(base64.standard_b64encode(b"abc"), b"YWJj")
- eq(base64.standard_b64encode(b""), b"")
- eq(base64.standard_b64encode(b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}"),
- b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.standard_b64encode, "")
- self.assertRaises(TypeError, base64.standard_b64encode, b"", altchars="")
- # Test with 'URL safe' alternative characters
- eq(base64.urlsafe_b64encode(b'\xd3V\xbeo\xf7\x1d'), b'01a-b_cd')
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.urlsafe_b64encode, "")
-
- def test_b64decode(self) -> None:
- eq = self.assertEqual
- eq(base64.b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
- eq(base64.b64decode(b'AA=='), b'\x00')
- eq(base64.b64decode(b"YQ=="), b"a")
- eq(base64.b64decode(b"YWI="), b"ab")
- eq(base64.b64decode(b"YWJj"), b"abc")
- eq(base64.b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
- b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}")
- eq(base64.b64decode(b''), b'')
- # Test with arbitrary alternative characters
- eq(base64.b64decode(b'01a*b$cd', altchars=b'*$'), b'\xd3V\xbeo\xf7\x1d')
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.b64decode, "")
- self.assertRaises(TypeError, base64.b64decode, b"", altchars="")
- # Test standard alphabet
- eq(base64.standard_b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
- eq(base64.standard_b64decode(b"YQ=="), b"a")
- eq(base64.standard_b64decode(b"YWI="), b"ab")
- eq(base64.standard_b64decode(b"YWJj"), b"abc")
- eq(base64.standard_b64decode(b""), b"")
- eq(base64.standard_b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
- b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
- b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
- b"abcdefghijklmnopqrstuvwxyz"
- b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- b"0123456789!@#0^&*();:<>,. []{}")
- # Check if passing a str object raises an error
- self.assertRaises(TypeError, base64.standard_b64decode, "")
- self.assertRaises(TypeError, base64.standard_b64decode, b"", altchars="")
- # Test with 'URL safe' alternative characters
- eq(base64.urlsafe_b64decode(b'01a-b_cd'), b'\xd3V\xbeo\xf7\x1d')
- self.assertRaises(TypeError, base64.urlsafe_b64decode, "")
-
- def test_b64decode_padding_error(self) -> None:
- self.assertRaises(binascii.Error, base64.b64decode, b'abc')
-
- def test_b64decode_invalid_chars(self) -> None:
- # issue 1466065: Test some invalid characters.
- tests = ((b'%3d==', b'\xdd'),
- (b'$3d==', b'\xdd'),
- (b'[==', b''),
- (b'YW]3=', b'am'),
- (b'3{d==', b'\xdd'),
- (b'3d}==', b'\xdd'),
- (b'@@', b''),
- (b'!', b''),
- (b'YWJj\nYWI=', b'abcab'))
- for bstr, res in tests:
- self.assertEqual(base64.b64decode(bstr), res)
- with self.assertRaises(binascii.Error):
- base64.b64decode(bstr, validate=True)
-
- def test_b32encode(self) -> None:
- eq = self.assertEqual
- eq(base64.b32encode(b''), b'')
- eq(base64.b32encode(b'\x00'), b'AA======')
- eq(base64.b32encode(b'a'), b'ME======')
- eq(base64.b32encode(b'ab'), b'MFRA====')
- eq(base64.b32encode(b'abc'), b'MFRGG===')
- eq(base64.b32encode(b'abcd'), b'MFRGGZA=')
- eq(base64.b32encode(b'abcde'), b'MFRGGZDF')
- self.assertRaises(TypeError, base64.b32encode, "")
-
- def test_b32decode(self) -> None:
- eq = self.assertEqual
- eq(base64.b32decode(b''), b'')
- eq(base64.b32decode(b'AA======'), b'\x00')
- eq(base64.b32decode(b'ME======'), b'a')
- eq(base64.b32decode(b'MFRA===='), b'ab')
- eq(base64.b32decode(b'MFRGG==='), b'abc')
- eq(base64.b32decode(b'MFRGGZA='), b'abcd')
- eq(base64.b32decode(b'MFRGGZDF'), b'abcde')
- self.assertRaises(TypeError, base64.b32decode, "")
-
- def test_b32decode_casefold(self) -> None:
- eq = self.assertEqual
- eq(base64.b32decode(b'', True), b'')
- eq(base64.b32decode(b'ME======', True), b'a')
- eq(base64.b32decode(b'MFRA====', True), b'ab')
- eq(base64.b32decode(b'MFRGG===', True), b'abc')
- eq(base64.b32decode(b'MFRGGZA=', True), b'abcd')
- eq(base64.b32decode(b'MFRGGZDF', True), b'abcde')
- # Lower cases
- eq(base64.b32decode(b'me======', True), b'a')
- eq(base64.b32decode(b'mfra====', True), b'ab')
- eq(base64.b32decode(b'mfrgg===', True), b'abc')
- eq(base64.b32decode(b'mfrggza=', True), b'abcd')
- eq(base64.b32decode(b'mfrggzdf', True), b'abcde')
- # Expected exceptions
- self.assertRaises(TypeError, base64.b32decode, b'me======')
- # Mapping zero and one
- eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe')
- eq(base64.b32decode(b'M1023456', map01=b'L'), b'b\xdd\xad\xf3\xbe')
- eq(base64.b32decode(b'M1023456', map01=b'I'), b'b\x1d\xad\xf3\xbe')
- self.assertRaises(TypeError, base64.b32decode, b"", map01="")
-
- def test_b32decode_error(self) -> None:
- self.assertRaises(binascii.Error, base64.b32decode, b'abc')
- self.assertRaises(binascii.Error, base64.b32decode, b'ABCDEF==')
-
- def test_b16encode(self) -> None:
- eq = self.assertEqual
- eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF')
- eq(base64.b16encode(b'\x00'), b'00')
- self.assertRaises(TypeError, base64.b16encode, "")
-
- def test_b16decode(self) -> None:
- eq = self.assertEqual
- eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
- eq(base64.b16decode(b'00'), b'\x00')
- # Lower case is not allowed without a flag
- self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef')
- # Case fold
- eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef')
- self.assertRaises(TypeError, base64.b16decode, "")
-
- def test_ErrorHeritage(self) -> None:
- self.assertTrue(issubclass(binascii.Error, ValueError))
-
-
-
-class TestMain(unittest.TestCase):
- def get_output(self, *args_tuple: str, **options: Any) -> Any:
- args = [sys.executable, '-m', 'base64'] + list(args_tuple)
- return subprocess.check_output(args, **options)
-
- def test_encode_decode(self) -> None:
- output = self.get_output('-t')
- self.assertSequenceEqual(output.splitlines(), [
- b"b'Aladdin:open sesame'",
- br"b'QWxhZGRpbjpvcGVuIHNlc2FtZQ==\n'",
- b"b'Aladdin:open sesame'",
- ])
-
- def test_encode_file(self) -> None:
- with open(support.TESTFN, 'wb') as fp:
- fp.write(b'a\xffb\n')
-
- output = self.get_output('-e', support.TESTFN)
- self.assertEqual(output.rstrip(), b'Yf9iCg==')
-
- with open(support.TESTFN, 'rb') as fp:
- output = self.get_output('-e', stdin=fp)
- self.assertEqual(output.rstrip(), b'Yf9iCg==')
-
- def test_decode(self) -> None:
- with open(support.TESTFN, 'wb') as fp:
- fp.write(b'Yf9iCg==')
- output = self.get_output('-d', support.TESTFN)
- self.assertEqual(output.rstrip(), b'a\xffb')
-
-
-
-def test_main() -> None:
- support.run_unittest(__name__)
-
-if __name__ == '__main__':
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_fnmatch.py b/test-data/stdlib-samples/3.2/test/test_fnmatch.py
deleted file mode 100644
index 0f5a23b..0000000
--- a/test-data/stdlib-samples/3.2/test/test_fnmatch.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""Test cases for the fnmatch module."""
-
-from test import support
-import unittest
-
-from fnmatch import fnmatch, fnmatchcase, translate, filter
-
-from typing import Any, AnyStr, Callable
-
-class FnmatchTestCase(unittest.TestCase):
-
- def check_match(self, filename: AnyStr, pattern: AnyStr,
- should_match: int = 1,
- fn: Any = fnmatch) -> None: # see #270
- if should_match:
- self.assertTrue(fn(filename, pattern),
- "expected %r to match pattern %r"
- % (filename, pattern))
- else:
- self.assertTrue(not fn(filename, pattern),
- "expected %r not to match pattern %r"
- % (filename, pattern))
-
- def test_fnmatch(self) -> None:
- check = self.check_match
- check('abc', 'abc')
- check('abc', '?*?')
- check('abc', '???*')
- check('abc', '*???')
- check('abc', '???')
- check('abc', '*')
- check('abc', 'ab[cd]')
- check('abc', 'ab[!de]')
- check('abc', 'ab[de]', 0)
- check('a', '??', 0)
- check('a', 'b', 0)
-
- # these test that '\' is handled correctly in character sets;
- # see SF bug #409651
- check('\\', r'[\]')
- check('a', r'[!\]')
- check('\\', r'[!\]', 0)
-
- # test that filenames with newlines in them are handled correctly.
- # http://bugs.python.org/issue6665
- check('foo\nbar', 'foo*')
- check('foo\nbar\n', 'foo*')
- check('\nfoo', 'foo*', False)
- check('\n', '*')
-
- def test_mix_bytes_str(self) -> None:
- self.assertRaises(TypeError, fnmatch, 'test', b'*')
- self.assertRaises(TypeError, fnmatch, b'test', '*')
- self.assertRaises(TypeError, fnmatchcase, 'test', b'*')
- self.assertRaises(TypeError, fnmatchcase, b'test', '*')
-
- def test_fnmatchcase(self) -> None:
- check = self.check_match
- check('AbC', 'abc', 0, fnmatchcase)
- check('abc', 'AbC', 0, fnmatchcase)
-
- def test_bytes(self) -> None:
- self.check_match(b'test', b'te*')
- self.check_match(b'test\xff', b'te*\xff')
- self.check_match(b'foo\nbar', b'foo*')
-
-class TranslateTestCase(unittest.TestCase):
-
- def test_translate(self) -> None:
- self.assertEqual(translate('*'), '.*\Z(?ms)')
- self.assertEqual(translate('?'), '.\Z(?ms)')
- self.assertEqual(translate('a?b*'), 'a.b.*\Z(?ms)')
- self.assertEqual(translate('[abc]'), '[abc]\Z(?ms)')
- self.assertEqual(translate('[]]'), '[]]\Z(?ms)')
- self.assertEqual(translate('[!x]'), '[^x]\Z(?ms)')
- self.assertEqual(translate('[^x]'), '[\\^x]\Z(?ms)')
- self.assertEqual(translate('[x'), '\\[x\Z(?ms)')
-
-
-class FilterTestCase(unittest.TestCase):
-
- def test_filter(self) -> None:
- self.assertEqual(filter(['a', 'b'], 'a'), ['a'])
-
-
-def test_main() -> None:
- support.run_unittest(FnmatchTestCase,
- TranslateTestCase,
- FilterTestCase)
-
-
-if __name__ == "__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_genericpath.py b/test-data/stdlib-samples/3.2/test/test_genericpath.py
deleted file mode 100644
index 43b78e7..0000000
--- a/test-data/stdlib-samples/3.2/test/test_genericpath.py
+++ /dev/null
@@ -1,313 +0,0 @@
-"""
-Tests common to genericpath, macpath, ntpath and posixpath
-"""
-
-import unittest
-from test import support
-import os
-
-import genericpath
-import imp
-imp.reload(genericpath) # Make sure we are using the local copy
-
-import sys
-from typing import Any, List
-
-
-def safe_rmdir(dirname: str) -> None:
- try:
- os.rmdir(dirname)
- except OSError:
- pass
-
-
-class GenericTest(unittest.TestCase):
- # The path module to be tested
- pathmodule = genericpath # type: Any
- common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime',
- 'getmtime', 'exists', 'isdir', 'isfile']
- attributes = [] # type: List[str]
-
- def test_no_argument(self) -> None:
- for attr in self.common_attributes + self.attributes:
- with self.assertRaises(TypeError):
- getattr(self.pathmodule, attr)()
- self.fail("{}.{}() did not raise a TypeError"
- .format(self.pathmodule.__name__, attr))
-
- def test_commonprefix(self) -> None:
- commonprefix = self.pathmodule.commonprefix
- self.assertEqual(
- commonprefix([]),
- ""
- )
- self.assertEqual(
- commonprefix(["/home/swenson/spam", "/home/swen/spam"]),
- "/home/swen"
- )
- self.assertEqual(
- commonprefix(["/home/swen/spam", "/home/swen/eggs"]),
- "/home/swen/"
- )
- self.assertEqual(
- commonprefix(["/home/swen/spam", "/home/swen/spam"]),
- "/home/swen/spam"
- )
- self.assertEqual(
- commonprefix(["home:swenson:spam", "home:swen:spam"]),
- "home:swen"
- )
- self.assertEqual(
- commonprefix([":home:swen:spam", ":home:swen:eggs"]),
- ":home:swen:"
- )
- self.assertEqual(
- commonprefix([":home:swen:spam", ":home:swen:spam"]),
- ":home:swen:spam"
- )
-
- self.assertEqual(
- commonprefix([b"/home/swenson/spam", b"/home/swen/spam"]),
- b"/home/swen"
- )
- self.assertEqual(
- commonprefix([b"/home/swen/spam", b"/home/swen/eggs"]),
- b"/home/swen/"
- )
- self.assertEqual(
- commonprefix([b"/home/swen/spam", b"/home/swen/spam"]),
- b"/home/swen/spam"
- )
- self.assertEqual(
- commonprefix([b"home:swenson:spam", b"home:swen:spam"]),
- b"home:swen"
- )
- self.assertEqual(
- commonprefix([b":home:swen:spam", b":home:swen:eggs"]),
- b":home:swen:"
- )
- self.assertEqual(
- commonprefix([b":home:swen:spam", b":home:swen:spam"]),
- b":home:swen:spam"
- )
-
- testlist = ['', 'abc', 'Xbcd', 'Xb', 'XY', 'abcd',
- 'aXc', 'abd', 'ab', 'aX', 'abcX']
- for s1 in testlist:
- for s2 in testlist:
- p = commonprefix([s1, s2])
- self.assertTrue(s1.startswith(p))
- self.assertTrue(s2.startswith(p))
- if s1 != s2:
- n = len(p)
- self.assertNotEqual(s1[n:n+1], s2[n:n+1])
-
- def test_getsize(self) -> None:
- f = open(support.TESTFN, "wb")
- try:
- f.write(b"foo")
- f.close()
- self.assertEqual(self.pathmodule.getsize(support.TESTFN), 3)
- finally:
- if not f.closed:
- f.close()
- support.unlink(support.TESTFN)
-
- def test_time(self) -> None:
- f = open(support.TESTFN, "wb")
- try:
- f.write(b"foo")
- f.close()
- f = open(support.TESTFN, "ab")
- f.write(b"bar")
- f.close()
- f = open(support.TESTFN, "rb")
- d = f.read()
- f.close()
- self.assertEqual(d, b"foobar")
-
- self.assertLessEqual(
- self.pathmodule.getctime(support.TESTFN),
- self.pathmodule.getmtime(support.TESTFN)
- )
- finally:
- if not f.closed:
- f.close()
- support.unlink(support.TESTFN)
-
- def test_exists(self) -> None:
- self.assertIs(self.pathmodule.exists(support.TESTFN), False)
- f = open(support.TESTFN, "wb")
- try:
- f.write(b"foo")
- f.close()
- self.assertIs(self.pathmodule.exists(support.TESTFN), True)
- if not self.pathmodule == genericpath:
- self.assertIs(self.pathmodule.lexists(support.TESTFN),
- True)
- finally:
- if not f.closed:
- f.close()
- support.unlink(support.TESTFN)
-
- def test_isdir(self) -> None:
- self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
- f = open(support.TESTFN, "wb")
- try:
- f.write(b"foo")
- f.close()
- self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
- os.remove(support.TESTFN)
- os.mkdir(support.TESTFN)
- self.assertIs(self.pathmodule.isdir(support.TESTFN), True)
- os.rmdir(support.TESTFN)
- finally:
- if not f.closed:
- f.close()
- support.unlink(support.TESTFN)
- safe_rmdir(support.TESTFN)
-
- def test_isfile(self) -> None:
- self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
- f = open(support.TESTFN, "wb")
- try:
- f.write(b"foo")
- f.close()
- self.assertIs(self.pathmodule.isfile(support.TESTFN), True)
- os.remove(support.TESTFN)
- os.mkdir(support.TESTFN)
- self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
- os.rmdir(support.TESTFN)
- finally:
- if not f.closed:
- f.close()
- support.unlink(support.TESTFN)
- safe_rmdir(support.TESTFN)
-
-
-# Following TestCase is not supposed to be run from test_genericpath.
-# It is inherited by other test modules (macpath, ntpath, posixpath).
-
-class CommonTest(GenericTest):
- # The path module to be tested
- pathmodule = None # type: Any
- common_attributes = GenericTest.common_attributes + [
- # Properties
- 'curdir', 'pardir', 'extsep', 'sep',
- 'pathsep', 'defpath', 'altsep', 'devnull',
- # Methods
- 'normcase', 'splitdrive', 'expandvars', 'normpath', 'abspath',
- 'join', 'split', 'splitext', 'isabs', 'basename', 'dirname',
- 'lexists', 'islink', 'ismount', 'expanduser', 'normpath', 'realpath',
- ]
-
- def test_normcase(self) -> None:
- normcase = self.pathmodule.normcase
- # check that normcase() is idempotent
- for p in ["FoO/./BaR", b"FoO/./BaR"]:
- p = normcase(p)
- self.assertEqual(p, normcase(p))
-
- self.assertEqual(normcase(''), '')
- self.assertEqual(normcase(b''), b'')
-
- # check that normcase raises a TypeError for invalid types
- for path in (None, True, 0, 2.5, [], bytearray(b''), {'o','o'}):
- self.assertRaises(TypeError, normcase, path)
-
- def test_splitdrive(self) -> None:
- # splitdrive for non-NT paths
- splitdrive = self.pathmodule.splitdrive
- self.assertEqual(splitdrive("/foo/bar"), ("", "/foo/bar"))
- self.assertEqual(splitdrive("foo:bar"), ("", "foo:bar"))
- self.assertEqual(splitdrive(":foo:bar"), ("", ":foo:bar"))
-
- self.assertEqual(splitdrive(b"/foo/bar"), (b"", b"/foo/bar"))
- self.assertEqual(splitdrive(b"foo:bar"), (b"", b"foo:bar"))
- self.assertEqual(splitdrive(b":foo:bar"), (b"", b":foo:bar"))
-
- def test_expandvars(self) -> None:
- if self.pathmodule.__name__ == 'macpath':
- self.skipTest('macpath.expandvars is a stub')
- expandvars = self.pathmodule.expandvars
- with support.EnvironmentVarGuard() as env:
- env.clear()
- env["foo"] = "bar"
- env["{foo"] = "baz1"
- env["{foo}"] = "baz2"
- self.assertEqual(expandvars("foo"), "foo")
- self.assertEqual(expandvars("$foo bar"), "bar bar")
- self.assertEqual(expandvars("${foo}bar"), "barbar")
- self.assertEqual(expandvars("$[foo]bar"), "$[foo]bar")
- self.assertEqual(expandvars("$bar bar"), "$bar bar")
- self.assertEqual(expandvars("$?bar"), "$?bar")
- self.assertEqual(expandvars("${foo}bar"), "barbar")
- self.assertEqual(expandvars("$foo}bar"), "bar}bar")
- self.assertEqual(expandvars("${foo"), "${foo")
- self.assertEqual(expandvars("${{foo}}"), "baz1}")
- self.assertEqual(expandvars("$foo$foo"), "barbar")
- self.assertEqual(expandvars("$bar$bar"), "$bar$bar")
-
- self.assertEqual(expandvars(b"foo"), b"foo")
- self.assertEqual(expandvars(b"$foo bar"), b"bar bar")
- self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
- self.assertEqual(expandvars(b"$[foo]bar"), b"$[foo]bar")
- self.assertEqual(expandvars(b"$bar bar"), b"$bar bar")
- self.assertEqual(expandvars(b"$?bar"), b"$?bar")
- self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
- self.assertEqual(expandvars(b"$foo}bar"), b"bar}bar")
- self.assertEqual(expandvars(b"${foo"), b"${foo")
- self.assertEqual(expandvars(b"${{foo}}"), b"baz1}")
- self.assertEqual(expandvars(b"$foo$foo"), b"barbar")
- self.assertEqual(expandvars(b"$bar$bar"), b"$bar$bar")
-
- def test_abspath(self) -> None:
- self.assertIn("foo", self.pathmodule.abspath("foo"))
- self.assertIn(b"foo", self.pathmodule.abspath(b"foo"))
-
- # Abspath returns bytes when the arg is bytes
- for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'):
- self.assertIsInstance(self.pathmodule.abspath(path), bytes)
-
- def test_realpath(self) -> None:
- self.assertIn("foo", self.pathmodule.realpath("foo"))
- self.assertIn(b"foo", self.pathmodule.realpath(b"foo"))
-
- def test_normpath_issue5827(self) -> None:
- # Make sure normpath preserves unicode
- for path in ('', '.', '/', '\\', '///foo/.//bar//'):
- self.assertIsInstance(self.pathmodule.normpath(path), str)
-
- def test_abspath_issue3426(self) -> None:
- # Check that abspath returns unicode when the arg is unicode
- # with both ASCII and non-ASCII cwds.
- abspath = self.pathmodule.abspath
- for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
- self.assertIsInstance(abspath(path), str)
-
- unicwd = '\xe7w\xf0'
- try:
- fsencoding = support.TESTFN_ENCODING or "ascii"
- unicwd.encode(fsencoding)
- except (AttributeError, UnicodeEncodeError):
- # FS encoding is probably ASCII
- pass
- else:
- with support.temp_cwd(unicwd):
- for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
- self.assertIsInstance(abspath(path), str)
-
- @unittest.skipIf(sys.platform == 'darwin',
- "Mac OS X denies the creation of a directory with an invalid utf8 name")
- def test_nonascii_abspath(self) -> None:
- # Test non-ASCII, non-UTF8 bytes in the path.
- with support.temp_cwd(b'\xe7w\xf0'):
- self.test_abspath()
-
-
-def test_main() -> None:
- support.run_unittest(GenericTest)
-
-
-if __name__=="__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_getopt.py b/test-data/stdlib-samples/3.2/test/test_getopt.py
deleted file mode 100644
index 3320552..0000000
--- a/test-data/stdlib-samples/3.2/test/test_getopt.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# test_getopt.py
-# David Goodger <dgoodger at bigfoot.com> 2000-08-19
-
-from test.support import verbose, run_doctest, run_unittest, EnvironmentVarGuard
-import unittest
-
-import getopt
-
-from typing import cast, Any
-
-sentinel = object()
-
-class GetoptTests(unittest.TestCase):
- def setUp(self) -> None:
- self.env = EnvironmentVarGuard()
- if "POSIXLY_CORRECT" in self.env:
- del self.env["POSIXLY_CORRECT"]
-
- def tearDown(self) -> None:
- self.env.__exit__()
- del self.env
-
- def assertError(self, *args: Any, **kwargs: Any) -> None:
- # JLe: work around mypy bug #229
- cast(Any, self.assertRaises)(getopt.GetoptError, *args, **kwargs)
-
- def test_short_has_arg(self) -> None:
- self.assertTrue(getopt.short_has_arg('a', 'a:'))
- self.assertFalse(getopt.short_has_arg('a', 'a'))
- self.assertError(getopt.short_has_arg, 'a', 'b')
-
- def test_long_has_args(self) -> None:
- has_arg, option = getopt.long_has_args('abc', ['abc='])
- self.assertTrue(has_arg)
- self.assertEqual(option, 'abc')
-
- has_arg, option = getopt.long_has_args('abc', ['abc'])
- self.assertFalse(has_arg)
- self.assertEqual(option, 'abc')
-
- has_arg, option = getopt.long_has_args('abc', ['abcd'])
- self.assertFalse(has_arg)
- self.assertEqual(option, 'abcd')
-
- self.assertError(getopt.long_has_args, 'abc', ['def'])
- self.assertError(getopt.long_has_args, 'abc', [])
- self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde'])
-
- def test_do_shorts(self) -> None:
- opts, args = getopt.do_shorts([], 'a', 'a', [])
- self.assertEqual(opts, [('-a', '')])
- self.assertEqual(args, [])
-
- opts, args = getopt.do_shorts([], 'a1', 'a:', [])
- self.assertEqual(opts, [('-a', '1')])
- self.assertEqual(args, [])
-
- #opts, args = getopt.do_shorts([], 'a=1', 'a:', [])
- #self.assertEqual(opts, [('-a', '1')])
- #self.assertEqual(args, [])
-
- opts, args = getopt.do_shorts([], 'a', 'a:', ['1'])
- self.assertEqual(opts, [('-a', '1')])
- self.assertEqual(args, [])
-
- opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2'])
- self.assertEqual(opts, [('-a', '1')])
- self.assertEqual(args, ['2'])
-
- self.assertError(getopt.do_shorts, [], 'a1', 'a', [])
- self.assertError(getopt.do_shorts, [], 'a', 'a:', [])
-
- def test_do_longs(self) -> None:
- opts, args = getopt.do_longs([], 'abc', ['abc'], [])
- self.assertEqual(opts, [('--abc', '')])
- self.assertEqual(args, [])
-
- opts, args = getopt.do_longs([], 'abc=1', ['abc='], [])
- self.assertEqual(opts, [('--abc', '1')])
- self.assertEqual(args, [])
-
- opts, args = getopt.do_longs([], 'abc=1', ['abcd='], [])
- self.assertEqual(opts, [('--abcd', '1')])
- self.assertEqual(args, [])
-
- opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], [])
- self.assertEqual(opts, [('--abc', '')])
- self.assertEqual(args, [])
-
- # Much like the preceding, except with a non-alpha character ("-") in
- # option name that precedes "="; failed in
- # http://python.org/sf/126863
- opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], [])
- self.assertEqual(opts, [('--foo', '42')])
- self.assertEqual(args, [])
-
- self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], [])
- self.assertError(getopt.do_longs, [], 'abc', ['abc='], [])
-
- def test_getopt(self) -> None:
- # note: the empty string between '-a' and '--beta' is significant:
- # it simulates an empty string option argument ('-a ""') on the
- # command line.
- cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a',
- '', '--beta', 'arg1', 'arg2']
-
- opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta'])
- self.assertEqual(opts, [('-a', '1'), ('-b', ''),
- ('--alpha', '2'), ('--beta', ''),
- ('-a', '3'), ('-a', ''), ('--beta', '')])
- # Note ambiguity of ('-b', '') and ('-a', '') above. This must be
- # accounted for in the code that calls getopt().
- self.assertEqual(args, ['arg1', 'arg2'])
-
- self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta'])
-
- def test_gnu_getopt(self) -> None:
- # Test handling of GNU style scanning mode.
- cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2']
-
- # GNU style
- opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
- self.assertEqual(args, ['arg1'])
- self.assertEqual(opts, [('-a', ''), ('-b', '1'),
- ('--alpha', ''), ('--beta', '2')])
-
- # recognize "-" as an argument
- opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', [])
- self.assertEqual(args, ['-'])
- self.assertEqual(opts, [('-a', ''), ('-b', '-')])
-
- # Posix style via +
- opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta='])
- self.assertEqual(opts, [('-a', '')])
- self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
-
- # Posix style via POSIXLY_CORRECT
- self.env["POSIXLY_CORRECT"] = "1"
- opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
- self.assertEqual(opts, [('-a', '')])
- self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
-
- def test_libref_examples(self) -> None:
- s = """
- Examples from the Library Reference: Doc/lib/libgetopt.tex
-
- An example using only Unix style options:
-
-
- >>> import getopt
- >>> args = '-a -b -cfoo -d bar a1 a2'.split()
- >>> args
- ['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2']
- >>> optlist, args = getopt.getopt(args, 'abc:d:')
- >>> optlist
- [('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')]
- >>> args
- ['a1', 'a2']
-
- Using long option names is equally easy:
-
-
- >>> s = '--condition=foo --testing --output-file abc.def -x a1 a2'
- >>> args = s.split()
- >>> args
- ['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2']
- >>> optlist, args = getopt.getopt(args, 'x', [
- ... 'condition=', 'output-file=', 'testing'])
- >>> optlist
- [('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')]
- >>> args
- ['a1', 'a2']
- """
-
- import types
- m = types.ModuleType("libreftest", s)
- run_doctest(m, verbose)
-
- def test_issue4629(self) -> None:
- longopts, shortopts = getopt.getopt(['--help='], '', ['help='])
- self.assertEqual(longopts, [('--help', '')])
- longopts, shortopts = getopt.getopt(['--help=x'], '', ['help='])
- self.assertEqual(longopts, [('--help', 'x')])
- self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help'])
-
-def test_main() -> None:
- run_unittest(GetoptTests)
-
-if __name__ == "__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_glob.py b/test-data/stdlib-samples/3.2/test/test_glob.py
deleted file mode 100644
index 08c8932..0000000
--- a/test-data/stdlib-samples/3.2/test/test_glob.py
+++ /dev/null
@@ -1,122 +0,0 @@
-import unittest
-from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink
-import glob
-import os
-import shutil
-
-from typing import TypeVar, Iterable, List, cast
-
-T = TypeVar('T')
-
-class GlobTests(unittest.TestCase):
-
- tempdir = ''
-
- # JLe: work around mypy issue #231
- def norm(self, first: str, *parts: str) -> str:
- return os.path.normpath(os.path.join(self.tempdir, first, *parts))
-
- def mktemp(self, *parts: str) -> None:
- filename = self.norm(*parts)
- base, file = os.path.split(filename)
- if not os.path.exists(base):
- os.makedirs(base)
- f = open(filename, 'w')
- f.close()
-
- def setUp(self) -> None:
- self.tempdir = TESTFN+"_dir"
- self.mktemp('a', 'D')
- self.mktemp('aab', 'F')
- self.mktemp('aaa', 'zzzF')
- self.mktemp('ZZZ')
- self.mktemp('a', 'bcd', 'EF')
- self.mktemp('a', 'bcd', 'efg', 'ha')
- if can_symlink():
- os.symlink(self.norm('broken'), self.norm('sym1'))
- os.symlink(self.norm('broken'), self.norm('sym2'))
-
- def tearDown(self) -> None:
- shutil.rmtree(self.tempdir)
-
- def glob(self, *parts: str) -> List[str]:
- if len(parts) == 1:
- pattern = parts[0]
- else:
- pattern = os.path.join(*parts)
- p = os.path.join(self.tempdir, pattern)
- res = glob.glob(p)
- self.assertEqual(list(glob.iglob(p)), res)
- return res
-
- def assertSequencesEqual_noorder(self, l1: Iterable[T],
- l2: Iterable[T]) -> None:
- self.assertEqual(set(l1), set(l2))
-
- def test_glob_literal(self) -> None:
- eq = self.assertSequencesEqual_noorder
- eq(self.glob('a'), [self.norm('a')])
- eq(self.glob('a', 'D'), [self.norm('a', 'D')])
- eq(self.glob('aab'), [self.norm('aab')])
- eq(self.glob('zymurgy'), cast(List[str], [])) # JLe: work around #230
-
- # test return types are unicode, but only if os.listdir
- # returns unicode filenames
- uniset = set([str])
- tmp = os.listdir('.')
- if set(type(x) for x in tmp) == uniset:
- u1 = glob.glob('*')
- u2 = glob.glob('./*')
- self.assertEqual(set(type(r) for r in u1), uniset)
- self.assertEqual(set(type(r) for r in u2), uniset)
-
- def test_glob_one_directory(self) -> None:
- eq = self.assertSequencesEqual_noorder
- eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa']))
- eq(self.glob('*a'), map(self.norm, ['a', 'aaa']))
- eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab']))
- eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab']))
- eq(self.glob('*q'), cast(List[str], [])) # JLe: work around #230
-
- def test_glob_nested_directory(self) -> None:
- eq = self.assertSequencesEqual_noorder
- if os.path.normcase("abCD") == "abCD":
- # case-sensitive filesystem
- eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')])
- else:
- # case insensitive filesystem
- eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'),
- self.norm('a', 'bcd', 'efg')])
- eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')])
-
- def test_glob_directory_names(self) -> None:
- eq = self.assertSequencesEqual_noorder
- eq(self.glob('*', 'D'), [self.norm('a', 'D')])
- eq(self.glob('*', '*a'), cast(List[str], [])) # JLe: work around #230
- eq(self.glob('a', '*', '*', '*a'),
- [self.norm('a', 'bcd', 'efg', 'ha')])
- eq(self.glob('?a?', '*F'), map(self.norm, [os.path.join('aaa', 'zzzF'),
- os.path.join('aab', 'F')]))
-
- def test_glob_directory_with_trailing_slash(self) -> None:
- # We are verifying that when there is wildcard pattern which
- # ends with os.sep doesn't blow up.
- res = glob.glob(self.tempdir + '*' + os.sep)
- self.assertEqual(len(res), 1)
- # either of these results are reasonable
- self.assertIn(res[0], [self.tempdir, self.tempdir + os.sep])
-
- @skip_unless_symlink
- def test_glob_broken_symlinks(self) -> None:
- eq = self.assertSequencesEqual_noorder
- eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2')])
- eq(self.glob('sym1'), [self.norm('sym1')])
- eq(self.glob('sym2'), [self.norm('sym2')])
-
-
-def test_main() -> None:
- run_unittest(GlobTests)
-
-
-if __name__ == "__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_posixpath.py b/test-data/stdlib-samples/3.2/test/test_posixpath.py
deleted file mode 100644
index de98975..0000000
--- a/test-data/stdlib-samples/3.2/test/test_posixpath.py
+++ /dev/null
@@ -1,531 +0,0 @@
-import unittest
-from test import support, test_genericpath
-
-import posixpath
-import genericpath
-
-import imp
-imp.reload(posixpath) # Make sure we are using the local copy
-imp.reload(genericpath)
-
-import os
-import sys
-from posixpath import realpath, abspath, dirname, basename
-
-import posix
-from typing import cast, Any, TypeVar, Callable
-
-T = TypeVar('T')
-
-# An absolute path to a temporary filename for testing. We can't rely on TESTFN
-# being an absolute path, so we need this.
-
-ABSTFN = abspath(support.TESTFN)
-
-def skip_if_ABSTFN_contains_backslash(
- test: Callable[[T], None]) -> Callable[[T], None]:
- """
- On Windows, posixpath.abspath still returns paths with backslashes
- instead of posix forward slashes. If this is the case, several tests
- fail, so skip them.
- """
- found_backslash = '\\' in ABSTFN
- msg = "ABSTFN is not a posix path - tests fail"
- return [test, unittest.skip(msg)(test)][found_backslash]
-
-def safe_rmdir(dirname: str) -> None:
- try:
- os.rmdir(dirname)
- except OSError:
- pass
-
-class PosixPathTest(unittest.TestCase):
-
- def setUp(self) -> None:
- self.tearDown()
-
- def tearDown(self) -> None:
- for suffix in ["", "1", "2"]:
- support.unlink(support.TESTFN + suffix)
- safe_rmdir(support.TESTFN + suffix)
-
- def test_join(self) -> None:
- self.assertEqual(posixpath.join("/foo", "bar", "/bar", "baz"),
- "/bar/baz")
- self.assertEqual(posixpath.join("/foo", "bar", "baz"), "/foo/bar/baz")
- self.assertEqual(posixpath.join("/foo/", "bar/", "baz/"),
- "/foo/bar/baz/")
-
- self.assertEqual(posixpath.join(b"/foo", b"bar", b"/bar", b"baz"),
- b"/bar/baz")
- self.assertEqual(posixpath.join(b"/foo", b"bar", b"baz"),
- b"/foo/bar/baz")
- self.assertEqual(posixpath.join(b"/foo/", b"bar/", b"baz/"),
- b"/foo/bar/baz/")
-
- self.assertRaises(TypeError, posixpath.join, b"bytes", "str")
- self.assertRaises(TypeError, posixpath.join, "str", b"bytes")
-
- def test_split(self) -> None:
- self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
- self.assertEqual(posixpath.split("/"), ("/", ""))
- self.assertEqual(posixpath.split("foo"), ("", "foo"))
- self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
- self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))
-
- self.assertEqual(posixpath.split(b"/foo/bar"), (b"/foo", b"bar"))
- self.assertEqual(posixpath.split(b"/"), (b"/", b""))
- self.assertEqual(posixpath.split(b"foo"), (b"", b"foo"))
- self.assertEqual(posixpath.split(b"////foo"), (b"////", b"foo"))
- self.assertEqual(posixpath.split(b"//foo//bar"), (b"//foo", b"bar"))
-
- def splitextTest(self, path: str, filename: str, ext: str) -> None:
- self.assertEqual(posixpath.splitext(path), (filename, ext))
- self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext))
- self.assertEqual(posixpath.splitext("abc/" + path),
- ("abc/" + filename, ext))
- self.assertEqual(posixpath.splitext("abc.def/" + path),
- ("abc.def/" + filename, ext))
- self.assertEqual(posixpath.splitext("/abc.def/" + path),
- ("/abc.def/" + filename, ext))
- self.assertEqual(posixpath.splitext(path + "/"),
- (filename + ext + "/", ""))
-
- pathb = bytes(path, "ASCII")
- filenameb = bytes(filename, "ASCII")
- extb = bytes(ext, "ASCII")
-
- self.assertEqual(posixpath.splitext(pathb), (filenameb, extb))
- self.assertEqual(posixpath.splitext(b"/" + pathb),
- (b"/" + filenameb, extb))
- self.assertEqual(posixpath.splitext(b"abc/" + pathb),
- (b"abc/" + filenameb, extb))
- self.assertEqual(posixpath.splitext(b"abc.def/" + pathb),
- (b"abc.def/" + filenameb, extb))
- self.assertEqual(posixpath.splitext(b"/abc.def/" + pathb),
- (b"/abc.def/" + filenameb, extb))
- self.assertEqual(posixpath.splitext(pathb + b"/"),
- (filenameb + extb + b"/", b""))
-
- def test_splitext(self) -> None:
- self.splitextTest("foo.bar", "foo", ".bar")
- self.splitextTest("foo.boo.bar", "foo.boo", ".bar")
- self.splitextTest("foo.boo.biff.bar", "foo.boo.biff", ".bar")
- self.splitextTest(".csh.rc", ".csh", ".rc")
- self.splitextTest("nodots", "nodots", "")
- self.splitextTest(".cshrc", ".cshrc", "")
- self.splitextTest("...manydots", "...manydots", "")
- self.splitextTest("...manydots.ext", "...manydots", ".ext")
- self.splitextTest(".", ".", "")
- self.splitextTest("..", "..", "")
- self.splitextTest("........", "........", "")
- self.splitextTest("", "", "")
-
- def test_isabs(self) -> None:
- self.assertIs(posixpath.isabs(""), False)
- self.assertIs(posixpath.isabs("/"), True)
- self.assertIs(posixpath.isabs("/foo"), True)
- self.assertIs(posixpath.isabs("/foo/bar"), True)
- self.assertIs(posixpath.isabs("foo/bar"), False)
-
- self.assertIs(posixpath.isabs(b""), False)
- self.assertIs(posixpath.isabs(b"/"), True)
- self.assertIs(posixpath.isabs(b"/foo"), True)
- self.assertIs(posixpath.isabs(b"/foo/bar"), True)
- self.assertIs(posixpath.isabs(b"foo/bar"), False)
-
- def test_basename(self) -> None:
- self.assertEqual(posixpath.basename("/foo/bar"), "bar")
- self.assertEqual(posixpath.basename("/"), "")
- self.assertEqual(posixpath.basename("foo"), "foo")
- self.assertEqual(posixpath.basename("////foo"), "foo")
- self.assertEqual(posixpath.basename("//foo//bar"), "bar")
-
- self.assertEqual(posixpath.basename(b"/foo/bar"), b"bar")
- self.assertEqual(posixpath.basename(b"/"), b"")
- self.assertEqual(posixpath.basename(b"foo"), b"foo")
- self.assertEqual(posixpath.basename(b"////foo"), b"foo")
- self.assertEqual(posixpath.basename(b"//foo//bar"), b"bar")
-
- def test_dirname(self) -> None:
- self.assertEqual(posixpath.dirname("/foo/bar"), "/foo")
- self.assertEqual(posixpath.dirname("/"), "/")
- self.assertEqual(posixpath.dirname("foo"), "")
- self.assertEqual(posixpath.dirname("////foo"), "////")
- self.assertEqual(posixpath.dirname("//foo//bar"), "//foo")
-
- self.assertEqual(posixpath.dirname(b"/foo/bar"), b"/foo")
- self.assertEqual(posixpath.dirname(b"/"), b"/")
- self.assertEqual(posixpath.dirname(b"foo"), b"")
- self.assertEqual(posixpath.dirname(b"////foo"), b"////")
- self.assertEqual(posixpath.dirname(b"//foo//bar"), b"//foo")
-
- def test_islink(self) -> None:
- self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
- self.assertIs(posixpath.lexists(support.TESTFN + "2"), False)
- f = open(support.TESTFN + "1", "wb")
- try:
- f.write(b"foo")
- f.close()
- self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
- if support.can_symlink():
- os.symlink(support.TESTFN + "1", support.TESTFN + "2")
- self.assertIs(posixpath.islink(support.TESTFN + "2"), True)
- os.remove(support.TESTFN + "1")
- self.assertIs(posixpath.islink(support.TESTFN + "2"), True)
- self.assertIs(posixpath.exists(support.TESTFN + "2"), False)
- self.assertIs(posixpath.lexists(support.TESTFN + "2"), True)
- finally:
- if not f.closed:
- f.close()
-
- @staticmethod
- def _create_file(filename: str) -> None:
- with open(filename, 'wb') as f:
- f.write(b'foo')
-
- def test_samefile(self) -> None:
- test_fn = support.TESTFN + "1"
- self._create_file(test_fn)
- self.assertTrue(posixpath.samefile(test_fn, test_fn))
- self.assertRaises(TypeError, posixpath.samefile)
-
- @unittest.skipIf(
- sys.platform.startswith('win'),
- "posixpath.samefile does not work on links in Windows")
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- def test_samefile_on_links(self) -> None:
- test_fn1 = support.TESTFN + "1"
- test_fn2 = support.TESTFN + "2"
- self._create_file(test_fn1)
-
- os.symlink(test_fn1, test_fn2)
- self.assertTrue(posixpath.samefile(test_fn1, test_fn2))
- os.remove(test_fn2)
-
- self._create_file(test_fn2)
- self.assertFalse(posixpath.samefile(test_fn1, test_fn2))
-
-
- def test_samestat(self) -> None:
- test_fn = support.TESTFN + "1"
- self._create_file(test_fn)
- test_fns = [test_fn]*2
- stats = map(os.stat, test_fns)
- self.assertTrue(posixpath.samestat(*stats))
-
- @unittest.skipIf(
- sys.platform.startswith('win'),
- "posixpath.samestat does not work on links in Windows")
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- def test_samestat_on_links(self) -> None:
- test_fn1 = support.TESTFN + "1"
- test_fn2 = support.TESTFN + "2"
- self._create_file(test_fn1)
- test_fns = [test_fn1, test_fn2]
- cast(Any, os.symlink)(*test_fns)
- stats = map(os.stat, test_fns)
- self.assertTrue(posixpath.samestat(*stats))
- os.remove(test_fn2)
-
- self._create_file(test_fn2)
- stats = map(os.stat, test_fns)
- self.assertFalse(posixpath.samestat(*stats))
-
- self.assertRaises(TypeError, posixpath.samestat)
-
- def test_ismount(self) -> None:
- self.assertIs(posixpath.ismount("/"), True)
- self.assertIs(posixpath.ismount(b"/"), True)
-
- def test_ismount_non_existent(self) -> None:
- # Non-existent mountpoint.
- self.assertIs(posixpath.ismount(ABSTFN), False)
- try:
- os.mkdir(ABSTFN)
- self.assertIs(posixpath.ismount(ABSTFN), False)
- finally:
- safe_rmdir(ABSTFN)
-
- @unittest.skipUnless(support.can_symlink(),
- "Test requires symlink support")
- def test_ismount_symlinks(self) -> None:
- # Symlinks are never mountpoints.
- try:
- os.symlink("/", ABSTFN)
- self.assertIs(posixpath.ismount(ABSTFN), False)
- finally:
- os.unlink(ABSTFN)
-
- @unittest.skipIf(posix is None, "Test requires posix module")
- def test_ismount_different_device(self) -> None:
- # Simulate the path being on a different device from its parent by
- # mocking out st_dev.
- save_lstat = os.lstat
- def fake_lstat(path):
- st_ino = 0
- st_dev = 0
- if path == ABSTFN:
- st_dev = 1
- st_ino = 1
- return posix.stat_result((0, st_ino, st_dev, 0, 0, 0, 0, 0, 0, 0))
- try:
- setattr(os, 'lstat', fake_lstat) # mypy: can't modify os directly
- self.assertIs(posixpath.ismount(ABSTFN), True)
- finally:
- setattr(os, 'lstat', save_lstat)
-
- def test_expanduser(self) -> None:
- self.assertEqual(posixpath.expanduser("foo"), "foo")
- self.assertEqual(posixpath.expanduser(b"foo"), b"foo")
- try:
- import pwd
- except ImportError:
- pass
- else:
- self.assertIsInstance(posixpath.expanduser("~/"), str)
- self.assertIsInstance(posixpath.expanduser(b"~/"), bytes)
- # if home directory == root directory, this test makes no sense
- if posixpath.expanduser("~") != '/':
- self.assertEqual(
- posixpath.expanduser("~") + "/",
- posixpath.expanduser("~/")
- )
- self.assertEqual(
- posixpath.expanduser(b"~") + b"/",
- posixpath.expanduser(b"~/")
- )
- self.assertIsInstance(posixpath.expanduser("~root/"), str)
- self.assertIsInstance(posixpath.expanduser("~foo/"), str)
- self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes)
- self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes)
-
- with support.EnvironmentVarGuard() as env:
- env['HOME'] = '/'
- self.assertEqual(posixpath.expanduser("~"), "/")
- # expanduser should fall back to using the password database
- del env['HOME']
- home = pwd.getpwuid(os.getuid()).pw_dir
- self.assertEqual(posixpath.expanduser("~"), home)
-
- def test_normpath(self) -> None:
- self.assertEqual(posixpath.normpath(""), ".")
- self.assertEqual(posixpath.normpath("/"), "/")
- self.assertEqual(posixpath.normpath("//"), "//")
- self.assertEqual(posixpath.normpath("///"), "/")
- self.assertEqual(posixpath.normpath("///foo/.//bar//"), "/foo/bar")
- self.assertEqual(posixpath.normpath("///foo/.//bar//.//..//.//baz"),
- "/foo/baz")
- self.assertEqual(posixpath.normpath("///..//./foo/.//bar"), "/foo/bar")
-
- self.assertEqual(posixpath.normpath(b""), b".")
- self.assertEqual(posixpath.normpath(b"/"), b"/")
- self.assertEqual(posixpath.normpath(b"//"), b"//")
- self.assertEqual(posixpath.normpath(b"///"), b"/")
- self.assertEqual(posixpath.normpath(b"///foo/.//bar//"), b"/foo/bar")
- self.assertEqual(posixpath.normpath(b"///foo/.//bar//.//..//.//baz"),
- b"/foo/baz")
- self.assertEqual(posixpath.normpath(b"///..//./foo/.//bar"),
- b"/foo/bar")
-
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- @skip_if_ABSTFN_contains_backslash
- def test_realpath_basic(self) -> None:
- # Basic operation.
- try:
- os.symlink(ABSTFN+"1", ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
- finally:
- support.unlink(ABSTFN)
-
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- @skip_if_ABSTFN_contains_backslash
- def test_realpath_relative(self) -> None:
- try:
- os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
- finally:
- support.unlink(ABSTFN)
-
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- @skip_if_ABSTFN_contains_backslash
- def test_realpath_symlink_loops(self) -> None:
- # Bug #930024, return the path unchanged if we get into an infinite
- # symlink loop.
- try:
- old_path = abspath('.')
- os.symlink(ABSTFN, ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN)
-
- os.symlink(ABSTFN+"1", ABSTFN+"2")
- os.symlink(ABSTFN+"2", ABSTFN+"1")
- self.assertEqual(realpath(ABSTFN+"1"), ABSTFN+"1")
- self.assertEqual(realpath(ABSTFN+"2"), ABSTFN+"2")
-
- # Test using relative path as well.
- os.chdir(dirname(ABSTFN))
- self.assertEqual(realpath(basename(ABSTFN)), ABSTFN)
- finally:
- os.chdir(old_path)
- support.unlink(ABSTFN)
- support.unlink(ABSTFN+"1")
- support.unlink(ABSTFN+"2")
-
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- @skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_parents(self) -> None:
- # We also need to resolve any symlinks in the parents of a relative
- # path passed to realpath. E.g.: current working directory is
- # /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
- # realpath("a"). This should return /usr/share/doc/a/.
- try:
- old_path = abspath('.')
- os.mkdir(ABSTFN)
- os.mkdir(ABSTFN + "/y")
- os.symlink(ABSTFN + "/y", ABSTFN + "/k")
-
- os.chdir(ABSTFN + "/k")
- self.assertEqual(realpath("a"), ABSTFN + "/y/a")
- finally:
- os.chdir(old_path)
- support.unlink(ABSTFN + "/k")
- safe_rmdir(ABSTFN + "/y")
- safe_rmdir(ABSTFN)
-
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- @skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_before_normalizing(self) -> None:
- # Bug #990669: Symbolic links should be resolved before we
- # normalize the path. E.g.: if we have directories 'a', 'k' and 'y'
- # in the following hierarchy:
- # a/k/y
- #
- # and a symbolic link 'link-y' pointing to 'y' in directory 'a',
- # then realpath("link-y/..") should return 'k', not 'a'.
- try:
- old_path = abspath('.')
- os.mkdir(ABSTFN)
- os.mkdir(ABSTFN + "/k")
- os.mkdir(ABSTFN + "/k/y")
- os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y")
-
- # Absolute path.
- self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
- # Relative path.
- os.chdir(dirname(ABSTFN))
- self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
- ABSTFN + "/k")
- finally:
- os.chdir(old_path)
- support.unlink(ABSTFN + "/link-y")
- safe_rmdir(ABSTFN + "/k/y")
- safe_rmdir(ABSTFN + "/k")
- safe_rmdir(ABSTFN)
-
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- @skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_first(self) -> None:
- # Bug #1213894: The first component of the path, if not absolute,
- # must be resolved too.
-
- try:
- old_path = abspath('.')
- os.mkdir(ABSTFN)
- os.mkdir(ABSTFN + "/k")
- os.symlink(ABSTFN, ABSTFN + "link")
- os.chdir(dirname(ABSTFN))
-
- base = basename(ABSTFN)
- self.assertEqual(realpath(base + "link"), ABSTFN)
- self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
- finally:
- os.chdir(old_path)
- support.unlink(ABSTFN + "link")
- safe_rmdir(ABSTFN + "/k")
- safe_rmdir(ABSTFN)
-
- def test_relpath(self) -> None:
- real_getcwd = os.getcwd
- # mypy: can't modify os directly
- setattr(os, 'getcwd', lambda: r"/home/user/bar")
- try:
- curdir = os.path.split(os.getcwd())[-1]
- self.assertRaises(ValueError, posixpath.relpath, "")
- self.assertEqual(posixpath.relpath("a"), "a")
- self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a")
- self.assertEqual(posixpath.relpath("a/b"), "a/b")
- self.assertEqual(posixpath.relpath("../a/b"), "../a/b")
- self.assertEqual(posixpath.relpath("a", "../b"), "../"+curdir+"/a")
- self.assertEqual(posixpath.relpath("a/b", "../c"),
- "../"+curdir+"/a/b")
- self.assertEqual(posixpath.relpath("a", "b/c"), "../../a")
- self.assertEqual(posixpath.relpath("a", "a"), ".")
- self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x/y/z"), '../../../foo/bar/bat')
- self.assertEqual(posixpath.relpath("/foo/bar/bat", "/foo/bar"), 'bat')
- self.assertEqual(posixpath.relpath("/foo/bar/bat", "/"), 'foo/bar/bat')
- self.assertEqual(posixpath.relpath("/", "/foo/bar/bat"), '../../..')
- self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x"), '../foo/bar/bat')
- self.assertEqual(posixpath.relpath("/x", "/foo/bar/bat"), '../../../x')
- self.assertEqual(posixpath.relpath("/", "/"), '.')
- self.assertEqual(posixpath.relpath("/a", "/a"), '.')
- self.assertEqual(posixpath.relpath("/a/b", "/a/b"), '.')
- finally:
- setattr(os, 'getcwd', real_getcwd)
-
- def test_relpath_bytes(self) -> None:
- real_getcwdb = os.getcwdb
- # mypy: can't modify os directly
- setattr(os, 'getcwdb', lambda: br"/home/user/bar")
- try:
- curdir = os.path.split(os.getcwdb())[-1]
- self.assertRaises(ValueError, posixpath.relpath, b"")
- self.assertEqual(posixpath.relpath(b"a"), b"a")
- self.assertEqual(posixpath.relpath(posixpath.abspath(b"a")), b"a")
- self.assertEqual(posixpath.relpath(b"a/b"), b"a/b")
- self.assertEqual(posixpath.relpath(b"../a/b"), b"../a/b")
- self.assertEqual(posixpath.relpath(b"a", b"../b"),
- b"../"+curdir+b"/a")
- self.assertEqual(posixpath.relpath(b"a/b", b"../c"),
- b"../"+curdir+b"/a/b")
- self.assertEqual(posixpath.relpath(b"a", b"b/c"), b"../../a")
- self.assertEqual(posixpath.relpath(b"a", b"a"), b".")
- self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x/y/z"), b'../../../foo/bar/bat')
- self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/foo/bar"), b'bat')
- self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/"), b'foo/bar/bat')
- self.assertEqual(posixpath.relpath(b"/", b"/foo/bar/bat"), b'../../..')
- self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x"), b'../foo/bar/bat')
- self.assertEqual(posixpath.relpath(b"/x", b"/foo/bar/bat"), b'../../../x')
- self.assertEqual(posixpath.relpath(b"/", b"/"), b'.')
- self.assertEqual(posixpath.relpath(b"/a", b"/a"), b'.')
- self.assertEqual(posixpath.relpath(b"/a/b", b"/a/b"), b'.')
-
- self.assertRaises(TypeError, posixpath.relpath, b"bytes", "str")
- self.assertRaises(TypeError, posixpath.relpath, "str", b"bytes")
- finally:
- setattr(os, 'getcwdb', real_getcwdb)
-
- def test_sameopenfile(self) -> None:
- fname = support.TESTFN + "1"
- with open(fname, "wb") as a, open(fname, "wb") as b:
- self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno()))
-
-
-class PosixCommonTest(test_genericpath.CommonTest):
- pathmodule = posixpath
- attributes = ['relpath', 'samefile', 'sameopenfile', 'samestat']
-
-
-def test_main() -> None:
- support.run_unittest(PosixPathTest, PosixCommonTest)
-
-
-if __name__=="__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_pprint.py b/test-data/stdlib-samples/3.2/test/test_pprint.py
deleted file mode 100644
index cf54ebd..0000000
--- a/test-data/stdlib-samples/3.2/test/test_pprint.py
+++ /dev/null
@@ -1,488 +0,0 @@
-import pprint
-import test.support
-import unittest
-import test.test_set
-import random
-import collections
-import itertools
-
-from typing import List, Any, Dict, Tuple, cast, Callable
-
-# list, tuple and dict subclasses that do or don't overwrite __repr__
-class list2(list):
- pass
-
-class list3(list):
- def __repr__(self) -> str:
- return list.__repr__(self)
-
-class tuple2(tuple):
- pass
-
-class tuple3(tuple):
- def __repr__(self) -> str:
- return tuple.__repr__(self)
-
-class dict2(dict):
- pass
-
-class dict3(dict):
- def __repr__(self) -> str:
- return dict.__repr__(self)
-
-class Unorderable:
- def __repr__(self) -> str:
- return str(id(self))
-
-class QueryTestCase(unittest.TestCase):
-
- def setUp(self) -> None:
- self.a = list(range(100)) # type: List[Any]
- self.b = list(range(200)) # type: List[Any]
- self.a[-12] = self.b
-
- def test_basic(self) -> None:
- # Verify .isrecursive() and .isreadable() w/o recursion
- pp = pprint.PrettyPrinter()
- for safe in (2, 2.0, complex(0.0, 2.0), "abc", [3], (2,2), {3: 3}, "yaddayadda",
- self.a, self.b):
- # module-level convenience functions
- self.assertFalse(pprint.isrecursive(safe),
- "expected not isrecursive for %r" % (safe,))
- self.assertTrue(pprint.isreadable(safe),
- "expected isreadable for %r" % (safe,))
- # PrettyPrinter methods
- self.assertFalse(pp.isrecursive(safe),
- "expected not isrecursive for %r" % (safe,))
- self.assertTrue(pp.isreadable(safe),
- "expected isreadable for %r" % (safe,))
-
- def test_knotted(self) -> None:
- # Verify .isrecursive() and .isreadable() w/ recursion
- # Tie a knot.
- self.b[67] = self.a
- # Messy dict.
- self.d = {} # type: Dict[int, dict]
- self.d[0] = self.d[1] = self.d[2] = self.d
-
- pp = pprint.PrettyPrinter()
-
- for icky in self.a, self.b, self.d, (self.d, self.d):
- self.assertTrue(pprint.isrecursive(icky), "expected isrecursive")
- self.assertFalse(pprint.isreadable(icky), "expected not isreadable")
- self.assertTrue(pp.isrecursive(icky), "expected isrecursive")
- self.assertFalse(pp.isreadable(icky), "expected not isreadable")
-
- # Break the cycles.
- self.d.clear()
- del self.a[:]
- del self.b[:]
-
- for safe in self.a, self.b, self.d, (self.d, self.d):
- # module-level convenience functions
- self.assertFalse(pprint.isrecursive(safe),
- "expected not isrecursive for %r" % (safe,))
- self.assertTrue(pprint.isreadable(safe),
- "expected isreadable for %r" % (safe,))
- # PrettyPrinter methods
- self.assertFalse(pp.isrecursive(safe),
- "expected not isrecursive for %r" % (safe,))
- self.assertTrue(pp.isreadable(safe),
- "expected isreadable for %r" % (safe,))
-
- def test_unreadable(self) -> None:
- # Not recursive but not readable anyway
- pp = pprint.PrettyPrinter()
- for unreadable in type(3), pprint, pprint.isrecursive:
- # module-level convenience functions
- self.assertFalse(pprint.isrecursive(unreadable),
- "expected not isrecursive for %r" % (unreadable,))
- self.assertFalse(pprint.isreadable(unreadable),
- "expected not isreadable for %r" % (unreadable,))
- # PrettyPrinter methods
- self.assertFalse(pp.isrecursive(unreadable),
- "expected not isrecursive for %r" % (unreadable,))
- self.assertFalse(pp.isreadable(unreadable),
- "expected not isreadable for %r" % (unreadable,))
-
- def test_same_as_repr(self) -> None:
- # Simple objects, small containers and classes that overwrite __repr__
- # For those the result should be the same as repr().
- # Ahem. The docs don't say anything about that -- this appears to
- # be testing an implementation quirk. Starting in Python 2.5, it's
- # not true for dicts: pprint always sorts dicts by key now; before,
- # it sorted a dict display if and only if the display required
- # multiple lines. For that reason, dicts with more than one element
- # aren't tested here.
- for simple in (0, 0, complex(0.0), 0.0, "", b"",
- (), tuple2(), tuple3(),
- [], list2(), list3(),
- {}, dict2(), dict3(),
- self.assertTrue, pprint,
- -6, -6, complex(-6.,-6.), -1.5, "x", b"x", (3,), [3], {3: 6},
- (1,2), [3,4], {5: 6},
- tuple2((1,2)), tuple3((1,2)), tuple3(range(100)), # type: ignore
- [3,4], list2(cast(Any, [3,4])), list3(cast(Any, [3,4])),
- list3(cast(Any, range(100))), dict2(cast(Any, {5: 6})),
- dict3(cast(Any, {5: 6})), # JLe: work around mypy issue #233
- range(10, -11, -1)
- ):
- native = repr(simple)
- for function in "pformat", "saferepr":
- f = getattr(pprint, function)
- got = f(simple)
- self.assertEqual(native, got,
- "expected %s got %s from pprint.%s" %
- (native, got, function))
-
- def test_basic_line_wrap(self) -> None:
- # verify basic line-wrapping operation
- o = {'RPM_cal': 0,
- 'RPM_cal2': 48059,
- 'Speed_cal': 0,
- 'controldesk_runtime_us': 0,
- 'main_code_runtime_us': 0,
- 'read_io_runtime_us': 0,
- 'write_io_runtime_us': 43690}
- exp = """\
-{'RPM_cal': 0,
- 'RPM_cal2': 48059,
- 'Speed_cal': 0,
- 'controldesk_runtime_us': 0,
- 'main_code_runtime_us': 0,
- 'read_io_runtime_us': 0,
- 'write_io_runtime_us': 43690}"""
- # JLe: work around mypy issue #232
- for type in cast(List[Any], [dict, dict2]):
- self.assertEqual(pprint.pformat(type(o)), exp)
-
- o2 = range(100)
- exp = '[%s]' % ',\n '.join(map(str, o2))
- for type in cast(List[Any], [list, list2]):
- self.assertEqual(pprint.pformat(type(o2)), exp)
-
- o3 = tuple(range(100))
- exp = '(%s)' % ',\n '.join(map(str, o3))
- for type in cast(List[Any], [tuple, tuple2]):
- self.assertEqual(pprint.pformat(type(o3)), exp)
-
- # indent parameter
- o4 = range(100)
- exp = '[ %s]' % ',\n '.join(map(str, o4))
- for type in cast(List[Any], [list, list2]):
- self.assertEqual(pprint.pformat(type(o4), indent=4), exp)
-
- def test_nested_indentations(self) -> None:
- o1 = list(range(10))
- o2 = {'first':1, 'second':2, 'third':3}
- o = [o1, o2]
- expected = """\
-[ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- { 'first': 1,
- 'second': 2,
- 'third': 3}]"""
- self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
-
- def test_sorted_dict(self) -> None:
- # Starting in Python 2.5, pprint sorts dict displays by key regardless
- # of how small the dictionary may be.
- # Before the change, on 32-bit Windows pformat() gave order
- # 'a', 'c', 'b' here, so this test failed.
- d = {'a': 1, 'b': 1, 'c': 1}
- self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}")
- self.assertEqual(pprint.pformat([d, d]),
- "[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]")
-
- # The next one is kind of goofy. The sorted order depends on the
- # alphabetic order of type names: "int" < "str" < "tuple". Before
- # Python 2.5, this was in the test_same_as_repr() test. It's worth
- # keeping around for now because it's one of few tests of pprint
- # against a crazy mix of types.
- self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}),
- r"{5: [[]], 'xy\tab\n': (3,), (): {}}")
-
- def test_ordered_dict(self) -> None:
- words = 'the quick brown fox jumped over a lazy dog'.split()
- d = collections.OrderedDict(zip(words, itertools.count()))
- self.assertEqual(pprint.pformat(d),
-"""\
-{'the': 0,
- 'quick': 1,
- 'brown': 2,
- 'fox': 3,
- 'jumped': 4,
- 'over': 5,
- 'a': 6,
- 'lazy': 7,
- 'dog': 8}""")
- def test_subclassing(self) -> None:
- o = {'names with spaces': 'should be presented using repr()',
- 'others.should.not.be': 'like.this'}
- exp = """\
-{'names with spaces': 'should be presented using repr()',
- others.should.not.be: like.this}"""
- self.assertEqual(DottedPrettyPrinter().pformat(o), exp)
-
- @test.support.cpython_only
- def test_set_reprs(self) -> None:
- # This test creates a complex arrangement of frozensets and
- # compares the pretty-printed repr against a string hard-coded in
- # the test. The hard-coded repr depends on the sort order of
- # frozensets.
- #
- # However, as the docs point out: "Since sets only define
- # partial ordering (subset relationships), the output of the
- # list.sort() method is undefined for lists of sets."
- #
- # In a nutshell, the test assumes frozenset({0}) will always
- # sort before frozenset({1}), but:
- #
- # >>> frozenset({0}) < frozenset({1})
- # False
- # >>> frozenset({1}) < frozenset({0})
- # False
- #
- # Consequently, this test is fragile and
- # implementation-dependent. Small changes to Python's sort
- # algorithm cause the test to fail when it should pass.
-
- self.assertEqual(pprint.pformat(set()), 'set()')
- self.assertEqual(pprint.pformat(set(range(3))), '{0, 1, 2}')
- self.assertEqual(pprint.pformat(frozenset()), 'frozenset()')
- self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset({0, 1, 2})')
- cube_repr_tgt = """\
-{frozenset(): frozenset({frozenset({2}), frozenset({0}), frozenset({1})}),
- frozenset({0}): frozenset({frozenset(),
- frozenset({0, 2}),
- frozenset({0, 1})}),
- frozenset({1}): frozenset({frozenset(),
- frozenset({1, 2}),
- frozenset({0, 1})}),
- frozenset({2}): frozenset({frozenset(),
- frozenset({1, 2}),
- frozenset({0, 2})}),
- frozenset({1, 2}): frozenset({frozenset({2}),
- frozenset({1}),
- frozenset({0, 1, 2})}),
- frozenset({0, 2}): frozenset({frozenset({2}),
- frozenset({0}),
- frozenset({0, 1, 2})}),
- frozenset({0, 1}): frozenset({frozenset({0}),
- frozenset({1}),
- frozenset({0, 1, 2})}),
- frozenset({0, 1, 2}): frozenset({frozenset({1, 2}),
- frozenset({0, 2}),
- frozenset({0, 1})})}"""
- cube = test.test_set.cube(3)
- self.assertEqual(pprint.pformat(cube), cube_repr_tgt)
- cubo_repr_tgt = """\
-{frozenset({frozenset({0, 2}), frozenset({0})}): frozenset({frozenset({frozenset({0,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 1})}),
- frozenset({frozenset(),
- frozenset({0})}),
- frozenset({frozenset({2}),
- frozenset({0,
- 2})})}),
- frozenset({frozenset({0, 1}), frozenset({1})}): frozenset({frozenset({frozenset({0,
- 1}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 1})}),
- frozenset({frozenset({1}),
- frozenset({1,
- 2})}),
- frozenset({frozenset(),
- frozenset({1})})}),
- frozenset({frozenset({1, 2}), frozenset({1})}): frozenset({frozenset({frozenset({1,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({2}),
- frozenset({1,
- 2})}),
- frozenset({frozenset(),
- frozenset({1})}),
- frozenset({frozenset({1}),
- frozenset({0,
- 1})})}),
- frozenset({frozenset({1, 2}), frozenset({2})}): frozenset({frozenset({frozenset({1,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({1}),
- frozenset({1,
- 2})}),
- frozenset({frozenset({2}),
- frozenset({0,
- 2})}),
- frozenset({frozenset(),
- frozenset({2})})}),
- frozenset({frozenset(), frozenset({0})}): frozenset({frozenset({frozenset({0}),
- frozenset({0,
- 1})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 2})}),
- frozenset({frozenset(),
- frozenset({1})}),
- frozenset({frozenset(),
- frozenset({2})})}),
- frozenset({frozenset(), frozenset({1})}): frozenset({frozenset({frozenset(),
- frozenset({0})}),
- frozenset({frozenset({1}),
- frozenset({1,
- 2})}),
- frozenset({frozenset(),
- frozenset({2})}),
- frozenset({frozenset({1}),
- frozenset({0,
- 1})})}),
- frozenset({frozenset({2}), frozenset()}): frozenset({frozenset({frozenset({2}),
- frozenset({1,
- 2})}),
- frozenset({frozenset(),
- frozenset({0})}),
- frozenset({frozenset(),
- frozenset({1})}),
- frozenset({frozenset({2}),
- frozenset({0,
- 2})})}),
- frozenset({frozenset({0, 1, 2}), frozenset({0, 1})}): frozenset({frozenset({frozenset({1,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 1})}),
- frozenset({frozenset({1}),
- frozenset({0,
- 1})})}),
- frozenset({frozenset({0}), frozenset({0, 1})}): frozenset({frozenset({frozenset(),
- frozenset({0})}),
- frozenset({frozenset({0,
- 1}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 2})}),
- frozenset({frozenset({1}),
- frozenset({0,
- 1})})}),
- frozenset({frozenset({2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({0,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({2}),
- frozenset({1,
- 2})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 2})}),
- frozenset({frozenset(),
- frozenset({2})})}),
- frozenset({frozenset({0, 1, 2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({1,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0,
- 1}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0}),
- frozenset({0,
- 2})}),
- frozenset({frozenset({2}),
- frozenset({0,
- 2})})}),
- frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}): frozenset({frozenset({frozenset({0,
- 2}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({0,
- 1}),
- frozenset({0,
- 1,
- 2})}),
- frozenset({frozenset({2}),
- frozenset({1,
- 2})}),
- frozenset({frozenset({1}),
- frozenset({1,
- 2})})})}"""
-
- cubo = test.test_set.linegraph(cube)
- self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt)
-
- def test_depth(self) -> None:
- nested_tuple = (1, (2, (3, (4, (5, 6)))))
- nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}}
- nested_list = [1, [2, [3, [4, [5, [6, []]]]]]]
- self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple))
- self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict))
- self.assertEqual(pprint.pformat(nested_list), repr(nested_list))
-
- lv1_tuple = '(1, (...))'
- lv1_dict = '{1: {...}}'
- lv1_list = '[1, [...]]'
- self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple)
- self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict)
- self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list)
-
- def test_sort_unorderable_values(self) -> None:
- # Issue 3976: sorted pprints fail for unorderable values.
- n = 20
- keys = [Unorderable() for i in range(n)]
- random.shuffle(keys)
- skeys = sorted(keys, key=id)
- clean = lambda s: s.replace(' ', '').replace('\n','') # type: Callable[[str], str]
-
- self.assertEqual(clean(pprint.pformat(set(keys))),
- '{' + ','.join(map(repr, skeys)) + '}')
- self.assertEqual(clean(pprint.pformat(frozenset(keys))),
- 'frozenset({' + ','.join(map(repr, skeys)) + '})')
- self.assertEqual(clean(pprint.pformat(dict.fromkeys(keys))),
- '{' + ','.join('%r:None' % k for k in skeys) + '}')
-
-class DottedPrettyPrinter(pprint.PrettyPrinter):
-
- def format(self, object: object, context: Dict[int, Any], maxlevels: int,
- level: int) -> Tuple[str, int, int]:
- if isinstance(object, str):
- if ' ' in object:
- return repr(object), 1, 0
- else:
- return object, 0, 0
- else:
- return pprint.PrettyPrinter.format(
- self, object, context, maxlevels, level)
-
-
-def test_main() -> None:
- test.support.run_unittest(QueryTestCase)
-
-
-if __name__ == "__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_random.py b/test-data/stdlib-samples/3.2/test/test_random.py
deleted file mode 100644
index 5989cee..0000000
--- a/test-data/stdlib-samples/3.2/test/test_random.py
+++ /dev/null
@@ -1,533 +0,0 @@
-#!/usr/bin/env python3
-
-import unittest
-import random
-import time
-import pickle
-import warnings
-from math import log, exp, pi, fsum, sin
-from test import support
-
-from typing import Any, Dict, List, Callable, Generic, TypeVar, cast
-
-RT = TypeVar('RT', random.Random, random.SystemRandom)
-
-class TestBasicOps(unittest.TestCase, Generic[RT]):
- # Superclass with tests common to all generators.
- # Subclasses must arrange for self.gen to retrieve the Random instance
- # to be tested.
-
- gen = None # type: RT # Either Random or SystemRandom
-
- def randomlist(self, n: int) -> List[float]:
- """Helper function to make a list of random numbers"""
- return [self.gen.random() for i in range(n)]
-
- def test_autoseed(self) -> None:
- self.gen.seed()
- state1 = self.gen.getstate()
- time.sleep(0.1)
- self.gen.seed() # diffent seeds at different times
- state2 = self.gen.getstate()
- self.assertNotEqual(state1, state2)
-
- def test_saverestore(self) -> None:
- N = 1000
- self.gen.seed()
- state = self.gen.getstate()
- randseq = self.randomlist(N)
- self.gen.setstate(state) # should regenerate the same sequence
- self.assertEqual(randseq, self.randomlist(N))
-
- def test_seedargs(self) -> None:
- for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20),
- 3.14, complex(1., 2.), 'a', tuple('abc')]:
- self.gen.seed(arg)
- for arg in [list(range(3)), {'one': 1}]:
- self.assertRaises(TypeError, self.gen.seed, arg)
- self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
- self.assertRaises(TypeError, type(self.gen), []) # type: ignore # mypy issue 1846
-
- def test_choice(self) -> None:
- choice = self.gen.choice
- with self.assertRaises(IndexError):
- choice([])
- self.assertEqual(choice([50]), 50)
- self.assertIn(choice([25, 75]), [25, 75])
-
- def test_sample(self) -> None:
- # For the entire allowable range of 0 <= k <= N, validate that
- # the sample is of the correct length and contains only unique items
- N = 100
- population = range(N)
- for k in range(N+1):
- s = self.gen.sample(population, k)
- self.assertEqual(len(s), k)
- uniq = set(s)
- self.assertEqual(len(uniq), k)
- self.assertTrue(uniq <= set(population))
- self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0
-
- def test_sample_distribution(self) -> None:
- # For the entire allowable range of 0 <= k <= N, validate that
- # sample generates all possible permutations
- n = 5
- pop = range(n)
- trials = 10000 # large num prevents false negatives without slowing normal case
- def factorial(n: int) -> int:
- if n == 0:
- return 1
- return n * factorial(n - 1)
- for k in range(n):
- expected = factorial(n) // factorial(n-k)
- perms = {} # type: Dict[tuple, object]
- for i in range(trials):
- perms[tuple(self.gen.sample(pop, k))] = None
- if len(perms) == expected:
- break
- else:
- self.fail()
-
- def test_sample_inputs(self) -> None:
- # SF bug #801342 -- population can be any iterable defining __len__()
- self.gen.sample(set(range(20)), 2)
- self.gen.sample(range(20), 2)
- self.gen.sample(range(20), 2)
- self.gen.sample(str('abcdefghijklmnopqrst'), 2)
- self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
-
- def test_sample_on_dicts(self) -> None:
- self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2)
-
- def test_gauss(self) -> None:
- # Ensure that the seed() method initializes all the hidden state. In
- # particular, through 2.2.1 it failed to reset a piece of state used
- # by (and only by) the .gauss() method.
-
- for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
- self.gen.seed(seed)
- x1 = self.gen.random()
- y1 = self.gen.gauss(0, 1)
-
- self.gen.seed(seed)
- x2 = self.gen.random()
- y2 = self.gen.gauss(0, 1)
-
- self.assertEqual(x1, x2)
- self.assertEqual(y1, y2)
-
- def test_pickling(self) -> None:
- state = pickle.dumps(self.gen)
- origseq = [self.gen.random() for i in range(10)]
- newgen = pickle.loads(state)
- restoredseq = [newgen.random() for i in range(10)]
- self.assertEqual(origseq, restoredseq)
-
- def test_bug_1727780(self) -> None:
- # verify that version-2-pickles can be loaded
- # fine, whether they are created on 32-bit or 64-bit
- # platforms, and that version-3-pickles load fine.
- files = [("randv2_32.pck", 780),
- ("randv2_64.pck", 866),
- ("randv3.pck", 343)]
- for file, value in files:
- f = open(support.findfile(file),"rb")
- r = pickle.load(f)
- f.close()
- self.assertEqual(int(r.random()*1000), value)
-
- def test_bug_9025(self) -> None:
- # Had problem with an uneven distribution in int(n*random())
- # Verify the fix by checking that distributions fall within expectations.
- n = 100000
- randrange = self.gen.randrange
- k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n))
- self.assertTrue(0.30 < k/n and k/n < .37, (k/n))
-
-class SystemRandom_TestBasicOps(TestBasicOps[random.SystemRandom]):
- gen = random.SystemRandom()
-
- def test_autoseed(self) -> None:
- # Doesn't need to do anything except not fail
- self.gen.seed()
-
- def test_saverestore(self) -> None:
- self.assertRaises(NotImplementedError, self.gen.getstate)
- self.assertRaises(NotImplementedError, self.gen.setstate, None)
-
- def test_seedargs(self) -> None:
- # Doesn't need to do anything except not fail
- self.gen.seed(100)
-
- def test_gauss(self) -> None:
- self.gen.gauss_next = None
- self.gen.seed(100)
- self.assertEqual(self.gen.gauss_next, None)
-
- def test_pickling(self) -> None:
- self.assertRaises(NotImplementedError, pickle.dumps, self.gen)
-
- def test_53_bits_per_float(self) -> None:
- # This should pass whenever a C double has 53 bit precision.
- span = 2 ** 53 # type: int
- cum = 0
- for i in range(100):
- cum |= int(self.gen.random() * span)
- self.assertEqual(cum, span-1)
-
- def test_bigrand(self) -> None:
- # The randrange routine should build-up the required number of bits
- # in stages so that all bit positions are active.
- span = 2 ** 500 # type: int
- cum = 0
- for i in range(100):
- r = self.gen.randrange(span)
- self.assertTrue(0 <= r < span)
- cum |= r
- self.assertEqual(cum, span-1)
-
- def test_bigrand_ranges(self) -> None:
- for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
- start = self.gen.randrange(2 ** i)
- stop = self.gen.randrange(2 ** (i-2))
- if stop <= start:
- return
- self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
-
- def test_rangelimits(self) -> None:
- for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
- self.assertEqual(set(range(start,stop)),
- set([self.gen.randrange(start,stop) for i in range(100)]))
-
- def test_genrandbits(self) -> None:
- # Verify ranges
- for k in range(1, 1000):
- self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
-
- # Verify all bits active
- getbits = self.gen.getrandbits
- for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
- cum = 0
- for i in range(100):
- cum |= getbits(span)
- self.assertEqual(cum, 2**span-1)
-
- # Verify argument checking
- self.assertRaises(TypeError, self.gen.getrandbits)
- self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
- self.assertRaises(ValueError, self.gen.getrandbits, 0)
- self.assertRaises(ValueError, self.gen.getrandbits, -1)
- self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
-
- def test_randbelow_logic(self, _log: Callable[[float, float], float] = log,
- int: Callable[[float], int] = int) -> None:
- # check bitcount transition points: 2**i and 2**(i+1)-1
- # show that: k = int(1.001 + _log(n, 2))
- # is equal to or one greater than the number of bits in n
- for i in range(1, 1000):
- n = 1 << i # check an exact power of two
- numbits = i+1
- k = int(1.00001 + _log(n, 2))
- self.assertEqual(k, numbits)
- self.assertEqual(n, 2**(k-1))
-
- n += n - 1 # check 1 below the next power of two
- k = int(1.00001 + _log(n, 2))
- self.assertIn(k, [numbits, numbits+1])
- self.assertTrue(2**k > n > 2**(k-2))
-
- n -= n >> 15 # check a little farther below the next power of two
- k = int(1.00001 + _log(n, 2))
- self.assertEqual(k, numbits) # note the stronger assertion
- self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
-
-
-class MersenneTwister_TestBasicOps(TestBasicOps[random.Random]):
- gen = random.Random()
-
- def test_guaranteed_stable(self) -> None:
- # These sequences are guaranteed to stay the same across versions of python
- self.gen.seed(3456147, version=1)
- self.assertEqual([self.gen.random().hex() for i in range(4)],
- ['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1',
- '0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1'])
- self.gen.seed("the quick brown fox", version=2)
- self.assertEqual([self.gen.random().hex() for i in range(4)],
- ['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4',
- '0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1'])
-
- def test_setstate_first_arg(self) -> None:
- self.assertRaises(ValueError, self.gen.setstate, (1, None, None))
-
- def test_setstate_middle_arg(self) -> None:
- # Wrong type, s/b tuple
- self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
- # Wrong length, s/b 625
- self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
- # Wrong type, s/b tuple of 625 ints
- self.assertRaises(TypeError, self.gen.setstate, (2, tuple(['a',]*625), None))
- # Last element s/b an int also
- self.assertRaises(TypeError, self.gen.setstate, (2, cast(Any, (0,))*624+('a',), None))
-
- def test_referenceImplementation(self) -> None:
- # Compare the python implementation with results from the original
- # code. Create 2000 53-bit precision random floats. Compare only
- # the last ten entries to show that the independent implementations
- # are tracking. Here is the main() function needed to create the
- # list of expected random numbers:
- # void main(void){
- # int i;
- # unsigned long init[4]={61731, 24903, 614, 42143}, length=4;
- # init_by_array(init, length);
- # for (i=0; i<2000; i++) {
- # printf("%.15f ", genrand_res53());
- # if (i%5==4) printf("\n");
- # }
- # }
- expected = [0.45839803073713259,
- 0.86057815201978782,
- 0.92848331726782152,
- 0.35932681119782461,
- 0.081823493762449573,
- 0.14332226470169329,
- 0.084297823823520024,
- 0.53814864671831453,
- 0.089215024911993401,
- 0.78486196105372907]
-
- self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
- actual = self.randomlist(2000)[-10:]
- for a, e in zip(actual, expected):
- self.assertAlmostEqual(a,e,places=14)
-
- def test_strong_reference_implementation(self) -> None:
- # Like test_referenceImplementation, but checks for exact bit-level
- # equality. This should pass on any box where C double contains
- # at least 53 bits of precision (the underlying algorithm suffers
- # no rounding errors -- all results are exact).
- from math import ldexp
-
- expected = [0x0eab3258d2231f,
- 0x1b89db315277a5,
- 0x1db622a5518016,
- 0x0b7f9af0d575bf,
- 0x029e4c4db82240,
- 0x04961892f5d673,
- 0x02b291598e4589,
- 0x11388382c15694,
- 0x02dad977c9e1fe,
- 0x191d96d4d334c6]
- self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
- actual = self.randomlist(2000)[-10:]
- for a, e in zip(actual, expected):
- self.assertEqual(int(ldexp(a, 53)), e)
-
- def test_long_seed(self) -> None:
- # This is most interesting to run in debug mode, just to make sure
- # nothing blows up. Under the covers, a dynamically resized array
- # is allocated, consuming space proportional to the number of bits
- # in the seed. Unfortunately, that's a quadratic-time algorithm,
- # so don't make this horribly big.
- seed = (1 << (10000 * 8)) - 1 # about 10K bytes
- self.gen.seed(seed)
-
- def test_53_bits_per_float(self) -> None:
- # This should pass whenever a C double has 53 bit precision.
- span = 2 ** 53 # type: int
- cum = 0
- for i in range(100):
- cum |= int(self.gen.random() * span)
- self.assertEqual(cum, span-1)
-
- def test_bigrand(self) -> None:
- # The randrange routine should build-up the required number of bits
- # in stages so that all bit positions are active.
- span = 2 ** 500 # type: int
- cum = 0
- for i in range(100):
- r = self.gen.randrange(span)
- self.assertTrue(0 <= r < span)
- cum |= r
- self.assertEqual(cum, span-1)
-
- def test_bigrand_ranges(self) -> None:
- for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
- start = self.gen.randrange(2 ** i)
- stop = self.gen.randrange(2 ** (i-2))
- if stop <= start:
- return
- self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
-
- def test_rangelimits(self) -> None:
- for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
- self.assertEqual(set(range(start,stop)),
- set([self.gen.randrange(start,stop) for i in range(100)]))
-
- def test_genrandbits(self) -> None:
- # Verify cross-platform repeatability
- self.gen.seed(1234567)
- self.assertEqual(self.gen.getrandbits(100),
- 97904845777343510404718956115)
- # Verify ranges
- for k in range(1, 1000):
- self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
-
- # Verify all bits active
- getbits = self.gen.getrandbits
- for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
- cum = 0
- for i in range(100):
- cum |= getbits(span)
- self.assertEqual(cum, 2**span-1)
-
- # Verify argument checking
- self.assertRaises(TypeError, self.gen.getrandbits)
- self.assertRaises(TypeError, self.gen.getrandbits, 'a')
- self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
- self.assertRaises(ValueError, self.gen.getrandbits, 0)
- self.assertRaises(ValueError, self.gen.getrandbits, -1)
-
- def test_randbelow_logic(self,
- _log: Callable[[int, float], float] = log,
- int: Callable[[float], int] = int) -> None:
- # check bitcount transition points: 2**i and 2**(i+1)-1
- # show that: k = int(1.001 + _log(n, 2))
- # is equal to or one greater than the number of bits in n
- for i in range(1, 1000):
- n = 1 << i # check an exact power of two
- numbits = i+1
- k = int(1.00001 + _log(n, 2))
- self.assertEqual(k, numbits)
- self.assertEqual(n, 2**(k-1))
-
- n += n - 1 # check 1 below the next power of two
- k = int(1.00001 + _log(n, 2))
- self.assertIn(k, [numbits, numbits+1])
- self.assertTrue(2**k > n > 2**(k-2))
-
- n -= n >> 15 # check a little farther below the next power of two
- k = int(1.00001 + _log(n, 2))
- self.assertEqual(k, numbits) # note the stronger assertion
- self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
-
- def test_randrange_bug_1590891(self) -> None:
- start = 1000000000000
- stop = -100000000000000000000
- step = -200
- x = self.gen.randrange(start, stop, step)
- self.assertTrue(stop < x <= start)
- self.assertEqual((x+stop)%step, 0)
-
-def gamma(z: float, sqrt2pi: float = (2.0*pi)**0.5) -> float:
- # Reflection to right half of complex plane
- if z < 0.5:
- return pi / sin(pi*z) / gamma(1.0-z)
- # Lanczos approximation with g=7
- az = z + (7.0 - 0.5)
- return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([
- 0.9999999999995183,
- 676.5203681218835 / z,
- -1259.139216722289 / (z+1.0),
- 771.3234287757674 / (z+2.0),
- -176.6150291498386 / (z+3.0),
- 12.50734324009056 / (z+4.0),
- -0.1385710331296526 / (z+5.0),
- 0.9934937113930748e-05 / (z+6.0),
- 0.1659470187408462e-06 / (z+7.0),
- ])
-
-class TestDistributions(unittest.TestCase):
- def test_zeroinputs(self) -> None:
- # Verify that distributions can handle a series of zero inputs'
- g = random.Random()
- x = [g.random() for i in range(50)] + [0.0]*5
- def patch() -> None:
- setattr(g, 'random', x[:].pop)
- patch(); g.uniform(1.0,10.0)
- patch(); g.paretovariate(1.0)
- patch(); g.expovariate(1.0)
- patch(); g.weibullvariate(1.0, 1.0)
- patch(); g.normalvariate(0.0, 1.0)
- patch(); g.gauss(0.0, 1.0)
- patch(); g.lognormvariate(0.0, 1.0)
- patch(); g.vonmisesvariate(0.0, 1.0)
- patch(); g.gammavariate(0.01, 1.0)
- patch(); g.gammavariate(1.0, 1.0)
- patch(); g.gammavariate(200.0, 1.0)
- patch(); g.betavariate(3.0, 3.0)
- patch(); g.triangular(0.0, 1.0, 1.0/3.0)
-
- def test_avg_std(self) -> None:
- # Use integration to test distribution average and standard deviation.
- # Only works for distributions which do not consume variates in pairs
- g = random.Random()
- N = 5000
- x = [i/float(N) for i in range(1,N)]
- variate = None # type: Any
- for variate, args, mu, sigmasqrd in [
- (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
- (g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0),
- (g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
- (g.paretovariate, (5.0,), 5.0/(5.0-1),
- 5.0/((5.0-1)**2*(5.0-2))),
- (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
- gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]:
- setattr(g, 'random', x[:].pop)
- y = [] # type: List[float]
- for i in range(len(x)):
- try:
- y.append(variate(*args))
- except IndexError:
- pass
- s1 = s2 = 0.0
- for e in y:
- s1 += e
- s2 += (e - mu) ** 2
- N = len(y)
- self.assertAlmostEqual(s1/N, mu, places=2)
- self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2)
-
-class TestModule(unittest.TestCase):
- def testMagicConstants(self) -> None:
- self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141)
- self.assertAlmostEqual(random.TWOPI, 6.28318530718)
- self.assertAlmostEqual(random.LOG4, 1.38629436111989)
- self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627)
-
- def test__all__(self) -> None:
- # tests validity but not completeness of the __all__ list
- self.assertTrue(set(random.__all__) <= set(dir(random)))
-
- def test_random_subclass_with_kwargs(self) -> None:
- # SF bug #1486663 -- this used to erroneously raise a TypeError
- class Subclass(random.Random):
- def __init__(self, newarg: object = None) -> None:
- random.Random.__init__(self)
- Subclass(newarg=1)
-
-
-def test_main(verbose: bool = None) -> None:
- testclasses = [MersenneTwister_TestBasicOps,
- TestDistributions,
- TestModule]
-
- try:
- random.SystemRandom().random()
- except NotImplementedError:
- pass
- else:
- testclasses.append(SystemRandom_TestBasicOps)
-
- support.run_unittest(*testclasses)
-
- # verify reference counting
- import sys
- if verbose and hasattr(sys, "gettotalrefcount"):
- counts = [None] * 5 # type: List[int]
- for i in range(len(counts)):
- support.run_unittest(*testclasses)
- counts[i] = sys.gettotalrefcount()
- print(counts)
-
-if __name__ == "__main__":
- test_main(verbose=True)
diff --git a/test-data/stdlib-samples/3.2/test/test_set.py b/test-data/stdlib-samples/3.2/test/test_set.py
deleted file mode 100644
index 23ae745..0000000
--- a/test-data/stdlib-samples/3.2/test/test_set.py
+++ /dev/null
@@ -1,1884 +0,0 @@
-import unittest
-from test import support
-import gc
-import weakref
-import operator
-import copy
-import pickle
-from random import randrange, shuffle
-import sys
-import warnings
-import collections
-from typing import Set, Any
-
-class PassThru(Exception):
- pass
-
-def check_pass_thru():
- raise PassThru
- yield 1
-
-class BadCmp:
- def __hash__(self):
- return 1
- def __eq__(self, other):
- raise RuntimeError
-
-class ReprWrapper:
- 'Used to test self-referential repr() calls'
- def __repr__(self):
- return repr(self.value)
-
-#class HashCountingInt(int):
-# 'int-like object that counts the number of times __hash__ is called'
-# def __init__(self, *args):
-# self.hash_count = 0
-# def __hash__(self):
-# self.hash_count += 1
-# return int.__hash__(self)
-
-class TestJointOps(unittest.TestCase):
- # Tests common to both set and frozenset
-
- def setUp(self):
- self.word = word = 'simsalabim'
- self.otherword = 'madagascar'
- self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
- self.s = self.thetype(word)
- self.d = dict.fromkeys(word)
-
- def test_new_or_init(self):
- self.assertRaises(TypeError, self.thetype, [], 2)
- self.assertRaises(TypeError, set().__init__, a=1)
-
- def test_uniquification(self):
- actual = sorted(self.s)
- expected = sorted(self.d)
- self.assertEqual(actual, expected)
- self.assertRaises(PassThru, self.thetype, check_pass_thru())
- self.assertRaises(TypeError, self.thetype, [[]])
-
- def test_len(self):
- self.assertEqual(len(self.s), len(self.d))
-
- def test_contains(self):
- for c in self.letters:
- self.assertEqual(c in self.s, c in self.d)
- self.assertRaises(TypeError, self.s.__contains__, [[]])
- s = self.thetype([frozenset(self.letters)])
- self.assertIn(self.thetype(self.letters), s)
-
- def test_union(self):
- u = self.s.union(self.otherword)
- for c in self.letters:
- self.assertEqual(c in u, c in self.d or c in self.otherword)
- self.assertEqual(self.s, self.thetype(self.word))
- self.assertEqual(type(u), self.basetype)
- self.assertRaises(PassThru, self.s.union, check_pass_thru())
- self.assertRaises(TypeError, self.s.union, [[]])
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd'))
- self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg'))
- self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc'))
- self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef'))
- self.assertEqual(self.thetype('abcba').union(C('ef'), C('fg')), set('abcefg'))
-
- # Issue #6573
- x = self.thetype()
- self.assertEqual(x.union(set([1]), x, set([2])), self.thetype([1, 2]))
-
- def test_or(self):
- i = self.s.union(self.otherword)
- self.assertEqual(self.s | set(self.otherword), i)
- self.assertEqual(self.s | frozenset(self.otherword), i)
- try:
- self.s | self.otherword
- except TypeError:
- pass
- else:
- self.fail("s|t did not screen-out general iterables")
-
- def test_intersection(self):
- i = self.s.intersection(self.otherword)
- for c in self.letters:
- self.assertEqual(c in i, c in self.d and c in self.otherword)
- self.assertEqual(self.s, self.thetype(self.word))
- self.assertEqual(type(i), self.basetype)
- self.assertRaises(PassThru, self.s.intersection, check_pass_thru())
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc'))
- self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set(''))
- self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc'))
- self.assertEqual(self.thetype('abcba').intersection(C('ef')), set(''))
- self.assertEqual(self.thetype('abcba').intersection(C('cbcf'), C('bag')), set('b'))
- s = self.thetype('abcba')
- z = s.intersection()
- if self.thetype == frozenset():
- self.assertEqual(id(s), id(z))
- else:
- self.assertNotEqual(id(s), id(z))
-
- def test_isdisjoint(self):
- def f(s1, s2):
- 'Pure python equivalent of isdisjoint()'
- return not set(s1).intersection(s2)
- for larg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
- s1 = self.thetype(larg)
- for rarg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- s2 = C(rarg)
- actual = s1.isdisjoint(s2)
- expected = f(s1, s2)
- self.assertEqual(actual, expected)
- self.assertTrue(actual is True or actual is False)
-
- def test_and(self):
- i = self.s.intersection(self.otherword)
- self.assertEqual(self.s & set(self.otherword), i)
- self.assertEqual(self.s & frozenset(self.otherword), i)
- try:
- self.s & self.otherword
- except TypeError:
- pass
- else:
- self.fail("s&t did not screen-out general iterables")
-
- def test_difference(self):
- i = self.s.difference(self.otherword)
- for c in self.letters:
- self.assertEqual(c in i, c in self.d and c not in self.otherword)
- self.assertEqual(self.s, self.thetype(self.word))
- self.assertEqual(type(i), self.basetype)
- self.assertRaises(PassThru, self.s.difference, check_pass_thru())
- self.assertRaises(TypeError, self.s.difference, [[]])
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab'))
- self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc'))
- self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a'))
- self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc'))
- self.assertEqual(self.thetype('abcba').difference(), set('abc'))
- self.assertEqual(self.thetype('abcba').difference(C('a'), C('b')), set('c'))
-
- def test_sub(self):
- i = self.s.difference(self.otherword)
- self.assertEqual(self.s - set(self.otherword), i)
- self.assertEqual(self.s - frozenset(self.otherword), i)
- try:
- self.s - self.otherword
- except TypeError:
- pass
- else:
- self.fail("s-t did not screen-out general iterables")
-
- def test_symmetric_difference(self):
- i = self.s.symmetric_difference(self.otherword)
- for c in self.letters:
- self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword))
- self.assertEqual(self.s, self.thetype(self.word))
- self.assertEqual(type(i), self.basetype)
- self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru())
- self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd'))
- self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg'))
- self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a'))
- self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef'))
-
- def test_xor(self):
- i = self.s.symmetric_difference(self.otherword)
- self.assertEqual(self.s ^ set(self.otherword), i)
- self.assertEqual(self.s ^ frozenset(self.otherword), i)
- try:
- self.s ^ self.otherword
- except TypeError:
- pass
- else:
- self.fail("s^t did not screen-out general iterables")
-
- def test_equality(self):
- self.assertEqual(self.s, set(self.word))
- self.assertEqual(self.s, frozenset(self.word))
- self.assertEqual(self.s == self.word, False)
- self.assertNotEqual(self.s, set(self.otherword))
- self.assertNotEqual(self.s, frozenset(self.otherword))
- self.assertEqual(self.s != self.word, True)
-
- def test_setOfFrozensets(self):
- t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba'])
- s = self.thetype(t)
- self.assertEqual(len(s), 3)
-
- def test_sub_and_super(self):
- p, q, r = map(self.thetype, ['ab', 'abcde', 'def'])
- self.assertTrue(p < q)
- self.assertTrue(p <= q)
- self.assertTrue(q <= q)
- self.assertTrue(q > p)
- self.assertTrue(q >= p)
- self.assertFalse(q < r)
- self.assertFalse(q <= r)
- self.assertFalse(q > r)
- self.assertFalse(q >= r)
- self.assertTrue(set('a').issubset('abc'))
- self.assertTrue(set('abc').issuperset('a'))
- self.assertFalse(set('a').issubset('cbs'))
- self.assertFalse(set('cbs').issuperset('a'))
-
- def test_pickling(self):
- for i in range(pickle.HIGHEST_PROTOCOL + 1):
- p = pickle.dumps(self.s, i)
- dup = pickle.loads(p)
- self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup))
- if type(self.s) not in (set, frozenset):
- self.s.x = 10
- p = pickle.dumps(self.s)
- dup = pickle.loads(p)
- self.assertEqual(self.s.x, dup.x)
-
- def test_deepcopy(self):
- class Tracer:
- def __init__(self, value):
- self.value = value
- def __hash__(self):
- return self.value
- def __deepcopy__(self, memo=None):
- return Tracer(self.value + 1)
- t = Tracer(10)
- s = self.thetype([t])
- dup = copy.deepcopy(s)
- self.assertNotEqual(id(s), id(dup))
- for elem in dup:
- newt = elem
- self.assertNotEqual(id(t), id(newt))
- self.assertEqual(t.value + 1, newt.value)
-
- def test_gc(self):
- # Create a nest of cycles to exercise overall ref count check
- class A:
- pass
- s = set(A() for i in range(1000))
- for elem in s:
- elem.cycle = s
- elem.sub = elem
- elem.set = set([elem])
-
- def test_subclass_with_custom_hash(self):
- raise NotImplementedError() # runtime computed base class below
- # Bug #1257731
- class H: # (self.thetype):
- def __hash__(self):
- return int(id(self) & 0x7fffffff)
- s=H()
- f=set()
- f.add(s)
- self.assertIn(s, f)
- f.remove(s)
- f.add(s)
- f.discard(s)
-
- def test_badcmp(self):
- s = self.thetype([BadCmp()])
- # Detect comparison errors during insertion and lookup
- self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()])
- self.assertRaises(RuntimeError, s.__contains__, BadCmp())
- # Detect errors during mutating operations
- if hasattr(s, 'add'):
- self.assertRaises(RuntimeError, s.add, BadCmp())
- self.assertRaises(RuntimeError, s.discard, BadCmp())
- self.assertRaises(RuntimeError, s.remove, BadCmp())
-
- def test_cyclical_repr(self):
- w = ReprWrapper()
- s = self.thetype([w])
- w.value = s
- if self.thetype == set:
- self.assertEqual(repr(s), '{set(...)}')
- else:
- name = repr(s).partition('(')[0] # strip class name
- self.assertEqual(repr(s), '%s({%s(...)})' % (name, name))
-
- def test_cyclical_print(self):
- w = ReprWrapper()
- s = self.thetype([w])
- w.value = s
- fo = open(support.TESTFN, "w")
- try:
- fo.write(str(s))
- fo.close()
- fo = open(support.TESTFN, "r")
- self.assertEqual(fo.read(), repr(s))
- finally:
- fo.close()
- support.unlink(support.TESTFN)
-
- def test_do_not_rehash_dict_keys(self):
- raise NotImplementedError() # cannot subclass int
- n = 10
- d = None # dict.fromkeys(map(HashCountingInt, range(n)))
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- s = self.thetype(d)
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- s.difference(d)
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- if hasattr(s, 'symmetric_difference_update'):
- s.symmetric_difference_update(d)
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- d2 = dict.fromkeys(set(d))
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- d3 = dict.fromkeys(frozenset(d))
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- d3 = dict.fromkeys(frozenset(d), 123)
- self.assertEqual(sum(elem.hash_count for elem in d), n)
- self.assertEqual(d3, dict.fromkeys(d, 123))
-
- def test_container_iterator(self):
- # Bug #3680: tp_traverse was not implemented for set iterator object
- class C(object):
- pass
- obj = C()
- ref = weakref.ref(obj)
- container = set([obj, 1])
- obj.x = iter(container)
- obj = None
- container = None
- gc.collect()
- self.assertTrue(ref() is None, "Cycle was not collected")
-
-class TestSet(TestJointOps):
- thetype = set
- basetype = set
-
- def test_init(self):
- s = self.thetype()
- s.__init__(self.word)
- self.assertEqual(s, set(self.word))
- s.__init__(self.otherword)
- self.assertEqual(s, set(self.otherword))
- self.assertRaises(TypeError, s.__init__, s, 2);
- self.assertRaises(TypeError, s.__init__, 1)
-
- def test_constructor_identity(self):
- s = self.thetype(range(3))
- t = self.thetype(s)
- self.assertNotEqual(id(s), id(t))
-
- def test_set_literal(self):
- raise NotImplementedError()
- #s = set([1,2,3])
- #t = {1,2,3}
- #self.assertEqual(s, t)
-
- def test_hash(self):
- self.assertRaises(TypeError, hash, self.s)
-
- def test_clear(self):
- self.s.clear()
- self.assertEqual(self.s, set())
- self.assertEqual(len(self.s), 0)
-
- def test_copy(self):
- dup = self.s.copy()
- self.assertEqual(self.s, dup)
- self.assertNotEqual(id(self.s), id(dup))
- self.assertEqual(type(dup), self.basetype)
-
- def test_add(self):
- self.s.add('Q')
- self.assertIn('Q', self.s)
- dup = self.s.copy()
- self.s.add('Q')
- self.assertEqual(self.s, dup)
- self.assertRaises(TypeError, self.s.add, [])
-
- def test_remove(self):
- self.s.remove('a')
- self.assertNotIn('a', self.s)
- self.assertRaises(KeyError, self.s.remove, 'Q')
- self.assertRaises(TypeError, self.s.remove, [])
- s = self.thetype([frozenset(self.word)])
- self.assertIn(self.thetype(self.word), s)
- s.remove(self.thetype(self.word))
- self.assertNotIn(self.thetype(self.word), s)
- self.assertRaises(KeyError, self.s.remove, self.thetype(self.word))
-
- def test_remove_keyerror_unpacking(self):
- # bug: www.python.org/sf/1576657
- for v1 in ['Q', (1,)]:
- try:
- self.s.remove(v1)
- except KeyError as e:
- v2 = e.args[0]
- self.assertEqual(v1, v2)
- else:
- self.fail()
-
- def test_remove_keyerror_set(self):
- key = self.thetype([3, 4])
- try:
- self.s.remove(key)
- except KeyError as e:
- self.assertTrue(e.args[0] is key,
- "KeyError should be {0}, not {1}".format(key,
- e.args[0]))
- else:
- self.fail()
-
- def test_discard(self):
- self.s.discard('a')
- self.assertNotIn('a', self.s)
- self.s.discard('Q')
- self.assertRaises(TypeError, self.s.discard, [])
- s = self.thetype([frozenset(self.word)])
- self.assertIn(self.thetype(self.word), s)
- s.discard(self.thetype(self.word))
- self.assertNotIn(self.thetype(self.word), s)
- s.discard(self.thetype(self.word))
-
- def test_pop(self):
- for i in range(len(self.s)):
- elem = self.s.pop()
- self.assertNotIn(elem, self.s)
- self.assertRaises(KeyError, self.s.pop)
-
- def test_update(self):
- retval = self.s.update(self.otherword)
- self.assertEqual(retval, None)
- for c in (self.word + self.otherword):
- self.assertIn(c, self.s)
- self.assertRaises(PassThru, self.s.update, check_pass_thru())
- self.assertRaises(TypeError, self.s.update, [[]])
- for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')):
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- s = self.thetype('abcba')
- self.assertEqual(s.update(C(p)), None)
- self.assertEqual(s, set(q))
- for p in ('cdc', 'efgfe', 'ccb', 'ef', 'abcda'):
- q = 'ahi'
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- s = self.thetype('abcba')
- self.assertEqual(s.update(C(p), C(q)), None)
- self.assertEqual(s, set(s) | set(p) | set(q))
-
- def test_ior(self):
- self.s |= set(self.otherword)
- for c in (self.word + self.otherword):
- self.assertIn(c, self.s)
-
- def test_intersection_update(self):
- retval = self.s.intersection_update(self.otherword)
- self.assertEqual(retval, None)
- for c in (self.word + self.otherword):
- if c in self.otherword and c in self.word:
- self.assertIn(c, self.s)
- else:
- self.assertNotIn(c, self.s)
- self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru())
- self.assertRaises(TypeError, self.s.intersection_update, [[]])
- for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')):
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- s = self.thetype('abcba')
- self.assertEqual(s.intersection_update(C(p)), None)
- self.assertEqual(s, set(q))
- ss = 'abcba'
- s = self.thetype(ss)
- t = 'cbc'
- self.assertEqual(s.intersection_update(C(p), C(t)), None)
- self.assertEqual(s, set('abcba')&set(p)&set(t))
-
- def test_iand(self):
- self.s &= set(self.otherword)
- for c in (self.word + self.otherword):
- if c in self.otherword and c in self.word:
- self.assertIn(c, self.s)
- else:
- self.assertNotIn(c, self.s)
-
- def test_difference_update(self):
- retval = self.s.difference_update(self.otherword)
- self.assertEqual(retval, None)
- for c in (self.word + self.otherword):
- if c in self.word and c not in self.otherword:
- self.assertIn(c, self.s)
- else:
- self.assertNotIn(c, self.s)
- self.assertRaises(PassThru, self.s.difference_update, check_pass_thru())
- self.assertRaises(TypeError, self.s.difference_update, [[]])
- self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
- for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')):
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- s = self.thetype('abcba')
- self.assertEqual(s.difference_update(C(p)), None)
- self.assertEqual(s, set(q))
-
- s = self.thetype('abcdefghih')
- s.difference_update()
- self.assertEqual(s, self.thetype('abcdefghih'))
-
- s = self.thetype('abcdefghih')
- s.difference_update(C('aba'))
- self.assertEqual(s, self.thetype('cdefghih'))
-
- s = self.thetype('abcdefghih')
- s.difference_update(C('cdc'), C('aba'))
- self.assertEqual(s, self.thetype('efghih'))
-
- def test_isub(self):
- self.s -= set(self.otherword)
- for c in (self.word + self.otherword):
- if c in self.word and c not in self.otherword:
- self.assertIn(c, self.s)
- else:
- self.assertNotIn(c, self.s)
-
- def test_symmetric_difference_update(self):
- retval = self.s.symmetric_difference_update(self.otherword)
- self.assertEqual(retval, None)
- for c in (self.word + self.otherword):
- if (c in self.word) ^ (c in self.otherword):
- self.assertIn(c, self.s)
- else:
- self.assertNotIn(c, self.s)
- self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru())
- self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
- for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')):
- for C in set, frozenset, dict.fromkeys, str, list, tuple:
- s = self.thetype('abcba')
- self.assertEqual(s.symmetric_difference_update(C(p)), None)
- self.assertEqual(s, set(q))
-
- def test_ixor(self):
- self.s ^= set(self.otherword)
- for c in (self.word + self.otherword):
- if (c in self.word) ^ (c in self.otherword):
- self.assertIn(c, self.s)
- else:
- self.assertNotIn(c, self.s)
-
- def test_inplace_on_self(self):
- t = self.s.copy()
- t |= t
- self.assertEqual(t, self.s)
- t &= t
- self.assertEqual(t, self.s)
- t -= t
- self.assertEqual(t, self.thetype())
- t = self.s.copy()
- t ^= t
- self.assertEqual(t, self.thetype())
-
- def test_weakref(self):
- s = self.thetype('gallahad')
- p = weakref.proxy(s)
- self.assertEqual(str(p), str(s))
- s = None
- self.assertRaises(ReferenceError, str, p)
-
- def test_rich_compare(self):
- class TestRichSetCompare:
- def __gt__(self, some_set):
- self.gt_called = True
- return False
- def __lt__(self, some_set):
- self.lt_called = True
- return False
- def __ge__(self, some_set):
- self.ge_called = True
- return False
- def __le__(self, some_set):
- self.le_called = True
- return False
-
- # This first tries the builtin rich set comparison, which doesn't know
- # how to handle the custom object. Upon returning NotImplemented, the
- # corresponding comparison on the right object is invoked.
- myset = {1, 2, 3}
-
- myobj = TestRichSetCompare()
- myset < myobj
- self.assertTrue(myobj.gt_called)
-
- myobj = TestRichSetCompare()
- myset > myobj
- self.assertTrue(myobj.lt_called)
-
- myobj = TestRichSetCompare()
- myset <= myobj
- self.assertTrue(myobj.ge_called)
-
- myobj = TestRichSetCompare()
- myset >= myobj
- self.assertTrue(myobj.le_called)
-
- # C API test only available in a debug build
- if hasattr(set, "test_c_api"):
- def test_c_api(self):
- self.assertEqual(set().test_c_api(), True)
-
-class SetSubclass(set):
- pass
-
-class TestSetSubclass(TestSet):
- thetype = SetSubclass
- basetype = set
-
-class SetSubclassWithKeywordArgs(set):
- def __init__(self, iterable=[], newarg=None):
- set.__init__(self, iterable)
-
-class TestSetSubclassWithKeywordArgs(TestSet):
-
- def test_keywords_in_subclass(self):
- 'SF bug #1486663 -- this used to erroneously raise a TypeError'
- SetSubclassWithKeywordArgs(newarg=1)
-
-class TestFrozenSet(TestJointOps):
- thetype = frozenset
- basetype = frozenset
-
- def test_init(self):
- s = self.thetype(self.word)
- s.__init__(self.otherword)
- self.assertEqual(s, set(self.word))
-
- def test_singleton_empty_frozenset(self):
- f = frozenset()
- efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''),
- frozenset(), frozenset([]), frozenset(()), frozenset(''),
- frozenset(range(0)), frozenset(frozenset()),
- frozenset(f), f]
- # All of the empty frozensets should have just one id()
- self.assertEqual(len(set(map(id, efs))), 1)
-
- def test_constructor_identity(self):
- s = self.thetype(range(3))
- t = self.thetype(s)
- self.assertEqual(id(s), id(t))
-
- def test_hash(self):
- self.assertEqual(hash(self.thetype('abcdeb')),
- hash(self.thetype('ebecda')))
-
- # make sure that all permutations give the same hash value
- n = 100
- seq = [randrange(n) for i in range(n)]
- results = set()
- for i in range(200):
- shuffle(seq)
- results.add(hash(self.thetype(seq)))
- self.assertEqual(len(results), 1)
-
- def test_copy(self):
- dup = self.s.copy()
- self.assertEqual(id(self.s), id(dup))
-
- def test_frozen_as_dictkey(self):
- seq = list(range(10)) + list('abcdefg') + ['apple']
- key1 = self.thetype(seq)
- key2 = self.thetype(reversed(seq))
- self.assertEqual(key1, key2)
- self.assertNotEqual(id(key1), id(key2))
- d = {}
- d[key1] = 42
- self.assertEqual(d[key2], 42)
-
- def test_hash_caching(self):
- f = self.thetype('abcdcda')
- self.assertEqual(hash(f), hash(f))
-
- def test_hash_effectiveness(self):
- n = 13
- hashvalues = set()
- addhashvalue = hashvalues.add
- elemmasks = [(i+1, 1<<i) for i in range(n)]
- for i in range(2**n):
- addhashvalue(hash(frozenset([e for e, m in elemmasks if m&i])))
- self.assertEqual(len(hashvalues), 2**n)
-
-class FrozenSetSubclass(frozenset):
- pass
-
-class TestFrozenSetSubclass(TestFrozenSet):
- thetype = FrozenSetSubclass
- basetype = frozenset
-
- def test_constructor_identity(self):
- s = self.thetype(range(3))
- t = self.thetype(s)
- self.assertNotEqual(id(s), id(t))
-
- def test_copy(self):
- dup = self.s.copy()
- self.assertNotEqual(id(self.s), id(dup))
-
- def test_nested_empty_constructor(self):
- s = self.thetype()
- t = self.thetype(s)
- self.assertEqual(s, t)
-
- def test_singleton_empty_frozenset(self):
- Frozenset = self.thetype
- f = frozenset()
- F = Frozenset()
- efs = [Frozenset(), Frozenset([]), Frozenset(()), Frozenset(''),
- Frozenset(), Frozenset([]), Frozenset(()), Frozenset(''),
- Frozenset(range(0)), Frozenset(Frozenset()),
- Frozenset(frozenset()), f, F, Frozenset(f), Frozenset(F)]
- # All empty frozenset subclass instances should have different ids
- self.assertEqual(len(set(map(id, efs))), len(efs))
-
-# Tests taken from test_sets.py =============================================
-
-empty_set = set() # type: Any
-
-#==============================================================================
-
-class TestBasicOps(unittest.TestCase):
-
- def test_repr(self):
- if self.repr is not None:
- self.assertEqual(repr(self.set), self.repr)
-
- def check_repr_against_values(self):
- text = repr(self.set)
- self.assertTrue(text.startswith('{'))
- self.assertTrue(text.endswith('}'))
-
- result = text[1:-1].split(', ')
- result.sort()
- sorted_repr_values = [repr(value) for value in self.values]
- sorted_repr_values.sort()
- self.assertEqual(result, sorted_repr_values)
-
- def test_print(self):
- try:
- fo = open(support.TESTFN, "w")
- fo.write(str(self.set))
- fo.close()
- fo = open(support.TESTFN, "r")
- self.assertEqual(fo.read(), repr(self.set))
- finally:
- fo.close()
- support.unlink(support.TESTFN)
-
- def test_length(self):
- self.assertEqual(len(self.set), self.length)
-
- def test_self_equality(self):
- self.assertEqual(self.set, self.set)
-
- def test_equivalent_equality(self):
- self.assertEqual(self.set, self.dup)
-
- def test_copy(self):
- self.assertEqual(self.set.copy(), self.dup)
-
- def test_self_union(self):
- result = self.set | self.set
- self.assertEqual(result, self.dup)
-
- def test_empty_union(self):
- result = self.set | empty_set
- self.assertEqual(result, self.dup)
-
- def test_union_empty(self):
- result = empty_set | self.set
- self.assertEqual(result, self.dup)
-
- def test_self_intersection(self):
- result = self.set & self.set
- self.assertEqual(result, self.dup)
-
- def test_empty_intersection(self):
- result = self.set & empty_set
- self.assertEqual(result, empty_set)
-
- def test_intersection_empty(self):
- result = empty_set & self.set
- self.assertEqual(result, empty_set)
-
- def test_self_isdisjoint(self):
- result = self.set.isdisjoint(self.set)
- self.assertEqual(result, not self.set)
-
- def test_empty_isdisjoint(self):
- result = self.set.isdisjoint(empty_set)
- self.assertEqual(result, True)
-
- def test_isdisjoint_empty(self):
- result = empty_set.isdisjoint(self.set)
- self.assertEqual(result, True)
-
- def test_self_symmetric_difference(self):
- result = self.set ^ self.set
- self.assertEqual(result, empty_set)
-
- def test_empty_symmetric_difference(self):
- result = self.set ^ empty_set
- self.assertEqual(result, self.set)
-
- def test_self_difference(self):
- result = self.set - self.set
- self.assertEqual(result, empty_set)
-
- def test_empty_difference(self):
- result = self.set - empty_set
- self.assertEqual(result, self.dup)
-
- def test_empty_difference_rev(self):
- result = empty_set - self.set
- self.assertEqual(result, empty_set)
-
- def test_iteration(self):
- for v in self.set:
- self.assertIn(v, self.values)
- setiter = iter(self.set)
- # note: __length_hint__ is an internal undocumented API,
- # don't rely on it in your own programs
- self.assertEqual(setiter.__length_hint__(), len(self.set))
-
- def test_pickling(self):
- p = pickle.dumps(self.set)
- copy = pickle.loads(p)
- self.assertEqual(self.set, copy,
- "%s != %s" % (self.set, copy))
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsEmpty(TestBasicOps):
- def setUp(self):
- self.case = "empty set"
- self.values = []
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 0
- self.repr = "set()"
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsSingleton(TestBasicOps):
- def setUp(self):
- self.case = "unit set (number)"
- self.values = [3]
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 1
- self.repr = "{3}"
-
- def test_in(self):
- self.assertIn(3, self.set)
-
- def test_not_in(self):
- self.assertNotIn(2, self.set)
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsTuple(TestBasicOps):
- def setUp(self):
- self.case = "unit set (tuple)"
- self.values = [(0, "zero")]
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 1
- self.repr = "{(0, 'zero')}"
-
- def test_in(self):
- self.assertIn((0, "zero"), self.set)
-
- def test_not_in(self):
- self.assertNotIn(9, self.set)
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsTriple(TestBasicOps):
- def setUp(self):
- self.case = "triple set"
- self.values = [0, "zero", operator.add]
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 3
- self.repr = None
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsString(TestBasicOps):
- def setUp(self):
- self.case = "string set"
- self.values = ["a", "b", "c"]
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 3
-
- def test_repr(self):
- self.check_repr_against_values()
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsBytes(TestBasicOps):
- def setUp(self):
- self.case = "string set"
- self.values = [b"a", b"b", b"c"]
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 3
-
- def test_repr(self):
- self.check_repr_against_values()
-
-#------------------------------------------------------------------------------
-
-class TestBasicOpsMixedStringBytes(TestBasicOps):
- def setUp(self):
- self._warning_filters = support.check_warnings()
- self._warning_filters.__enter__()
- warnings.simplefilter('ignore', BytesWarning)
- self.case = "string and bytes set"
- self.values = ["a", "b", b"a", b"b"]
- self.set = set(self.values)
- self.dup = set(self.values)
- self.length = 4
-
- def tearDown(self):
- self._warning_filters.__exit__(None, None, None)
-
- def test_repr(self):
- self.check_repr_against_values()
-
-#==============================================================================
-
-def baditer():
- raise TypeError
- yield True
-
-def gooditer():
- yield True
-
-class TestExceptionPropagation(unittest.TestCase):
- """SF 628246: Set constructor should not trap iterator TypeErrors"""
-
- def test_instanceWithException(self):
- self.assertRaises(TypeError, set, baditer())
-
- def test_instancesWithoutException(self):
- # All of these iterables should load without exception.
- set([1,2,3])
- set((1,2,3))
- set({'one':1, 'two':2, 'three':3})
- set(range(3))
- set('abc')
- set(gooditer())
-
- def test_changingSizeWhileIterating(self):
- s = set([1,2,3])
- try:
- for i in s:
- s.update([4])
- except RuntimeError:
- pass
- else:
- self.fail("no exception when changing size during iteration")
-
-#==============================================================================
-
-class TestSetOfSets(unittest.TestCase):
- def test_constructor(self):
- inner = frozenset([1])
- outer = set([inner])
- element = outer.pop()
- self.assertEqual(type(element), frozenset)
- outer.add(inner) # Rebuild set of sets with .add method
- outer.remove(inner)
- self.assertEqual(outer, set()) # Verify that remove worked
- outer.discard(inner) # Absence of KeyError indicates working fine
-
-#==============================================================================
-
-class TestBinaryOps(unittest.TestCase):
- def setUp(self):
- self.set = set((2, 4, 6))
-
- def test_eq(self): # SF bug 643115
- self.assertEqual(self.set, set({2:1,4:3,6:5}))
-
- def test_union_subset(self):
- result = self.set | set([2])
- self.assertEqual(result, set((2, 4, 6)))
-
- def test_union_superset(self):
- result = self.set | set([2, 4, 6, 8])
- self.assertEqual(result, set([2, 4, 6, 8]))
-
- def test_union_overlap(self):
- result = self.set | set([3, 4, 5])
- self.assertEqual(result, set([2, 3, 4, 5, 6]))
-
- def test_union_non_overlap(self):
- result = self.set | set([8])
- self.assertEqual(result, set([2, 4, 6, 8]))
-
- def test_intersection_subset(self):
- result = self.set & set((2, 4))
- self.assertEqual(result, set((2, 4)))
-
- def test_intersection_superset(self):
- result = self.set & set([2, 4, 6, 8])
- self.assertEqual(result, set([2, 4, 6]))
-
- def test_intersection_overlap(self):
- result = self.set & set([3, 4, 5])
- self.assertEqual(result, set([4]))
-
- def test_intersection_non_overlap(self):
- result = self.set & set([8])
- self.assertEqual(result, empty_set)
-
- def test_isdisjoint_subset(self):
- result = self.set.isdisjoint(set((2, 4)))
- self.assertEqual(result, False)
-
- def test_isdisjoint_superset(self):
- result = self.set.isdisjoint(set([2, 4, 6, 8]))
- self.assertEqual(result, False)
-
- def test_isdisjoint_overlap(self):
- result = self.set.isdisjoint(set([3, 4, 5]))
- self.assertEqual(result, False)
-
- def test_isdisjoint_non_overlap(self):
- result = self.set.isdisjoint(set([8]))
- self.assertEqual(result, True)
-
- def test_sym_difference_subset(self):
- result = self.set ^ set((2, 4))
- self.assertEqual(result, set([6]))
-
- def test_sym_difference_superset(self):
- result = self.set ^ set((2, 4, 6, 8))
- self.assertEqual(result, set([8]))
-
- def test_sym_difference_overlap(self):
- result = self.set ^ set((3, 4, 5))
- self.assertEqual(result, set([2, 3, 5, 6]))
-
- def test_sym_difference_non_overlap(self):
- result = self.set ^ set([8])
- self.assertEqual(result, set([2, 4, 6, 8]))
-
-#==============================================================================
-
-class TestUpdateOps(unittest.TestCase):
- def setUp(self):
- self.set = set((2, 4, 6))
-
- def test_union_subset(self):
- self.set |= set([2])
- self.assertEqual(self.set, set((2, 4, 6)))
-
- def test_union_superset(self):
- self.set |= set([2, 4, 6, 8])
- self.assertEqual(self.set, set([2, 4, 6, 8]))
-
- def test_union_overlap(self):
- self.set |= set([3, 4, 5])
- self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
-
- def test_union_non_overlap(self):
- self.set |= set([8])
- self.assertEqual(self.set, set([2, 4, 6, 8]))
-
- def test_union_method_call(self):
- self.set.update(set([3, 4, 5]))
- self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
-
- def test_intersection_subset(self):
- self.set &= set((2, 4))
- self.assertEqual(self.set, set((2, 4)))
-
- def test_intersection_superset(self):
- self.set &= set([2, 4, 6, 8])
- self.assertEqual(self.set, set([2, 4, 6]))
-
- def test_intersection_overlap(self):
- self.set &= set([3, 4, 5])
- self.assertEqual(self.set, set([4]))
-
- def test_intersection_non_overlap(self):
- self.set &= set([8])
- self.assertEqual(self.set, empty_set)
-
- def test_intersection_method_call(self):
- self.set.intersection_update(set([3, 4, 5]))
- self.assertEqual(self.set, set([4]))
-
- def test_sym_difference_subset(self):
- self.set ^= set((2, 4))
- self.assertEqual(self.set, set([6]))
-
- def test_sym_difference_superset(self):
- self.set ^= set((2, 4, 6, 8))
- self.assertEqual(self.set, set([8]))
-
- def test_sym_difference_overlap(self):
- self.set ^= set((3, 4, 5))
- self.assertEqual(self.set, set([2, 3, 5, 6]))
-
- def test_sym_difference_non_overlap(self):
- self.set ^= set([8])
- self.assertEqual(self.set, set([2, 4, 6, 8]))
-
- def test_sym_difference_method_call(self):
- self.set.symmetric_difference_update(set([3, 4, 5]))
- self.assertEqual(self.set, set([2, 3, 5, 6]))
-
- def test_difference_subset(self):
- self.set -= set((2, 4))
- self.assertEqual(self.set, set([6]))
-
- def test_difference_superset(self):
- self.set -= set((2, 4, 6, 8))
- self.assertEqual(self.set, set([]))
-
- def test_difference_overlap(self):
- self.set -= set((3, 4, 5))
- self.assertEqual(self.set, set([2, 6]))
-
- def test_difference_non_overlap(self):
- self.set -= set([8])
- self.assertEqual(self.set, set([2, 4, 6]))
-
- def test_difference_method_call(self):
- self.set.difference_update(set([3, 4, 5]))
- self.assertEqual(self.set, set([2, 6]))
-
-#==============================================================================
-
-class TestMutate(unittest.TestCase):
- def setUp(self):
- self.values = ["a", "b", "c"]
- self.set = set(self.values)
-
- def test_add_present(self):
- self.set.add("c")
- self.assertEqual(self.set, set("abc"))
-
- def test_add_absent(self):
- self.set.add("d")
- self.assertEqual(self.set, set("abcd"))
-
- def test_add_until_full(self):
- tmp = set()
- expected_len = 0
- for v in self.values:
- tmp.add(v)
- expected_len += 1
- self.assertEqual(len(tmp), expected_len)
- self.assertEqual(tmp, self.set)
-
- def test_remove_present(self):
- self.set.remove("b")
- self.assertEqual(self.set, set("ac"))
-
- def test_remove_absent(self):
- try:
- self.set.remove("d")
- self.fail("Removing missing element should have raised LookupError")
- except LookupError:
- pass
-
- def test_remove_until_empty(self):
- expected_len = len(self.set)
- for v in self.values:
- self.set.remove(v)
- expected_len -= 1
- self.assertEqual(len(self.set), expected_len)
-
- def test_discard_present(self):
- self.set.discard("c")
- self.assertEqual(self.set, set("ab"))
-
- def test_discard_absent(self):
- self.set.discard("d")
- self.assertEqual(self.set, set("abc"))
-
- def test_clear(self):
- self.set.clear()
- self.assertEqual(len(self.set), 0)
-
- def test_pop(self):
- popped = {}
- while self.set:
- popped[self.set.pop()] = None
- self.assertEqual(len(popped), len(self.values))
- for v in self.values:
- self.assertIn(v, popped)
-
- def test_update_empty_tuple(self):
- self.set.update(())
- self.assertEqual(self.set, set(self.values))
-
- def test_update_unit_tuple_overlap(self):
- self.set.update(("a",))
- self.assertEqual(self.set, set(self.values))
-
- def test_update_unit_tuple_non_overlap(self):
- self.set.update(("a", "z"))
- self.assertEqual(self.set, set(self.values + ["z"]))
-
-#==============================================================================
-
-class TestSubsets(unittest.TestCase):
-
- case2method = {"<=": "issubset",
- ">=": "issuperset",
- }
-
- reverse = {"==": "==",
- "!=": "!=",
- "<": ">",
- ">": "<",
- "<=": ">=",
- ">=": "<=",
- }
-
- def test_issubset(self):
- raise NotImplementedError() # eval not supported below
- x = self.left
- y = self.right
- for case in "!=", "==", "<", "<=", ">", ">=":
- expected = case in self.cases
- # Test the binary infix spelling.
- result = None ## eval("x" + case + "y", locals())
- self.assertEqual(result, expected)
- # Test the "friendly" method-name spelling, if one exists.
- if case in TestSubsets.case2method:
- method = getattr(x, TestSubsets.case2method[case])
- result = method(y)
- self.assertEqual(result, expected)
-
- # Now do the same for the operands reversed.
- rcase = TestSubsets.reverse[case]
- result = None ## eval("y" + rcase + "x", locals())
- self.assertEqual(result, expected)
- if rcase in TestSubsets.case2method:
- method = getattr(y, TestSubsets.case2method[rcase])
- result = method(x)
- self.assertEqual(result, expected)
-#------------------------------------------------------------------------------
-
-class TestSubsetEqualEmpty(TestSubsets):
- left = set() # type: Any
- right = set() # type: Any
- name = "both empty"
- cases = "==", "<=", ">="
-
-#------------------------------------------------------------------------------
-
-class TestSubsetEqualNonEmpty(TestSubsets):
- left = set([1, 2])
- right = set([1, 2])
- name = "equal pair"
- cases = "==", "<=", ">="
-
-#------------------------------------------------------------------------------
-
-class TestSubsetEmptyNonEmpty(TestSubsets):
- left = set() # type: Any
- right = set([1, 2])
- name = "one empty, one non-empty"
- cases = "!=", "<", "<="
-
-#------------------------------------------------------------------------------
-
-class TestSubsetPartial(TestSubsets):
- left = set([1])
- right = set([1, 2])
- name = "one a non-empty proper subset of other"
- cases = "!=", "<", "<="
-
-#------------------------------------------------------------------------------
-
-class TestSubsetNonOverlap(TestSubsets):
- left = set([1])
- right = set([2])
- name = "neither empty, neither contains"
- cases = "!="
-
-#==============================================================================
-
-class TestOnlySetsInBinaryOps(unittest.TestCase):
-
- def test_eq_ne(self):
- # Unlike the others, this is testing that == and != *are* allowed.
- self.assertEqual(self.other == self.set, False)
- self.assertEqual(self.set == self.other, False)
- self.assertEqual(self.other != self.set, True)
- self.assertEqual(self.set != self.other, True)
-
- def test_ge_gt_le_lt(self):
- self.assertRaises(TypeError, lambda: self.set < self.other)
- self.assertRaises(TypeError, lambda: self.set <= self.other)
- self.assertRaises(TypeError, lambda: self.set > self.other)
- self.assertRaises(TypeError, lambda: self.set >= self.other)
-
- self.assertRaises(TypeError, lambda: self.other < self.set)
- self.assertRaises(TypeError, lambda: self.other <= self.set)
- self.assertRaises(TypeError, lambda: self.other > self.set)
- self.assertRaises(TypeError, lambda: self.other >= self.set)
-
- def test_update_operator(self):
- try:
- self.set |= self.other
- except TypeError:
- pass
- else:
- self.fail("expected TypeError")
-
- def test_update(self):
- if self.otherIsIterable:
- self.set.update(self.other)
- else:
- self.assertRaises(TypeError, self.set.update, self.other)
-
- def test_union(self):
- self.assertRaises(TypeError, lambda: self.set | self.other)
- self.assertRaises(TypeError, lambda: self.other | self.set)
- if self.otherIsIterable:
- self.set.union(self.other)
- else:
- self.assertRaises(TypeError, self.set.union, self.other)
-
- def test_intersection_update_operator(self):
- try:
- self.set &= self.other
- except TypeError:
- pass
- else:
- self.fail("expected TypeError")
-
- def test_intersection_update(self):
- if self.otherIsIterable:
- self.set.intersection_update(self.other)
- else:
- self.assertRaises(TypeError,
- self.set.intersection_update,
- self.other)
-
- def test_intersection(self):
- self.assertRaises(TypeError, lambda: self.set & self.other)
- self.assertRaises(TypeError, lambda: self.other & self.set)
- if self.otherIsIterable:
- self.set.intersection(self.other)
- else:
- self.assertRaises(TypeError, self.set.intersection, self.other)
-
- def test_sym_difference_update_operator(self):
- try:
- self.set ^= self.other
- except TypeError:
- pass
- else:
- self.fail("expected TypeError")
-
- def test_sym_difference_update(self):
- if self.otherIsIterable:
- self.set.symmetric_difference_update(self.other)
- else:
- self.assertRaises(TypeError,
- self.set.symmetric_difference_update,
- self.other)
-
- def test_sym_difference(self):
- self.assertRaises(TypeError, lambda: self.set ^ self.other)
- self.assertRaises(TypeError, lambda: self.other ^ self.set)
- if self.otherIsIterable:
- self.set.symmetric_difference(self.other)
- else:
- self.assertRaises(TypeError, self.set.symmetric_difference, self.other)
-
- def test_difference_update_operator(self):
- try:
- self.set -= self.other
- except TypeError:
- pass
- else:
- self.fail("expected TypeError")
-
- def test_difference_update(self):
- if self.otherIsIterable:
- self.set.difference_update(self.other)
- else:
- self.assertRaises(TypeError,
- self.set.difference_update,
- self.other)
-
- def test_difference(self):
- self.assertRaises(TypeError, lambda: self.set - self.other)
- self.assertRaises(TypeError, lambda: self.other - self.set)
- if self.otherIsIterable:
- self.set.difference(self.other)
- else:
- self.assertRaises(TypeError, self.set.difference, self.other)
-
-#------------------------------------------------------------------------------
-
-class TestOnlySetsNumeric(TestOnlySetsInBinaryOps):
- def setUp(self):
- self.set = set((1, 2, 3))
- self.other = 19
- self.otherIsIterable = False
-
-#------------------------------------------------------------------------------
-
-class TestOnlySetsDict(TestOnlySetsInBinaryOps):
- def setUp(self):
- self.set = set((1, 2, 3))
- self.other = {1:2, 3:4}
- self.otherIsIterable = True
-
-#------------------------------------------------------------------------------
-
-class TestOnlySetsOperator(TestOnlySetsInBinaryOps):
- def setUp(self):
- self.set = set((1, 2, 3))
- self.other = operator.add
- self.otherIsIterable = False
-
-#------------------------------------------------------------------------------
-
-class TestOnlySetsTuple(TestOnlySetsInBinaryOps):
- def setUp(self):
- self.set = set((1, 2, 3))
- self.other = (2, 4, 6)
- self.otherIsIterable = True
-
-#------------------------------------------------------------------------------
-
-class TestOnlySetsString(TestOnlySetsInBinaryOps):
- def setUp(self):
- self.set = set((1, 2, 3))
- self.other = 'abc'
- self.otherIsIterable = True
-
-#------------------------------------------------------------------------------
-
-class TestOnlySetsGenerator(TestOnlySetsInBinaryOps):
- def setUp(self):
- def gen():
- for i in range(0, 10, 2):
- yield i
- self.set = set((1, 2, 3))
- self.other = gen()
- self.otherIsIterable = True
-
-#==============================================================================
-
-class TestCopying(unittest.TestCase):
-
- def test_copy(self):
- dup = self.set.copy()
- dup_list = sorted(dup, key=repr)
- set_list = sorted(self.set, key=repr)
- self.assertEqual(len(dup_list), len(set_list))
- for i in range(len(dup_list)):
- self.assertTrue(dup_list[i] is set_list[i])
-
- def test_deep_copy(self):
- dup = copy.deepcopy(self.set)
- ##print type(dup), repr(dup)
- dup_list = sorted(dup, key=repr)
- set_list = sorted(self.set, key=repr)
- self.assertEqual(len(dup_list), len(set_list))
- for i in range(len(dup_list)):
- self.assertEqual(dup_list[i], set_list[i])
-
-#------------------------------------------------------------------------------
-
-class TestCopyingEmpty(TestCopying):
- def setUp(self):
- self.set = set()
-
-#------------------------------------------------------------------------------
-
-class TestCopyingSingleton(TestCopying):
- def setUp(self):
- self.set = set(["hello"])
-
-#------------------------------------------------------------------------------
-
-class TestCopyingTriple(TestCopying):
- def setUp(self):
- self.set = set(["zero", 0, None])
-
-#------------------------------------------------------------------------------
-
-class TestCopyingTuple(TestCopying):
- def setUp(self):
- self.set = set([(1, 2)])
-
-#------------------------------------------------------------------------------
-
-class TestCopyingNested(TestCopying):
- def setUp(self):
- self.set = set([((1, 2), (3, 4))])
-
-#==============================================================================
-
-class TestIdentities(unittest.TestCase):
- def setUp(self):
- self.a = set('abracadabra')
- self.b = set('alacazam')
-
- def test_binopsVsSubsets(self):
- a, b = self.a, self.b
- self.assertTrue(a - b < a)
- self.assertTrue(b - a < b)
- self.assertTrue(a & b < a)
- self.assertTrue(a & b < b)
- self.assertTrue(a | b > a)
- self.assertTrue(a | b > b)
- self.assertTrue(a ^ b < a | b)
-
- def test_commutativity(self):
- a, b = self.a, self.b
- self.assertEqual(a&b, b&a)
- self.assertEqual(a|b, b|a)
- self.assertEqual(a^b, b^a)
- if a != b:
- self.assertNotEqual(a-b, b-a)
-
- def test_summations(self):
- # check that sums of parts equal the whole
- a, b = self.a, self.b
- self.assertEqual((a-b)|(a&b)|(b-a), a|b)
- self.assertEqual((a&b)|(a^b), a|b)
- self.assertEqual(a|(b-a), a|b)
- self.assertEqual((a-b)|b, a|b)
- self.assertEqual((a-b)|(a&b), a)
- self.assertEqual((b-a)|(a&b), b)
- self.assertEqual((a-b)|(b-a), a^b)
-
- def test_exclusion(self):
- # check that inverse operations show non-overlap
- a, b, zero = self.a, self.b, set()
- self.assertEqual((a-b)&b, zero)
- self.assertEqual((b-a)&a, zero)
- self.assertEqual((a&b)&(a^b), zero)
-
-# Tests derived from test_itertools.py =======================================
-
-def R(seqn):
- 'Regular generator'
- for i in seqn:
- yield i
-
-class G:
- 'Sequence using __getitem__'
- def __init__(self, seqn):
- self.seqn = seqn
- def __getitem__(self, i):
- return self.seqn[i]
-
-class I:
- 'Sequence using iterator protocol'
- def __init__(self, seqn):
- self.seqn = seqn
- self.i = 0
- def __iter__(self):
- return self
- def __next__(self):
- if self.i >= len(self.seqn): raise StopIteration
- v = self.seqn[self.i]
- self.i += 1
- return v
-
-class Ig:
- 'Sequence using iterator protocol defined with a generator'
- def __init__(self, seqn):
- self.seqn = seqn
- self.i = 0
- def __iter__(self):
- for val in self.seqn:
- yield val
-
-class X:
- 'Missing __getitem__ and __iter__'
- def __init__(self, seqn):
- self.seqn = seqn
- self.i = 0
- def __next__(self):
- if self.i >= len(self.seqn): raise StopIteration
- v = self.seqn[self.i]
- self.i += 1
- return v
-
-class N:
- 'Iterator missing __next__()'
- def __init__(self, seqn):
- self.seqn = seqn
- self.i = 0
- def __iter__(self):
- return self
-
-class E:
- 'Test propagation of exceptions'
- def __init__(self, seqn):
- self.seqn = seqn
- self.i = 0
- def __iter__(self):
- return self
- def __next__(self):
- 3 // 0
-
-class S:
- 'Test immediate stop'
- def __init__(self, seqn):
- pass
- def __iter__(self):
- return self
- def __next__(self):
- raise StopIteration
-
-from itertools import chain
-def L(seqn):
- 'Test multiple tiers of iterators'
- return chain(map(lambda x:x, R(Ig(G(seqn)))))
-
-class TestVariousIteratorArgs(unittest.TestCase):
-
- def test_constructor(self):
- for cons in (set, frozenset):
- for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
- for g in (G, I, Ig, S, L, R):
- self.assertEqual(sorted(cons(g(s)), key=repr), sorted(g(s), key=repr))
- self.assertRaises(TypeError, cons , X(s))
- self.assertRaises(TypeError, cons , N(s))
- self.assertRaises(ZeroDivisionError, cons , E(s))
-
- def test_inline_methods(self):
- s = set('november')
- for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'):
- for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint):
- for g in (G, I, Ig, L, R):
- expected = meth(data)
- actual = meth(G(data))
- if isinstance(expected, bool):
- self.assertEqual(actual, expected)
- else:
- self.assertEqual(sorted(actual, key=repr), sorted(expected, key=repr))
- self.assertRaises(TypeError, meth, X(s))
- self.assertRaises(TypeError, meth, N(s))
- self.assertRaises(ZeroDivisionError, meth, E(s))
-
- def test_inplace_methods(self):
- for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'):
- for methname in ('update', 'intersection_update',
- 'difference_update', 'symmetric_difference_update'):
- for g in (G, I, Ig, S, L, R):
- s = set('january')
- t = s.copy()
- getattr(s, methname)(list(g(data)))
- getattr(t, methname)(g(data))
- self.assertEqual(sorted(s, key=repr), sorted(t, key=repr))
-
- self.assertRaises(TypeError, getattr(set('january'), methname), X(data))
- self.assertRaises(TypeError, getattr(set('january'), methname), N(data))
- self.assertRaises(ZeroDivisionError, getattr(set('january'), methname), E(data))
-
-be_bad = set2 = dict2 = None # type: Any
-
-class bad_eq:
- def __eq__(self, other):
- if be_bad:
- set2.clear()
- raise ZeroDivisionError
- return self is other
- def __hash__(self):
- return 0
-
-class bad_dict_clear:
- def __eq__(self, other):
- if be_bad:
- dict2.clear()
- return self is other
- def __hash__(self):
- return 0
-
-class TestWeirdBugs(unittest.TestCase):
- def test_8420_set_merge(self):
- # This used to segfault
- global be_bad, set2, dict2
- be_bad = False
- set1 = {bad_eq()}
- set2 = {bad_eq() for i in range(75)}
- be_bad = True
- self.assertRaises(ZeroDivisionError, set1.update, set2)
-
- be_bad = False
- set1 = {bad_dict_clear()}
- dict2 = {bad_dict_clear(): None}
- be_bad = True
- set1.symmetric_difference_update(dict2)
-
-# Application tests (based on David Eppstein's graph recipes ====================================
-
-def powerset(U):
- """Generates all subsets of a set or sequence U."""
- U = iter(U)
- try:
- x = frozenset([next(U)])
- for S in powerset(U):
- yield S
- yield S | x
- except StopIteration:
- yield frozenset()
-
-def cube(n):
- """Graph of n-dimensional hypercube."""
- singletons = [frozenset([x]) for x in range(n)]
- return dict([(x, frozenset([x^s for s in singletons]))
- for x in powerset(range(n))])
-
-def linegraph(G):
- """Graph, the vertices of which are edges of G,
- with two vertices being adjacent iff the corresponding
- edges share a vertex."""
- L = {}
- for x in G:
- for y in G[x]:
- nx = [frozenset([x,z]) for z in G[x] if z != y]
- ny = [frozenset([y,z]) for z in G[y] if z != x]
- L[frozenset([x,y])] = frozenset(nx+ny)
- return L
-
-def faces(G):
- 'Return a set of faces in G. Where a face is a set of vertices on that face'
- # currently limited to triangles,squares, and pentagons
- f = set()
- for v1, edges in G.items():
- for v2 in edges:
- for v3 in G[v2]:
- if v1 == v3:
- continue
- if v1 in G[v3]:
- f.add(frozenset([v1, v2, v3]))
- else:
- for v4 in G[v3]:
- if v4 == v2:
- continue
- if v1 in G[v4]:
- f.add(frozenset([v1, v2, v3, v4]))
- else:
- for v5 in G[v4]:
- if v5 == v3 or v5 == v2:
- continue
- if v1 in G[v5]:
- f.add(frozenset([v1, v2, v3, v4, v5]))
- return f
-
-
-class TestGraphs(unittest.TestCase):
-
- def test_cube(self):
-
- g = cube(3) # vert --> {v1, v2, v3}
- vertices1 = set(g)
- self.assertEqual(len(vertices1), 8) # eight vertices
- for edge in g.values():
- self.assertEqual(len(edge), 3) # each vertex connects to three edges
- vertices2 = set()
- for edges in g.values():
- for v in edges:
- vertices2.add(v)
- self.assertEqual(vertices1, vertices2) # edge vertices in original set
-
- cubefaces = faces(g)
- self.assertEqual(len(cubefaces), 6) # six faces
- for face in cubefaces:
- self.assertEqual(len(face), 4) # each face is a square
-
- def test_cuboctahedron(self):
-
- # http://en.wikipedia.org/wiki/Cuboctahedron
- # 8 triangular faces and 6 square faces
- # 12 indentical vertices each connecting a triangle and square
-
- g = cube(3)
- cuboctahedron = linegraph(g) # V( --> {V1, V2, V3, V4}
- self.assertEqual(len(cuboctahedron), 12)# twelve vertices
-
- vertices = set(cuboctahedron)
- for edges in cuboctahedron.values():
- self.assertEqual(len(edges), 4) # each vertex connects to four other vertices
- othervertices = set(edge for edges in cuboctahedron.values() for edge in edges)
- self.assertEqual(vertices, othervertices) # edge vertices in original set
-
- cubofaces = faces(cuboctahedron)
- facesizes = collections.defaultdict(int)
- for face in cubofaces:
- facesizes[len(face)] += 1
- self.assertEqual(facesizes[3], 8) # eight triangular faces
- self.assertEqual(facesizes[4], 6) # six square faces
-
- for vertex in cuboctahedron:
- edge = vertex # Cuboctahedron vertices are edges in Cube
- self.assertEqual(len(edge), 2) # Two cube vertices define an edge
- for cubevert in edge:
- self.assertIn(cubevert, g)
-
-
-#==============================================================================
-
-def test_main(verbose=None):
- test_classes = (
- TestSet,
- TestSetSubclass,
- TestSetSubclassWithKeywordArgs,
- TestFrozenSet,
- TestFrozenSetSubclass,
- TestSetOfSets,
- TestExceptionPropagation,
- TestBasicOpsEmpty,
- TestBasicOpsSingleton,
- TestBasicOpsTuple,
- TestBasicOpsTriple,
- TestBasicOpsString,
- TestBasicOpsBytes,
- TestBasicOpsMixedStringBytes,
- TestBinaryOps,
- TestUpdateOps,
- TestMutate,
- TestSubsetEqualEmpty,
- TestSubsetEqualNonEmpty,
- TestSubsetEmptyNonEmpty,
- TestSubsetPartial,
- TestSubsetNonOverlap,
- TestOnlySetsNumeric,
- TestOnlySetsDict,
- TestOnlySetsOperator,
- TestOnlySetsTuple,
- TestOnlySetsString,
- TestOnlySetsGenerator,
- TestCopyingEmpty,
- TestCopyingSingleton,
- TestCopyingTriple,
- TestCopyingTuple,
- TestCopyingNested,
- TestIdentities,
- TestVariousIteratorArgs,
- TestGraphs,
- TestWeirdBugs,
- )
-
- support.run_unittest(*test_classes)
-
- # verify reference counting
- if verbose and hasattr(sys, "gettotalrefcount"):
- import gc
- counts = [None] * 5
- for i in range(len(counts)):
- support.run_unittest(*test_classes)
- gc.collect()
- counts[i] = sys.gettotalrefcount()
- print(counts)
-
-if __name__ == "__main__":
- test_main(verbose=True)
diff --git a/test-data/stdlib-samples/3.2/test/test_shutil.py b/test-data/stdlib-samples/3.2/test/test_shutil.py
deleted file mode 100644
index 32e0fd1..0000000
--- a/test-data/stdlib-samples/3.2/test/test_shutil.py
+++ /dev/null
@@ -1,978 +0,0 @@
-# Copyright (C) 2003 Python Software Foundation
-
-import unittest
-import shutil
-import tempfile
-import sys
-import stat
-import os
-import os.path
-import functools
-from test import support
-from test.support import TESTFN
-from os.path import splitdrive
-from distutils.spawn import find_executable, spawn
-from shutil import (_make_tarball, _make_zipfile, make_archive,
- register_archive_format, unregister_archive_format,
- get_archive_formats, Error, unpack_archive,
- register_unpack_format, RegistryError,
- unregister_unpack_format, get_unpack_formats)
-import tarfile
-import warnings
-
-from test import support
-from test.support import check_warnings, captured_stdout
-
-from typing import (
- Any, Callable, Tuple, List, Sequence, BinaryIO, IO, Union, cast
-)
-from types import TracebackType
-
-import bz2
-BZ2_SUPPORTED = True
-
-TESTFN2 = TESTFN + "2"
-
-import grp
-import pwd
-UID_GID_SUPPORT = True
-
-import zlib
-
-import zipfile
-ZIP_SUPPORT = True
-
-def _fake_rename(*args: Any, **kwargs: Any) -> None:
- # Pretend the destination path is on a different filesystem.
- raise OSError()
-
-def mock_rename(func: Any) -> Any:
- @functools.wraps(func)
- def wrap(*args: Any, **kwargs: Any) -> Any:
- try:
- builtin_rename = shutil.rename
- shutil.rename = cast(Any, _fake_rename)
- return func(*args, **kwargs)
- finally:
- shutil.rename = cast(Any, builtin_rename)
- return wrap
-
-class TestShutil(unittest.TestCase):
-
- def setUp(self) -> None:
- super().setUp()
- self.tempdirs = [] # type: List[str]
-
- def tearDown(self) -> None:
- super().tearDown()
- while self.tempdirs:
- d = self.tempdirs.pop()
- shutil.rmtree(d, os.name in ('nt', 'cygwin'))
-
- def write_file(self, path: Union[str, List[str], tuple], content: str = 'xxx') -> None:
- """Writes a file in the given path.
-
-
- path can be a string or a sequence.
- """
- if isinstance(path, list):
- path = os.path.join(*path)
- elif isinstance(path, tuple):
- path = cast(str, os.path.join(*path))
- f = open(path, 'w')
- try:
- f.write(content)
- finally:
- f.close()
-
- def mkdtemp(self) -> str:
- """Create a temporary directory that will be cleaned up.
-
- Returns the path of the directory.
- """
- d = tempfile.mkdtemp()
- self.tempdirs.append(d)
- return d
-
- def test_rmtree_errors(self) -> None:
- # filename is guaranteed not to exist
- filename = tempfile.mktemp()
- self.assertRaises(OSError, shutil.rmtree, filename)
-
- # See bug #1071513 for why we don't run this on cygwin
- # and bug #1076467 for why we don't run this as root.
- if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
- and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):
- def test_on_error(self) -> None:
- self.errorState = 0
- os.mkdir(TESTFN)
- self.childpath = os.path.join(TESTFN, 'a')
- f = open(self.childpath, 'w')
- f.close()
- old_dir_mode = os.stat(TESTFN).st_mode
- old_child_mode = os.stat(self.childpath).st_mode
- # Make unwritable.
- os.chmod(self.childpath, stat.S_IREAD)
- os.chmod(TESTFN, stat.S_IREAD)
-
- shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
- # Test whether onerror has actually been called.
- self.assertEqual(self.errorState, 2,
- "Expected call to onerror function did not happen.")
-
- # Make writable again.
- os.chmod(TESTFN, old_dir_mode)
- os.chmod(self.childpath, old_child_mode)
-
- # Clean up.
- shutil.rmtree(TESTFN)
-
- def check_args_to_onerror(self, func: Callable[[str], Any], arg: str,
- exc: Tuple[type, BaseException,
- TracebackType]) -> None:
- # test_rmtree_errors deliberately runs rmtree
- # on a directory that is chmod 400, which will fail.
- # This function is run when shutil.rmtree fails.
- # 99.9% of the time it initially fails to remove
- # a file in the directory, so the first time through
- # func is os.remove.
- # However, some Linux machines running ZFS on
- # FUSE experienced a failure earlier in the process
- # at os.listdir. The first failure may legally
- # be either.
- if self.errorState == 0:
- if func is os.remove:
- self.assertEqual(arg, self.childpath)
- else:
- self.assertIs(func, os.listdir,
- "func must be either os.remove or os.listdir")
- self.assertEqual(arg, TESTFN)
- self.assertTrue(issubclass(exc[0], OSError))
- self.errorState = 1
- else:
- self.assertEqual(func, os.rmdir)
- self.assertEqual(arg, TESTFN)
- self.assertTrue(issubclass(exc[0], OSError))
- self.errorState = 2
-
- def test_rmtree_dont_delete_file(self) -> None:
- # When called on a file instead of a directory, don't delete it.
- handle, path = tempfile.mkstemp()
- os.fdopen(handle).close()
- self.assertRaises(OSError, shutil.rmtree, path)
- os.remove(path)
-
- def _write_data(self, path: str, data: str) -> None:
- f = open(path, "w")
- f.write(data)
- f.close()
-
- def test_copytree_simple(self) -> None:
-
- def read_data(path: str) -> str:
- f = open(path)
- data = f.read()
- f.close()
- return data
-
- src_dir = tempfile.mkdtemp()
- dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
- self._write_data(os.path.join(src_dir, 'test.txt'), '123')
- os.mkdir(os.path.join(src_dir, 'test_dir'))
- self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
-
- try:
- shutil.copytree(src_dir, dst_dir)
- self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
- self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
- self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
- 'test.txt')))
- actual = read_data(os.path.join(dst_dir, 'test.txt'))
- self.assertEqual(actual, '123')
- actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
- self.assertEqual(actual, '456')
- finally:
- for path in (
- os.path.join(src_dir, 'test.txt'),
- os.path.join(dst_dir, 'test.txt'),
- os.path.join(src_dir, 'test_dir', 'test.txt'),
- os.path.join(dst_dir, 'test_dir', 'test.txt'),
- ):
- if os.path.exists(path):
- os.remove(path)
- for path in (src_dir,
- os.path.dirname(dst_dir)
- ):
- if os.path.exists(path):
- shutil.rmtree(path)
-
- def test_copytree_with_exclude(self) -> None:
-
- def read_data(path: str) -> str:
- f = open(path)
- data = f.read()
- f.close()
- return data
-
- # creating data
- join = os.path.join
- exists = os.path.exists
- src_dir = tempfile.mkdtemp()
- try:
- dst_dir = join(tempfile.mkdtemp(), 'destination')
- self._write_data(join(src_dir, 'test.txt'), '123')
- self._write_data(join(src_dir, 'test.tmp'), '123')
- os.mkdir(join(src_dir, 'test_dir'))
- self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
- os.mkdir(join(src_dir, 'test_dir2'))
- self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
- os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
- os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
- self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
- '456')
- self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
- '456')
-
-
- # testing glob-like patterns
- try:
- patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
- shutil.copytree(src_dir, dst_dir, ignore=patterns)
- # checking the result: some elements should not be copied
- self.assertTrue(exists(join(dst_dir, 'test.txt')))
- self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
- finally:
- if os.path.exists(dst_dir):
- shutil.rmtree(dst_dir)
- try:
- patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
- shutil.copytree(src_dir, dst_dir, ignore=patterns)
- # checking the result: some elements should not be copied
- self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
- finally:
- if os.path.exists(dst_dir):
- shutil.rmtree(dst_dir)
-
- # testing callable-style
- try:
- def _filter(src: str, names: Sequence[str]) -> List[str]:
- res = [] # type: List[str]
- for name in names:
- path = os.path.join(src, name)
-
- if (os.path.isdir(path) and
- path.split()[-1] == 'subdir'):
- res.append(name)
- elif os.path.splitext(path)[-1] in ('.py'):
- res.append(name)
- return res
-
- shutil.copytree(src_dir, dst_dir, ignore=_filter)
-
- # checking the result: some elements should not be copied
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
- 'test.py')))
- self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
-
- finally:
- if os.path.exists(dst_dir):
- shutil.rmtree(dst_dir)
- finally:
- shutil.rmtree(src_dir)
- shutil.rmtree(os.path.dirname(dst_dir))
-
- @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
- def test_dont_copy_file_onto_link_to_itself(self) -> None:
- # Temporarily disable test on Windows.
- if os.name == 'nt':
- return
- # bug 851123.
- os.mkdir(TESTFN)
- src = os.path.join(TESTFN, 'cheese')
- dst = os.path.join(TESTFN, 'shop')
- try:
- with open(src, 'w') as f:
- f.write('cheddar')
- os.link(src, dst)
- self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
- with open(src, 'r') as f:
- self.assertEqual(f.read(), 'cheddar')
- os.remove(dst)
- finally:
- shutil.rmtree(TESTFN, ignore_errors=True)
-
- @support.skip_unless_symlink
- def test_dont_copy_file_onto_symlink_to_itself(self) -> None:
- # bug 851123.
- os.mkdir(TESTFN)
- src = os.path.join(TESTFN, 'cheese')
- dst = os.path.join(TESTFN, 'shop')
- try:
- with open(src, 'w') as f:
- f.write('cheddar')
- # Using `src` here would mean we end up with a symlink pointing
- # to TESTFN/TESTFN/cheese, while it should point at
- # TESTFN/cheese.
- os.symlink('cheese', dst)
- self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
- with open(src, 'r') as f:
- self.assertEqual(f.read(), 'cheddar')
- os.remove(dst)
- finally:
- shutil.rmtree(TESTFN, ignore_errors=True)
-
- @support.skip_unless_symlink
- def test_rmtree_on_symlink(self) -> None:
- # bug 1669.
- os.mkdir(TESTFN)
- try:
- src = os.path.join(TESTFN, 'cheese')
- dst = os.path.join(TESTFN, 'shop')
- os.mkdir(src)
- os.symlink(src, dst)
- self.assertRaises(OSError, shutil.rmtree, dst)
- finally:
- shutil.rmtree(TESTFN, ignore_errors=True)
-
- if hasattr(os, "mkfifo"):
- # Issue #3002: copyfile and copytree block indefinitely on named pipes
- def test_copyfile_named_pipe(self) -> None:
- os.mkfifo(TESTFN)
- try:
- self.assertRaises(shutil.SpecialFileError,
- shutil.copyfile, TESTFN, TESTFN2)
- self.assertRaises(shutil.SpecialFileError,
- shutil.copyfile, __file__, TESTFN)
- finally:
- os.remove(TESTFN)
-
- @support.skip_unless_symlink
- def test_copytree_named_pipe(self) -> None:
- os.mkdir(TESTFN)
- try:
- subdir = os.path.join(TESTFN, "subdir")
- os.mkdir(subdir)
- pipe = os.path.join(subdir, "mypipe")
- os.mkfifo(pipe)
- try:
- shutil.copytree(TESTFN, TESTFN2)
- except shutil.Error as e:
- errors = e.args[0]
- self.assertEqual(len(errors), 1)
- src, dst, error_msg = errors[0]
- self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
- else:
- self.fail("shutil.Error should have been raised")
- finally:
- shutil.rmtree(TESTFN, ignore_errors=True)
- shutil.rmtree(TESTFN2, ignore_errors=True)
-
- def test_copytree_special_func(self) -> None:
-
- src_dir = self.mkdtemp()
- dst_dir = os.path.join(self.mkdtemp(), 'destination')
- self._write_data(os.path.join(src_dir, 'test.txt'), '123')
- os.mkdir(os.path.join(src_dir, 'test_dir'))
- self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
-
- copied = [] # type: List[Tuple[str, str]]
- def _copy(src: str, dst: str) -> None:
- copied.append((src, dst))
-
- shutil.copytree(src_dir, dst_dir, copy_function=_copy)
- self.assertEqual(len(copied), 2)
-
- @support.skip_unless_symlink
- def test_copytree_dangling_symlinks(self) -> None:
-
- # a dangling symlink raises an error at the end
- src_dir = self.mkdtemp()
- dst_dir = os.path.join(self.mkdtemp(), 'destination')
- os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
- os.mkdir(os.path.join(src_dir, 'test_dir'))
- self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
- self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
-
- # a dangling symlink is ignored with the proper flag
- dst_dir = os.path.join(self.mkdtemp(), 'destination2')
- shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
- self.assertNotIn('test.txt', os.listdir(dst_dir))
-
- # a dangling symlink is copied if symlinks=True
- dst_dir = os.path.join(self.mkdtemp(), 'destination3')
- shutil.copytree(src_dir, dst_dir, symlinks=True)
- self.assertIn('test.txt', os.listdir(dst_dir))
-
- def _copy_file(self,
- method: Callable[[str, str], None]) -> Tuple[str, str]:
- fname = 'test.txt'
- tmpdir = self.mkdtemp()
- self.write_file([tmpdir, fname])
- file1 = os.path.join(tmpdir, fname)
- tmpdir2 = self.mkdtemp()
- method(file1, tmpdir2)
- file2 = os.path.join(tmpdir2, fname)
- return (file1, file2)
-
- @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
- def test_copy(self) -> None:
- # Ensure that the copied file exists and has the same mode bits.
- file1, file2 = self._copy_file(shutil.copy)
- self.assertTrue(os.path.exists(file2))
- self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)
-
- @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
- @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime')
- def test_copy2(self) -> None:
- # Ensure that the copied file exists and has the same mode and
- # modification time bits.
- file1, file2 = self._copy_file(shutil.copy2)
- self.assertTrue(os.path.exists(file2))
- file1_stat = os.stat(file1)
- file2_stat = os.stat(file2)
- self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
- for attr in 'st_atime', 'st_mtime':
- # The modification times may be truncated in the new file.
- self.assertLessEqual(getattr(file1_stat, attr),
- getattr(file2_stat, attr) + 1)
- if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
- self.assertEqual(getattr(file1_stat, 'st_flags'),
- getattr(file2_stat, 'st_flags'))
-
- @unittest.skipUnless(zlib, "requires zlib")
- def test_make_tarball(self) -> None:
- # creating something to tar
- tmpdir = self.mkdtemp()
- self.write_file([tmpdir, 'file1'], 'xxx')
- self.write_file([tmpdir, 'file2'], 'xxx')
- os.mkdir(os.path.join(tmpdir, 'sub'))
- self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
-
- tmpdir2 = self.mkdtemp()
- # force shutil to create the directory
- os.rmdir(tmpdir2)
- unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
- "source and target should be on same drive")
-
- base_name = os.path.join(tmpdir2, 'archive')
-
- # working with relative paths to avoid tar warnings
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- try:
- _make_tarball(splitdrive(base_name)[1], '.')
- finally:
- os.chdir(old_dir)
-
- # check if the compressed tarball was created
- tarball = base_name + '.tar.gz'
- self.assertTrue(os.path.exists(tarball))
-
- # trying an uncompressed one
- base_name = os.path.join(tmpdir2, 'archive')
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- try:
- _make_tarball(splitdrive(base_name)[1], '.', compress=None)
- finally:
- os.chdir(old_dir)
- tarball = base_name + '.tar'
- self.assertTrue(os.path.exists(tarball))
-
- def _tarinfo(self, path: str) -> tuple:
- tar = tarfile.open(path)
- try:
- names = tar.getnames()
- names.sort()
- return tuple(names)
- finally:
- tar.close()
-
- def _create_files(self) -> Tuple[str, str, str]:
- # creating something to tar
- tmpdir = self.mkdtemp()
- dist = os.path.join(tmpdir, 'dist')
- os.mkdir(dist)
- self.write_file([dist, 'file1'], 'xxx')
- self.write_file([dist, 'file2'], 'xxx')
- os.mkdir(os.path.join(dist, 'sub'))
- self.write_file([dist, 'sub', 'file3'], 'xxx')
- os.mkdir(os.path.join(dist, 'sub2'))
- tmpdir2 = self.mkdtemp()
- base_name = os.path.join(tmpdir2, 'archive')
- return tmpdir, tmpdir2, base_name
-
- @unittest.skipUnless(zlib, "Requires zlib")
- @unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
- 'Need the tar command to run')
- def test_tarfile_vs_tar(self) -> None:
- tmpdir, tmpdir2, base_name = self._create_files()
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- try:
- _make_tarball(base_name, 'dist')
- finally:
- os.chdir(old_dir)
-
- # check if the compressed tarball was created
- tarball = base_name + '.tar.gz'
- self.assertTrue(os.path.exists(tarball))
-
- # now create another tarball using `tar`
- tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
- tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
- gzip_cmd = ['gzip', '-f9', 'archive2.tar']
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- try:
- with captured_stdout() as s:
- spawn(tar_cmd)
- spawn(gzip_cmd)
- finally:
- os.chdir(old_dir)
-
- self.assertTrue(os.path.exists(tarball2))
- # let's compare both tarballs
- self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
-
- # trying an uncompressed one
- base_name = os.path.join(tmpdir2, 'archive')
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- try:
- _make_tarball(base_name, 'dist', compress=None)
- finally:
- os.chdir(old_dir)
- tarball = base_name + '.tar'
- self.assertTrue(os.path.exists(tarball))
-
- # now for a dry_run
- base_name = os.path.join(tmpdir2, 'archive')
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- try:
- _make_tarball(base_name, 'dist', compress=None, dry_run=True)
- finally:
- os.chdir(old_dir)
- tarball = base_name + '.tar'
- self.assertTrue(os.path.exists(tarball))
-
- @unittest.skipUnless(zlib, "Requires zlib")
- @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
- def test_make_zipfile(self) -> None:
- # creating something to tar
- tmpdir = self.mkdtemp()
- self.write_file([tmpdir, 'file1'], 'xxx')
- self.write_file([tmpdir, 'file2'], 'xxx')
-
- tmpdir2 = self.mkdtemp()
- # force shutil to create the directory
- os.rmdir(tmpdir2)
- base_name = os.path.join(tmpdir2, 'archive')
- _make_zipfile(base_name, tmpdir)
-
- # check if the compressed tarball was created
- tarball = base_name + '.zip'
- self.assertTrue(os.path.exists(tarball))
-
-
- def test_make_archive(self) -> None:
- tmpdir = self.mkdtemp()
- base_name = os.path.join(tmpdir, 'archive')
- self.assertRaises(ValueError, make_archive, base_name, 'xxx')
-
- @unittest.skipUnless(zlib, "Requires zlib")
- def test_make_archive_owner_group(self) -> None:
- # testing make_archive with owner and group, with various combinations
- # this works even if there's not gid/uid support
- if UID_GID_SUPPORT:
- group = grp.getgrgid(0).gr_name
- owner = pwd.getpwuid(0).pw_name
- else:
- group = owner = 'root'
-
- base_dir, root_dir, base_name = self._create_files()
- base_name = os.path.join(self.mkdtemp() , 'archive')
- res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
- group=group)
- self.assertTrue(os.path.exists(res))
-
- res = make_archive(base_name, 'zip', root_dir, base_dir)
- self.assertTrue(os.path.exists(res))
-
- res = make_archive(base_name, 'tar', root_dir, base_dir,
- owner=owner, group=group)
- self.assertTrue(os.path.exists(res))
-
- res = make_archive(base_name, 'tar', root_dir, base_dir,
- owner='kjhkjhkjg', group='oihohoh')
- self.assertTrue(os.path.exists(res))
-
-
- @unittest.skipUnless(zlib, "Requires zlib")
- @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
- def test_tarfile_root_owner(self) -> None:
- tmpdir, tmpdir2, base_name = self._create_files()
- old_dir = os.getcwd()
- os.chdir(tmpdir)
- group = grp.getgrgid(0).gr_name
- owner = pwd.getpwuid(0).pw_name
- try:
- archive_name = _make_tarball(base_name, 'dist', compress=None,
- owner=owner, group=group)
- finally:
- os.chdir(old_dir)
-
- # check if the compressed tarball was created
- self.assertTrue(os.path.exists(archive_name))
-
- # now checks the rights
- archive = tarfile.open(archive_name)
- try:
- for member in archive.getmembers():
- self.assertEqual(member.uid, 0)
- self.assertEqual(member.gid, 0)
- finally:
- archive.close()
-
- def test_make_archive_cwd(self) -> None:
- current_dir = os.getcwd()
- def _breaks(*args: Any, **kw: Any) -> None:
- raise RuntimeError()
-
- register_archive_format('xxx', _breaks, [], 'xxx file')
- try:
- try:
- make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
- except Exception:
- pass
- self.assertEqual(os.getcwd(), current_dir)
- finally:
- unregister_archive_format('xxx')
-
- def test_register_archive_format(self) -> None:
-
- self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
- self.assertRaises(TypeError, register_archive_format, 'xxx',
- lambda: 1/0,
- 1)
- self.assertRaises(TypeError, register_archive_format, 'xxx',
- lambda: 1/0,
- [(1, 2), (1, 2, 3)])
-
- register_archive_format('xxx', lambda: 1/0, [('x', 2)], 'xxx file')
- formats = [name for name, params in get_archive_formats()]
- self.assertIn('xxx', formats)
-
- unregister_archive_format('xxx')
- formats = [name for name, params in get_archive_formats()]
- self.assertNotIn('xxx', formats)
-
- def _compare_dirs(self, dir1: str, dir2: str) -> List[str]:
- # check that dir1 and dir2 are equivalent,
- # return the diff
- diff = [] # type: List[str]
- for root, dirs, files in os.walk(dir1):
- for file_ in files:
- path = os.path.join(root, file_)
- target_path = os.path.join(dir2, os.path.split(path)[-1])
- if not os.path.exists(target_path):
- diff.append(file_)
- return diff
-
- @unittest.skipUnless(zlib, "Requires zlib")
- def test_unpack_archive(self) -> None:
- formats = ['tar', 'gztar', 'zip']
- if BZ2_SUPPORTED:
- formats.append('bztar')
-
- for format in formats:
- tmpdir = self.mkdtemp()
- base_dir, root_dir, base_name = self._create_files()
- tmpdir2 = self.mkdtemp()
- filename = make_archive(base_name, format, root_dir, base_dir)
-
- # let's try to unpack it now
- unpack_archive(filename, tmpdir2)
- diff = self._compare_dirs(tmpdir, tmpdir2)
- self.assertEqual(diff, [])
-
- # and again, this time with the format specified
- tmpdir3 = self.mkdtemp()
- unpack_archive(filename, tmpdir3, format=format)
- diff = self._compare_dirs(tmpdir, tmpdir3)
- self.assertEqual(diff, [])
- self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
- self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')
-
- def test_unpack_registery(self) -> None:
-
- formats = get_unpack_formats()
-
- def _boo(filename: str, extract_dir: str, extra: int) -> None:
- self.assertEqual(extra, 1)
- self.assertEqual(filename, 'stuff.boo')
- self.assertEqual(extract_dir, 'xx')
-
- register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
- unpack_archive('stuff.boo', 'xx')
-
- # trying to register a .boo unpacker again
- self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
- ['.boo'], _boo)
-
- # should work now
- unregister_unpack_format('Boo')
- register_unpack_format('Boo2', ['.boo'], _boo)
- self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
- self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())
-
- # let's leave a clean state
- unregister_unpack_format('Boo2')
- self.assertEqual(get_unpack_formats(), formats)
-
-
-class TestMove(unittest.TestCase):
-
- def setUp(self) -> None:
- filename = "foo"
- self.src_dir = tempfile.mkdtemp()
- self.dst_dir = tempfile.mkdtemp()
- self.src_file = os.path.join(self.src_dir, filename)
- self.dst_file = os.path.join(self.dst_dir, filename)
- with open(self.src_file, "wb") as f:
- f.write(b"spam")
-
- def tearDown(self) -> None:
- for d in (self.src_dir, self.dst_dir):
- try:
- if d:
- shutil.rmtree(d)
- except:
- pass
-
- def _check_move_file(self, src: str, dst: str, real_dst: str) -> None:
- with open(src, "rb") as f:
- contents = f.read()
- shutil.move(src, dst)
- with open(real_dst, "rb") as f:
- self.assertEqual(contents, f.read())
- self.assertFalse(os.path.exists(src))
-
- def _check_move_dir(self, src: str, dst: str, real_dst: str) -> None:
- contents = sorted(os.listdir(src))
- shutil.move(src, dst)
- self.assertEqual(contents, sorted(os.listdir(real_dst)))
- self.assertFalse(os.path.exists(src))
-
- def test_move_file(self) -> None:
- # Move a file to another location on the same filesystem.
- self._check_move_file(self.src_file, self.dst_file, self.dst_file)
-
- def test_move_file_to_dir(self) -> None:
- # Move a file inside an existing dir on the same filesystem.
- self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
-
- @mock_rename
- def test_move_file_other_fs(self) -> None:
- # Move a file to an existing dir on another filesystem.
- self.test_move_file()
-
- @mock_rename
- def test_move_file_to_dir_other_fs(self) -> None:
- # Move a file to another location on another filesystem.
- self.test_move_file_to_dir()
-
- def test_move_dir(self) -> None:
- # Move a dir to another location on the same filesystem.
- dst_dir = tempfile.mktemp()
- try:
- self._check_move_dir(self.src_dir, dst_dir, dst_dir)
- finally:
- try:
- shutil.rmtree(dst_dir)
- except:
- pass
-
- @mock_rename
- def test_move_dir_other_fs(self) -> None:
- # Move a dir to another location on another filesystem.
- self.test_move_dir()
-
- def test_move_dir_to_dir(self) -> None:
- # Move a dir inside an existing dir on the same filesystem.
- self._check_move_dir(self.src_dir, self.dst_dir,
- os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
-
- @mock_rename
- def test_move_dir_to_dir_other_fs(self) -> None:
- # Move a dir inside an existing dir on another filesystem.
- self.test_move_dir_to_dir()
-
- def test_existing_file_inside_dest_dir(self) -> None:
- # A file with the same name inside the destination dir already exists.
- with open(self.dst_file, "wb"):
- pass
- self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
-
- def test_dont_move_dir_in_itself(self) -> None:
- # Moving a dir inside itself raises an Error.
- dst = os.path.join(self.src_dir, "bar")
- self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
-
- def test_destinsrc_false_negative(self) -> None:
- os.mkdir(TESTFN)
- try:
- for src, dst in [('srcdir', 'srcdir/dest')]:
- src = os.path.join(TESTFN, src)
- dst = os.path.join(TESTFN, dst)
- self.assertTrue(shutil._destinsrc(src, dst),
- msg='_destinsrc() wrongly concluded that '
- 'dst (%s) is not in src (%s)' % (dst, src))
- finally:
- shutil.rmtree(TESTFN, ignore_errors=True)
-
- def test_destinsrc_false_positive(self) -> None:
- os.mkdir(TESTFN)
- try:
- for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
- src = os.path.join(TESTFN, src)
- dst = os.path.join(TESTFN, dst)
- self.assertFalse(shutil._destinsrc(src, dst),
- msg='_destinsrc() wrongly concluded that '
- 'dst (%s) is in src (%s)' % (dst, src))
- finally:
- shutil.rmtree(TESTFN, ignore_errors=True)
-
-
-class TestCopyFile(unittest.TestCase):
-
- _delete = False
-
- class Faux(object):
- _entered = False
- _exited_with = None # type: tuple
- _raised = False
- def __init__(self, raise_in_exit: bool = False,
- suppress_at_exit: bool = True) -> None:
- self._raise_in_exit = raise_in_exit
- self._suppress_at_exit = suppress_at_exit
- def read(self, *args: Any) -> str:
- return ''
- def __enter__(self) -> None:
- self._entered = True
- def __exit__(self, exc_type: type, exc_val: BaseException,
- exc_tb: TracebackType) -> bool:
- self._exited_with = exc_type, exc_val, exc_tb
- if self._raise_in_exit:
- self._raised = True
- raise IOError("Cannot close")
- return self._suppress_at_exit
-
- def tearDown(self) -> None:
- shutil.open = open
-
- def _set_shutil_open(self, func: Any) -> None:
- shutil.open = func
- self._delete = True
-
- def test_w_source_open_fails(self) -> None:
- def _open(filename: str, mode: str= 'r') -> BinaryIO:
- if filename == 'srcfile':
- raise IOError('Cannot open "srcfile"')
- assert 0 # shouldn't reach here.
-
- self._set_shutil_open(_open)
-
- self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
-
- def test_w_dest_open_fails(self) -> None:
-
- srcfile = TestCopyFile.Faux()
-
- def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux:
- if filename == 'srcfile':
- return srcfile
- if filename == 'destfile':
- raise IOError('Cannot open "destfile"')
- assert 0 # shouldn't reach here.
-
- self._set_shutil_open(_open)
-
- shutil.copyfile('srcfile', 'destfile')
- self.assertTrue(srcfile._entered)
- self.assertTrue(srcfile._exited_with[0] is IOError)
- self.assertEqual(srcfile._exited_with[1].args,
- ('Cannot open "destfile"',))
-
- def test_w_dest_close_fails(self) -> None:
-
- srcfile = TestCopyFile.Faux()
- destfile = TestCopyFile.Faux(True)
-
- def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux:
- if filename == 'srcfile':
- return srcfile
- if filename == 'destfile':
- return destfile
- assert 0 # shouldn't reach here.
-
- self._set_shutil_open(_open)
-
- shutil.copyfile('srcfile', 'destfile')
- self.assertTrue(srcfile._entered)
- self.assertTrue(destfile._entered)
- self.assertTrue(destfile._raised)
- self.assertTrue(srcfile._exited_with[0] is IOError)
- self.assertEqual(srcfile._exited_with[1].args,
- ('Cannot close',))
-
- def test_w_source_close_fails(self) -> None:
-
- srcfile = TestCopyFile.Faux(True)
- destfile = TestCopyFile.Faux()
-
- def _open(filename: str, mode: str= 'r') -> TestCopyFile.Faux:
- if filename == 'srcfile':
- return srcfile
- if filename == 'destfile':
- return destfile
- assert 0 # shouldn't reach here.
-
- self._set_shutil_open(_open)
-
- self.assertRaises(IOError,
- shutil.copyfile, 'srcfile', 'destfile')
- self.assertTrue(srcfile._entered)
- self.assertTrue(destfile._entered)
- self.assertFalse(destfile._raised)
- self.assertTrue(srcfile._exited_with[0] is None)
- self.assertTrue(srcfile._raised)
-
- def test_move_dir_caseinsensitive(self) -> None:
- # Renames a folder to the same name
- # but a different case.
-
- self.src_dir = tempfile.mkdtemp()
- dst_dir = os.path.join(
- os.path.dirname(self.src_dir),
- os.path.basename(self.src_dir).upper())
- self.assertNotEqual(self.src_dir, dst_dir)
-
- try:
- shutil.move(self.src_dir, dst_dir)
- self.assertTrue(os.path.isdir(dst_dir))
- finally:
- if os.path.exists(dst_dir):
- os.rmdir(dst_dir)
-
-
-
-def test_main() -> None:
- support.run_unittest(TestShutil, TestMove, TestCopyFile)
-
-if __name__ == '__main__':
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_subprocess.py b/test-data/stdlib-samples/3.2/test/test_subprocess.py
deleted file mode 100644
index 772d8cc..0000000
--- a/test-data/stdlib-samples/3.2/test/test_subprocess.py
+++ /dev/null
@@ -1,1764 +0,0 @@
-import unittest
-from test import support
-import subprocess
-import sys
-import signal
-import io
-import os
-import errno
-import tempfile
-import time
-import re
-import sysconfig
-import warnings
-import select
-import shutil
-import gc
-
-import resource
-
-from typing import Any, Dict, Callable, Iterable, List, Set, Tuple, cast
-
-mswindows = (sys.platform == "win32")
-
-#
-# Depends on the following external programs: Python
-#
-
-if mswindows:
- SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
- 'os.O_BINARY);')
-else:
- SETBINARY = ''
-
-
-try:
- mkstemp = tempfile.mkstemp
-except AttributeError:
- # tempfile.mkstemp is not available
- def _mkstemp() -> Tuple[int, str]:
- """Replacement for mkstemp, calling mktemp."""
- fname = tempfile.mktemp()
- return os.open(fname, os.O_RDWR|os.O_CREAT), fname
- mkstemp = cast(Any, _mkstemp)
-
-
-class BaseTestCase(unittest.TestCase):
- def setUp(self) -> None:
- # Try to minimize the number of children we have so this test
- # doesn't crash on some buildbots (Alphas in particular).
- support.reap_children()
-
- def tearDown(self) -> None:
- for inst in subprocess._active:
- inst.wait()
- subprocess._cleanup()
- self.assertFalse(subprocess._active, "subprocess._active not empty")
-
- def assertStderrEqual(self, stderr: bytes, expected: bytes,
- msg: object = None) -> None:
- # In a debug build, stuff like "[6580 refs]" is printed to stderr at
- # shutdown time. That frustrates tests trying to check stderr produced
- # from a spawned Python process.
- actual = support.strip_python_stderr(stderr)
- self.assertEqual(actual, expected, msg)
-
-
-class ProcessTestCase(BaseTestCase):
-
- def test_call_seq(self) -> None:
- # call() function with sequence argument
- rc = subprocess.call([sys.executable, "-c",
- "import sys; sys.exit(47)"])
- self.assertEqual(rc, 47)
-
- def test_check_call_zero(self) -> None:
- # check_call() function with zero return code
- rc = subprocess.check_call([sys.executable, "-c",
- "import sys; sys.exit(0)"])
- self.assertEqual(rc, 0)
-
- def test_check_call_nonzero(self) -> None:
- # check_call() function with non-zero return code
- with self.assertRaises(subprocess.CalledProcessError) as c:
- subprocess.check_call([sys.executable, "-c",
- "import sys; sys.exit(47)"])
- self.assertEqual(c.exception.returncode, 47)
-
- def test_check_output(self) -> None:
- # check_output() function with zero return code
- output = subprocess.check_output(
- [sys.executable, "-c", "print('BDFL')"])
- self.assertIn(b'BDFL', cast(Any, output)) # see #39
-
- def test_check_output_nonzero(self) -> None:
- # check_call() function with non-zero return code
- with self.assertRaises(subprocess.CalledProcessError) as c:
- subprocess.check_output(
- [sys.executable, "-c", "import sys; sys.exit(5)"])
- self.assertEqual(c.exception.returncode, 5)
-
- def test_check_output_stderr(self) -> None:
- # check_output() function stderr redirected to stdout
- output = subprocess.check_output(
- [sys.executable, "-c", "import sys; sys.stderr.write('BDFL')"],
- stderr=subprocess.STDOUT)
- self.assertIn(b'BDFL', cast(Any, output)) # see #39
-
- def test_check_output_stdout_arg(self) -> None:
- # check_output() function stderr redirected to stdout
- with self.assertRaises(ValueError) as c:
- output = subprocess.check_output(
- [sys.executable, "-c", "print('will not be run')"],
- stdout=sys.stdout)
- self.fail("Expected ValueError when stdout arg supplied.")
- self.assertIn('stdout', c.exception.args[0])
-
- def test_call_kwargs(self) -> None:
- # call() function with keyword args
- newenv = os.environ.copy()
- newenv["FRUIT"] = "banana"
- rc = subprocess.call([sys.executable, "-c",
- 'import sys, os;'
- 'sys.exit(os.getenv("FRUIT")=="banana")'],
- env=newenv)
- self.assertEqual(rc, 1)
-
- def test_invalid_args(self) -> None:
- # Popen() called with invalid arguments should raise TypeError
- # but Popen.__del__ should not complain (issue #12085)
- with support.captured_stderr() as s:
- self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1)
- argcount = subprocess.Popen.__init__.__code__.co_argcount
- too_many_args = [0] * (argcount + 1)
- self.assertRaises(TypeError, subprocess.Popen, *too_many_args)
- self.assertEqual(s.getvalue(), '')
-
- def test_stdin_none(self) -> None:
- # .stdin is None when not redirected
- p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- p.wait()
- self.assertEqual(p.stdin, None)
-
- def test_stdout_none(self) -> None:
- # .stdout is None when not redirected
- p = subprocess.Popen([sys.executable, "-c",
- 'print(" this bit of output is from a '
- 'test of stdout in a different '
- 'process ...")'],
- stdin=subprocess.PIPE, stderr=subprocess.PIPE)
- self.addCleanup(p.stdin.close)
- self.addCleanup(p.stderr.close)
- p.wait()
- self.assertEqual(p.stdout, None)
-
- def test_stderr_none(self) -> None:
- # .stderr is None when not redirected
- p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stdin.close)
- p.wait()
- self.assertEqual(p.stderr, None)
-
- def test_executable_with_cwd(self) -> None:
- python_dir = os.path.dirname(os.path.realpath(sys.executable))
- p = subprocess.Popen(["somethingyoudonthave", "-c",
- "import sys; sys.exit(47)"],
- executable=sys.executable, cwd=python_dir)
- p.wait()
- self.assertEqual(p.returncode, 47)
-
- @unittest.skipIf(sysconfig.is_python_build(),
- "need an installed Python. See #7774")
- def test_executable_without_cwd(self) -> None:
- # For a normal installation, it should work without 'cwd'
- # argument. For test runs in the build directory, see #7774.
- p = subprocess.Popen(["somethingyoudonthave", "-c",
- "import sys; sys.exit(47)"],
- executable=sys.executable)
- p.wait()
- self.assertEqual(p.returncode, 47)
-
- def test_stdin_pipe(self) -> None:
- # stdin redirection
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.exit(sys.stdin.read() == "pear")'],
- stdin=subprocess.PIPE)
- p.stdin.write(b"pear")
- p.stdin.close()
- p.wait()
- self.assertEqual(p.returncode, 1)
-
- def test_stdin_filedes(self) -> None:
- # stdin is set to open file descriptor
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- d = tf.fileno()
- os.write(d, b"pear")
- os.lseek(d, 0, 0)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.exit(sys.stdin.read() == "pear")'],
- stdin=d)
- p.wait()
- self.assertEqual(p.returncode, 1)
-
- def test_stdin_fileobj(self) -> None:
- # stdin is set to open file object
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- tf.write(b"pear")
- tf.seek(0)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.exit(sys.stdin.read() == "pear")'],
- stdin=tf)
- p.wait()
- self.assertEqual(p.returncode, 1)
-
- def test_stdout_pipe(self) -> None:
- # stdout redirection
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stdout.write("orange")'],
- stdout=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.assertEqual(p.stdout.read(), b"orange")
-
- def test_stdout_filedes(self) -> None:
- # stdout is set to open file descriptor
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- d = tf.fileno()
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stdout.write("orange")'],
- stdout=d)
- p.wait()
- os.lseek(d, 0, 0)
- self.assertEqual(os.read(d, 1024), b"orange")
-
- def test_stdout_fileobj(self) -> None:
- # stdout is set to open file object
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stdout.write("orange")'],
- stdout=tf)
- p.wait()
- tf.seek(0)
- self.assertEqual(tf.read(), b"orange")
-
- def test_stderr_pipe(self) -> None:
- # stderr redirection
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stderr.write("strawberry")'],
- stderr=subprocess.PIPE)
- self.addCleanup(p.stderr.close)
- self.assertStderrEqual(p.stderr.read(), b"strawberry")
-
- def test_stderr_filedes(self) -> None:
- # stderr is set to open file descriptor
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- d = tf.fileno()
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stderr.write("strawberry")'],
- stderr=d)
- p.wait()
- os.lseek(d, 0, 0)
- self.assertStderrEqual(os.read(d, 1024), b"strawberry")
-
- def test_stderr_fileobj(self) -> None:
- # stderr is set to open file object
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stderr.write("strawberry")'],
- stderr=tf)
- p.wait()
- tf.seek(0)
- self.assertStderrEqual(tf.read(), b"strawberry")
-
- def test_stdout_stderr_pipe(self) -> None:
- # capture stdout and stderr to the same pipe
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys;'
- 'sys.stdout.write("apple");'
- 'sys.stdout.flush();'
- 'sys.stderr.write("orange")'],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- self.addCleanup(p.stdout.close)
- self.assertStderrEqual(p.stdout.read(), b"appleorange")
-
- def test_stdout_stderr_file(self) -> None:
- # capture stdout and stderr to the same open file
- tf = tempfile.TemporaryFile()
- self.addCleanup(tf.close)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys;'
- 'sys.stdout.write("apple");'
- 'sys.stdout.flush();'
- 'sys.stderr.write("orange")'],
- stdout=tf,
- stderr=tf)
- p.wait()
- tf.seek(0)
- self.assertStderrEqual(tf.read(), b"appleorange")
-
- def test_stdout_filedes_of_stdout(self) -> None:
- # stdout is set to 1 (#1531862).
- cmd = r"import sys, os; sys.exit(os.write(sys.stdout.fileno(), b'.\n'))"
- rc = subprocess.call([sys.executable, "-c", cmd], stdout=1)
- self.assertEqual(rc, 2)
-
- def test_cwd(self) -> None:
- tmpdir = tempfile.gettempdir()
- # We cannot use os.path.realpath to canonicalize the path,
- # since it doesn't expand Tru64 {memb} strings. See bug 1063571.
- cwd = os.getcwd()
- os.chdir(tmpdir)
- tmpdir = os.getcwd()
- os.chdir(cwd)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;'
- 'sys.stdout.write(os.getcwd())'],
- stdout=subprocess.PIPE,
- cwd=tmpdir)
- self.addCleanup(p.stdout.close)
- normcase = os.path.normcase
- self.assertEqual(normcase(p.stdout.read().decode("utf-8")),
- normcase(tmpdir))
-
- def test_env(self) -> None:
- newenv = os.environ.copy()
- newenv["FRUIT"] = "orange"
- with subprocess.Popen([sys.executable, "-c",
- 'import sys,os;'
- 'sys.stdout.write(os.getenv("FRUIT"))'],
- stdout=subprocess.PIPE,
- env=newenv) as p:
- stdout, stderr = p.communicate()
- self.assertEqual(stdout, b"orange")
-
- # Windows requires at least the SYSTEMROOT environment variable to start
- # Python
- @unittest.skipIf(sys.platform == 'win32',
- 'cannot test an empty env on Windows')
- @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') is not None,
- 'the python library cannot be loaded '
- 'with an empty environment')
- def test_empty_env(self) -> None:
- with subprocess.Popen([sys.executable, "-c",
- 'import os; '
- 'print(list(os.environ.keys()))'],
- stdout=subprocess.PIPE,
- env={}) as p:
- stdout, stderr = p.communicate()
- self.assertIn(stdout.strip(),
- [b"[]",
- # Mac OS X adds __CF_USER_TEXT_ENCODING variable to an empty
- # environment
- b"['__CF_USER_TEXT_ENCODING']"])
-
- def test_communicate_stdin(self) -> None:
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys;'
- 'sys.exit(sys.stdin.read() == "pear")'],
- stdin=subprocess.PIPE)
- p.communicate(b"pear")
- self.assertEqual(p.returncode, 1)
-
- def test_communicate_stdout(self) -> None:
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stdout.write("pineapple")'],
- stdout=subprocess.PIPE)
- (stdout, stderr) = p.communicate()
- self.assertEqual(stdout, b"pineapple")
- self.assertEqual(stderr, None)
-
- def test_communicate_stderr(self) -> None:
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; sys.stderr.write("pineapple")'],
- stderr=subprocess.PIPE)
- (stdout, stderr) = p.communicate()
- self.assertEqual(stdout, None)
- self.assertStderrEqual(stderr, b"pineapple")
-
- def test_communicate(self) -> None:
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;'
- 'sys.stderr.write("pineapple");'
- 'sys.stdout.write(sys.stdin.read())'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- self.addCleanup(p.stdin.close)
- (stdout, stderr) = p.communicate(b"banana")
- self.assertEqual(stdout, b"banana")
- self.assertStderrEqual(stderr, b"pineapple")
-
- # Test for the fd leak reported in http://bugs.python.org/issue2791.
- def test_communicate_pipe_fd_leak(self) -> None:
- for stdin_pipe in (False, True):
- for stdout_pipe in (False, True):
- for stderr_pipe in (False, True):
- options = {} # type: Dict[str, Any]
- if stdin_pipe:
- options['stdin'] = subprocess.PIPE
- if stdout_pipe:
- options['stdout'] = subprocess.PIPE
- if stderr_pipe:
- options['stderr'] = subprocess.PIPE
- if not options:
- continue
- p = subprocess.Popen([sys.executable, "-c", "pass"], **options)
- p.communicate()
- if p.stdin is not None:
- self.assertTrue(p.stdin.closed)
- if p.stdout is not None:
- self.assertTrue(p.stdout.closed)
- if p.stderr is not None:
- self.assertTrue(p.stderr.closed)
-
- def test_communicate_returns(self) -> None:
- # communicate() should return None if no redirection is active
- p = subprocess.Popen([sys.executable, "-c",
- "import sys; sys.exit(47)"])
- (stdout, stderr) = p.communicate()
- self.assertEqual(stdout, None)
- self.assertEqual(stderr, None)
-
- def test_communicate_pipe_buf(self) -> None:
- # communicate() with writes larger than pipe_buf
- # This test will probably deadlock rather than fail, if
- # communicate() does not work properly.
- x, y = os.pipe()
- if mswindows:
- pipe_buf = 512
- else:
- pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
- os.close(x)
- os.close(y)
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;'
- 'sys.stdout.write(sys.stdin.read(47));'
- 'sys.stderr.write("xyz"*%d);'
- 'sys.stdout.write(sys.stdin.read())' % pipe_buf],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- self.addCleanup(p.stdin.close)
- string_to_write = b"abc"*pipe_buf
- (stdout, stderr) = p.communicate(string_to_write)
- self.assertEqual(stdout, string_to_write)
-
- def test_writes_before_communicate(self) -> None:
- # stdin.write before communicate()
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;'
- 'sys.stdout.write(sys.stdin.read())'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- self.addCleanup(p.stdin.close)
- p.stdin.write(b"banana")
- (stdout, stderr) = p.communicate(b"split")
- self.assertEqual(stdout, b"bananasplit")
- self.assertStderrEqual(stderr, b"")
-
- def test_universal_newlines(self) -> None:
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;' + SETBINARY +
- 'sys.stdout.write(sys.stdin.readline());'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line2\\n");'
- 'sys.stdout.flush();'
- 'sys.stdout.write(sys.stdin.read());'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line4\\n");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line5\\r\\n");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line6\\r");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("\\nline7");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("\\nline8");'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- universal_newlines=1)
- p.stdin.write("line1\n")
- self.assertEqual(p.stdout.readline(), "line1\n")
- p.stdin.write("line3\n")
- p.stdin.close()
- self.addCleanup(p.stdout.close)
- self.assertEqual(p.stdout.readline(),
- "line2\n")
- self.assertEqual(p.stdout.read(6),
- "line3\n")
- self.assertEqual(p.stdout.read(),
- "line4\nline5\nline6\nline7\nline8")
-
- def test_universal_newlines_communicate(self) -> None:
- # universal newlines through communicate()
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;' + SETBINARY +
- 'sys.stdout.write("line2\\n");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line4\\n");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line5\\r\\n");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("line6\\r");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("\\nline7");'
- 'sys.stdout.flush();'
- 'sys.stdout.write("\\nline8");'],
- stderr=subprocess.PIPE,
- stdout=subprocess.PIPE,
- universal_newlines=1)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- # BUG: can't give a non-empty stdin because it breaks both the
- # select- and poll-based communicate() implementations.
- (stdout, stderr) = p.communicate()
- self.assertEqual(stdout,
- "line2\nline4\nline5\nline6\nline7\nline8")
-
- def test_universal_newlines_communicate_stdin(self) -> None:
- # universal newlines through communicate(), with only stdin
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;' + SETBINARY + '''\nif True:
- s = sys.stdin.readline()
- assert s == "line1\\n", repr(s)
- s = sys.stdin.read()
- assert s == "line3\\n", repr(s)
- '''],
- stdin=subprocess.PIPE,
- universal_newlines=1)
- (stdout, stderr) = p.communicate("line1\nline3\n")
- self.assertEqual(p.returncode, 0)
-
- def test_no_leaking(self) -> None:
- # Make sure we leak no resources
- if not mswindows:
- max_handles = 1026 # too much for most UNIX systems
- else:
- max_handles = 2050 # too much for (at least some) Windows setups
- handles = [] # type: List[int]
- tmpdir = tempfile.mkdtemp()
- try:
- for i in range(max_handles):
- try:
- tmpfile = os.path.join(tmpdir, support.TESTFN)
- handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT))
- except OSError as e:
- if e.errno != errno.EMFILE:
- raise
- break
- else:
- self.skipTest("failed to reach the file descriptor limit "
- "(tried %d)" % max_handles)
- # Close a couple of them (should be enough for a subprocess)
- for i in range(10):
- os.close(handles.pop())
- # Loop creating some subprocesses. If one of them leaks some fds,
- # the next loop iteration will fail by reaching the max fd limit.
- for i in range(15):
- p = subprocess.Popen([sys.executable, "-c",
- "import sys;"
- "sys.stdout.write(sys.stdin.read())"],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- data = p.communicate(b"lime")[0]
- self.assertEqual(data, b"lime")
- finally:
- for h in handles:
- os.close(h)
- shutil.rmtree(tmpdir)
-
- def test_list2cmdline(self) -> None:
- self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']),
- '"a b c" d e')
- self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']),
- 'ab\\"c \\ d')
- self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']),
- 'ab\\"c " \\\\" d')
- self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']),
- 'a\\\\\\b "de fg" h')
- self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']),
- 'a\\\\\\"b c d')
- self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']),
- '"a\\\\b c" d e')
- self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
- '"a\\\\b\\ c" d e')
- self.assertEqual(subprocess.list2cmdline(['ab', '']),
- 'ab ""')
-
-
- def test_poll(self) -> None:
- p = subprocess.Popen([sys.executable,
- "-c", "import time; time.sleep(1)"])
- count = 0
- while p.poll() is None:
- time.sleep(0.1)
- count += 1
- # We expect that the poll loop probably went around about 10 times,
- # but, based on system scheduling we can't control, it's possible
- # poll() never returned None. It "should be" very rare that it
- # didn't go around at least twice.
- self.assertGreaterEqual(count, 2)
- # Subsequent invocations should just return the returncode
- self.assertEqual(p.poll(), 0)
-
-
- def test_wait(self) -> None:
- p = subprocess.Popen([sys.executable,
- "-c", "import time; time.sleep(2)"])
- self.assertEqual(p.wait(), 0)
- # Subsequent invocations should just return the returncode
- self.assertEqual(p.wait(), 0)
-
-
- def test_invalid_bufsize(self) -> None:
- # an invalid type of the bufsize argument should raise
- # TypeError.
- with self.assertRaises(TypeError):
- subprocess.Popen([sys.executable, "-c", "pass"], cast(Any, "orange"))
-
- def test_bufsize_is_none(self) -> None:
- # bufsize=None should be the same as bufsize=0.
- p = subprocess.Popen([sys.executable, "-c", "pass"], None)
- self.assertEqual(p.wait(), 0)
- # Again with keyword arg
- p = subprocess.Popen([sys.executable, "-c", "pass"], bufsize=None)
- self.assertEqual(p.wait(), 0)
-
- def test_leaking_fds_on_error(self) -> None:
- # see bug #5179: Popen leaks file descriptors to PIPEs if
- # the child fails to execute; this will eventually exhaust
- # the maximum number of open fds. 1024 seems a very common
- # value for that limit, but Windows has 2048, so we loop
- # 1024 times (each call leaked two fds).
- for i in range(1024):
- # Windows raises IOError. Others raise OSError.
- with self.assertRaises(EnvironmentError) as c:
- subprocess.Popen(['nonexisting_i_hope'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # ignore errors that indicate the command was not found
- if c.exception.errno not in (errno.ENOENT, errno.EACCES):
- raise c.exception
-
- def test_issue8780(self) -> None:
- # Ensure that stdout is inherited from the parent
- # if stdout=PIPE is not used
- code = ';'.join([
- 'import subprocess, sys',
- 'retcode = subprocess.call('
- "[sys.executable, '-c', 'print(\"Hello World!\")'])",
- 'assert retcode == 0'])
- output = subprocess.check_output([sys.executable, '-c', code])
- self.assertTrue(output.startswith(b'Hello World!'), ascii(output))
-
- def test_handles_closed_on_exception(self) -> None:
- # If CreateProcess exits with an error, ensure the
- # duplicate output handles are released
- ifhandle, ifname = mkstemp()
- ofhandle, ofname = mkstemp()
- efhandle, efname = mkstemp()
- try:
- subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle,
- stderr=efhandle)
- except OSError:
- os.close(ifhandle)
- os.remove(ifname)
- os.close(ofhandle)
- os.remove(ofname)
- os.close(efhandle)
- os.remove(efname)
- self.assertFalse(os.path.exists(ifname))
- self.assertFalse(os.path.exists(ofname))
- self.assertFalse(os.path.exists(efname))
-
- def test_communicate_epipe(self) -> None:
- # Issue 10963: communicate() should hide EPIPE
- p = subprocess.Popen([sys.executable, "-c", 'pass'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- self.addCleanup(p.stdin.close)
- p.communicate(b"x" * 2**20)
-
- def test_communicate_epipe_only_stdin(self) -> None:
- # Issue 10963: communicate() should hide EPIPE
- p = subprocess.Popen([sys.executable, "-c", 'pass'],
- stdin=subprocess.PIPE)
- self.addCleanup(p.stdin.close)
- time.sleep(2)
- p.communicate(b"x" * 2**20)
-
- @unittest.skipUnless(hasattr(signal, 'SIGALRM'),
- "Requires signal.SIGALRM")
- def test_communicate_eintr(self) -> None:
- # Issue #12493: communicate() should handle EINTR
- def handler(signum, frame):
- pass
- old_handler = signal.signal(signal.SIGALRM, handler)
- self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
-
- # the process is running for 2 seconds
- args = [sys.executable, "-c", 'import time; time.sleep(2)']
- for stream in ('stdout', 'stderr'):
- kw = {stream: subprocess.PIPE} # type: Dict[str, Any]
- with subprocess.Popen(args, **kw) as process:
- signal.alarm(1)
- # communicate() will be interrupted by SIGALRM
- process.communicate()
-
-
-# context manager
-class _SuppressCoreFiles(object):
- """Try to prevent core files from being created."""
- old_limit = None # type: Tuple[int, int]
-
- def __enter__(self) -> None:
- """Try to save previous ulimit, then set it to (0, 0)."""
- if resource is not None:
- try:
- self.old_limit = resource.getrlimit(resource.RLIMIT_CORE)
- resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
- except (ValueError, resource.error):
- pass
-
- if sys.platform == 'darwin':
- # Check if the 'Crash Reporter' on OSX was configured
- # in 'Developer' mode and warn that it will get triggered
- # when it is.
- #
- # This assumes that this context manager is used in tests
- # that might trigger the next manager.
- value = subprocess.Popen(['/usr/bin/defaults', 'read',
- 'com.apple.CrashReporter', 'DialogType'],
- stdout=subprocess.PIPE).communicate()[0]
- if value.strip() == b'developer':
- print("this tests triggers the Crash Reporter, "
- "that is intentional", end='')
- sys.stdout.flush()
-
- def __exit__(self, *args: Any) -> None:
- """Return core file behavior to default."""
- if self.old_limit is None:
- return
- if resource is not None:
- try:
- resource.setrlimit(resource.RLIMIT_CORE, self.old_limit)
- except (ValueError, resource.error):
- pass
-
-
- at unittest.skipIf(mswindows, "POSIX specific tests")
-class POSIXProcessTestCase(BaseTestCase):
-
- def test_exceptions(self) -> None:
- nonexistent_dir = "/_this/pa.th/does/not/exist"
- try:
- os.chdir(nonexistent_dir)
- except OSError as e:
- # This avoids hard coding the errno value or the OS perror()
- # string and instead capture the exception that we want to see
- # below for comparison.
- desired_exception = e
- desired_exception.strerror += ': ' + repr(sys.executable)
- else:
- self.fail("chdir to nonexistant directory %s succeeded." %
- nonexistent_dir)
-
- # Error in the child re-raised in the parent.
- try:
- p = subprocess.Popen([sys.executable, "-c", ""],
- cwd=nonexistent_dir)
- except OSError as e:
- # Test that the child process chdir failure actually makes
- # it up to the parent process as the correct exception.
- self.assertEqual(desired_exception.errno, e.errno)
- self.assertEqual(desired_exception.strerror, e.strerror)
- else:
- self.fail("Expected OSError: %s" % desired_exception)
-
- def test_restore_signals(self) -> None:
- # Code coverage for both values of restore_signals to make sure it
- # at least does not blow up.
- # A test for behavior would be complex. Contributions welcome.
- subprocess.call([sys.executable, "-c", ""], restore_signals=True)
- subprocess.call([sys.executable, "-c", ""], restore_signals=False)
-
- def test_start_new_session(self) -> None:
- # For code coverage of calling setsid(). We don't care if we get an
- # EPERM error from it depending on the test execution environment, that
- # still indicates that it was called.
- try:
- output = subprocess.check_output(
- [sys.executable, "-c",
- "import os; print(os.getpgid(os.getpid()))"],
- start_new_session=True)
- except OSError as e:
- if e.errno != errno.EPERM:
- raise
- else:
- parent_pgid = os.getpgid(os.getpid())
- child_pgid = int(output)
- self.assertNotEqual(parent_pgid, child_pgid)
-
- def test_run_abort(self) -> None:
- # returncode handles signal termination
- with _SuppressCoreFiles():
- p = subprocess.Popen([sys.executable, "-c",
- 'import os; os.abort()'])
- p.wait()
- self.assertEqual(-p.returncode, signal.SIGABRT)
-
- def test_preexec(self) -> None:
- # DISCLAIMER: Setting environment variables is *not* a good use
- # of a preexec_fn. This is merely a test.
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;'
- 'sys.stdout.write(os.getenv("FRUIT"))'],
- stdout=subprocess.PIPE,
- preexec_fn=lambda: os.putenv("FRUIT", "apple"))
- self.addCleanup(p.stdout.close)
- self.assertEqual(p.stdout.read(), b"apple")
-
- def test_preexec_exception(self) -> None:
- def raise_it():
- raise ValueError("What if two swallows carried a coconut?")
- try:
- p = subprocess.Popen([sys.executable, "-c", ""],
- preexec_fn=raise_it)
- except RuntimeError as e:
- self.assertTrue(
- subprocess._posixsubprocess,
- "Expected a ValueError from the preexec_fn")
- except ValueError as e2:
- self.assertIn("coconut", e2.args[0])
- else:
- self.fail("Exception raised by preexec_fn did not make it "
- "to the parent process.")
-
- def test_preexec_gc_module_failure(self) -> None:
- # This tests the code that disables garbage collection if the child
- # process will execute any Python.
- def raise_runtime_error():
- raise RuntimeError("this shouldn't escape")
- enabled = gc.isenabled()
- orig_gc_disable = gc.disable
- orig_gc_isenabled = gc.isenabled
- try:
- gc.disable()
- self.assertFalse(gc.isenabled())
- subprocess.call([sys.executable, '-c', ''],
- preexec_fn=lambda: None)
- self.assertFalse(gc.isenabled(),
- "Popen enabled gc when it shouldn't.")
-
- gc.enable()
- self.assertTrue(gc.isenabled())
- subprocess.call([sys.executable, '-c', ''],
- preexec_fn=lambda: None)
- self.assertTrue(gc.isenabled(), "Popen left gc disabled.")
-
- setattr(gc, 'disable', raise_runtime_error)
- self.assertRaises(RuntimeError, subprocess.Popen,
- [sys.executable, '-c', ''],
- preexec_fn=lambda: None)
-
- del gc.isenabled # force an AttributeError
- self.assertRaises(AttributeError, subprocess.Popen,
- [sys.executable, '-c', ''],
- preexec_fn=lambda: None)
- finally:
- setattr(gc, 'disable', orig_gc_disable)
- setattr(gc, 'isenabled', orig_gc_isenabled)
- if not enabled:
- gc.disable()
-
- def test_args_string(self) -> None:
- # args is a string
- fd, fname = mkstemp()
- # reopen in text mode
- with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260
- fobj.write("#!/bin/sh\n")
- fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
- sys.executable)
- os.chmod(fname, 0o700)
- p = subprocess.Popen(fname)
- p.wait()
- os.remove(fname)
- self.assertEqual(p.returncode, 47)
-
- def test_invalid_args(self) -> None:
- # invalid arguments should raise ValueError
- self.assertRaises(ValueError, subprocess.call,
- [sys.executable, "-c",
- "import sys; sys.exit(47)"],
- startupinfo=47)
- self.assertRaises(ValueError, subprocess.call,
- [sys.executable, "-c",
- "import sys; sys.exit(47)"],
- creationflags=47)
-
- def test_shell_sequence(self) -> None:
- # Run command through the shell (sequence)
- newenv = os.environ.copy()
- newenv["FRUIT"] = "apple"
- p = subprocess.Popen(["echo $FRUIT"], shell=1,
- stdout=subprocess.PIPE,
- env=newenv)
- self.addCleanup(p.stdout.close)
- self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
-
- def test_shell_string(self) -> None:
- # Run command through the shell (string)
- newenv = os.environ.copy()
- newenv["FRUIT"] = "apple"
- p = subprocess.Popen("echo $FRUIT", shell=1,
- stdout=subprocess.PIPE,
- env=newenv)
- self.addCleanup(p.stdout.close)
- self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
-
- def test_call_string(self) -> None:
- # call() function with string argument on UNIX
- fd, fname = mkstemp()
- # reopen in text mode
- with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260
- fobj.write("#!/bin/sh\n")
- fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
- sys.executable)
- os.chmod(fname, 0o700)
- rc = subprocess.call(fname)
- os.remove(fname)
- self.assertEqual(rc, 47)
-
- def test_specific_shell(self) -> None:
- # Issue #9265: Incorrect name passed as arg[0].
- shells = [] # type: List[str]
- for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']:
- for name in ['bash', 'ksh']:
- sh = os.path.join(prefix, name)
- if os.path.isfile(sh):
- shells.append(sh)
- if not shells: # Will probably work for any shell but csh.
- self.skipTest("bash or ksh required for this test")
- sh = '/bin/sh'
- if os.path.isfile(sh) and not os.path.islink(sh):
- # Test will fail if /bin/sh is a symlink to csh.
- shells.append(sh)
- for sh in shells:
- p = subprocess.Popen("echo $0", executable=sh, shell=True,
- stdout=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.assertEqual(p.stdout.read().strip(), bytes(sh, 'ascii'))
-
- def _kill_process(self, method: str, *args: Any) -> subprocess.Popen:
- # Do not inherit file handles from the parent.
- # It should fix failures on some platforms.
- p = subprocess.Popen([sys.executable, "-c", """if 1:
- import sys, time
- sys.stdout.write('x\\n')
- sys.stdout.flush()
- time.sleep(30)
- """],
- close_fds=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # Wait for the interpreter to be completely initialized before
- # sending any signal.
- p.stdout.read(1)
- getattr(p, method)(*args)
- return p
-
- def test_send_signal(self) -> None:
- p = self._kill_process('send_signal', signal.SIGINT)
- _, stderr = p.communicate()
- self.assertIn(b'KeyboardInterrupt', stderr)
- self.assertNotEqual(p.wait(), 0)
-
- def test_kill(self) -> None:
- p = self._kill_process('kill')
- _, stderr = p.communicate()
- self.assertStderrEqual(stderr, b'')
- self.assertEqual(p.wait(), -signal.SIGKILL)
-
- def test_terminate(self) -> None:
- p = self._kill_process('terminate')
- _, stderr = p.communicate()
- self.assertStderrEqual(stderr, b'')
- self.assertEqual(p.wait(), -signal.SIGTERM)
-
- def check_close_std_fds(self, fds: Iterable[int]) -> None:
- # Issue #9905: test that subprocess pipes still work properly with
- # some standard fds closed
- stdin = 0
- newfds = [] # type: List[int]
- for a in fds:
- b = os.dup(a)
- newfds.append(b)
- if a == 0:
- stdin = b
- try:
- for fd in fds:
- os.close(fd)
- out, err = subprocess.Popen([sys.executable, "-c",
- 'import sys;'
- 'sys.stdout.write("apple");'
- 'sys.stdout.flush();'
- 'sys.stderr.write("orange")'],
- stdin=stdin,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE).communicate()
- err = support.strip_python_stderr(err)
- self.assertEqual((out, err), (b'apple', b'orange'))
- finally:
- for b, a in zip(newfds, fds):
- os.dup2(b, a)
- for b in newfds:
- os.close(b)
-
- def test_close_fd_0(self) -> None:
- self.check_close_std_fds([0])
-
- def test_close_fd_1(self) -> None:
- self.check_close_std_fds([1])
-
- def test_close_fd_2(self) -> None:
- self.check_close_std_fds([2])
-
- def test_close_fds_0_1(self) -> None:
- self.check_close_std_fds([0, 1])
-
- def test_close_fds_0_2(self) -> None:
- self.check_close_std_fds([0, 2])
-
- def test_close_fds_1_2(self) -> None:
- self.check_close_std_fds([1, 2])
-
- def test_close_fds_0_1_2(self) -> None:
- # Issue #10806: test that subprocess pipes still work properly with
- # all standard fds closed.
- self.check_close_std_fds([0, 1, 2])
-
- def test_remapping_std_fds(self) -> None:
- # open up some temporary files
- temps = [mkstemp() for i in range(3)]
- try:
- temp_fds = [fd for fd, fname in temps]
-
- # unlink the files -- we won't need to reopen them
- for fd, fname in temps:
- os.unlink(fname)
-
- # write some data to what will become stdin, and rewind
- os.write(temp_fds[1], b"STDIN")
- os.lseek(temp_fds[1], 0, 0)
-
- # move the standard file descriptors out of the way
- saved_fds = [os.dup(fd) for fd in range(3)]
- try:
- # duplicate the file objects over the standard fd's
- for fd, temp_fd in enumerate(temp_fds):
- os.dup2(temp_fd, fd)
-
- # now use those files in the "wrong" order, so that subprocess
- # has to rearrange them in the child
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; got = sys.stdin.read();'
- 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
- stdin=temp_fds[1],
- stdout=temp_fds[2],
- stderr=temp_fds[0])
- p.wait()
- finally:
- # restore the original fd's underneath sys.stdin, etc.
- for std, saved in enumerate(saved_fds):
- os.dup2(saved, std)
- os.close(saved)
-
- for fd in temp_fds:
- os.lseek(fd, 0, 0)
-
- out = os.read(temp_fds[2], 1024)
- err = support.strip_python_stderr(os.read(temp_fds[0], 1024))
- self.assertEqual(out, b"got STDIN")
- self.assertEqual(err, b"err")
-
- finally:
- for fd in temp_fds:
- os.close(fd)
-
- def check_swap_fds(self, stdin_no: int, stdout_no: int,
- stderr_no: int) -> None:
- # open up some temporary files
- temps = [mkstemp() for i in range(3)]
- temp_fds = [fd for fd, fname in temps]
- try:
- # unlink the files -- we won't need to reopen them
- for fd, fname in temps:
- os.unlink(fname)
-
- # save a copy of the standard file descriptors
- saved_fds = [os.dup(fd) for fd in range(3)]
- try:
- # duplicate the temp files over the standard fd's 0, 1, 2
- for fd, temp_fd in enumerate(temp_fds):
- os.dup2(temp_fd, fd)
-
- # write some data to what will become stdin, and rewind
- os.write(stdin_no, b"STDIN")
- os.lseek(stdin_no, 0, 0)
-
- # now use those files in the given order, so that subprocess
- # has to rearrange them in the child
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys; got = sys.stdin.read();'
- 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
- stdin=stdin_no,
- stdout=stdout_no,
- stderr=stderr_no)
- p.wait()
-
- for fd in temp_fds:
- os.lseek(fd, 0, 0)
-
- out = os.read(stdout_no, 1024)
- err = support.strip_python_stderr(os.read(stderr_no, 1024))
- finally:
- for std, saved in enumerate(saved_fds):
- os.dup2(saved, std)
- os.close(saved)
-
- self.assertEqual(out, b"got STDIN")
- self.assertEqual(err, b"err")
-
- finally:
- for fd in temp_fds:
- os.close(fd)
-
- # When duping fds, if there arises a situation where one of the fds is
- # either 0, 1 or 2, it is possible that it is overwritten (#12607).
- # This tests all combinations of this.
- def test_swap_fds(self) -> None:
- self.check_swap_fds(0, 1, 2)
- self.check_swap_fds(0, 2, 1)
- self.check_swap_fds(1, 0, 2)
- self.check_swap_fds(1, 2, 0)
- self.check_swap_fds(2, 0, 1)
- self.check_swap_fds(2, 1, 0)
-
- def test_surrogates_error_message(self) -> None:
- def prepare() -> None:
- raise ValueError("surrogate:\uDCff")
-
- try:
- subprocess.call(
- [sys.executable, "-c", "pass"],
- preexec_fn=prepare)
- except ValueError as err:
- # Pure Python implementations keeps the message
- self.assertIsNone(subprocess._posixsubprocess)
- self.assertEqual(str(err), "surrogate:\uDCff")
- except RuntimeError as err2:
- # _posixsubprocess uses a default message
- self.assertIsNotNone(subprocess._posixsubprocess)
- self.assertEqual(str(err2), "Exception occurred in preexec_fn.")
- else:
- self.fail("Expected ValueError or RuntimeError")
-
- def test_undecodable_env(self) -> None:
- for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')):
- # test str with surrogates
- script = "import os; print(ascii(os.getenv(%s)))" % repr(key)
- env = os.environ.copy()
- env[key] = value
- # Use C locale to get ascii for the locale encoding to force
- # surrogate-escaping of \xFF in the child process; otherwise it can
- # be decoded as-is if the default locale is latin-1.
- env['LC_ALL'] = 'C'
- stdout = subprocess.check_output(
- [sys.executable, "-c", script],
- env=env)
- stdout = stdout.rstrip(b'\n\r')
- self.assertEqual(stdout.decode('ascii'), ascii(value))
-
- # test bytes
- keyb = key.encode("ascii", "surrogateescape")
- valueb = value.encode("ascii", "surrogateescape")
- script = "import os; print(ascii(os.getenvb(%s)))" % repr(keyb)
- envb = dict(os.environ.copy().items()) # type: Dict[Any, Any]
- envb[keyb] = valueb
- stdout = subprocess.check_output(
- [sys.executable, "-c", script],
- env=envb)
- stdout = stdout.rstrip(b'\n\r')
- self.assertEqual(stdout.decode('ascii'), ascii(valueb))
-
- def test_bytes_program(self) -> None:
- abs_program = os.fsencode(sys.executable)
- path, programs = os.path.split(sys.executable)
- program = os.fsencode(programs)
-
- # absolute bytes path
- exitcode = subprocess.call([abs_program, "-c", "pass"])
- self.assertEqual(exitcode, 0)
-
- # bytes program, unicode PATH
- env = os.environ.copy()
- env["PATH"] = path
- exitcode = subprocess.call([program, "-c", "pass"], env=env)
- self.assertEqual(exitcode, 0)
-
- # bytes program, bytes PATH
- envb = os.environb.copy()
- envb[b"PATH"] = os.fsencode(path)
- exitcode = subprocess.call([program, "-c", "pass"], env=envb)
- self.assertEqual(exitcode, 0)
-
- def test_pipe_cloexec(self) -> None:
- sleeper = support.findfile("input_reader.py", subdir="subprocessdata")
- fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
-
- p1 = subprocess.Popen([sys.executable, sleeper],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, close_fds=False)
-
- self.addCleanup(p1.communicate, b'')
-
- p2 = subprocess.Popen([sys.executable, fd_status],
- stdout=subprocess.PIPE, close_fds=False)
-
- output, error = p2.communicate()
- result_fds = set(map(int, output.split(b',')))
- unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(),
- p1.stderr.fileno()])
-
- self.assertFalse(result_fds & unwanted_fds,
- "Expected no fds from %r to be open in child, "
- "found %r" %
- (unwanted_fds, result_fds & unwanted_fds))
-
- def test_pipe_cloexec_real_tools(self) -> None:
- qcat = support.findfile("qcat.py", subdir="subprocessdata")
- qgrep = support.findfile("qgrep.py", subdir="subprocessdata")
-
- subdata = b'zxcvbn'
- data = subdata * 4 + b'\n'
-
- p1 = subprocess.Popen([sys.executable, qcat],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- close_fds=False)
-
- p2 = subprocess.Popen([sys.executable, qgrep, subdata],
- stdin=p1.stdout, stdout=subprocess.PIPE,
- close_fds=False)
-
- self.addCleanup(p1.wait)
- self.addCleanup(p2.wait)
- def kill_p1() -> None:
- #try:
- p1.terminate()
- #except ProcessLookupError:
- # pass
- def kill_p2() -> None:
- #try:
- p2.terminate()
- #except ProcessLookupError:
- # pass
- self.addCleanup(kill_p1)
- self.addCleanup(kill_p2)
-
- p1.stdin.write(data)
- p1.stdin.close()
-
- readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10)
-
- self.assertTrue(readfiles, "The child hung")
- self.assertEqual(p2.stdout.read(), data)
-
- p1.stdout.close()
- p2.stdout.close()
-
- def test_close_fds(self) -> None:
- fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
-
- fds = os.pipe()
- self.addCleanup(os.close, fds[0])
- self.addCleanup(os.close, fds[1])
-
- open_fds = set([fds[0], fds[1]])
- # add a bunch more fds
- for _ in range(9):
- fd = os.open("/dev/null", os.O_RDONLY)
- self.addCleanup(os.close, fd)
- open_fds.add(fd)
-
- p = subprocess.Popen([sys.executable, fd_status],
- stdout=subprocess.PIPE, close_fds=False)
- output, ignored = p.communicate()
- remaining_fds = set(map(int, output.split(b',')))
-
- self.assertEqual(remaining_fds & open_fds, open_fds,
- "Some fds were closed")
-
- p = subprocess.Popen([sys.executable, fd_status],
- stdout=subprocess.PIPE, close_fds=True)
- output, ignored = p.communicate()
- remaining_fds = set(map(int, output.split(b',')))
-
- self.assertFalse(remaining_fds & open_fds,
- "Some fds were left open")
- self.assertIn(1, remaining_fds, "Subprocess failed")
-
- # Keep some of the fd's we opened open in the subprocess.
- # This tests _posixsubprocess.c's proper handling of fds_to_keep.
- fds_to_keep = set(open_fds.pop() for _ in range(8))
- p = subprocess.Popen([sys.executable, fd_status],
- stdout=subprocess.PIPE, close_fds=True,
- pass_fds=())
- output, ignored = p.communicate()
- remaining_fds = set(map(int, output.split(b',')))
-
- self.assertFalse(remaining_fds & fds_to_keep & open_fds,
- "Some fds not in pass_fds were left open")
- self.assertIn(1, remaining_fds, "Subprocess failed")
-
- # Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file
- # descriptor of a pipe closed in the parent process is valid in the
- # child process according to fstat(), but the mode of the file
- # descriptor is invalid, and read or write raise an error.
- @support.requires_mac_ver(10, 5)
- def test_pass_fds(self) -> None:
- fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
-
- open_fds = set() # type: Set[int]
-
- for x in range(5):
- fds = os.pipe()
- self.addCleanup(os.close, fds[0])
- self.addCleanup(os.close, fds[1])
- open_fds.update([fds[0], fds[1]])
-
- for fd in open_fds:
- p = subprocess.Popen([sys.executable, fd_status],
- stdout=subprocess.PIPE, close_fds=True,
- pass_fds=(fd, ))
- output, ignored = p.communicate()
-
- remaining_fds = set(map(int, output.split(b',')))
- to_be_closed = open_fds - {fd}
-
- self.assertIn(fd, remaining_fds, "fd to be passed not passed")
- self.assertFalse(remaining_fds & to_be_closed,
- "fd to be closed passed")
-
- # pass_fds overrides close_fds with a warning.
- with self.assertWarns(RuntimeWarning) as context:
- self.assertFalse(subprocess.call(
- [sys.executable, "-c", "import sys; sys.exit(0)"],
- close_fds=False, pass_fds=(fd, )))
- self.assertIn('overriding close_fds', str(context.warning))
-
- def test_stdout_stdin_are_single_inout_fd(self) -> None:
- with io.open(os.devnull, "r+") as inout:
- p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
- stdout=inout, stdin=inout)
- p.wait()
-
- def test_stdout_stderr_are_single_inout_fd(self) -> None:
- with io.open(os.devnull, "r+") as inout:
- p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
- stdout=inout, stderr=inout)
- p.wait()
-
- def test_stderr_stdin_are_single_inout_fd(self) -> None:
- with io.open(os.devnull, "r+") as inout:
- p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
- stderr=inout, stdin=inout)
- p.wait()
-
- def test_wait_when_sigchild_ignored(self) -> None:
- # NOTE: sigchild_ignore.py may not be an effective test on all OSes.
- sigchild_ignore = support.findfile("sigchild_ignore.py",
- subdir="subprocessdata")
- p = subprocess.Popen([sys.executable, sigchild_ignore],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = p.communicate()
- self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
- " non-zero with this error:\n%s" %
- stderr.decode('utf8'))
-
- def test_select_unbuffered(self) -> None:
- # Issue #11459: bufsize=0 should really set the pipes as
- # unbuffered (and therefore let select() work properly).
- select = support.import_module("select")
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys;'
- 'sys.stdout.write("apple")'],
- stdout=subprocess.PIPE,
- bufsize=0)
- f = p.stdout
- self.addCleanup(f.close)
- try:
- self.assertEqual(f.read(4), b"appl")
- self.assertIn(f, select.select([f], [], [], 0.0)[0])
- finally:
- p.wait()
-
- def test_zombie_fast_process_del(self) -> None:
- # Issue #12650: on Unix, if Popen.__del__() was called before the
- # process exited, it wouldn't be added to subprocess._active, and would
- # remain a zombie.
- # spawn a Popen, and delete its reference before it exits
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys, time;'
- 'time.sleep(0.2)'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- ident = id(p)
- pid = p.pid
- del p
- # check that p is in the active processes list
- self.assertIn(ident, [id(o) for o in subprocess._active])
-
- def test_leak_fast_process_del_killed(self) -> None:
- # Issue #12650: on Unix, if Popen.__del__() was called before the
- # process exited, and the process got killed by a signal, it would never
- # be removed from subprocess._active, which triggered a FD and memory
- # leak.
- # spawn a Popen, delete its reference and kill it
- p = subprocess.Popen([sys.executable, "-c",
- 'import time;'
- 'time.sleep(3)'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- ident = id(p)
- pid = p.pid
- del p
- os.kill(pid, signal.SIGKILL)
- # check that p is in the active processes list
- self.assertIn(ident, [id(o) for o in subprocess._active])
-
- # let some time for the process to exit, and create a new Popen: this
- # should trigger the wait() of p
- time.sleep(0.2)
- with self.assertRaises(EnvironmentError) as c:
- with subprocess.Popen(['nonexisting_i_hope'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE) as proc:
- pass
- # p should have been wait()ed on, and removed from the _active list
- self.assertRaises(OSError, os.waitpid, pid, 0)
- self.assertNotIn(ident, [id(o) for o in subprocess._active])
-
-
- at unittest.skipUnless(mswindows, "Windows specific tests")
-class Win32ProcessTestCase(BaseTestCase):
-
- def test_startupinfo(self) -> None:
- # startupinfo argument
- # We uses hardcoded constants, because we do not want to
- # depend on win32all.
- STARTF_USESHOWWINDOW = 1
- SW_MAXIMIZE = 3
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags = STARTF_USESHOWWINDOW
- startupinfo.wShowWindow = SW_MAXIMIZE
- # Since Python is a console process, it won't be affected
- # by wShowWindow, but the argument should be silently
- # ignored
- subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"],
- startupinfo=startupinfo)
-
- def test_creationflags(self) -> None:
- # creationflags argument
- CREATE_NEW_CONSOLE = 16
- sys.stderr.write(" a DOS box should flash briefly ...\n")
- subprocess.call(sys.executable +
- ' -c "import time; time.sleep(0.25)"',
- creationflags=CREATE_NEW_CONSOLE)
-
- def test_invalid_args(self) -> None:
- # invalid arguments should raise ValueError
- self.assertRaises(ValueError, subprocess.call,
- [sys.executable, "-c",
- "import sys; sys.exit(47)"],
- preexec_fn=lambda: 1)
- self.assertRaises(ValueError, subprocess.call,
- [sys.executable, "-c",
- "import sys; sys.exit(47)"],
- stdout=subprocess.PIPE,
- close_fds=True)
-
- def test_close_fds(self) -> None:
- # close file descriptors
- rc = subprocess.call([sys.executable, "-c",
- "import sys; sys.exit(47)"],
- close_fds=True)
- self.assertEqual(rc, 47)
-
- def test_shell_sequence(self) -> None:
- # Run command through the shell (sequence)
- newenv = os.environ.copy()
- newenv["FRUIT"] = "physalis"
- p = subprocess.Popen(["set"], shell=1,
- stdout=subprocess.PIPE,
- env=newenv)
- self.addCleanup(p.stdout.close)
- self.assertIn(b"physalis", p.stdout.read())
-
- def test_shell_string(self) -> None:
- # Run command through the shell (string)
- newenv = os.environ.copy()
- newenv["FRUIT"] = "physalis"
- p = subprocess.Popen("set", shell=1,
- stdout=subprocess.PIPE,
- env=newenv)
- self.addCleanup(p.stdout.close)
- self.assertIn(b"physalis", p.stdout.read())
-
- def test_call_string(self) -> None:
- # call() function with string argument on Windows
- rc = subprocess.call(sys.executable +
- ' -c "import sys; sys.exit(47)"')
- self.assertEqual(rc, 47)
-
- def _kill_process(self, method: str, *args: Any) -> None:
- # Some win32 buildbot raises EOFError if stdin is inherited
- p = subprocess.Popen([sys.executable, "-c", """if 1:
- import sys, time
- sys.stdout.write('x\\n')
- sys.stdout.flush()
- time.sleep(30)
- """],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- self.addCleanup(p.stdout.close)
- self.addCleanup(p.stderr.close)
- self.addCleanup(p.stdin.close)
- # Wait for the interpreter to be completely initialized before
- # sending any signal.
- p.stdout.read(1)
- getattr(p, method)(*args)
- _, stderr = p.communicate()
- self.assertStderrEqual(stderr, b'')
- returncode = p.wait()
- self.assertNotEqual(returncode, 0)
-
- def test_send_signal(self) -> None:
- self._kill_process('send_signal', signal.SIGTERM)
-
- def test_kill(self) -> None:
- self._kill_process('kill')
-
- def test_terminate(self) -> None:
- self._kill_process('terminate')
-
-
-# The module says:
-# "NB This only works (and is only relevant) for UNIX."
-#
-# Actually, getoutput should work on any platform with an os.popen, but
-# I'll take the comment as given, and skip this suite.
- at unittest.skipUnless(os.name == 'posix', "only relevant for UNIX")
-class CommandTests(unittest.TestCase):
- def test_getoutput(self) -> None:
- self.assertEqual(subprocess.getoutput('echo xyzzy'), 'xyzzy')
- self.assertEqual(subprocess.getstatusoutput('echo xyzzy'),
- (0, 'xyzzy'))
-
- # we use mkdtemp in the next line to create an empty directory
- # under our exclusive control; from that, we can invent a pathname
- # that we _know_ won't exist. This is guaranteed to fail.
- dir = None # type: str
- try:
- dir = tempfile.mkdtemp()
- name = os.path.join(dir, "foo")
-
- status, output = subprocess.getstatusoutput('cat ' + name)
- self.assertNotEqual(status, 0)
- finally:
- if dir is not None:
- os.rmdir(dir)
-
-
- at unittest.skipUnless(getattr(subprocess, '_has_poll', False),
- "poll system call not supported")
-class ProcessTestCaseNoPoll(ProcessTestCase):
- def setUp(self) -> None:
- subprocess._has_poll = False
- ProcessTestCase.setUp(self)
-
- def tearDown(self) -> None:
- subprocess._has_poll = True
- ProcessTestCase.tearDown(self)
-
-
-#@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
-# "_posixsubprocess extension module not found.")
-#class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
-# @classmethod
-# def setUpClass(cls):
-# global subprocess
-# assert subprocess._posixsubprocess
-# # Reimport subprocess while forcing _posixsubprocess to not exist.
-# with support.check_warnings(('.*_posixsubprocess .* not being used.*',
-# RuntimeWarning)):
-# subprocess = support.import_fresh_module(
-# 'subprocess', blocked=['_posixsubprocess'])
-# assert not subprocess._posixsubprocess
-#
-# @classmethod
-# def tearDownClass(cls):
-# global subprocess
-# # Reimport subprocess as it should be, restoring order to the universe#.
-# subprocess = support.import_fresh_module('subprocess')
-# assert subprocess._posixsubprocess
-
-
-class HelperFunctionTests(unittest.TestCase):
- @unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
- def test_eintr_retry_call(self) -> None:
- record_calls = [] # type: List[Any]
- def fake_os_func(*args: Any) -> tuple:
- record_calls.append(args)
- if len(record_calls) == 2:
- raise OSError(errno.EINTR, "fake interrupted system call")
- return tuple(reversed(args))
-
- self.assertEqual((999, 256),
- subprocess._eintr_retry_call(fake_os_func, 256, 999))
- self.assertEqual([(256, 999)], record_calls)
- # This time there will be an EINTR so it will loop once.
- self.assertEqual((666,),
- subprocess._eintr_retry_call(fake_os_func, 666))
- self.assertEqual([(256, 999), (666,), (666,)], record_calls)
-
-
- at unittest.skipUnless(mswindows, "Windows-specific tests")
-class CommandsWithSpaces (BaseTestCase):
-
- def setUp(self) -> None:
- super().setUp()
- f, fname = mkstemp(".py", "te st")
- self.fname = fname.lower ()
- os.write(f, b"import sys;"
- b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))"
- )
- os.close(f)
-
- def tearDown(self) -> None:
- os.remove(self.fname)
- super().tearDown()
-
- def with_spaces(self, *args: Any, **kwargs: Any) -> None:
- kwargs['stdout'] = subprocess.PIPE
- p = subprocess.Popen(*args, **kwargs)
- self.addCleanup(p.stdout.close)
- self.assertEqual(
- p.stdout.read ().decode("mbcs"),
- "2 [%r, 'ab cd']" % self.fname
- )
-
- def test_shell_string_with_spaces(self) -> None:
- # call() function with string argument with spaces on Windows
- self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
- "ab cd"), shell=1)
-
- def test_shell_sequence_with_spaces(self) -> None:
- # call() function with sequence argument with spaces on Windows
- self.with_spaces([sys.executable, self.fname, "ab cd"], shell=1)
-
- def test_noshell_string_with_spaces(self) -> None:
- # call() function with string argument with spaces on Windows
- self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
- "ab cd"))
-
- def test_noshell_sequence_with_spaces(self) -> None:
- # call() function with sequence argument with spaces on Windows
- self.with_spaces([sys.executable, self.fname, "ab cd"])
-
-
-class ContextManagerTests(BaseTestCase):
-
- def test_pipe(self) -> None:
- with subprocess.Popen([sys.executable, "-c",
- "import sys;"
- "sys.stdout.write('stdout');"
- "sys.stderr.write('stderr');"],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE) as proc:
- self.assertEqual(proc.stdout.read(), b"stdout")
- self.assertStderrEqual(proc.stderr.read(), b"stderr")
-
- self.assertTrue(proc.stdout.closed)
- self.assertTrue(proc.stderr.closed)
-
- def test_returncode(self) -> None:
- with subprocess.Popen([sys.executable, "-c",
- "import sys; sys.exit(100)"]) as proc:
- pass
- # __exit__ calls wait(), so the returncode should be set
- self.assertEqual(proc.returncode, 100)
-
- def test_communicate_stdin(self) -> None:
- with subprocess.Popen([sys.executable, "-c",
- "import sys;"
- "sys.exit(sys.stdin.read() == 'context')"],
- stdin=subprocess.PIPE) as proc:
- proc.communicate(b"context")
- self.assertEqual(proc.returncode, 1)
-
- def test_invalid_args(self) -> None:
- with self.assertRaises(EnvironmentError) as c:
- with subprocess.Popen(['nonexisting_i_hope'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE) as proc:
- pass
-
- if c.exception.errno != errno.ENOENT: # ignore "no such file"
- raise c.exception
-
-
-def test_main():
- unit_tests = (ProcessTestCase,
- POSIXProcessTestCase,
- Win32ProcessTestCase,
- #ProcessTestCasePOSIXPurePython,
- CommandTests,
- ProcessTestCaseNoPoll,
- HelperFunctionTests,
- CommandsWithSpaces,
- ContextManagerTests,
- )
-
- support.run_unittest(*unit_tests)
- support.reap_children()
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/test-data/stdlib-samples/3.2/test/test_tempfile.py b/test-data/stdlib-samples/3.2/test/test_tempfile.py
deleted file mode 100644
index 31b0fec..0000000
--- a/test-data/stdlib-samples/3.2/test/test_tempfile.py
+++ /dev/null
@@ -1,1122 +0,0 @@
-# tempfile.py unit tests.
-import tempfile
-import os
-import signal
-import sys
-import re
-import warnings
-
-import unittest
-from test import support
-
-from typing import Any, AnyStr, List, Dict, IO
-
-
-if hasattr(os, 'stat'):
- import stat
- has_stat = 1
-else:
- has_stat = 0
-
-has_textmode = (tempfile._text_openflags != tempfile._bin_openflags)
-has_spawnl = hasattr(os, 'spawnl')
-
-# TEST_FILES may need to be tweaked for systems depending on the maximum
-# number of files that can be opened at one time (see ulimit -n)
-if sys.platform in ('openbsd3', 'openbsd4'):
- TEST_FILES = 48
-else:
- TEST_FILES = 100
-
-# This is organized as one test for each chunk of code in tempfile.py,
-# in order of their appearance in the file. Testing which requires
-# threads is not done here.
-
-# Common functionality.
-class TC(unittest.TestCase):
-
- str_check = re.compile(r"[a-zA-Z0-9_-]{6}$")
-
- def setUp(self) -> None:
- self._warnings_manager = support.check_warnings()
- self._warnings_manager.__enter__()
- warnings.filterwarnings("ignore", category=RuntimeWarning,
- message="mktemp", module=__name__)
-
- def tearDown(self) -> None:
- self._warnings_manager.__exit__(None, None, None)
-
-
- def failOnException(self, what: str, ei: tuple = None) -> None:
- if ei is None:
- ei = sys.exc_info()
- self.fail("%s raised %s: %s" % (what, ei[0], ei[1]))
-
- def nameCheck(self, name: str, dir: str, pre: str, suf: str) -> None:
- (ndir, nbase) = os.path.split(name)
- npre = nbase[:len(pre)]
- nsuf = nbase[len(nbase)-len(suf):]
-
- # check for equality of the absolute paths!
- self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir),
- "file '%s' not in directory '%s'" % (name, dir))
- self.assertEqual(npre, pre,
- "file '%s' does not begin with '%s'" % (nbase, pre))
- self.assertEqual(nsuf, suf,
- "file '%s' does not end with '%s'" % (nbase, suf))
-
- nbase = nbase[len(pre):len(nbase)-len(suf)]
- self.assertTrue(self.str_check.match(nbase),
- "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/"
- % nbase)
-
-test_classes = [] # type: List[type]
-
-class test_exports(TC):
- def test_exports(self) -> None:
- # There are no surprising symbols in the tempfile module
- dict = tempfile.__dict__
-
- expected = {
- "NamedTemporaryFile" : 1,
- "TemporaryFile" : 1,
- "mkstemp" : 1,
- "mkdtemp" : 1,
- "mktemp" : 1,
- "TMP_MAX" : 1,
- "gettempprefix" : 1,
- "gettempdir" : 1,
- "tempdir" : 1,
- "template" : 1,
- "SpooledTemporaryFile" : 1,
- "TemporaryDirectory" : 1,
- }
-
- unexp = [] # type: List[str]
- for key in dict:
- if key[0] != '_' and key not in expected:
- unexp.append(key)
- self.assertTrue(len(unexp) == 0,
- "unexpected keys: %s" % unexp)
-
-test_classes.append(test_exports)
-
-
-class test__RandomNameSequence(TC):
- """Test the internal iterator object _RandomNameSequence."""
-
- def setUp(self) -> None:
- self.r = tempfile._RandomNameSequence()
- super().setUp()
-
- def test_get_six_char_str(self) -> None:
- # _RandomNameSequence returns a six-character string
- s = next(self.r)
- self.nameCheck(s, '', '', '')
-
- def test_many(self) -> None:
- # _RandomNameSequence returns no duplicate strings (stochastic)
-
- dict = {} # type: Dict[str, int]
- r = self.r
- for i in range(TEST_FILES):
- s = next(r)
- self.nameCheck(s, '', '', '')
- self.assertNotIn(s, dict)
- dict[s] = 1
-
- def supports_iter(self) -> None:
- # _RandomNameSequence supports the iterator protocol
-
- i = 0
- r = self.r
- try:
- for s in r:
- i += 1
- if i == 20:
- break
- except:
- self.failOnException("iteration")
-
- @unittest.skipUnless(hasattr(os, 'fork'),
- "os.fork is required for this test")
- def test_process_awareness(self) -> None:
- # ensure that the random source differs between
- # child and parent.
- read_fd, write_fd = os.pipe()
- pid = None # type: int
- try:
- pid = os.fork()
- if not pid:
- os.close(read_fd)
- os.write(write_fd, next(self.r).encode("ascii"))
- os.close(write_fd)
- # bypass the normal exit handlers- leave those to
- # the parent.
- os._exit(0)
- parent_value = next(self.r)
- child_value = os.read(read_fd, len(parent_value)).decode("ascii")
- finally:
- if pid:
- # best effort to ensure the process can't bleed out
- # via any bugs above
- try:
- os.kill(pid, signal.SIGKILL)
- except EnvironmentError:
- pass
- os.close(read_fd)
- os.close(write_fd)
- self.assertNotEqual(child_value, parent_value)
-
-
-test_classes.append(test__RandomNameSequence)
-
-
-class test__candidate_tempdir_list(TC):
- """Test the internal function _candidate_tempdir_list."""
-
- def test_nonempty_list(self) -> None:
- # _candidate_tempdir_list returns a nonempty list of strings
-
- cand = tempfile._candidate_tempdir_list()
-
- self.assertFalse(len(cand) == 0)
- for c in cand:
- self.assertIsInstance(c, str)
-
- def test_wanted_dirs(self) -> None:
- # _candidate_tempdir_list contains the expected directories
-
- # Make sure the interesting environment variables are all set.
- with support.EnvironmentVarGuard() as env:
- for envname in 'TMPDIR', 'TEMP', 'TMP':
- dirname = os.getenv(envname)
- if not dirname:
- env[envname] = os.path.abspath(envname)
-
- cand = tempfile._candidate_tempdir_list()
-
- for envname in 'TMPDIR', 'TEMP', 'TMP':
- dirname = os.getenv(envname)
- if not dirname: raise ValueError
- self.assertIn(dirname, cand)
-
- try:
- dirname = os.getcwd()
- except (AttributeError, os.error):
- dirname = os.curdir
-
- self.assertIn(dirname, cand)
-
- # Not practical to try to verify the presence of OS-specific
- # paths in this list.
-
-test_classes.append(test__candidate_tempdir_list)
-
-
-# We test _get_default_tempdir by testing gettempdir.
-
-
-class test__get_candidate_names(TC):
- """Test the internal function _get_candidate_names."""
-
- def test_retval(self) -> None:
- # _get_candidate_names returns a _RandomNameSequence object
- obj = tempfile._get_candidate_names()
- self.assertIsInstance(obj, tempfile._RandomNameSequence)
-
- def test_same_thing(self) -> None:
- # _get_candidate_names always returns the same object
- a = tempfile._get_candidate_names()
- b = tempfile._get_candidate_names()
-
- self.assertTrue(a is b)
-
-test_classes.append(test__get_candidate_names)
-
-
-class test__mkstemp_inner(TC):
- """Test the internal function _mkstemp_inner."""
-
- class mkstemped:
- _bflags = tempfile._bin_openflags
- _tflags = tempfile._text_openflags
-
- def __init__(self, dir: str, pre: str, suf: str, bin: int) -> None:
- if bin: flags = self._bflags
- else: flags = self._tflags
-
- (self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags)
-
- self._close = os.close
- self._unlink = os.unlink
-
- def write(self, str: bytes) -> None:
- os.write(self.fd, str)
-
- def __del__(self) -> None:
- self._close(self.fd)
- self._unlink(self.name)
-
- def do_create(self, dir: str = None, pre: str = "", suf: str= "",
- bin: int = 1) -> mkstemped:
- if dir is None:
- dir = tempfile.gettempdir()
- try:
- file = test__mkstemp_inner.mkstemped(dir, pre, suf, bin) # see #259
- except:
- self.failOnException("_mkstemp_inner")
-
- self.nameCheck(file.name, dir, pre, suf)
- return file
-
- def test_basic(self) -> None:
- # _mkstemp_inner can create files
- self.do_create().write(b"blat")
- self.do_create(pre="a").write(b"blat")
- self.do_create(suf="b").write(b"blat")
- self.do_create(pre="a", suf="b").write(b"blat")
- self.do_create(pre="aa", suf=".txt").write(b"blat")
-
- def test_basic_many(self) -> None:
- # _mkstemp_inner can create many files (stochastic)
- extant = list(range(TEST_FILES)) # type: List[Any]
- for i in extant:
- extant[i] = self.do_create(pre="aa")
-
- def test_choose_directory(self) -> None:
- # _mkstemp_inner can create files in a user-selected directory
- dir = tempfile.mkdtemp()
- try:
- self.do_create(dir=dir).write(b"blat")
- finally:
- os.rmdir(dir)
-
- def test_file_mode(self) -> None:
- # _mkstemp_inner creates files with the proper mode
- if not has_stat:
- return # ugh, can't use SkipTest.
-
- file = self.do_create()
- mode = stat.S_IMODE(os.stat(file.name).st_mode)
- expected = 0o600
- if sys.platform in ('win32', 'os2emx'):
- # There's no distinction among 'user', 'group' and 'world';
- # replicate the 'user' bits.
- user = expected >> 6
- expected = user * (1 + 8 + 64)
- self.assertEqual(mode, expected)
-
- def test_noinherit(self) -> None:
- # _mkstemp_inner file handles are not inherited by child processes
- if not has_spawnl:
- return # ugh, can't use SkipTest.
-
- if support.verbose:
- v="v"
- else:
- v="q"
-
- file = self.do_create()
- fd = "%d" % file.fd
-
- try:
- me = __file__ # type: str
- except NameError:
- me = sys.argv[0]
-
- # We have to exec something, so that FD_CLOEXEC will take
- # effect. The core of this test is therefore in
- # tf_inherit_check.py, which see.
- tester = os.path.join(os.path.dirname(os.path.abspath(me)),
- "tf_inherit_check.py")
-
- # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
- # but an arg with embedded spaces should be decorated with double
- # quotes on each end
- if sys.platform in ('win32',):
- decorated = '"%s"' % sys.executable
- tester = '"%s"' % tester
- else:
- decorated = sys.executable
-
- retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
- self.assertFalse(retval < 0,
- "child process caught fatal signal %d" % -retval)
- self.assertFalse(retval > 0, "child process reports failure %d"%retval)
-
- def test_textmode(self) -> None:
- # _mkstemp_inner can create files in text mode
- if not has_textmode:
- return # ugh, can't use SkipTest.
-
- # A text file is truncated at the first Ctrl+Z byte
- f = self.do_create(bin=0)
- f.write(b"blat\x1a")
- f.write(b"extra\n")
- os.lseek(f.fd, 0, os.SEEK_SET)
- self.assertEqual(os.read(f.fd, 20), b"blat")
-
-test_classes.append(test__mkstemp_inner)
-
-
-class test_gettempprefix(TC):
- """Test gettempprefix()."""
-
- def test_sane_template(self) -> None:
- # gettempprefix returns a nonempty prefix string
- p = tempfile.gettempprefix()
-
- self.assertIsInstance(p, str)
- self.assertTrue(len(p) > 0)
-
- def test_usable_template(self) -> None:
- # gettempprefix returns a usable prefix string
-
- # Create a temp directory, avoiding use of the prefix.
- # Then attempt to create a file whose name is
- # prefix + 'xxxxxx.xxx' in that directory.
- p = tempfile.gettempprefix() + "xxxxxx.xxx"
- d = tempfile.mkdtemp(prefix="")
- try:
- p = os.path.join(d, p)
- try:
- fd = os.open(p, os.O_RDWR | os.O_CREAT)
- except:
- self.failOnException("os.open")
- os.close(fd)
- os.unlink(p)
- finally:
- os.rmdir(d)
-
-test_classes.append(test_gettempprefix)
-
-
-class test_gettempdir(TC):
- """Test gettempdir()."""
-
- def test_directory_exists(self) -> None:
- # gettempdir returns a directory which exists
-
- dir = tempfile.gettempdir()
- self.assertTrue(os.path.isabs(dir) or dir == os.curdir,
- "%s is not an absolute path" % dir)
- self.assertTrue(os.path.isdir(dir),
- "%s is not a directory" % dir)
-
- def test_directory_writable(self) -> None:
- # gettempdir returns a directory writable by the user
-
- # sneaky: just instantiate a NamedTemporaryFile, which
- # defaults to writing into the directory returned by
- # gettempdir.
- try:
- file = tempfile.NamedTemporaryFile()
- file.write(b"blat")
- file.close()
- except:
- self.failOnException("create file in %s" % tempfile.gettempdir())
-
- def test_same_thing(self) -> None:
- # gettempdir always returns the same object
- a = tempfile.gettempdir()
- b = tempfile.gettempdir()
-
- self.assertTrue(a is b)
-
-test_classes.append(test_gettempdir)
-
-
-class test_mkstemp(TC):
- """Test mkstemp()."""
-
- def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> None:
- if dir is None:
- dir = tempfile.gettempdir()
- try:
- (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
- (ndir, nbase) = os.path.split(name)
- adir = os.path.abspath(dir)
- self.assertEqual(adir, ndir,
- "Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
- except:
- self.failOnException("mkstemp")
-
- try:
- self.nameCheck(name, dir, pre, suf)
- finally:
- os.close(fd)
- os.unlink(name)
-
- def test_basic(self) -> None:
- # mkstemp can create files
- self.do_create()
- self.do_create(pre="a")
- self.do_create(suf="b")
- self.do_create(pre="a", suf="b")
- self.do_create(pre="aa", suf=".txt")
- self.do_create(dir=".")
-
- def test_choose_directory(self) -> None:
- # mkstemp can create directories in a user-selected directory
- dir = tempfile.mkdtemp()
- try:
- self.do_create(dir=dir)
- finally:
- os.rmdir(dir)
-
-test_classes.append(test_mkstemp)
-
-
-class test_mkdtemp(TC):
- """Test mkdtemp()."""
-
- def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> str:
- if dir is None:
- dir = tempfile.gettempdir()
- try:
- name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
- except:
- self.failOnException("mkdtemp")
-
- try:
- self.nameCheck(name, dir, pre, suf)
- return name
- except:
- os.rmdir(name)
- raise
-
- def test_basic(self) -> None:
- # mkdtemp can create directories
- os.rmdir(self.do_create())
- os.rmdir(self.do_create(pre="a"))
- os.rmdir(self.do_create(suf="b"))
- os.rmdir(self.do_create(pre="a", suf="b"))
- os.rmdir(self.do_create(pre="aa", suf=".txt"))
-
- def test_basic_many(self) -> None:
- # mkdtemp can create many directories (stochastic)
- extant = list(range(TEST_FILES)) # type: List[Any]
- try:
- for i in extant:
- extant[i] = self.do_create(pre="aa")
- finally:
- for i in extant:
- if(isinstance(i, str)):
- os.rmdir(i)
-
- def test_choose_directory(self) -> None:
- # mkdtemp can create directories in a user-selected directory
- dir = tempfile.mkdtemp()
- try:
- os.rmdir(self.do_create(dir=dir))
- finally:
- os.rmdir(dir)
-
- def test_mode(self) -> None:
- # mkdtemp creates directories with the proper mode
- if not has_stat:
- return # ugh, can't use SkipTest.
-
- dir = self.do_create()
- try:
- mode = stat.S_IMODE(os.stat(dir).st_mode)
- mode &= 0o777 # Mask off sticky bits inherited from /tmp
- expected = 0o700
- if sys.platform in ('win32', 'os2emx'):
- # There's no distinction among 'user', 'group' and 'world';
- # replicate the 'user' bits.
- user = expected >> 6
- expected = user * (1 + 8 + 64)
- self.assertEqual(mode, expected)
- finally:
- os.rmdir(dir)
-
-test_classes.append(test_mkdtemp)
-
-
-class test_mktemp(TC):
- """Test mktemp()."""
-
- # For safety, all use of mktemp must occur in a private directory.
- # We must also suppress the RuntimeWarning it generates.
- def setUp(self) -> None:
- self.dir = tempfile.mkdtemp()
- super().setUp()
-
- def tearDown(self) -> None:
- if self.dir:
- os.rmdir(self.dir)
- self.dir = None
- super().tearDown()
-
- class mktemped:
- def _unlink(self, path: str) -> None:
- os.unlink(path)
-
- _bflags = tempfile._bin_openflags
-
- def __init__(self, dir: str, pre: str, suf: str) -> None:
- self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf)
- # Create the file. This will raise an exception if it's
- # mysteriously appeared in the meanwhile.
- os.close(os.open(self.name, self._bflags, 0o600))
-
- def __del__(self) -> None:
- self._unlink(self.name)
-
- def do_create(self, pre: str = "", suf: str = "") -> mktemped:
- try:
- file = test_mktemp.mktemped(self.dir, pre, suf) # see #259
- except:
- self.failOnException("mktemp")
-
- self.nameCheck(file.name, self.dir, pre, suf)
- return file
-
- def test_basic(self) -> None:
- # mktemp can choose usable file names
- self.do_create()
- self.do_create(pre="a")
- self.do_create(suf="b")
- self.do_create(pre="a", suf="b")
- self.do_create(pre="aa", suf=".txt")
-
- def test_many(self) -> None:
- # mktemp can choose many usable file names (stochastic)
- extant = list(range(TEST_FILES)) # type: List[Any]
- for i in extant:
- extant[i] = self.do_create(pre="aa")
-
-## def test_warning(self):
-## # mktemp issues a warning when used
-## warnings.filterwarnings("error",
-## category=RuntimeWarning,
-## message="mktemp")
-## self.assertRaises(RuntimeWarning,
-## tempfile.mktemp, dir=self.dir)
-
-test_classes.append(test_mktemp)
-
-
-# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
-
-
-class test_NamedTemporaryFile(TC):
- """Test NamedTemporaryFile()."""
-
- def do_create(self, dir: str = None, pre: str = "", suf: str = "",
- delete: bool = True) -> IO[Any]:
- if dir is None:
- dir = tempfile.gettempdir()
- try:
- file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
- delete=delete)
- except:
- self.failOnException("NamedTemporaryFile")
-
- self.nameCheck(file.name, dir, pre, suf)
- return file
-
-
- def test_basic(self) -> None:
- # NamedTemporaryFile can create files
- self.do_create()
- self.do_create(pre="a")
- self.do_create(suf="b")
- self.do_create(pre="a", suf="b")
- self.do_create(pre="aa", suf=".txt")
-
- def test_creates_named(self) -> None:
- # NamedTemporaryFile creates files with names
- f = tempfile.NamedTemporaryFile()
- self.assertTrue(os.path.exists(f.name),
- "NamedTemporaryFile %s does not exist" % f.name)
-
- def test_del_on_close(self) -> None:
- # A NamedTemporaryFile is deleted when closed
- dir = tempfile.mkdtemp()
- try:
- f = tempfile.NamedTemporaryFile(dir=dir)
- f.write(b'blat')
- f.close()
- self.assertFalse(os.path.exists(f.name),
- "NamedTemporaryFile %s exists after close" % f.name)
- finally:
- os.rmdir(dir)
-
- def test_dis_del_on_close(self) -> None:
- # Tests that delete-on-close can be disabled
- dir = tempfile.mkdtemp()
- tmp = None # type: str
- try:
- f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
- tmp = f.name
- f.write(b'blat')
- f.close()
- self.assertTrue(os.path.exists(f.name),
- "NamedTemporaryFile %s missing after close" % f.name)
- finally:
- if tmp is not None:
- os.unlink(tmp)
- os.rmdir(dir)
-
- def test_multiple_close(self) -> None:
- # A NamedTemporaryFile can be closed many times without error
- f = tempfile.NamedTemporaryFile()
- f.write(b'abc\n')
- f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
-
- def test_context_manager(self) -> None:
- # A NamedTemporaryFile can be used as a context manager
- with tempfile.NamedTemporaryFile() as f:
- self.assertTrue(os.path.exists(f.name))
- self.assertFalse(os.path.exists(f.name))
- def use_closed():
- with f:
- pass
- self.assertRaises(ValueError, use_closed)
-
- # How to test the mode and bufsize parameters?
-
-test_classes.append(test_NamedTemporaryFile)
-
-class test_SpooledTemporaryFile(TC):
- """Test SpooledTemporaryFile()."""
-
- def do_create(self, max_size: int = 0, dir: str = None, pre: str = "",
- suf: str = "") -> tempfile.SpooledTemporaryFile:
- if dir is None:
- dir = tempfile.gettempdir()
- try:
- file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
- except:
- self.failOnException("SpooledTemporaryFile")
-
- return file
-
-
- def test_basic(self) -> None:
- # SpooledTemporaryFile can create files
- f = self.do_create()
- self.assertFalse(f._rolled)
- f = self.do_create(max_size=100, pre="a", suf=".txt")
- self.assertFalse(f._rolled)
-
- def test_del_on_close(self) -> None:
- # A SpooledTemporaryFile is deleted when closed
- dir = tempfile.mkdtemp()
- try:
- f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir)
- self.assertFalse(f._rolled)
- f.write(b'blat ' * 5)
- self.assertTrue(f._rolled)
- filename = f.name
- f.close()
- self.assertFalse(isinstance(filename, str) and os.path.exists(filename),
- "SpooledTemporaryFile %s exists after close" % filename)
- finally:
- os.rmdir(dir)
-
- def test_rewrite_small(self) -> None:
- # A SpooledTemporaryFile can be written to multiple within the max_size
- f = self.do_create(max_size=30)
- self.assertFalse(f._rolled)
- for i in range(5):
- f.seek(0, 0)
- f.write(b'x' * 20)
- self.assertFalse(f._rolled)
-
- def test_write_sequential(self) -> None:
- # A SpooledTemporaryFile should hold exactly max_size bytes, and roll
- # over afterward
- f = self.do_create(max_size=30)
- self.assertFalse(f._rolled)
- f.write(b'x' * 20)
- self.assertFalse(f._rolled)
- f.write(b'x' * 10)
- self.assertFalse(f._rolled)
- f.write(b'x')
- self.assertTrue(f._rolled)
-
- def test_writelines(self) -> None:
- # Verify writelines with a SpooledTemporaryFile
- f = self.do_create()
- f.writelines([b'x', b'y', b'z'])
- f.seek(0)
- buf = f.read()
- self.assertEqual(buf, b'xyz')
-
- def test_writelines_sequential(self) -> None:
- # A SpooledTemporaryFile should hold exactly max_size bytes, and roll
- # over afterward
- f = self.do_create(max_size=35)
- f.writelines([b'x' * 20, b'x' * 10, b'x' * 5])
- self.assertFalse(f._rolled)
- f.write(b'x')
- self.assertTrue(f._rolled)
-
- def test_sparse(self) -> None:
- # A SpooledTemporaryFile that is written late in the file will extend
- # when that occurs
- f = self.do_create(max_size=30)
- self.assertFalse(f._rolled)
- f.seek(100, 0)
- self.assertFalse(f._rolled)
- f.write(b'x')
- self.assertTrue(f._rolled)
-
- def test_fileno(self) -> None:
- # A SpooledTemporaryFile should roll over to a real file on fileno()
- f = self.do_create(max_size=30)
- self.assertFalse(f._rolled)
- self.assertTrue(f.fileno() > 0)
- self.assertTrue(f._rolled)
-
- def test_multiple_close_before_rollover(self) -> None:
- # A SpooledTemporaryFile can be closed many times without error
- f = tempfile.SpooledTemporaryFile()
- f.write(b'abc\n')
- self.assertFalse(f._rolled)
- f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
-
- def test_multiple_close_after_rollover(self) -> None:
- # A SpooledTemporaryFile can be closed many times without error
- f = tempfile.SpooledTemporaryFile(max_size=1)
- f.write(b'abc\n')
- self.assertTrue(f._rolled)
- f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
-
- def test_bound_methods(self) -> None:
- # It should be OK to steal a bound method from a SpooledTemporaryFile
- # and use it independently; when the file rolls over, those bound
- # methods should continue to function
- f = self.do_create(max_size=30)
- read = f.read
- write = f.write
- seek = f.seek
-
- write(b"a" * 35)
- write(b"b" * 35)
- seek(0, 0)
- self.assertEqual(read(70), b'a'*35 + b'b'*35)
-
- def test_text_mode(self) -> None:
- # Creating a SpooledTemporaryFile with a text mode should produce
- # a file object reading and writing (Unicode) text strings.
- f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10)
- f.write("abc\n")
- f.seek(0)
- self.assertEqual(f.read(), "abc\n")
- f.write("def\n")
- f.seek(0)
- self.assertEqual(f.read(), "abc\ndef\n")
- f.write("xyzzy\n")
- f.seek(0)
- self.assertEqual(f.read(), "abc\ndef\nxyzzy\n")
- # Check that Ctrl+Z doesn't truncate the file
- f.write("foo\x1abar\n")
- f.seek(0)
- self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n")
-
- def test_text_newline_and_encoding(self) -> None:
- f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10,
- newline='', encoding='utf-8')
- f.write("\u039B\r\n")
- f.seek(0)
- self.assertEqual(f.read(), "\u039B\r\n")
- self.assertFalse(f._rolled)
-
- f.write("\u039B" * 20 + "\r\n")
- f.seek(0)
- self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n")
- self.assertTrue(f._rolled)
-
- def test_context_manager_before_rollover(self) -> None:
- # A SpooledTemporaryFile can be used as a context manager
- with tempfile.SpooledTemporaryFile(max_size=1) as f:
- self.assertFalse(f._rolled)
- self.assertFalse(f.closed)
- self.assertTrue(f.closed)
- def use_closed():
- with f:
- pass
- self.assertRaises(ValueError, use_closed)
-
- def test_context_manager_during_rollover(self) -> None:
- # A SpooledTemporaryFile can be used as a context manager
- with tempfile.SpooledTemporaryFile(max_size=1) as f:
- self.assertFalse(f._rolled)
- f.write(b'abc\n')
- f.flush()
- self.assertTrue(f._rolled)
- self.assertFalse(f.closed)
- self.assertTrue(f.closed)
- def use_closed():
- with f:
- pass
- self.assertRaises(ValueError, use_closed)
-
- def test_context_manager_after_rollover(self) -> None:
- # A SpooledTemporaryFile can be used as a context manager
- f = tempfile.SpooledTemporaryFile(max_size=1)
- f.write(b'abc\n')
- f.flush()
- self.assertTrue(f._rolled)
- with f:
- self.assertFalse(f.closed)
- self.assertTrue(f.closed)
- def use_closed():
- with f:
- pass
- self.assertRaises(ValueError, use_closed)
-
-
-test_classes.append(test_SpooledTemporaryFile)
-
-
-class test_TemporaryFile(TC):
- """Test TemporaryFile()."""
-
- def test_basic(self) -> None:
- # TemporaryFile can create files
- # No point in testing the name params - the file has no name.
- try:
- tempfile.TemporaryFile()
- except:
- self.failOnException("TemporaryFile")
-
- def test_has_no_name(self) -> None:
- # TemporaryFile creates files with no names (on this system)
- dir = tempfile.mkdtemp()
- f = tempfile.TemporaryFile(dir=dir)
- f.write(b'blat')
-
- # Sneaky: because this file has no name, it should not prevent
- # us from removing the directory it was created in.
- try:
- os.rmdir(dir)
- except:
- ei = sys.exc_info()
- # cleanup
- f.close()
- os.rmdir(dir)
- self.failOnException("rmdir", ei)
-
- def test_multiple_close(self) -> None:
- # A TemporaryFile can be closed many times without error
- f = tempfile.TemporaryFile()
- f.write(b'abc\n')
- f.close()
- try:
- f.close()
- f.close()
- except:
- self.failOnException("close")
-
- # How to test the mode and bufsize parameters?
- def test_mode_and_encoding(self) -> None:
-
- def roundtrip(input: AnyStr, *args: Any, **kwargs: Any) -> None:
- with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
- fileobj.write(input)
- fileobj.seek(0)
- self.assertEqual(input, fileobj.read())
-
- roundtrip(b"1234", "w+b")
- roundtrip("abdc\n", "w+")
- roundtrip("\u039B", "w+", encoding="utf-16")
- roundtrip("foo\r\n", "w+", newline="")
-
-
-if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
- test_classes.append(test_TemporaryFile)
-
-
-# Helper for test_del_on_shutdown
-class NulledModules:
- def __init__(self, *modules: Any) -> None:
- self.refs = [mod.__dict__ for mod in modules]
- self.contents = [ref.copy() for ref in self.refs]
-
- def __enter__(self) -> None:
- for d in self.refs:
- for key in d:
- d[key] = None
-
- def __exit__(self, *exc_info: Any) -> None:
- for d, c in zip(self.refs, self.contents):
- d.clear()
- d.update(c)
-
-class test_TemporaryDirectory(TC):
- """Test TemporaryDirectory()."""
-
- def do_create(self, dir: str = None, pre: str = "", suf: str = "",
- recurse: int = 1) -> tempfile.TemporaryDirectory:
- if dir is None:
- dir = tempfile.gettempdir()
- try:
- tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
- except:
- self.failOnException("TemporaryDirectory")
- self.nameCheck(tmp.name, dir, pre, suf)
- # Create a subdirectory and some files
- if recurse:
- self.do_create(tmp.name, pre, suf, recurse-1)
- with open(os.path.join(tmp.name, "test.txt"), "wb") as f:
- f.write(b"Hello world!")
- return tmp
-
- def test_mkdtemp_failure(self) -> None:
- # Check no additional exception if mkdtemp fails
- # Previously would raise AttributeError instead
- # (noted as part of Issue #10188)
- with tempfile.TemporaryDirectory() as nonexistent:
- pass
- with self.assertRaises(os.error):
- tempfile.TemporaryDirectory(dir=nonexistent)
-
- def test_explicit_cleanup(self) -> None:
- # A TemporaryDirectory is deleted when cleaned up
- dir = tempfile.mkdtemp()
- try:
- d = self.do_create(dir=dir)
- self.assertTrue(os.path.exists(d.name),
- "TemporaryDirectory %s does not exist" % d.name)
- d.cleanup()
- self.assertFalse(os.path.exists(d.name),
- "TemporaryDirectory %s exists after cleanup" % d.name)
- finally:
- os.rmdir(dir)
-
- @support.skip_unless_symlink
- def test_cleanup_with_symlink_to_a_directory(self) -> None:
- # cleanup() should not follow symlinks to directories (issue #12464)
- d1 = self.do_create()
- d2 = self.do_create()
-
- # Symlink d1/foo -> d2
- os.symlink(d2.name, os.path.join(d1.name, "foo"))
-
- # This call to cleanup() should not follow the "foo" symlink
- d1.cleanup()
-
- self.assertFalse(os.path.exists(d1.name),
- "TemporaryDirectory %s exists after cleanup" % d1.name)
- self.assertTrue(os.path.exists(d2.name),
- "Directory pointed to by a symlink was deleted")
- self.assertEqual(os.listdir(d2.name), ['test.txt'],
- "Contents of the directory pointed to by a symlink "
- "were deleted")
- d2.cleanup()
-
- @support.cpython_only
- def test_del_on_collection(self) -> None:
- # A TemporaryDirectory is deleted when garbage collected
- dir = tempfile.mkdtemp()
- try:
- d = self.do_create(dir=dir)
- name = d.name
- del d # Rely on refcounting to invoke __del__
- self.assertFalse(os.path.exists(name),
- "TemporaryDirectory %s exists after __del__" % name)
- finally:
- os.rmdir(dir)
-
- @unittest.expectedFailure # See issue #10188
- def test_del_on_shutdown(self) -> None:
- # A TemporaryDirectory may be cleaned up during shutdown
- # Make sure it works with the relevant modules nulled out
- with self.do_create() as dir:
- d = self.do_create(dir=dir)
- # Mimic the nulling out of modules that
- # occurs during system shutdown
- modules = [os, os.path]
- if has_stat:
- modules.append(stat)
- # Currently broken, so suppress the warning
- # that is otherwise emitted on stdout
- with support.captured_stderr() as err:
- with NulledModules(*modules):
- d.cleanup()
- # Currently broken, so stop spurious exception by
- # indicating the object has already been closed
- d._closed = True
- # And this assert will fail, as expected by the
- # unittest decorator...
- self.assertFalse(os.path.exists(d.name),
- "TemporaryDirectory %s exists after cleanup" % d.name)
-
- def test_warnings_on_cleanup(self) -> None:
- # Two kinds of warning on shutdown
- # Issue 10888: may write to stderr if modules are nulled out
- # ResourceWarning will be triggered by __del__
- with self.do_create() as dir:
- if os.sep != '\\':
- # Embed a backslash in order to make sure string escaping
- # in the displayed error message is dealt with correctly
- suffix = '\\check_backslash_handling'
- else:
- suffix = ''
- d = self.do_create(dir=dir, suf=suffix)
-
- #Check for the Issue 10888 message
- modules = [os, os.path]
- if has_stat:
- modules.append(stat)
- with support.captured_stderr() as err:
- with NulledModules(*modules):
- d.cleanup()
- message = err.getvalue().replace('\\\\', '\\')
- self.assertIn("while cleaning up", message)
- self.assertIn(d.name, message)
-
- # Check for the resource warning
- with support.check_warnings(('Implicitly', ResourceWarning), quiet=False):
- warnings.filterwarnings("always", category=ResourceWarning)
- d.__del__()
- self.assertFalse(os.path.exists(d.name),
- "TemporaryDirectory %s exists after __del__" % d.name)
-
- def test_multiple_close(self) -> None:
- # Can be cleaned-up many times without error
- d = self.do_create()
- d.cleanup()
- try:
- d.cleanup()
- d.cleanup()
- except:
- self.failOnException("cleanup")
-
- def test_context_manager(self) -> None:
- # Can be used as a context manager
- d = self.do_create()
- with d as name:
- self.assertTrue(os.path.exists(name))
- self.assertEqual(name, d.name)
- self.assertFalse(os.path.exists(name))
-
-
-test_classes.append(test_TemporaryDirectory)
-
-def test_main() -> None:
- support.run_unittest(*test_classes)
-
-if __name__ == "__main__":
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_textwrap.py b/test-data/stdlib-samples/3.2/test/test_textwrap.py
deleted file mode 100644
index 79d921a..0000000
--- a/test-data/stdlib-samples/3.2/test/test_textwrap.py
+++ /dev/null
@@ -1,601 +0,0 @@
-#
-# Test suite for the textwrap module.
-#
-# Original tests written by Greg Ward <gward at python.net>.
-# Converted to PyUnit by Peter Hansen <peter at engcorp.com>.
-# Currently maintained by Greg Ward.
-#
-# $Id$
-#
-
-import unittest
-from test import support
-
-from typing import Any, List, Sequence
-
-from textwrap import TextWrapper, wrap, fill, dedent
-
-
-class BaseTestCase(unittest.TestCase):
- '''Parent class with utility methods for textwrap tests.'''
-
- wrapper = None # type: TextWrapper
-
- def show(self, textin: Sequence[str]) -> str:
- if isinstance(textin, list):
- results = [] # type: List[str]
- for i in range(len(textin)):
- results.append(" %d: %r" % (i, textin[i]))
- result = '\n'.join(results)
- elif isinstance(textin, str):
- result = " %s\n" % repr(textin)
- return result
-
-
- def check(self, result: Sequence[str], expect: Sequence[str]) -> None:
- self.assertEqual(result, expect,
- 'expected:\n%s\nbut got:\n%s' % (
- self.show(expect), self.show(result)))
-
- def check_wrap(self, text: str, width: int, expect: Sequence[str],
- **kwargs: Any) -> None:
- result = wrap(text, width, **kwargs)
- self.check(result, expect)
-
- def check_split(self, text: str, expect: Sequence[str]) -> None:
- result = self.wrapper._split(text)
- self.assertEqual(result, expect,
- "\nexpected %r\n"
- "but got %r" % (expect, result))
-
-
-class WrapTestCase(BaseTestCase):
-
- def setUp(self) -> None:
- self.wrapper = TextWrapper(width=45)
-
- def test_simple(self) -> None:
- # Simple case: just words, spaces, and a bit of punctuation
-
- text = "Hello there, how are you this fine day? I'm glad to hear it!"
-
- self.check_wrap(text, 12,
- ["Hello there,",
- "how are you",
- "this fine",
- "day? I'm",
- "glad to hear",
- "it!"])
- self.check_wrap(text, 42,
- ["Hello there, how are you this fine day?",
- "I'm glad to hear it!"])
- self.check_wrap(text, 80, [text])
-
-
- def test_whitespace(self) -> None:
- # Whitespace munging and end-of-sentence detection
-
- text = """\
-This is a paragraph that already has
-line breaks. But some of its lines are much longer than the others,
-so it needs to be wrapped.
-Some lines are \ttabbed too.
-What a mess!
-"""
-
- expect = ["This is a paragraph that already has line",
- "breaks. But some of its lines are much",
- "longer than the others, so it needs to be",
- "wrapped. Some lines are tabbed too. What a",
- "mess!"]
-
- wrapper = TextWrapper(45, fix_sentence_endings=True)
- result = wrapper.wrap(text)
- self.check(result, expect)
-
- results = wrapper.fill(text)
- self.check(results, '\n'.join(expect))
-
- def test_fix_sentence_endings(self) -> None:
- wrapper = TextWrapper(60, fix_sentence_endings=True)
-
- # SF #847346: ensure that fix_sentence_endings=True does the
- # right thing even on input short enough that it doesn't need to
- # be wrapped.
- text = "A short line. Note the single space."
- expect = ["A short line. Note the single space."]
- self.check(wrapper.wrap(text), expect)
-
- # Test some of the hairy end cases that _fix_sentence_endings()
- # is supposed to handle (the easy stuff is tested in
- # test_whitespace() above).
- text = "Well, Doctor? What do you think?"
- expect = ["Well, Doctor? What do you think?"]
- self.check(wrapper.wrap(text), expect)
-
- text = "Well, Doctor?\nWhat do you think?"
- self.check(wrapper.wrap(text), expect)
-
- text = 'I say, chaps! Anyone for "tennis?"\nHmmph!'
- expect = ['I say, chaps! Anyone for "tennis?" Hmmph!']
- self.check(wrapper.wrap(text), expect)
-
- wrapper.width = 20
- expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!']
- self.check(wrapper.wrap(text), expect)
-
- text = 'And she said, "Go to hell!"\nCan you believe that?'
- expect = ['And she said, "Go to',
- 'hell!" Can you',
- 'believe that?']
- self.check(wrapper.wrap(text), expect)
-
- wrapper.width = 60
- expect = ['And she said, "Go to hell!" Can you believe that?']
- self.check(wrapper.wrap(text), expect)
-
- text = 'File stdio.h is nice.'
- expect = ['File stdio.h is nice.']
- self.check(wrapper.wrap(text), expect)
-
- def test_wrap_short(self) -> None:
- # Wrapping to make short lines longer
-
- text = "This is a\nshort paragraph."
-
- self.check_wrap(text, 20, ["This is a short",
- "paragraph."])
- self.check_wrap(text, 40, ["This is a short paragraph."])
-
-
- def test_wrap_short_1line(self) -> None:
- # Test endcases
-
- text = "This is a short line."
-
- self.check_wrap(text, 30, ["This is a short line."])
- self.check_wrap(text, 30, ["(1) This is a short line."],
- initial_indent="(1) ")
-
-
- def test_hyphenated(self) -> None:
- # Test breaking hyphenated words
-
- text = ("this-is-a-useful-feature-for-"
- "reformatting-posts-from-tim-peters'ly")
-
- self.check_wrap(text, 40,
- ["this-is-a-useful-feature-for-",
- "reformatting-posts-from-tim-peters'ly"])
- self.check_wrap(text, 41,
- ["this-is-a-useful-feature-for-",
- "reformatting-posts-from-tim-peters'ly"])
- self.check_wrap(text, 42,
- ["this-is-a-useful-feature-for-reformatting-",
- "posts-from-tim-peters'ly"])
-
- def test_hyphenated_numbers(self) -> None:
- # Test that hyphenated numbers (eg. dates) are not broken like words.
- text = ("Python 1.0.0 was released on 1994-01-26. Python 1.0.1 was\n"
- "released on 1994-02-15.")
-
- self.check_wrap(text, 30, ['Python 1.0.0 was released on',
- '1994-01-26. Python 1.0.1 was',
- 'released on 1994-02-15.'])
- self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.',
- 'Python 1.0.1 was released on 1994-02-15.'])
-
- text = "I do all my shopping at 7-11."
- self.check_wrap(text, 25, ["I do all my shopping at",
- "7-11."])
- self.check_wrap(text, 27, ["I do all my shopping at",
- "7-11."])
- self.check_wrap(text, 29, ["I do all my shopping at 7-11."])
-
- def test_em_dash(self) -> None:
- # Test text with em-dashes
- text = "Em-dashes should be written -- thus."
- self.check_wrap(text, 25,
- ["Em-dashes should be",
- "written -- thus."])
-
- # Probe the boundaries of the properly written em-dash,
- # ie. " -- ".
- self.check_wrap(text, 29,
- ["Em-dashes should be written",
- "-- thus."])
- expect = ["Em-dashes should be written --",
- "thus."]
- self.check_wrap(text, 30, expect)
- self.check_wrap(text, 35, expect)
- self.check_wrap(text, 36,
- ["Em-dashes should be written -- thus."])
-
- # The improperly written em-dash is handled too, because
- # it's adjacent to non-whitespace on both sides.
- text = "You can also do--this or even---this."
- expect = ["You can also do",
- "--this or even",
- "---this."]
- self.check_wrap(text, 15, expect)
- self.check_wrap(text, 16, expect)
- expect = ["You can also do--",
- "this or even---",
- "this."]
- self.check_wrap(text, 17, expect)
- self.check_wrap(text, 19, expect)
- expect = ["You can also do--this or even",
- "---this."]
- self.check_wrap(text, 29, expect)
- self.check_wrap(text, 31, expect)
- expect = ["You can also do--this or even---",
- "this."]
- self.check_wrap(text, 32, expect)
- self.check_wrap(text, 35, expect)
-
- # All of the above behaviour could be deduced by probing the
- # _split() method.
- text = "Here's an -- em-dash and--here's another---and another!"
- expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ",
- "and", "--", "here's", " ", "another", "---",
- "and", " ", "another!"]
- self.check_split(text, expect)
-
- text = "and then--bam!--he was gone"
- expect = ["and", " ", "then", "--", "bam!", "--",
- "he", " ", "was", " ", "gone"]
- self.check_split(text, expect)
-
-
- def test_unix_options (self) -> None:
- # Test that Unix-style command-line options are wrapped correctly.
- # Both Optik (OptionParser) and Docutils rely on this behaviour!
-
- text = "You should use the -n option, or --dry-run in its long form."
- self.check_wrap(text, 20,
- ["You should use the",
- "-n option, or --dry-",
- "run in its long",
- "form."])
- self.check_wrap(text, 21,
- ["You should use the -n",
- "option, or --dry-run",
- "in its long form."])
- expect = ["You should use the -n option, or",
- "--dry-run in its long form."]
- self.check_wrap(text, 32, expect)
- self.check_wrap(text, 34, expect)
- self.check_wrap(text, 35, expect)
- self.check_wrap(text, 38, expect)
- expect = ["You should use the -n option, or --dry-",
- "run in its long form."]
- self.check_wrap(text, 39, expect)
- self.check_wrap(text, 41, expect)
- expect = ["You should use the -n option, or --dry-run",
- "in its long form."]
- self.check_wrap(text, 42, expect)
-
- # Again, all of the above can be deduced from _split().
- text = "the -n option, or --dry-run or --dryrun"
- expect = ["the", " ", "-n", " ", "option,", " ", "or", " ",
- "--dry-", "run", " ", "or", " ", "--dryrun"]
- self.check_split(text, expect)
-
- def test_funky_hyphens (self) -> None:
- # Screwy edge cases cooked up by David Goodger. All reported
- # in SF bug #596434.
- self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"])
- self.check_split("what the--", ["what", " ", "the--"])
- self.check_split("what the--.", ["what", " ", "the--."])
- self.check_split("--text--.", ["--text--."])
-
- # When I first read bug #596434, this is what I thought David
- # was talking about. I was wrong; these have always worked
- # fine. The real problem is tested in test_funky_parens()
- # below...
- self.check_split("--option", ["--option"])
- self.check_split("--option-opt", ["--option-", "opt"])
- self.check_split("foo --option-opt bar",
- ["foo", " ", "--option-", "opt", " ", "bar"])
-
- def test_punct_hyphens(self) -> None:
- # Oh bother, SF #965425 found another problem with hyphens --
- # hyphenated words in single quotes weren't handled correctly.
- # In fact, the bug is that *any* punctuation around a hyphenated
- # word was handled incorrectly, except for a leading "--", which
- # was special-cased for Optik and Docutils. So test a variety
- # of styles of punctuation around a hyphenated word.
- # (Actually this is based on an Optik bug report, #813077).
- self.check_split("the 'wibble-wobble' widget",
- ['the', ' ', "'wibble-", "wobble'", ' ', 'widget'])
- self.check_split('the "wibble-wobble" widget',
- ['the', ' ', '"wibble-', 'wobble"', ' ', 'widget'])
- self.check_split("the (wibble-wobble) widget",
- ['the', ' ', "(wibble-", "wobble)", ' ', 'widget'])
- self.check_split("the ['wibble-wobble'] widget",
- ['the', ' ', "['wibble-", "wobble']", ' ', 'widget'])
-
- def test_funky_parens (self) -> None:
- # Second part of SF bug #596434: long option strings inside
- # parentheses.
- self.check_split("foo (--option) bar",
- ["foo", " ", "(--option)", " ", "bar"])
-
- # Related stuff -- make sure parens work in simpler contexts.
- self.check_split("foo (bar) baz",
- ["foo", " ", "(bar)", " ", "baz"])
- self.check_split("blah (ding dong), wubba",
- ["blah", " ", "(ding", " ", "dong),",
- " ", "wubba"])
-
- def test_initial_whitespace(self) -> None:
- # SF bug #622849 reported inconsistent handling of leading
- # whitespace; let's test that a bit, shall we?
- text = " This is a sentence with leading whitespace."
- self.check_wrap(text, 50,
- [" This is a sentence with leading whitespace."])
- self.check_wrap(text, 30,
- [" This is a sentence with", "leading whitespace."])
-
- def test_no_drop_whitespace(self) -> None:
- # SF patch #1581073
- text = " This is a sentence with much whitespace."
- self.check_wrap(text, 10,
- [" This is a", " ", "sentence ",
- "with ", "much white", "space."],
- drop_whitespace=False)
-
- def test_split(self) -> None:
- # Ensure that the standard _split() method works as advertised
- # in the comments
-
- text = "Hello there -- you goof-ball, use the -b option!"
-
- result = self.wrapper._split(text)
- self.check(result,
- ["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-",
- "ball,", " ", "use", " ", "the", " ", "-b", " ", "option!"])
-
- def test_break_on_hyphens(self) -> None:
- # Ensure that the break_on_hyphens attributes work
- text = "yaba daba-doo"
- self.check_wrap(text, 10, ["yaba daba-", "doo"],
- break_on_hyphens=True)
- self.check_wrap(text, 10, ["yaba", "daba-doo"],
- break_on_hyphens=False)
-
- def test_bad_width(self) -> None:
- # Ensure that width <= 0 is caught.
- text = "Whatever, it doesn't matter."
- self.assertRaises(ValueError, wrap, text, 0)
- self.assertRaises(ValueError, wrap, text, -1)
-
- def test_no_split_at_umlaut(self) -> None:
- text = "Die Empf\xe4nger-Auswahl"
- self.check_wrap(text, 13, ["Die", "Empf\xe4nger-", "Auswahl"])
-
- def test_umlaut_followed_by_dash(self) -> None:
- text = "aa \xe4\xe4-\xe4\xe4"
- self.check_wrap(text, 7, ["aa \xe4\xe4-", "\xe4\xe4"])
-
-
-class LongWordTestCase (BaseTestCase):
- def setUp(self) -> None:
- self.wrapper = TextWrapper()
- self.text = '''\
-Did you say "supercalifragilisticexpialidocious?"
-How *do* you spell that odd word, anyways?
-'''
-
- def test_break_long(self) -> None:
- # Wrap text with long words and lots of punctuation
-
- self.check_wrap(self.text, 30,
- ['Did you say "supercalifragilis',
- 'ticexpialidocious?" How *do*',
- 'you spell that odd word,',
- 'anyways?'])
- self.check_wrap(self.text, 50,
- ['Did you say "supercalifragilisticexpialidocious?"',
- 'How *do* you spell that odd word, anyways?'])
-
- # SF bug 797650. Prevent an infinite loop by making sure that at
- # least one character gets split off on every pass.
- self.check_wrap('-'*10+'hello', 10,
- ['----------',
- ' h',
- ' e',
- ' l',
- ' l',
- ' o'],
- subsequent_indent = ' '*15)
-
- # bug 1146. Prevent a long word to be wrongly wrapped when the
- # preceding word is exactly one character shorter than the width
- self.check_wrap(self.text, 12,
- ['Did you say ',
- '"supercalifr',
- 'agilisticexp',
- 'ialidocious?',
- '" How *do*',
- 'you spell',
- 'that odd',
- 'word,',
- 'anyways?'])
-
- def test_nobreak_long(self) -> None:
- # Test with break_long_words disabled
- self.wrapper.break_long_words = False
- self.wrapper.width = 30
- expect = ['Did you say',
- '"supercalifragilisticexpialidocious?"',
- 'How *do* you spell that odd',
- 'word, anyways?'
- ]
- result = self.wrapper.wrap(self.text)
- self.check(result, expect)
-
- # Same thing with kwargs passed to standalone wrap() function.
- result = wrap(self.text, width=30, break_long_words=0)
- self.check(result, expect)
-
-
-class IndentTestCases(BaseTestCase):
-
- # called before each test method
- def setUp(self) -> None:
- self.text = '''\
-This paragraph will be filled, first without any indentation,
-and then with some (including a hanging indent).'''
-
-
- def test_fill(self) -> None:
- # Test the fill() method
-
- expect = '''\
-This paragraph will be filled, first
-without any indentation, and then with
-some (including a hanging indent).'''
-
- result = fill(self.text, 40)
- self.check(result, expect)
-
-
- def test_initial_indent(self) -> None:
- # Test initial_indent parameter
-
- expect = [" This paragraph will be filled,",
- "first without any indentation, and then",
- "with some (including a hanging indent)."]
- result = wrap(self.text, 40, initial_indent=" ")
- self.check(result, expect)
-
- expects = "\n".join(expect)
- results = fill(self.text, 40, initial_indent=" ")
- self.check(results, expects)
-
-
- def test_subsequent_indent(self) -> None:
- # Test subsequent_indent parameter
-
- expect = '''\
- * This paragraph will be filled, first
- without any indentation, and then
- with some (including a hanging
- indent).'''
-
- result = fill(self.text, 40,
- initial_indent=" * ", subsequent_indent=" ")
- self.check(result, expect)
-
-
-# Despite the similar names, DedentTestCase is *not* the inverse
-# of IndentTestCase!
-class DedentTestCase(unittest.TestCase):
-
- def assertUnchanged(self, text: str) -> None:
- """assert that dedent() has no effect on 'text'"""
- self.assertEqual(text, dedent(text))
-
- def test_dedent_nomargin(self) -> None:
- # No lines indented.
- text = "Hello there.\nHow are you?\nOh good, I'm glad."
- self.assertUnchanged(text)
-
- # Similar, with a blank line.
- text = "Hello there.\n\nBoo!"
- self.assertUnchanged(text)
-
- # Some lines indented, but overall margin is still zero.
- text = "Hello there.\n This is indented."
- self.assertUnchanged(text)
-
- # Again, add a blank line.
- text = "Hello there.\n\n Boo!\n"
- self.assertUnchanged(text)
-
- def test_dedent_even(self) -> None:
- # All lines indented by two spaces.
- text = " Hello there.\n How are ya?\n Oh good."
- expect = "Hello there.\nHow are ya?\nOh good."
- self.assertEqual(expect, dedent(text))
-
- # Same, with blank lines.
- text = " Hello there.\n\n How are ya?\n Oh good.\n"
- expect = "Hello there.\n\nHow are ya?\nOh good.\n"
- self.assertEqual(expect, dedent(text))
-
- # Now indent one of the blank lines.
- text = " Hello there.\n \n How are ya?\n Oh good.\n"
- expect = "Hello there.\n\nHow are ya?\nOh good.\n"
- self.assertEqual(expect, dedent(text))
-
- def test_dedent_uneven(self) -> None:
- # Lines indented unevenly.
- text = '''\
- def foo():
- while 1:
- return foo
- '''
- expect = '''\
-def foo():
- while 1:
- return foo
-'''
- self.assertEqual(expect, dedent(text))
-
- # Uneven indentation with a blank line.
- text = " Foo\n Bar\n\n Baz\n"
- expect = "Foo\n Bar\n\n Baz\n"
- self.assertEqual(expect, dedent(text))
-
- # Uneven indentation with a whitespace-only line.
- text = " Foo\n Bar\n \n Baz\n"
- expect = "Foo\n Bar\n\n Baz\n"
- self.assertEqual(expect, dedent(text))
-
- # dedent() should not mangle internal tabs
- def test_dedent_preserve_internal_tabs(self) -> None:
- text = " hello\tthere\n how are\tyou?"
- expect = "hello\tthere\nhow are\tyou?"
- self.assertEqual(expect, dedent(text))
-
- # make sure that it preserves tabs when it's not making any
- # changes at all
- self.assertEqual(expect, dedent(expect))
-
- # dedent() should not mangle tabs in the margin (i.e.
- # tabs and spaces both count as margin, but are *not*
- # considered equivalent)
- def test_dedent_preserve_margin_tabs(self) -> None:
- text = " hello there\n\thow are you?"
- self.assertUnchanged(text)
-
- # same effect even if we have 8 spaces
- text = " hello there\n\thow are you?"
- self.assertUnchanged(text)
-
- # dedent() only removes whitespace that can be uniformly removed!
- text = "\thello there\n\thow are you?"
- expect = "hello there\nhow are you?"
- self.assertEqual(expect, dedent(text))
-
- text = " \thello there\n \thow are you?"
- self.assertEqual(expect, dedent(text))
-
- text = " \t hello there\n \t how are you?"
- self.assertEqual(expect, dedent(text))
-
- text = " \thello there\n \t how are you?"
- expect = "hello there\n how are you?"
- self.assertEqual(expect, dedent(text))
-
-
-def test_main() -> None:
- support.run_unittest(WrapTestCase,
- LongWordTestCase,
- IndentTestCases,
- DedentTestCase)
-
-if __name__ == '__main__':
- test_main()
diff --git a/test-data/stdlib-samples/3.2/test/tf_inherit_check.py b/test-data/stdlib-samples/3.2/test/tf_inherit_check.py
deleted file mode 100644
index 92ebd95..0000000
--- a/test-data/stdlib-samples/3.2/test/tf_inherit_check.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Helper script for test_tempfile.py. argv[2] is the number of a file
-# descriptor which should _not_ be open. Check this by attempting to
-# write to it -- if we succeed, something is wrong.
-
-import sys
-import os
-
-verbose = (sys.argv[1] == 'v')
-try:
- fd = int(sys.argv[2])
-
- try:
- os.write(fd, b"blat")
- except os.error:
- # Success -- could not write to fd.
- sys.exit(0)
- else:
- if verbose:
- sys.stderr.write("fd %d is open in child" % fd)
- sys.exit(1)
-
-except Exception:
- if verbose:
- raise
- sys.exit(1)
diff --git a/test-data/stdlib-samples/3.2/textwrap.py b/test-data/stdlib-samples/3.2/textwrap.py
deleted file mode 100644
index a6d0266..0000000
--- a/test-data/stdlib-samples/3.2/textwrap.py
+++ /dev/null
@@ -1,391 +0,0 @@
-"""Text wrapping and filling.
-"""
-
-# Copyright (C) 1999-2001 Gregory P. Ward.
-# Copyright (C) 2002, 2003 Python Software Foundation.
-# Written by Greg Ward <gward at python.net>
-
-import string, re
-
-from typing import Dict, List, Any
-
-__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent']
-
-# Hardcode the recognized whitespace characters to the US-ASCII
-# whitespace characters. The main reason for doing this is that in
-# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
-# that character winds up in string.whitespace. Respecting
-# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
-# same as any other whitespace char, which is clearly wrong (it's a
-# *non-breaking* space), 2) possibly cause problems with Unicode,
-# since 0xa0 is not in range(128).
-_whitespace = '\t\n\x0b\x0c\r '
-
-class TextWrapper:
- """
- Object for wrapping/filling text. The public interface consists of
- the wrap() and fill() methods; the other methods are just there for
- subclasses to override in order to tweak the default behaviour.
- If you want to completely replace the main wrapping algorithm,
- you'll probably have to override _wrap_chunks().
-
- Several instance attributes control various aspects of wrapping:
- width (default: 70)
- the maximum width of wrapped lines (unless break_long_words
- is false)
- initial_indent (default: "")
- string that will be prepended to the first line of wrapped
- output. Counts towards the line's width.
- subsequent_indent (default: "")
- string that will be prepended to all lines save the first
- of wrapped output; also counts towards each line's width.
- expand_tabs (default: true)
- Expand tabs in input text to spaces before further processing.
- Each tab will become 1 .. 8 spaces, depending on its position in
- its line. If false, each tab is treated as a single character.
- replace_whitespace (default: true)
- Replace all whitespace characters in the input text by spaces
- after tab expansion. Note that if expand_tabs is false and
- replace_whitespace is true, every tab will be converted to a
- single space!
- fix_sentence_endings (default: false)
- Ensure that sentence-ending punctuation is always followed
- by two spaces. Off by default because the algorithm is
- (unavoidably) imperfect.
- break_long_words (default: true)
- Break words longer than 'width'. If false, those words will not
- be broken, and some lines might be longer than 'width'.
- break_on_hyphens (default: true)
- Allow breaking hyphenated words. If true, wrapping will occur
- preferably on whitespaces and right after hyphens part of
- compound words.
- drop_whitespace (default: true)
- Drop leading and trailing whitespace from lines.
- """
-
- unicode_whitespace_trans = {} # type: Dict[int, int]
- uspace = ord(' ')
- for x in _whitespace:
- unicode_whitespace_trans[ord(x)] = uspace
-
- # This funky little regex is just the trick for splitting
- # text up into word-wrappable chunks. E.g.
- # "Hello there -- you goof-ball, use the -b option!"
- # splits into
- # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
- # (after stripping out empty strings).
- wordsep_re = re.compile(
- r'(\s+|' # any whitespace
- r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words
- r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
-
- # This less funky little regex just split on recognized spaces. E.g.
- # "Hello there -- you goof-ball, use the -b option!"
- # splits into
- # Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/
- wordsep_simple_re = re.compile(r'(\s+)')
-
- # XXX this is not locale- or charset-aware -- string.lowercase
- # is US-ASCII only (and therefore English-only)
- sentence_end_re = re.compile(r'[a-z]' # lowercase letter
- r'[\.\!\?]' # sentence-ending punct.
- r'[\"\']?' # optional end-of-quote
- r'\Z') # end of chunk
-
-
- def __init__(self,
- width: int = 70,
- initial_indent: str = "",
- subsequent_indent: str = "",
- expand_tabs: bool = True,
- replace_whitespace: bool = True,
- fix_sentence_endings: bool = False,
- break_long_words: bool = True,
- drop_whitespace: bool = True,
- break_on_hyphens: bool = True) -> None:
- self.width = width
- self.initial_indent = initial_indent
- self.subsequent_indent = subsequent_indent
- self.expand_tabs = expand_tabs
- self.replace_whitespace = replace_whitespace
- self.fix_sentence_endings = fix_sentence_endings
- self.break_long_words = break_long_words
- self.drop_whitespace = drop_whitespace
- self.break_on_hyphens = break_on_hyphens
-
-
- # -- Private methods -----------------------------------------------
- # (possibly useful for subclasses to override)
-
- def _munge_whitespace(self, text: str) -> str:
- """_munge_whitespace(text : string) -> string
-
- Munge whitespace in text: expand tabs and convert all other
- whitespace characters to spaces. Eg. " foo\tbar\n\nbaz"
- becomes " foo bar baz".
- """
- if self.expand_tabs:
- text = text.expandtabs()
- if self.replace_whitespace:
- text = text.translate(self.unicode_whitespace_trans)
- return text
-
-
- def _split(self, text: str) -> List[str]:
- """_split(text : string) -> [string]
-
- Split the text to wrap into indivisible chunks. Chunks are
- not quite the same as words; see _wrap_chunks() for full
- details. As an example, the text
- Look, goof-ball -- use the -b option!
- breaks into the following chunks:
- 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
- 'use', ' ', 'the', ' ', '-b', ' ', 'option!'
- if break_on_hyphens is True, or in:
- 'Look,', ' ', 'goof-ball', ' ', '--', ' ',
- 'use', ' ', 'the', ' ', '-b', ' ', option!'
- otherwise.
- """
- if self.break_on_hyphens is True:
- chunks = self.wordsep_re.split(text)
- else:
- chunks = self.wordsep_simple_re.split(text)
- chunks = [c for c in chunks if c]
- return chunks
-
- def _fix_sentence_endings(self, chunks: List[str]) -> None:
- """_fix_sentence_endings(chunks : [string])
-
- Correct for sentence endings buried in 'chunks'. Eg. when the
- original text contains "... foo.\nBar ...", munge_whitespace()
- and split() will convert that to [..., "foo.", " ", "Bar", ...]
- which has one too few spaces; this method simply changes the one
- space to two.
- """
- i = 0
- patsearch = self.sentence_end_re.search
- while i < len(chunks)-1:
- if chunks[i+1] == " " and patsearch(chunks[i]):
- chunks[i+1] = " "
- i += 2
- else:
- i += 1
-
- def _handle_long_word(self, reversed_chunks: List[str],
- cur_line: List[str], cur_len: int,
- width: int) -> None:
- """_handle_long_word(chunks : [string],
- cur_line : [string],
- cur_len : int, width : int)
-
- Handle a chunk of text (most likely a word, not whitespace) that
- is too long to fit in any line.
- """
- # Figure out when indent is larger than the specified width, and make
- # sure at least one character is stripped off on every pass
- if width < 1:
- space_left = 1
- else:
- space_left = width - cur_len
-
- # If we're allowed to break long words, then do so: put as much
- # of the next chunk onto the current line as will fit.
- if self.break_long_words:
- cur_line.append(reversed_chunks[-1][:space_left])
- reversed_chunks[-1] = reversed_chunks[-1][space_left:]
-
- # Otherwise, we have to preserve the long word intact. Only add
- # it to the current line if there's nothing already there --
- # that minimizes how much we violate the width constraint.
- elif not cur_line:
- cur_line.append(reversed_chunks.pop())
-
- # If we're not allowed to break long words, and there's already
- # text on the current line, do nothing. Next time through the
- # main loop of _wrap_chunks(), we'll wind up here again, but
- # cur_len will be zero, so the next line will be entirely
- # devoted to the long word that we can't handle right now.
-
- def _wrap_chunks(self, chunks: List[str]) -> List[str]:
- """_wrap_chunks(chunks : [string]) -> [string]
-
- Wrap a sequence of text chunks and return a list of lines of
- length 'self.width' or less. (If 'break_long_words' is false,
- some lines may be longer than this.) Chunks correspond roughly
- to words and the whitespace between them: each chunk is
- indivisible (modulo 'break_long_words'), but a line break can
- come between any two chunks. Chunks should not have internal
- whitespace; ie. a chunk is either all whitespace or a "word".
- Whitespace chunks will be removed from the beginning and end of
- lines, but apart from that whitespace is preserved.
- """
- lines = [] # type: List[str]
- if self.width <= 0:
- raise ValueError("invalid width %r (must be > 0)" % self.width)
-
- # Arrange in reverse order so items can be efficiently popped
- # from a stack of chucks.
- chunks.reverse()
-
- while chunks:
-
- # Start the list of chunks that will make up the current line.
- # cur_len is just the length of all the chunks in cur_line.
- cur_line = [] # type: List[str]
- cur_len = 0
-
- # Figure out which static string will prefix this line.
- if lines:
- indent = self.subsequent_indent
- else:
- indent = self.initial_indent
-
- # Maximum width for this line.
- width = self.width - len(indent)
-
- # First chunk on line is whitespace -- drop it, unless this
- # is the very beginning of the text (ie. no lines started yet).
- if self.drop_whitespace and chunks[-1].strip() == '' and lines:
- del chunks[-1]
-
- while chunks:
- l = len(chunks[-1])
-
- # Can at least squeeze this chunk onto the current line.
- if cur_len + l <= width:
- cur_line.append(chunks.pop())
- cur_len += l
-
- # Nope, this line is full.
- else:
- break
-
- # The current line is full, and the next chunk is too big to
- # fit on *any* line (not just this one).
- if chunks and len(chunks[-1]) > width:
- self._handle_long_word(chunks, cur_line, cur_len, width)
-
- # If the last chunk on this line is all whitespace, drop it.
- if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
- del cur_line[-1]
-
- # Convert current line back to a string and store it in list
- # of all lines (return value).
- if cur_line:
- lines.append(indent + ''.join(cur_line))
-
- return lines
-
-
- # -- Public interface ----------------------------------------------
-
- def wrap(self, text: str) -> List[str]:
- """wrap(text : string) -> [string]
-
- Reformat the single paragraph in 'text' so it fits in lines of
- no more than 'self.width' columns, and return a list of wrapped
- lines. Tabs in 'text' are expanded with string.expandtabs(),
- and all other whitespace characters (including newline) are
- converted to space.
- """
- text = self._munge_whitespace(text)
- chunks = self._split(text)
- if self.fix_sentence_endings:
- self._fix_sentence_endings(chunks)
- return self._wrap_chunks(chunks)
-
- def fill(self, text: str) -> str:
- """fill(text : string) -> string
-
- Reformat the single paragraph in 'text' to fit in lines of no
- more than 'self.width' columns, and return a new string
- containing the entire wrapped paragraph.
- """
- return "\n".join(self.wrap(text))
-
-
-# -- Convenience interface ---------------------------------------------
-
-def wrap(text: str, width: int = 70, **kwargs: Any) -> List[str]:
- """Wrap a single paragraph of text, returning a list of wrapped lines.
-
- Reformat the single paragraph in 'text' so it fits in lines of no
- more than 'width' columns, and return a list of wrapped lines. By
- default, tabs in 'text' are expanded with string.expandtabs(), and
- all other whitespace characters (including newline) are converted to
- space. See TextWrapper class for available keyword args to customize
- wrapping behaviour.
- """
- w = TextWrapper(width=width, **kwargs)
- return w.wrap(text)
-
-def fill(text: str, width: int = 70, **kwargs: Any) -> str:
- """Fill a single paragraph of text, returning a new string.
-
- Reformat the single paragraph in 'text' to fit in lines of no more
- than 'width' columns, and return a new string containing the entire
- wrapped paragraph. As with wrap(), tabs are expanded and other
- whitespace characters converted to space. See TextWrapper class for
- available keyword args to customize wrapping behaviour.
- """
- w = TextWrapper(width=width, **kwargs)
- return w.fill(text)
-
-
-# -- Loosely related functionality -------------------------------------
-
-_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
-_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
-
-def dedent(text: str) -> str:
- """Remove any common leading whitespace from every line in `text`.
-
- This can be used to make triple-quoted strings line up with the left
- edge of the display, while still presenting them in the source code
- in indented form.
-
- Note that tabs and spaces are both treated as whitespace, but they
- are not equal: the lines " hello" and "\thello" are
- considered to have no common leading whitespace. (This behaviour is
- new in Python 2.5; older versions of this module incorrectly
- expanded tabs before searching for common leading whitespace.)
- """
- # Look for the longest leading string of spaces and tabs common to
- # all lines.
- margin = None # type: str
- text = _whitespace_only_re.sub('', text)
- indents = _leading_whitespace_re.findall(text)
- for indent in indents:
- if margin is None:
- margin = indent
-
- # Current line more deeply indented than previous winner:
- # no change (previous winner is still on top).
- elif indent.startswith(margin):
- pass
-
- # Current line consistent with and no deeper than previous winner:
- # it's the new winner.
- elif margin.startswith(indent):
- margin = indent
-
- # Current line and previous winner have no common whitespace:
- # there is no margin.
- else:
- margin = ""
- break
-
- # sanity check (testing/debugging only)
- if 0 and margin:
- for line in text.split("\n"):
- assert not line or line.startswith(margin), \
- "line = %r, margin = %r" % (line, margin)
-
- if margin:
- text = re.sub(r'(?m)^' + margin, '', text)
- return text
-
-if __name__ == "__main__":
- #print dedent("\tfoo\n\tbar")
- #print dedent(" \thello there\n \t how are you?")
- print(dedent("Hello there.\n This is indented."))
diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
deleted file mode 100644
index 1caeabf..0000000
--- a/test-data/unit/check-abstract.test
+++ /dev/null
@@ -1,734 +0,0 @@
--- Type checker test cases for abstract classes.
-
-
--- Subtyping with abstract classes
--- -------------------------------
-
-
-[case testAbstractClassSubclasses]
-
-from abc import abstractmethod, ABCMeta
-
-i = None # type: I
-j = None # type: J
-a = None # type: A
-b = None # type: B
-c = None # type: C
-
-j = c # E: Incompatible types in assignment (expression has type "C", variable has type "J")
-a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A")
-a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A")
-b = i # E: Incompatible types in assignment (expression has type "I", variable has type "B")
-
-i = a
-i = b
-i = c
-j = a
-j = b
-a = b
-
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-class J(metaclass=ABCMeta):
- @abstractmethod
- def g(self): pass
-class A(I, J): pass
-class B(A): pass
-class C(I): pass
-
-[case testAbstractClassSubtypingViaExtension]
-
-from abc import abstractmethod, ABCMeta
-
-i = None # type: I
-j = None # type: J
-a = None # type: A
-o = None # type: object
-
-j = i # E: Incompatible types in assignment (expression has type "I", variable has type "J")
-a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A")
-a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A")
-i = o # E: Incompatible types in assignment (expression has type "object", variable has type "I")
-j = o # E: Incompatible types in assignment (expression has type "object", variable has type "J")
-
-i = a
-j = a
-i = j
-o = i
-o = j
-
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-class J(I): pass
-class A(J): pass
-
-[case testInheritingAbstractClassInSubclass]
-
-from abc import abstractmethod, ABCMeta
-
-i = None # type: I
-a = None # type: A
-b = None # type: B
-
-i = a # E: Incompatible types in assignment (expression has type "A", variable has type "I")
-b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a = b
-i = b
-
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-class A: pass
-class B(A, I): pass
-
-
--- Abstract class objects
--- ----------------------
-
-
-[case testAbstractClassAsTypeObject]
-
-from abc import abstractmethod, ABCMeta
-
-o = None # type: object
-t = None # type: type
-
-o = I
-t = I
-
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-
-[case testAbstractClassInCasts]
-from typing import cast
-from abc import abstractmethod, ABCMeta
-
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-class A(I): pass
-class B: pass
-
-i, a, b = None, None, None # type: (I, A, B)
-o = None # type: object
-
-a = cast(I, o) # E: Incompatible types in assignment (expression has type "I", variable has type "A")
-b = cast(B, i) # Ok; a subclass of B might inherit I
-i = cast(I, b) # Ok; a subclass of B might inherit I
-
-i = cast(I, o)
-i = cast(I, a)
-
-[case testInstantiatingClassThatImplementsAbstractMethod]
-from abc import abstractmethod, ABCMeta
-import typing
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-class B(A):
- def f(self): pass
-B()
-[out]
-
-[case testInstantiatingAbstractClass]
-from abc import abstractmethod, ABCMeta
-import typing
-class A(metaclass=ABCMeta): pass
-class B(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
-A() # OK
-B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f'
-[out]
-
-[case testInstantiatingClassWithInheritedAbstractMethod]
-from abc import abstractmethod, ABCMeta
-import typing
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self): pass
- @abstractmethod
- def g(self): pass
-class B(A): pass
-B()# E: Cannot instantiate abstract class 'B' with abstract attributes 'f' and 'g'
-[out]
-
-[case testInstantiatingClassWithInheritedAbstractMethodAndSuppression]
-from abc import abstractmethod, ABCMeta
-import typing
-class A(metaclass=ABCMeta):
- @abstractmethod
- def a(self): pass
- @abstractmethod
- def b(self): pass
- @abstractmethod
- def c(self): pass
- @abstractmethod
- def d(self): pass
- @abstractmethod
- def e(self): pass
- @abstractmethod
- def f(self): pass
- @abstractmethod
- def g(self): pass
- @abstractmethod
- def h(self): pass
- @abstractmethod
- def i(self): pass
- @abstractmethod
- def j(self): pass
-a = A() # E: Cannot instantiate abstract class 'A' with abstract attributes 'a', 'b', ... and 'j' (7 methods suppressed)
-[out]
-
-
--- Implementing abstract methods
--- -----------------------------
-
-
-[case testImplementingAbstractMethod]
-from abc import abstractmethod, ABCMeta
-import typing
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: int) -> int: pass
- @abstractmethod
- def g(self, x: int) -> int: pass
-class B(A):
- def f(self, x: str) -> int: \
- # E: Argument 1 of "f" incompatible with supertype "A"
- pass
- def g(self, x: int) -> int: pass
-[out]
-
-[case testImplementingAbstractMethodWithMultipleBaseClasses]
-from abc import abstractmethod, ABCMeta
-import typing
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: int) -> int: pass
-class J(metaclass=ABCMeta):
- @abstractmethod
- def g(self, x: str) -> str: pass
-class A(I, J):
- def f(self, x: str) -> int: pass \
- # E: Argument 1 of "f" incompatible with supertype "I"
- def g(self, x: str) -> int: pass \
- # E: Return type of "g" incompatible with supertype "J"
- def h(self) -> int: pass # Not related to any base class
-[out]
-
-[case testImplementingAbstractMethodWithExtension]
-from abc import abstractmethod, ABCMeta
-import typing
-class J(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: int) -> int: pass
-class I(J): pass
-class A(I):
- def f(self, x: str) -> int: pass \
- # E: Argument 1 of "f" incompatible with supertype "J"
-[out]
-
-[case testInvalidOverridingAbstractMethod]
-from abc import abstractmethod, ABCMeta
-import typing
-class J(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: 'J') -> None: pass
-class I(J):
- @abstractmethod
- def f(self, x: 'I') -> None: pass # E: Argument 1 of "f" incompatible with supertype "J"
-[out]
-
-[case testAbstractClassCoAndContraVariance]
-from abc import abstractmethod, ABCMeta
-import typing
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self, a: A) -> 'I': pass
- @abstractmethod
- def g(self, a: A) -> 'I': pass
- @abstractmethod
- def h(self, a: 'I') -> A: pass
-class A(I):
- def h(self, a: 'A') -> 'I': # Fail
- pass
- def f(self, a: 'I') -> 'I':
- pass
- def g(self, a: 'A') -> 'A':
- pass
-[out]
-main:11: error: Argument 1 of "h" incompatible with supertype "I"
-main:11: error: Return type of "h" incompatible with supertype "I"
-
-
--- Accessing abstract members
--- --------------------------
-
-
-[case testAccessingAbstractMethod]
-
-from abc import abstractmethod, ABCMeta
-
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self, a: int) -> str: pass
-
-i, a, b = None, None, None # type: (I, int, str)
-
-a = i.f(a) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-b = i.f(b) # E: Argument 1 to "f" of "I" has incompatible type "str"; expected "int"
-i.g() # E: "I" has no attribute "g"
-
-b = i.f(a)
-
-[case testAccessingInheritedAbstractMethod]
-
-from abc import abstractmethod, ABCMeta
-
-class J(metaclass=ABCMeta):
- @abstractmethod
- def f(self, a: int) -> str: pass
-class I(J): pass
-
-i, a, b = None, None, None # type: (I, int, str)
-
-a = i.f(1) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-b = i.f(1)
-
-
--- Any (dynamic) types
--- -------------------
-
-
-[case testAbstractClassWithAllDynamicTypes]
-from abc import abstractmethod, ABCMeta
-import typing
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x): pass
- @abstractmethod
- def g(self, x): pass
-class A(I):
- def f(self, x): pass
- def g(self, x, y) -> None: pass \
- # E: Signature of "g" incompatible with supertype "I"
-[out]
-
-[case testAbstractClassWithAllDynamicTypes2]
-from abc import abstractmethod, ABCMeta
-import typing
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x): pass
- @abstractmethod
- def g(self, x): pass
-class A(I):
- def f(self, x): pass
- def g(self, x, y): pass
-[out]
-
-[case testAbstractClassWithImplementationUsingDynamicTypes]
-from abc import abstractmethod, ABCMeta
-import typing
-class I(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: int) -> None: pass
- @abstractmethod
- def g(self, x: int) -> None: pass
-class A(I):
- def f(self, x): pass
- def g(self, x, y): pass
-[out]
-
-
--- Special cases
--- -------------
-
-
-[case testMultipleAbstractBases]
-from abc import abstractmethod, ABCMeta
-import typing
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self) -> None: pass
-class B(metaclass=ABCMeta):
- @abstractmethod
- def g(self) -> None: pass
-class C(A, B):
- @abstractmethod
- def h(self) -> None: pass
-
-[case testMemberAccessWithMultipleAbstractBaseClasses]
-
-from abc import abstractmethod, ABCMeta
-
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self) -> None: pass
-class B(metaclass=ABCMeta):
- @abstractmethod
- def g(self) -> None: pass
-class C(A, B): pass
-x = None # type: C
-x.f()
-x.g()
-x.f(x) # E: Too many arguments for "f" of "A"
-x.g(x) # E: Too many arguments for "g" of "B"
-
-[case testInstantiatingAbstractClassWithMultipleBaseClasses]
-
-from abc import abstractmethod, ABCMeta
-
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self) -> None: pass
-class B(metaclass=ABCMeta):
- @abstractmethod
- def g(self) -> None: pass
-class C(A, B):
- def f(self) -> None: pass
-class D(A, B):
- def g(self) -> None: pass
-class E(A, B):
- def f(self) -> None: pass
- def g(self) -> None: pass
-C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'g'
-D() # E: Cannot instantiate abstract class 'D' with abstract attribute 'f'
-E()
-
-[case testInconsistentMro]
-from abc import abstractmethod, ABCMeta
-import typing
-
-class A(metaclass=ABCMeta): pass
-class B(object, A): pass \
- # E: Cannot determine consistent method resolution order (MRO) for "B"
-
-[case testOverloadedAbstractMethod]
-from abc import abstractmethod, ABCMeta
-from typing import overload
-
-class A(metaclass=ABCMeta):
- @abstractmethod
- @overload
- def f(self, x: int) -> int: pass
- @abstractmethod
- @overload
- def f(self, x: str) -> str: pass
-
-class B(A):
- @overload
- def f(self, x: int) -> int: pass
- @overload
- def f(self, x: str) -> str: pass
-A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f'
-B()
-B().f(1)
-a = B() # type: A
-a.f(1)
-a.f('')
-a.f(B()) # E: No overload variant of "f" of "A" matches argument types [__main__.B]
-
-[case testOverloadedAbstractMethodWithAlternativeDecoratorOrder]
-from abc import abstractmethod, ABCMeta
-from typing import overload
-
-class A(metaclass=ABCMeta):
- @overload
- @abstractmethod
- def f(self, x: int) -> int: pass
- @overload
- @abstractmethod
- def f(self, x: str) -> str: pass
-
-class B(A):
- @overload
- def f(self, x: int) -> int: pass
- @overload
- def f(self, x: str) -> str: pass
-A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f'
-B()
-B().f(1)
-a = B() # type: A
-a.f(1)
-a.f('')
-a.f(B()) # E: No overload variant of "f" of "A" matches argument types [__main__.B]
-
-[case testOverloadedAbstractMethodVariantMissingDecorator1]
-from abc import abstractmethod, ABCMeta
-from typing import overload
-
-class A(metaclass=ABCMeta):
- @abstractmethod \
- # E: Overloaded method has both abstract and non-abstract variants
- @overload
- def f(self, x: int) -> int: pass
- @overload
- def f(self, x: str) -> str: pass
-[out]
-
-[case testOverloadedAbstractMethodVariantMissingDecorator1]
-from abc import abstractmethod, ABCMeta
-from typing import overload
-
-class A(metaclass=ABCMeta):
- @overload \
- # E: Overloaded method has both abstract and non-abstract variants
- def f(self, x: int) -> int: pass
- @abstractmethod
- @overload
- def f(self, x: str) -> str: pass
-[out]
-
-[case testMultipleInheritanceAndAbstractMethod]
-import typing
-from abc import abstractmethod, ABCMeta
-class A:
- def f(self, x: str) -> None: pass
-class B(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: str) -> None: pass
-class C(A, B): pass
-
-[case testMultipleInheritanceAndAbstractMethod2]
-import typing
-from abc import abstractmethod, ABCMeta
-class A:
- def f(self, x: str) -> None: pass
-class B(metaclass=ABCMeta):
- @abstractmethod
- def f(self, x: int) -> None: pass
-class C(A, B): pass
-[out]
-main:8: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
-
-[case testCallAbstractMethodBeforeDefinition]
-import typing
-from abc import abstractmethod, ABCMeta
-class A(metaclass=ABCMeta):
- def f(self) -> None:
- self.g(1) # E: Argument 1 to "g" of "A" has incompatible type "int"; expected "str"
- @abstractmethod
- def g(self, x: str) -> None: pass
-[out]
-
-[case testAbstractOperatorMethods1]
-import typing
-from abc import abstractmethod, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractmethod
- def __lt__(self, other: 'A') -> int: pass
- @abstractmethod
- def __gt__(self, other: 'A') -> int: pass
-
-[case testAbstractOperatorMethods2]
-import typing
-from abc import abstractmethod, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractmethod
- def __radd__(self, other: 'C') -> str: pass # Error
-class B:
- @abstractmethod
- def __add__(self, other: 'A') -> int: pass
-class C:
- def __add__(self, other: int) -> B: pass
-[out]
-
-
--- Abstract properties
--- -------------------
-
-
-[case testReadOnlyAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-def f(a: A) -> None:
- a.x() # E: "int" not callable
- a.x = 1 # E: Property "x" defined in "A" is read-only
-[out]
-
-[case testReadOnlyAbstractPropertyForwardRef]
-from abc import abstractproperty, ABCMeta
-def f(a: A) -> None:
- a.x() # E: "int" not callable
- a.x = 1 # E: Property "x" defined in "A" is read-only
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-[out]
-
-[case testReadWriteAbstractProperty]
-from abc import abstractproperty, ABCMeta
-def f(a: A) -> None:
- a.x.y # E: "int" has no attribute "y"
- a.x = 1
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
- @x.setter
- def x(self, x: int) -> None: pass
-[out]
-
-[case testInstantiateClassWithReadOnlyAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-class B(A): pass
-b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x'
-
-[case testInstantiateClassWithReadWriteAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
- @x.setter
- def x(self, x: int) -> None: pass
-class B(A): pass
-b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x'
-
-[case testImplementAbstractPropertyViaProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-class B(A):
- @property
- def x(self) -> int: pass
-b = B()
-b.x() # E: "int" not callable
-[builtins fixtures/property.pyi]
-
-[case testImplementReradWriteAbstractPropertyViaProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
- @x.setter
- def x(self, v: int) -> None: pass
-class B(A):
- @property
- def x(self) -> int: pass
- @x.setter
- def x(self, v: int) -> None: pass
-b = B()
-b.x.y # E: "int" has no attribute "y"
-[builtins fixtures/property.pyi]
-
-[case testImplementAbstractPropertyViaPropertyInvalidType]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-class B(A):
- @property
- def x(self) -> str: pass # E
-b = B()
-b.x() # E
-[builtins fixtures/property.pyi]
-[out]
-main:7: error: Return type of "x" incompatible with supertype "A"
-main:9: error: "str" not callable
-
-[case testCantImplementAbstractPropertyViaInstanceVariable]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-class B(A):
- def __init__(self) -> None:
- self.x = 1 # E
-b = B() # E
-b.x.y # E
-[builtins fixtures/property.pyi]
-[out]
-main:7: error: Property "x" defined in "B" is read-only
-main:8: error: Cannot instantiate abstract class 'B' with abstract attribute 'x'
-main:9: error: "int" has no attribute "y"
-
-[case testSuperWithAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-class B(A):
- @property
- def x(self) -> int:
- return super().x.y # E: "int" has no attribute "y"
-[builtins fixtures/property.pyi]
-[out]
-
-[case testSuperWithReadWriteAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
- @x.setter
- def x(self, v: int) -> None: pass
-class B(A):
- @property
- def x(self) -> int:
- return super().x.y # E
- @x.setter
- def x(self, v: int) -> None:
- super().x = '' # E
-[builtins fixtures/property.pyi]
-[out]
-main:10: error: "int" has no attribute "y"
-main:13: error: Invalid assignment target
-
-[case testOnlyImplementGetterOfReadWriteAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
- @x.setter
- def x(self, v: int) -> None: pass
-class B(A):
- @property # E
- def x(self) -> int: pass
-b = B()
-b.x.y # E
-[builtins fixtures/property.pyi]
-[out]
-main:8: error: Read-only property cannot override read-write property
-main:11: error: "int" has no attribute "y"
-
-[case testDynamicallyTypedReadOnlyAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self): pass
-def f(a: A) -> None:
- a.x.y
- a.x = 1 # E: Property "x" defined in "A" is read-only
-[out]
-
-[case testDynamicallyTypedReadOnlyAbstractPropertyForwardRef]
-from abc import abstractproperty, ABCMeta
-def f(a: A) -> None:
- a.x.y
- a.x = 1 # E: Property "x" defined in "A" is read-only
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self): pass
-[out]
-
-[case testDynamicallyTypedReadWriteAbstractProperty]
-from abc import abstractproperty, ABCMeta
-def f(a: A) -> None:
- a.x.y
- a.x = 1
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self): pass
- @x.setter
- def x(self, x): pass
-[out]
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
deleted file mode 100644
index 871091c..0000000
--- a/test-data/unit/check-async-await.test
+++ /dev/null
@@ -1,393 +0,0 @@
--- Tests for async def and await (PEP 492)
--- ---------------------------------------
-
-[case testAsyncDefPass]
-# flags: --fast-parser
-async def f() -> int:
- pass
-[builtins fixtures/async_await.pyi]
-
-[case testAsyncDefReturn]
-# flags: --fast-parser
-async def f() -> int:
- return 0
-reveal_type(f()) # E: Revealed type is 'typing.Awaitable[builtins.int]'
-[builtins fixtures/async_await.pyi]
-
-[case testAsyncDefMissingReturn]
-# flags: --fast-parser --warn-no-return
-async def f() -> int:
- make_this_not_trivial = 1
-[builtins fixtures/async_await.pyi]
-[out]
-main:2: note: Missing return statement
-
-[case testAsyncDefReturnWithoutValue]
-# flags: --fast-parser
-async def f() -> int:
- make_this_not_trivial = 1
- return
-[builtins fixtures/async_await.pyi]
-[out]
-main:4: error: Return value expected
-
-[case testAwaitCoroutine]
-# flags: --fast-parser
-async def f() -> int:
- x = await f()
- reveal_type(x) # E: Revealed type is 'builtins.int*'
- return x
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testAwaitDefaultContext]
-# flags: --fast-parser
-from typing import TypeVar
-T = TypeVar('T')
-async def f(x: T) -> T:
- y = await f(x)
- reveal_type(y)
- return y
-[out]
-main:6: error: Revealed type is 'T`-1'
-
-[case testAwaitAnyContext]
-# flags: --fast-parser
-from typing import Any, TypeVar
-T = TypeVar('T')
-async def f(x: T) -> T:
- y = await f(x) # type: Any
- reveal_type(y)
- return y
-[out]
-main:6: error: Revealed type is 'Any'
-
-[case testAwaitExplicitContext]
-# flags: --fast-parser
-from typing import TypeVar
-T = TypeVar('T')
-async def f(x: T) -> T:
- y = await f(x) # type: int
- reveal_type(y)
-[out]
-main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int"
-main:6: error: Revealed type is 'builtins.int'
-
-[case testAwaitGeneratorError]
-# flags: --fast-parser
-from typing import Any, Generator
-def g() -> Generator[int, None, str]:
- yield 0
- return ''
-async def f() -> int:
- x = await g()
- return x
-[out]
-main:7: error: Incompatible types in await (actual type Generator[int, None, str], expected type "Awaitable")
-
-[case testAwaitIteratorError]
-# flags: --fast-parser
-from typing import Any, Iterator
-def g() -> Iterator[Any]:
- yield
-async def f() -> int:
- x = await g()
- return x
-[out]
-main:6: error: Incompatible types in await (actual type Iterator[Any], expected type "Awaitable")
-
-[case testAwaitArgumentError]
-# flags: --fast-parser
-def g() -> int:
- return 0
-async def f() -> int:
- x = await g()
- return x
-[builtins fixtures/async_await.pyi]
-[out]
-main:5: error: Incompatible types in await (actual type "int", expected type "Awaitable")
-
-[case testAwaitResultError]
-# flags: --fast-parser
-async def g() -> int:
- return 0
-async def f() -> str:
- x = await g() # type: str
-[builtins fixtures/async_await.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testAwaitReturnError]
-# flags: --fast-parser
-async def g() -> int:
- return 0
-async def f() -> str:
- x = await g()
- return x
-[builtins fixtures/async_await.pyi]
-[out]
-main:6: error: Incompatible return value type (got "int", expected "str")
-
-[case testAsyncFor]
-# flags: --fast-parser
-from typing import AsyncIterator
-class C(AsyncIterator[int]):
- async def __anext__(self) -> int: return 0
-async def f() -> None:
- async for x in C():
- reveal_type(x) # E: Revealed type is 'builtins.int*'
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testAsyncForError]
-# flags: --fast-parser
-from typing import AsyncIterator
-async def f() -> None:
- async for x in [1]:
- pass
-[builtins fixtures/async_await.pyi]
-[out]
-main:4: error: AsyncIterable expected
-main:4: error: List[int] has no attribute "__aiter__"
-
-[case testAsyncWith]
-# flags: --fast-parser
-class C:
- async def __aenter__(self) -> int: pass
- async def __aexit__(self, x, y, z) -> None: pass
-async def f() -> None:
- async with C() as x:
- reveal_type(x) # E: Revealed type is 'builtins.int*'
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testAsyncWithError]
-# flags: --fast-parser
-class C:
- def __enter__(self) -> int: pass
- def __exit__(self, x, y, z) -> None: pass
-async def f() -> None:
- async with C() as x:
- pass
-[builtins fixtures/async_await.pyi]
-[out]
-main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"?
-main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"?
-
-[case testAsyncWithErrorBadAenter]
-# flags: --fast-parser
-class C:
- def __aenter__(self) -> int: pass
- async def __aexit__(self, x, y, z) -> None: pass
-async def f() -> None:
- async with C() as x: # E: Incompatible types in "async with" for __aenter__ (actual type "int", expected type "Awaitable")
- pass
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testAsyncWithErrorBadAenter2]
-# flags: --fast-parser
-class C:
- def __aenter__(self) -> None: pass
- async def __aexit__(self, x, y, z) -> None: pass
-async def f() -> None:
- async with C() as x: # E: "__aenter__" of "C" does not return a value
- pass
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testAsyncWithErrorBadAexit]
-# flags: --fast-parser
-class C:
- async def __aenter__(self) -> int: pass
- def __aexit__(self, x, y, z) -> int: pass
-async def f() -> None:
- async with C() as x: # E: Incompatible types in "async with" for __aexit__ (actual type "int", expected type "Awaitable")
- pass
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testAsyncWithErrorBadAexit2]
-# flags: --fast-parser
-class C:
- async def __aenter__(self) -> int: pass
- def __aexit__(self, x, y, z) -> None: pass
-async def f() -> None:
- async with C() as x: # E: "__aexit__" of "C" does not return a value
- pass
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testNoYieldInAsyncDef]
-# flags: --fast-parser
-async def f():
- yield None
-async def g():
- yield
-async def h():
- x = yield
-[builtins fixtures/async_await.pyi]
-[out]
-main:3: error: 'yield' in async function
-main:5: error: 'yield' in async function
-main:7: error: 'yield' in async function
-
-[case testNoYieldFromInAsyncDef]
-# flags: --fast-parser
-async def f():
- yield from []
-async def g():
- x = yield from []
-[builtins fixtures/async_await.pyi]
-[out]
-main:3: error: 'yield from' in async function
-main:5: error: 'yield from' in async function
-
-[case testNoAsyncDefInPY2_python2]
-# flags: --fast-parser
-async def f(): # E: invalid syntax
- pass
-
-[case testYieldFromNoAwaitable]
-# flags: --fast-parser
-from typing import Any, Generator
-async def f() -> str:
- return ''
-def g() -> Generator[Any, None, str]:
- x = yield from f()
- return x
-[builtins fixtures/async_await.pyi]
-[out]
-main:6: error: "yield from" can't be applied to Awaitable[str]
-
-[case testAwaitableSubclass]
-# flags: --fast-parser
-from typing import Any, AsyncIterator, Awaitable, Generator
-class A(Awaitable[int]):
- def __await__(self) -> Generator[Any, None, int]:
- yield
- return 0
-class C:
- def __aenter__(self) -> A:
- return A()
- def __aexit__(self, *a) -> A:
- return A()
-class I(AsyncIterator[int]):
- def __aiter__(self) -> 'I':
- return self
- def __anext__(self) -> A:
- return A()
-async def main() -> None:
- x = await A()
- reveal_type(x) # E: Revealed type is 'builtins.int'
- async with C() as y:
- reveal_type(y) # E: Revealed type is 'builtins.int'
- async for z in I():
- reveal_type(z) # E: Revealed type is 'builtins.int'
-[builtins fixtures/async_await.pyi]
-[out]
-
-[case testYieldTypeCheckInDecoratedCoroutine]
-# flags: --fast-parser
-from typing import Generator
-from types import coroutine
- at coroutine
-def f() -> Generator[int, str, int]:
- x = yield 0
- x = yield '' # E: Incompatible types in yield (actual type "str", expected type "int")
- reveal_type(x) # E: Revealed type is 'builtins.str'
- if x:
- return 0
- else:
- return '' # E: Incompatible return value type (got "str", expected "int")
-[builtins fixtures/async_await.pyi]
-[out]
-
-
--- The full matrix of coroutine compatibility
--- ------------------------------------------
-
-[case testFullCoroutineMatrix]
-# flags: --fast-parser
-from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
-from types import coroutine
-
-# The various things you might try to use in `await` or `yield from`.
-
-def plain_generator() -> Generator[str, None, int]:
- yield 'a'
- return 1
-
-async def plain_coroutine() -> int:
- return 1
-
- at coroutine
-def decorated_generator() -> Generator[str, None, int]:
- yield 'a'
- return 1
-
- at coroutine
-async def decorated_coroutine() -> int:
- return 1
-
-class It(Iterator[str]):
- def __iter__(self) -> 'It':
- return self
- def __next__(self) -> str:
- return 'a'
-
-def other_iterator() -> It:
- return It()
-
-class Aw(Awaitable[int]):
- def __await__(self) -> Generator[str, Any, int]:
- yield 'a'
- return 1
-
-def other_coroutine() -> Aw:
- return Aw()
-
-# The various contexts in which `await` or `yield from` might occur.
-
-def plain_host_generator() -> Generator[str, None, None]:
- yield 'a'
- x = 0
- x = yield from plain_generator()
- x = yield from plain_coroutine() # E: "yield from" can't be applied to Awaitable[int]
- x = yield from decorated_generator()
- x = yield from decorated_coroutine() # E: "yield from" can't be applied to AwaitableGenerator[Any, Any, int, Awaitable[int]]
- x = yield from other_iterator()
- x = yield from other_coroutine() # E: "yield from" can't be applied to "Aw"
-
-async def plain_host_coroutine() -> None:
- x = 0
- x = await plain_generator() # E: Incompatible types in await (actual type Generator[str, None, int], expected type "Awaitable")
- x = await plain_coroutine()
- x = await decorated_generator()
- x = await decorated_coroutine()
- x = await other_iterator() # E: Incompatible types in await (actual type "It", expected type "Awaitable")
- x = await other_coroutine()
-
- at coroutine
-def decorated_host_generator() -> Generator[str, None, None]:
- yield 'a'
- x = 0
- x = yield from plain_generator()
- x = yield from plain_coroutine()
- x = yield from decorated_generator()
- x = yield from decorated_coroutine()
- x = yield from other_iterator()
- x = yield from other_coroutine() # E: "yield from" can't be applied to "Aw"
-
- at coroutine
-async def decorated_host_coroutine() -> None:
- x = 0
- x = await plain_generator() # E: Incompatible types in await (actual type Generator[str, None, int], expected type "Awaitable")
- x = await plain_coroutine()
- x = await decorated_generator()
- x = await decorated_coroutine()
- x = await other_iterator() # E: Incompatible types in await (actual type "It", expected type "Awaitable")
- x = await other_coroutine()
-
-[builtins fixtures/async_await.pyi]
-[out]
diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test
deleted file mode 100644
index 05fa1a9..0000000
--- a/test-data/unit/check-basic.test
+++ /dev/null
@@ -1,310 +0,0 @@
-[case testEmptyFile]
-[out]
-
-[case testAssignmentAndVarDef]
-
-a = None # type: A
-b = None # type: B
-a = a
-a = b # Fail
-class A: pass
-class B: pass
-[out]
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testConstructionAndAssignment]
-
-x = None # type: A
-x = A()
-x = B()
-class A:
- def __init__(self): pass
-class B:
- def __init__(self): pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testInheritInitFromObject]
-
-x = None # type: A
-x = A()
-x = B()
-class A(object): pass
-class B(object): pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testImplicitInheritInitFromObject]
-
-x = None # type: A
-o = None # type: object
-x = o # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-x = A()
-o = x
-class A: pass
-class B: pass
-[out]
-
-[case testTooManyConstructorArgs]
-import typing
-object(object())
-[out]
-main:2: error: Too many arguments for "object"
-
-[case testVarDefWithInit]
-import typing
-a = A() # type: A
-b = object() # type: A
-class A: pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-[case testInheritanceBasedSubtyping]
-import typing
-x = B() # type: A
-y = A() # type: B # Fail
-class A: pass
-class B(A): pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testDeclaredVariableInParentheses]
-
-(x) = None # type: int
-x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-x = 1
-
-
--- Simple functions and calling
--- ----------------------------
-
-
-[case testFunction]
-import typing
-def f(x: 'A') -> None: pass
-f(A())
-f(B()) # Fail
-class A: pass
-class B: pass
-[out]
-main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-
-[case testNotCallable]
-import typing
-A()()
-class A: pass
-[out]
-main:2: error: "A" not callable
-
-[case testSubtypeArgument]
-import typing
-def f(x: 'A', y: 'B') -> None: pass
-f(B(), A()) # Fail
-f(B(), B())
-
-class A: pass
-class B(A): pass
-[out]
-main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B"
-
-[case testInvalidArgumentCount]
-import typing
-def f(x, y) -> None: pass
-f(object())
-f(object(), object(), object())
-[out]
-main:3: error: Too few arguments for "f"
-main:4: error: Too many arguments for "f"
-
-
--- Locals
--- ------
-
-
-[case testLocalVariables]
-
-def f() -> None:
- x = None # type: A
- y = None # type: B
- x = x
- x = y # Fail
-class A: pass
-class B: pass
-[out]
-main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testLocalVariableScope]
-
-def f() -> None:
- x = None # type: A
- x = A()
-def g() -> None:
- x = None # type: B
- x = A() # Fail
-class A: pass
-class B: pass
-[out]
-main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testFunctionArguments]
-import typing
-def f(x: 'A', y: 'B') -> None:
- x = y # Fail
- x = x
- y = B()
-class A: pass
-class B: pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testLocalVariableInitialization]
-import typing
-def f() -> None:
- a = A() # type: A
- b = B() # type: A # Fail
-class A: pass
-class B: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testVariableInitializationWithSubtype]
-import typing
-x = B() # type: A
-y = A() # type: B # Fail
-class A: pass
-class B(A): pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-
--- Misc
--- ----
-
-
-[case testInvalidReturn]
-import typing
-def f() -> 'A':
- return B()
-class A: pass
-class B: pass
-[out]
-main:3: error: Incompatible return value type (got "B", expected "A")
-
-[case testTopLevelContextAndInvalidReturn]
-import typing
-def f() -> 'A':
- return B()
-a = B() # type: A
-class A: pass
-class B: pass
-[out]
-main:3: error: Incompatible return value type (got "B", expected "A")
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testEmptyReturnInAnyTypedFunction]
-from typing import Any
-def f() -> Any:
- return
-
-[case testEmptyYieldInAnyTypedFunction]
-from typing import Any
-def f() -> Any:
- yield
-
-[case testModule__name__]
-import typing
-x = __name__ # type: str
-a = __name__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-class A: pass
-[builtins fixtures/primitives.pyi]
-
-[case testModule__doc__]
-import typing
-x = __doc__ # type: str
-a = __doc__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-class A: pass
-[builtins fixtures/primitives.pyi]
-
-[case testModule__file__]
-import typing
-x = __file__ # type: str
-a = __file__ # type: A # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-class A: pass
-[builtins fixtures/primitives.pyi]
-
-[case test__package__]
-import typing
-x = __package__ # type: str
-a = __file__ # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-
--- Scoping and shadowing
--- ---------------------
-
-
-[case testLocalVariableShadowing]
-
-a = None # type: A
-a = B() # Fail
-a = A()
-def f() -> None:
- a = None # type: B
- a = A() # Fail
- a = B()
-a = B() # Fail
-a = A()
-
-class A: pass
-class B: pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testGlobalDefinedInBlockWithType]
-
-class A: pass
-while A:
- a = None # type: A
- a = A()
- a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-
--- # type: signatures
--- ------------------
-
-
-[case testFunctionSignatureAsComment]
-def f(x): # type: (int) -> str
- return 1
-f('')
-[out]
-main:2: error: Incompatible return value type (got "int", expected "str")
-main:3: error: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testMethodSignatureAsComment]
-class A:
- def f(self, x):
- # type: (int) -> str
- self.f('') # Fail
- return 1
-A().f('') # Fail
-[out]
-main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-main:5: error: Incompatible return value type (got "int", expected "str")
-main:6: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-[case testTrailingCommaParsing-skip]
-x = 1
-x in 1,
-if x in 1, :
- pass
-[out]
-
-[case testInitReturnTypeError]
-class C:
- def __init__(self):
- # type: () -> int
- pass
-[out]
-main:2: error: The return type of "__init__" must be None
diff --git a/test-data/unit/check-bound.test b/test-data/unit/check-bound.test
deleted file mode 100644
index ee935ae..0000000
--- a/test-data/unit/check-bound.test
+++ /dev/null
@@ -1,203 +0,0 @@
--- Enforcement of upper bounds
--- ---------------------------
-
-
-[case testBoundOnGenericFunction]
-from typing import TypeVar
-
-class A: pass
-class B(A): pass
-class C(A): pass
-class D: pass
-
-T = TypeVar('T', bound=A)
-U = TypeVar('U')
-def f(x: T) -> T: pass
-def g(x: U) -> U:
- return f(x) # Fail
-
-f(A())
-f(B())
-f(D()) # Fail
-
-b = B()
-b = f(b)
-b = f(C()) # Fail
-[out]
-main:12: error: Type argument 1 of "f" has incompatible value "U"
-main:16: error: Type argument 1 of "f" has incompatible value "D"
-main:20: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-
-
-[case testBoundOnGenericClass]
-from typing import TypeVar, Generic
-
-class A: pass
-class B(A): pass
-T = TypeVar('T', bound=A)
-
-class G(Generic[T]):
- def __init__(self, x: T) -> None: pass
-
-v = None # type: G[A]
-w = None # type: G[B]
-x = None # type: G[str] # E: Type argument "builtins.str" of "G" must be a subtype of "__main__.A"
-y = G('a') # E: Type argument 1 of "G" has incompatible value "str"
-z = G(A())
-z = G(B())
-
-
-[case testBoundVoid]
-from typing import TypeVar, Generic
-T = TypeVar('T', bound=int)
-class C(Generic[T]):
- t = None # type: T
- def get(self) -> T:
- return self.t
-c1 = None # type: C[None]
-c1.get()
-d = c1.get() # E: Function does not return a value
-
-
-[case testBoundAny]
-from typing import TypeVar, Generic
-T = TypeVar('T', bound=int)
-class C(Generic[T]):
- def __init__(self, x: T) -> None: pass
-def f(x: T) -> T:
- return x
-
-def g(): pass
-
-f(g())
-C(g())
-z = None # type: C
-
-
-[case testBoundHigherOrderWithVoid]
-from typing import TypeVar, Callable
-class A: pass
-T = TypeVar('T', bound=A)
-def f(g: Callable[[], T]) -> T:
- return g()
-def h() -> None: pass
-f(h)
-a = f(h) # E: "h" does not return a value
-
-
-[case testBoundInheritance]
-from typing import TypeVar, Generic
-class A: pass
-T = TypeVar('T')
-TA = TypeVar('TA', bound=A)
-
-class C(Generic[TA]): pass
-class D0(C[TA], Generic[TA]): pass
-class D1(C[T], Generic[T]): pass # E: Type argument "T`1" of "C" must be a subtype of "__main__.A"
-class D2(C[A]): pass
-class D3(C[str]): pass # E: Type argument "builtins.str" of "C" must be a subtype of "__main__.A"
-
-
--- Using information from upper bounds
--- -----------------------------------
-
-
-[case testBoundGenericFunctions]
-from typing import TypeVar
-class A: pass
-class B(A): pass
-
-T = TypeVar('T')
-TA = TypeVar('TA', bound=A)
-TB = TypeVar('TB', bound=B)
-
-def f(x: T) -> T:
- return x
-def g(x: TA) -> TA:
- return f(x)
-def h(x: TB) -> TB:
- return g(x)
-def g2(x: TA) -> TA:
- return h(x) # Fail
-
-def j(x: TA) -> A:
- return x
-def k(x: TA) -> B:
- return x # Fail
-[out]
-main:16: error: Type argument 1 of "h" has incompatible value "TA"
-main:21: error: Incompatible return value type (got "TA", expected "B")
-
-
-[case testBoundMethodUsage]
-from typing import TypeVar
-class A0:
- def foo(self) -> None: pass
-class A(A0):
- def bar(self) -> None: pass
- a = 1
- @property
- def b(self) -> int:
- return self.a
-class B(A):
- def baz(self) -> None: pass
-
-T = TypeVar('T', A)
-
-def f(x: T) -> T:
- x.foo()
- x.bar()
- x.baz() # E: "A" has no attribute "baz"
- x.a
- x.b
- return x
-
-b = f(B())
-[builtins fixtures/property.pyi]
-[out]
-
-[case testBoundClassMethod]
-from typing import TypeVar
-class A0:
- @classmethod
- def foo(cls, x: int) -> int: pass
-class A(A0): pass
-
-T = TypeVar('T', bound=A)
-def f(x: T) -> int:
- return x.foo(22)
-[builtins fixtures/classmethod.pyi]
-
-
-[case testBoundStaticMethod]
-from typing import TypeVar
-class A0:
- @staticmethod
- def foo(x: int) -> int: pass
-class A(A0): pass
-
-T = TypeVar('T', bound=A)
-def f(x: T) -> int:
- return x.foo(22)
-[builtins fixtures/staticmethod.pyi]
-
-
-[case testBoundOnDecorator]
-from typing import TypeVar, Callable, Any, cast
-T = TypeVar('T', bound=Callable[..., Any])
-
-def twice(f: T) -> T:
- def result(*args, **kwargs) -> Any:
- f(*args, **kwargs)
- return f(*args, **kwargs)
- return cast(T, result)
-
- at twice
-def foo(x: int) -> int:
- return x
-
-a = 1
-b = foo(a)
-b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-twice(a) # E: Type argument 1 of "twice" has incompatible value "int"
-[builtins fixtures/args.pyi]
diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test
deleted file mode 100644
index 429ad44..0000000
--- a/test-data/unit/check-callable.test
+++ /dev/null
@@ -1,345 +0,0 @@
-[case testCallableDef]
-def f() -> None: pass
-
-if callable(f):
- f()
-else:
- f += 5
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableLambda]
-f = lambda: None
-
-if callable(f):
- f()
-else:
- f += 5
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableNotCallable]
-x = 5
-
-if callable(x):
- x()
-else:
- x += 5
-
-[builtins fixtures/callable.pyi]
-
-[case testUnion]
-from typing import Callable, Union
-
-x = 5 # type: Union[int, Callable[[], str]]
-
-if callable(x):
- y = x() + 'test'
-else:
- z = x + 6
-
-[builtins fixtures/callable.pyi]
-
-[case testUnionMultipleReturnTypes]
-from typing import Callable, Union
-
-x = 5 # type: Union[int, Callable[[], str], Callable[[], int]]
-
-if callable(x):
- y = x() + 2 # E: Unsupported operand types for + (likely involving Union)
-else:
- z = x + 6
-
-[builtins fixtures/callable.pyi]
-
-[case testUnionMultipleNonCallableTypes]
-from typing import Callable, Union
-
-x = 5 # type: Union[int, str, Callable[[], str]]
-
-if callable(x):
- y = x() + 'test'
-else:
- z = x + 6 # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableThenIsinstance]
-from typing import Callable, Union
-
-x = 5 # type: Union[int, str, Callable[[], str], Callable[[], int]]
-
-if callable(x):
- y = x()
- if isinstance(y, int):
- b1 = y + 2
- else:
- b2 = y + 'test'
-else:
- if isinstance(x, int):
- b3 = x + 3
- else:
- b4 = x + 'test2'
-
-[builtins fixtures/callable.pyi]
-
-[case testIsinstanceThenCallable]
-from typing import Callable, Union
-
-x = 5 # type: Union[int, str, Callable[[], str], Callable[[], int]]
-
-if isinstance(x, int):
- b1 = x + 1
-else:
- if callable(x):
- y = x()
- if isinstance(y, int):
- b2 = y + 1
- else:
- b3 = y + 'test'
- else:
- b4 = x + 'test2'
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableWithDifferentArgTypes]
-from typing import Callable, Union
-
-x = 5 # type: Union[int, Callable[[], None], Callable[[int], None]]
-
-if callable(x):
- x() # E: Too few arguments
-
-[builtins fixtures/callable.pyi]
-
-[case testClassInitializer]
-from typing import Callable, Union
-
-class A:
- x = 5
-
-a = A # type: Union[A, Callable[[], A]]
-
-if callable(a):
- a = a()
-
-a.x + 6
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableVariables]
-from typing import Union
-
-class A:
- x = 5
-
-class B:
- x = int
-
-x = A() # type: Union[A, B]
-
-if callable(x.x):
- y = x.x()
-else:
- y = x.x + 5
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableAnd]
-from typing import Union, Callable
-
-x = 5 # type: Union[int, Callable[[], str]]
-
-if callable(x) and x() == 'test':
- x()
-else:
- x + 5 # E: Unsupported left operand type for + (some union)
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableOr]
-from typing import Union, Callable
-
-x = 5 # type: Union[int, Callable[[], str]]
-
-if callable(x) or x() == 'test': # E: "int" not callable
- x() # E: "int" not callable
-else:
- x + 5
-[builtins fixtures/callable.pyi]
-
-[case testCallableOrOtherType]
-from typing import Union, Callable
-
-x = 5 # type: Union[int, Callable[[], str]]
-
-if callable(x) or x == 2:
- pass
-else:
- pass
-[builtins fixtures/callable.pyi]
-
-[case testAnyCallable]
-from typing import Any
-
-x = 5 # type: Any
-
-if callable(x):
- reveal_type(x) # E: Revealed type is 'Any'
-else:
- reveal_type(x) # E: Revealed type is 'Any'
-[builtins fixtures/callable.pyi]
-
-[case testCallableCallableClasses]
-from typing import Union
-
-
-class A:
- pass
-
-
-class B:
- def __call__(self) -> None:
- pass
-
-
-a = A() # type: A
-b = B() # type: B
-c = A() # type: Union[A, B]
-
-if callable(a):
- 5 + 'test'
-
-if not callable(b):
- 5 + 'test'
-
-if callable(c):
- reveal_type(c) # E: Revealed type is '__main__.B'
-else:
- reveal_type(c) # E: Revealed type is '__main__.A'
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableNestedUnions]
-from typing import Callable, Union
-
-T = Union[Union[int, Callable[[], int]], Union[str, Callable[[], str]]]
-
-def f(t: T) -> None:
- if callable(t):
- reveal_type(t()) # E: Revealed type is 'Union[builtins.int, builtins.str]'
- else:
- reveal_type(t) # E: Revealed type is 'Union[builtins.int, builtins.str]'
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableTypeVarEmpty]
-from typing import TypeVar
-
-T = TypeVar('T')
-
-def f(t: T) -> T:
- if callable(t):
- return 5
- else:
- return t
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableTypeVarUnion]
-from typing import Callable, TypeVar, Union
-
-T = TypeVar('T', int, Callable[[], int], Union[str, Callable[[], str]])
-
-def f(t: T) -> None:
- if callable(t):
- reveal_type(t()) # E: Revealed type is 'builtins.int' # E: Revealed type is 'builtins.str'
- else:
- reveal_type(t) # E: Revealed type is 'builtins.int*' # E: Revealed type is 'builtins.str'
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableTypeVarBound]
-from typing import TypeVar
-
-
-class A:
- def __call__(self) -> str:
- return 'hi'
-
-
-T = TypeVar('T', bound=A)
-
-def f(t: T) -> str:
- if callable(t):
- return t()
- else:
- return 5
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableTypeType]
-from typing import Type
-
-
-class A:
- pass
-
-
-T = Type[A]
-
-def f(t: T) -> A:
- if callable(t):
- return t()
- else:
- return 5
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableTypeUnion]
-from abc import ABCMeta, abstractmethod
-from typing import Type, Union
-
-
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self) -> None:
- pass
-
-
-class B:
- pass
-
-
-x = B # type: Union[Type[A], Type[B]]
-if callable(x):
- # Abstract classes raise an error when called, but are indeed `callable`
- pass
-else:
- 'test' + 5
-
-[builtins fixtures/callable.pyi]
-
-[case testCallableUnionOfTypes]
-from abc import ABCMeta, abstractmethod
-from typing import Type, Union
-
-
-class A(metaclass=ABCMeta):
- @abstractmethod
- def f(self) -> None:
- pass
-
-
-class B:
- pass
-
-
-x = B # type: Type[Union[A, B]]
-if callable(x):
- # Abstract classes raise an error when called, but are indeed `callable`
- pass
-else:
- 'test' + 5
-
-[builtins fixtures/callable.pyi]
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
deleted file mode 100644
index a18532d..0000000
--- a/test-data/unit/check-class-namedtuple.test
+++ /dev/null
@@ -1,378 +0,0 @@
-[case testNewNamedTupleOldPythonVersion]
-# flags: --fast-parser --python-version 3.5
-from typing import NamedTuple
-
-class E(NamedTuple): # E: NamedTuple class syntax is only supported in Python 3.6
- pass
-
-[case testNewNamedTupleNoUnderscoreFields]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- _y: int # E: NamedTuple field name cannot start with an underscore: _y
- _z: int # E: NamedTuple field name cannot start with an underscore: _z
-
-[case testNewNamedTupleAccessingAttributes]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-x: X
-x.x
-x.y
-x.z # E: "X" has no attribute "z"
-
-[case testNewNamedTupleAttributesAreReadOnly]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
-
-x: X
-x.x = 5 # E: Property "x" defined in "X" is read-only
-x.y = 5 # E: "X" has no attribute "y"
-
-class A(X): pass
-a: A
-a.x = 5 # E: Property "x" defined in "A" is read-only
-
-[case testNewNamedTupleCreateWithPositionalArguments]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-x = X(1, '2')
-x.x
-x.z # E: "X" has no attribute "z"
-x = X(1) # E: Too few arguments for "X"
-x = X(1, '2', 3) # E: Too many arguments for "X"
-
-[case testNewNamedTupleShouldBeSingleBase]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class A: ...
-class X(NamedTuple, A): # E: NamedTuple should be a single base
- pass
-
-[case testCreateNewNamedTupleWithKeywordArguments]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-x = X(x=1, y='x')
-x = X(1, y='x')
-x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X"
-x = X(y='x') # E: Missing positional argument "x" in call to "X"
-
-[case testNewNamedTupleCreateAndUseAsTuple]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-x = X(1, 'x')
-a, b = x
-a, b, c = x # E: Need more than 2 values to unpack (3 expected)
-
-[case testNewNamedTupleWithItemTypes]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class N(NamedTuple):
- a: int
- b: str
-
-n = N(1, 'x')
-s: str = n.a # E: Incompatible types in assignment (expression has type "int", \
- variable has type "str")
-i: int = n.b # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-x, y = n
-x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testNewNamedTupleConstructorArgumentTypes]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class N(NamedTuple):
- a: int
- b: str
-
-n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int"
-n = N(1, b=2) # E: Argument 2 to "N" has incompatible type "int"; expected "str"
-N(1, 'x')
-N(b='x', a=1)
-
-[case testNewNamedTupleAsBaseClass]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class N(NamedTuple):
- a: int
- b: str
-
-class X(N):
- pass
-x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str"
-s = ''
-i = 0
-s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-i, s = x
-s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testNewNamedTupleSelfTypeWithNamedTupleAsBase]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class A(NamedTuple):
- a: int
- b: str
-
-class B(A):
- def f(self, x: int) -> None:
- self.f(self.a)
- self.f(self.b) # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int"
- i = 0
- s = ''
- i, s = self
- i, i = self # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-[out]
-
-[case testNewNamedTupleTypeReferenceToClassDerivedFrom]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class A(NamedTuple):
- a: int
- b: str
-
-class B(A):
- def f(self, x: 'B') -> None:
- i = 0
- s = ''
- self = x
- i, s = x
- i, s = x.a, x.b
- i, s = x.a, x.a # E: Incompatible types in assignment (expression has type "int", \
- variable has type "str")
- i, i = self # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-
-[out]
-
-[case testNewNamedTupleSubtyping]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple, Tuple
-
-class A(NamedTuple):
- a: int
- b: str
-
-class B(A): pass
-a = A(1, '')
-b = B(1, '')
-t: Tuple[int, str]
-b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A")
-b = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B")
-t = a
-t = (1, '')
-t = b
-a = b
-
-[case testNewNamedTupleSimpleTypeInference]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple, Tuple
-
-class A(NamedTuple):
- a: int
-
-l = [A(1), A(2)]
-a = A(1)
-a = l[0]
-(i,) = l[0]
-i, i = l[0] # E: Need more than 1 value to unpack (2 expected)
-l = [A(1)]
-a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int]", \
- variable has type "A")
-[builtins fixtures/list.pyi]
-
-[case testNewNamedTupleMissingClassAttribute]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class MyNamedTuple(NamedTuple):
- a: int
- b: str
-
-MyNamedTuple.x # E: "MyNamedTuple" has no attribute "x"
-
-[case testNewNamedTupleEmptyItems]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class A(NamedTuple):
- ...
-
-[case testNewNamedTupleForwardRef]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class A(NamedTuple):
- b: 'B'
-
-class B: ...
-
-a = A(B())
-a = A(1) # E: Argument 1 to "A" has incompatible type "int"; expected "B"
-
-[case testNewNamedTupleProperty]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class A(NamedTuple):
- a: int
-
-class B(A):
- @property
- def b(self) -> int:
- return self.a
-class C(B): pass
-B(1).b
-C(2).b
-
-[builtins fixtures/property.pyi]
-
-[case testNewNamedTupleAsDict]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple, Any
-
-class X(NamedTuple):
- x: Any
- y: Any
-
-x: X
-reveal_type(x._asdict()) # E: Revealed type is 'builtins.dict[builtins.str, Any]'
-
-[builtins fixtures/dict.pyi]
-
-[case testNewNamedTupleReplaceTyped]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-x: X
-reveal_type(x._replace()) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
-x._replace(x=5)
-x._replace(y=5) # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
-
-[case testNewNamedTupleFields]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-reveal_type(X._fields) # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
-
-[case testNewNamedTupleUnit]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- pass
-
-x: X = X()
-x._replace()
-x._fields[0] # E: Tuple index out of range
-
-[case testNewNamedTupleJoinNamedTuple]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-class Y(NamedTuple):
- x: int
- y: str
-
-reveal_type([X(3, 'b'), Y(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
-
-[builtins fixtures/list.pyi]
-
-[case testNewNamedTupleJoinTuple]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y: str
-
-reveal_type([(3, 'b'), X(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
-reveal_type([X(1, 'a'), (3, 'b')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
-
-[builtins fixtures/list.pyi]
-
-[case testNewNamedTupleWithTooManyArguments]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y = z = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
- def f(self): pass # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
-
-[case testNewNamedTupleWithInvalidItems2]
-# flags: --fast-parser --python-version 3.6
-import typing
-
-class X(typing.NamedTuple):
- x: int
- y: str = 'y' # E: Right hand side values are not supported in NamedTuple
- z = None # type: int # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
- x[0]: int # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
-
-[builtins fixtures/list.pyi]
-
-[case testNewNamedTupleWithoutTypesSpecified]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple
-
-class X(NamedTuple):
- x: int
- y = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
-
-[case testTypeUsingTypeCNamedTuple]
-# flags: --fast-parser --python-version 3.6
-from typing import NamedTuple, Type
-
-class N(NamedTuple):
- x: int
- y: str
-
-def f(a: Type[N]):
- a()
-[builtins fixtures/list.pyi]
-[out]
-main:8: error: Unsupported type Type["N"]
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
deleted file mode 100644
index 9e46ef9..0000000
--- a/test-data/unit/check-classes.test
+++ /dev/null
@@ -1,2761 +0,0 @@
--- Methods
--- -------
-
-
-[case testMethodCall]
-
-a = None # type: A
-b = None # type: B
-
-a.foo(B()) # Fail
-a.bar(B(), A()) # Fail
-
-a.foo(A())
-b.bar(B(), A())
-
-class A:
- def foo(self, x: 'A') -> None: pass
-class B:
- def bar(self, x: 'B', y: A) -> None: pass
-[out]
-main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A"
-main:6: error: "A" has no attribute "bar"
-
-[case testMethodCallWithSubtype]
-
-a = None # type: A
-a.foo(A())
-a.foo(B())
-a.bar(A()) # Fail
-a.bar(B())
-
-class A:
- def foo(self, x: 'A') -> None: pass
- def bar(self, x: 'B') -> None: pass
-class B(A): pass
-[out]
-main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B"
-
-[case testInheritingMethod]
-
-a = None # type: B
-a.foo(A()) # Fail
-a.foo(B())
-
-class A:
- def foo(self, x: 'B') -> None: pass
-class B(A): pass
-[out]
-main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B"
-
-[case testMethodCallWithInvalidNumberOfArguments]
-
-a = None # type: A
-a.foo() # Fail
-a.foo(object(), A()) # Fail
-
-class A:
- def foo(self, x: 'A') -> None: pass
-[out]
-main:3: error: Too few arguments for "foo" of "A"
-main:4: error: Too many arguments for "foo" of "A"
-main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A"
-
-[case testMethodBody]
-import typing
-class A:
- def f(self) -> None:
- a = object() # type: A # Fail
-[out]
-main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-[case testMethodArguments]
-import typing
-class A:
- def f(self, a: 'A', b: 'B') -> None:
- a = B() # Fail
- b = A() # Fail
- a = A()
- b = B()
- a = a
- a = b # Fail
-class B: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testReturnFromMethod]
-import typing
-class A:
- def f(self) -> 'A':
- return B() # Fail
- return A()
-class B: pass
-[out]
-main:4: error: Incompatible return value type (got "B", expected "A")
-
-[case testSelfArgument]
-import typing
-class A:
- def f(self) -> None:
- o = self # type: B # Fail
- self.g() # Fail
- a = self # type: A
- self.f()
-class B: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:5: error: "A" has no attribute "g"
-
-[case testAssignToMethodViaInstance]
-import typing
-class A:
- def f(self): pass
-A().f = None # E: Cannot assign to a method
-
-
--- Attributes
--- ----------
-
-
-[case testReferToInvalidAttribute]
-
-class A:
- def __init__(self):
- self.x = object()
-a = None # type: A
-a.y
-a.y = object()
-a.x
-a.x = object()
-[out]
-main:6: error: "A" has no attribute "y"
-main:7: error: "A" has no attribute "y"
-
-[case testArgumentTypeInference]
-
-class A:
- def __init__(self, aa: 'A', bb: 'B') -> None:
- self.a = aa
- self.b = bb
-class B: pass
-a = None # type: A
-b = None # type: B
-a.a = b # Fail
-a.b = a # Fail
-b.a # Fail
-a.a = a
-a.b = b
-[out]
-main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:10: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:11: error: "B" has no attribute "a"
-
-[case testExplicitAttributeInBody]
-
-a = None # type: A
-a.x = object() # Fail
-a.x = A()
-class A:
- x = None # type: A
-[out]
-main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-[case testAttributeDefinedInNonInitMethod]
-import typing
-class A:
- def f(self) -> None:
- self.x = 1
- self.y = ''
- self.x = 1
-a = A()
-a.x = 1
-a.y = ''
-a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-a.z = 0 # E: "A" has no attribute "z"
-
-[case testInheritanceAndAttributeAssignment]
-import typing
-class A:
- def f(self) -> None:
- self.x = 0
-class B(A):
- def f(self) -> None:
- self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[out]
-
-[case testAssignmentToAttributeInMultipleMethods]
-import typing
-class A:
- def f(self) -> None:
- self.x = 0
- def g(self) -> None:
- self.x = '' # Fail
- def __init__(self) -> None:
- self.x = '' # Fail
-[out]
-main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-
--- Method overriding
--- -----------------
-
-
-[case testMethodOverridingWithIdenticalSignature]
-import typing
-class A:
- def f(self, x: 'A') -> None: pass
- def g(self, x: 'B' , y: object) -> 'A': pass
- def h(self) -> None: pass
-class B(A):
- def f(self, x: A) -> None: pass
- def g(self, x: 'B' , y: object) -> A: pass
- def h(self) -> None: pass
-[out]
-
-[case testMethodOverridingWithCovariantType]
-import typing
-class A:
- def f(self, x: 'A', y: 'B') -> 'A': pass
- def g(self, x: 'A', y: 'B') -> 'A': pass
-class B(A):
- def f(self, x: A, y: 'B') -> 'B': pass
- def g(self, x: A, y: A) -> 'A': pass
-[out]
-
-[case testMethodOverridingWithIncompatibleTypes]
-import typing
-class A:
- def f(self, x: 'A', y: 'B') -> 'A': pass
- def g(self, x: 'A', y: 'B') -> 'A': pass
- def h(self, x: 'A', y: 'B') -> 'A': pass
-class B(A):
- def f(self, x: 'B', y: 'B') -> A: pass # Fail
- def g(self, x: A, y: A) -> A: pass
- def h(self, x: A, y: 'B') -> object: pass # Fail
-[out]
-main:7: error: Argument 1 of "f" incompatible with supertype "A"
-main:9: error: Return type of "h" incompatible with supertype "A"
-
-[case testMethodOverridingWithIncompatibleArgumentCount]
-import typing
-class A:
- def f(self, x: 'A') -> None: pass
- def g(self, x: 'A', y: 'B') -> 'A': pass
-class B(A):
- def f(self, x: A, y: A) -> None: pass # Fail
- def g(self, x: A) -> A: pass # Fail
-[out]
-main:6: error: Signature of "f" incompatible with supertype "A"
-main:7: error: Signature of "g" incompatible with supertype "A"
-
-[case testMethodOverridingAcrossDeepInheritanceHierarchy1]
-import typing
-class A:
- def f(self, x: 'B') -> None: pass
-class B(A): pass
-class C(B): # with gap in implementations
- def f(self, x: 'C') -> None: # Fail
- pass
-[out]
-main:6: error: Argument 1 of "f" incompatible with supertype "A"
-
-[case testMethodOverridingAcrossDeepInheritanceHierarchy2]
-import typing
-class A:
- def f(self) -> 'B': pass
-class B(A):
- def f(self) -> 'C': pass
-class C(B): # with multiple implementations
- def f(self) -> B: # Fail
- pass
-[out]
-main:7: error: Return type of "f" incompatible with supertype "B"
-
-[case testMethodOverridingWithVoidReturnValue]
-import typing
-class A:
- def f(self) -> None: pass
- def g(self) -> 'A': pass
-class B(A):
- def f(self) -> A: pass # Fail
- def g(self) -> None: pass # Fail
-[out]
-main:6: error: Return type of "f" incompatible with supertype "A"
-main:7: error: Return type of "g" incompatible with supertype "A"
-
-[case testOverride__new__WithDifferentSignature]
-class A:
- def __new__(cls, x: int) -> str:
- return ''
-
-class B(A):
- def __new__(cls) -> int:
- return 1
-
-[case testInnerFunctionNotOverriding]
-class A:
- def f(self) -> int: pass
-
-class B(A):
- def g(self) -> None:
- def f(self) -> str: pass
-
-
--- Constructors
--- ------------
-
-
-[case testTrivialConstructor]
-import typing
-a = A() # type: A
-b = A() # type: B # Fail
-class A:
- def __init__(self) -> None: pass
-class B: pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testConstructor]
-import typing
-a = A(B()) # type: A
-aa = A(object()) # type: A # Fail
-b = A(B()) # type: B # Fail
-class A:
- def __init__(self, x: 'B') -> None: pass
-class B: pass
-[out]
-main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B"
-main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testConstructorWithTwoArguments]
-import typing
-a = A(C(), B()) # type: A # Fail
-
-class A:
- def __init__(self, x: 'B', y: 'C') -> None: pass
-class B: pass
-class C(B): pass
-[out]
-main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C"
-
-[case testInheritedConstructor]
-import typing
-b = B(C()) # type: B
-a = B(D()) # type: A # Fail
-class A:
- def __init__(self, x: 'C') -> None: pass
-class B(A): pass
-class C: pass
-class D: pass
-[out]
-main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C"
-
-[case testOverridingWithIncompatibleConstructor]
-import typing
-A() # Fail
-B(C()) # Fail
-A(C())
-B()
-class A:
- def __init__(self, x: 'C') -> None: pass
-class B(A):
- def __init__(self) -> None: pass
-class C: pass
-[out]
-main:2: error: Too few arguments for "A"
-main:3: error: Too many arguments for "B"
-
-[case testConstructorWithReturnValueType]
-import typing
-class A:
- def __init__(self) -> 'A': pass
-[out]
-main:3: error: The return type of "__init__" must be None
-
-[case testConstructorWithImplicitReturnValueType]
-import typing
-class A:
- def __init__(self, x: int): pass
-[out]
-main:3: error: The return type of "__init__" must be None
-
-[case testInitSubclassWithReturnValueType]
-import typing
-class A:
- def __init_subclass__(cls) -> 'A': pass
-[out]
-main:3: error: The return type of "__init_subclass__" must be None
-
-[case testInitSubclassWithImplicitReturnValueType]
-import typing
-class A:
- def __init_subclass__(cls, x: int=1): pass
-[out]
-main:3: error: The return type of "__init_subclass__" must be None
-
-[case testGlobalFunctionInitWithReturnType]
-import typing
-a = __init__() # type: A
-b = __init__() # type: B # Fail
-def __init__() -> 'A': pass
-class A: pass
-class B: pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testAccessingInit]
-from typing import Any, cast
-class A:
- def __init__(self, a: 'A') -> None: pass
-a = None # type: A
-a.__init__(a) # E: Cannot access "__init__" directly
-(cast(Any, a)).__init__(a)
-
-[case testDeepInheritanceHierarchy]
-import typing
-d = C() # type: D # Fail
-d = B() # Fail
-d = A() # Fail
-d = D2() # Fail
-a = D() # type: A
-a = D2()
-b = D() # type: B
-b = D2()
-
-class A: pass
-class B(A): pass
-class C(B): pass
-class D(C): pass
-class D2(C): pass
-[out]
-main:2: error: Incompatible types in assignment (expression has type "C", variable has type "D")
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "D")
-main:4: error: Incompatible types in assignment (expression has type "A", variable has type "D")
-main:5: error: Incompatible types in assignment (expression has type "D2", variable has type "D")
-
-
--- Attribute access in class body
--- ------------------------------
-
-
-[case testDataAttributeRefInClassBody]
-import typing
-class B: pass
-class A:
- x = B()
- y = x
- b = x # type: B
- b = x
- c = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- c = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[out]
-
-[case testMethodRefInClassBody]
-from typing import Callable
-class B: pass
-class A:
- def f(self) -> None: pass
- g = f
- h = f # type: Callable[[A], None]
- h = f
- g = h
- ff = f # type: Callable[[B], None] # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[B], None])
- g = ff # E: Incompatible types in assignment (expression has type Callable[[B], None], variable has type Callable[[A], None])
-[out]
-
-
--- Arbitrary statements in class body
--- ----------------------------------
-
-
-[case testStatementsInClassBody]
-import typing
-class B: pass
-class A:
- for x in [A()]:
- y = x
- y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- x = A()
- y = A()
- x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Class attributes
--- ----------------
-
-
-[case testAccessMethodViaClass]
-import typing
-class A:
- def f(self) -> None: pass
-A.f(A())
-A.f(object()) # E: Argument 1 to "f" of "A" has incompatible type "object"; expected "A"
-A.f() # E: Too few arguments for "f" of "A"
-A.f(None, None) # E: Too many arguments for "f" of "A"
-
-[case testAccessAttributeViaClass]
-import typing
-class B: pass
-class A:
- x = None # type: A
-a = A.x # type: A
-b = A.x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testAccessingUndefinedAttributeViaClass]
-import typing
-class A: pass
-A.x # E: "A" has no attribute "x"
-
-[case testAccessingUndefinedAttributeViaClassWithOverloadedInit]
-from typing import overload
-class A:
- @overload
- def __init__(self): pass
- @overload
- def __init__(self, x): pass
-A.x # E: "A" has no attribute "x"
-
-[case testAccessMethodOfClassWithOverloadedInit]
-from typing import overload, Any
-class A:
- @overload
- def __init__(self) -> None: pass
- @overload
- def __init__(self, x: Any) -> None: pass
- def f(self) -> None: pass
-A.f(A())
-A.f() # E: Too few arguments for "f" of "A"
-
-[case testAssignmentToClassDataAttribute]
-import typing
-class B: pass
-class A:
- x = None # type: B
-A.x = B()
-A.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "B")
-
-[case testAssignmentToInferredClassDataAttribute]
-import typing
-class B: pass
-class A:
- x = B()
-A.x = B()
-A.x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testInitMethodUnbound]
-
-class B: pass
-class A:
- def __init__(self, b: B) -> None: pass
-a = None # type: A
-b = None # type: B
-A.__init__(a, b)
-A.__init__(b, b) # E: Argument 1 to "__init__" of "A" has incompatible type "B"; expected "A"
-A.__init__(a, a) # E: Argument 2 to "__init__" of "A" has incompatible type "A"; expected "B"
-
-[case testAssignToMethodViaClass]
-import typing
-class A:
- def f(self): pass
-A.f = None # E: Cannot assign to a method
-
-[case testAssignToNestedClassViaClass]
-import typing
-class A:
- class B: pass
-A.B = None # E: Cannot assign to a type
-
-[case testAccessingClassAttributeWithTypeInferenceIssue]
-x = C.x # E: Cannot determine type of 'x'
-def f() -> int: return 1
-class C:
- x = f()
-[builtins fixtures/list.pyi]
-
-[case testAccessingClassAttributeWithTypeInferenceIssue2]
-class C:
- x = []
-x = C.x
-[builtins fixtures/list.pyi]
-[out]
-main:2: error: Need type annotation for variable
-
-
--- Nested classes
--- --------------
-
-
-[case testClassWithinFunction]
-
-def f() -> None:
- class A:
- def g(self) -> None: pass
- a = None # type: A
- a.g()
- a.g(a) # E: Too many arguments for "g" of "A"
-[out]
-
-[case testConstructNestedClass]
-import typing
-class A:
- class B: pass
- b = B()
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- b = B(b) # E: Too many arguments for "B"
-[out]
-
-[case testConstructNestedClassWithCustomInit]
-import typing
-class A:
- def f(self) -> None:
- class B:
- def __init__(self, a: 'A') -> None: pass
- b = B(A())
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- b = B() # E: Too few arguments for "B"
-[out]
-
-[case testDeclareVariableWithNestedClassType]
-
-def f() -> None:
- class A: pass
- a = None # type: A
- a = A()
- a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-[out]
-
-[case testExternalReferenceToClassWithinClass]
-
-class A:
- class B: pass
-b = None # type: A.B
-b = A.B()
-b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = A.B(b) # E: Too many arguments for "B"
-
-
--- Declaring attribute type in method
--- ----------------------------------
-
-
-[case testDeclareAttributeTypeInInit]
-
-class A:
- def __init__(self):
- self.x = None # type: int
-a = None # type: A
-a.x = 1
-a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testAccessAttributeDeclaredInInitBeforeDeclaration]
-
-a = None # type: A
-a.x = 1
-a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-class A:
- def __init__(self):
- self.x = None # type: int
-
-
--- Special cases
--- -------------
-
-
-[case testMultipleClassDefinition]
-import typing
-A()
-class A: pass
-class A: pass
-[out]
-main:4: error: Name 'A' already defined
-
-[case testDocstringInClass]
-import typing
-class A:
- """Foo"""
-class B:
- 'x'
- y = B()
-[builtins fixtures/primitives.pyi]
-
-[case testErrorMessageInFunctionNestedWithinMethod]
-import typing
-class A:
- def f(self) -> None:
- def g() -> None:
- a = None
- b = None
-[out]
-main:5: error: Need type annotation for variable
-main:6: error: Need type annotation for variable
-
-
--- Static methods
--- --------------
-
-
-[case testSimpleStaticMethod]
-import typing
-class A:
- @staticmethod
- def f(x: int) -> None: pass
-A.f(1)
-A().f(1)
-A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-[builtins fixtures/staticmethod.pyi]
-
-[case testBuiltinStaticMethod]
-import typing
-int.from_bytes(b'', '')
-int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes"
-[builtins fixtures/staticmethod.pyi]
-
-[case testAssignStaticMethodOnInstance]
-import typing
-class A:
- @staticmethod
- def f(x: int) -> None: pass
-A().f = A.f # E: Cannot assign to a method
-[builtins fixtures/staticmethod.pyi]
-
-
--- Class methods
--- -------------
-
-
-[case testSimpleClassMethod]
-import typing
-class A:
- @classmethod
- def f(cls, x: int) -> None: pass
-A.f(1)
-A().f(1)
-A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-[builtins fixtures/classmethod.pyi]
-
-[case testBuiltinClassMethod]
-import typing
-int.from_bytes(b'', '')
-int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes"
-[builtins fixtures/classmethod.pyi]
-
-[case testAssignClassMethodOnClass]
-import typing
-class A:
- @classmethod
- def f(cls, x: int) -> None: pass
-A.f = A.f # E: Cannot assign to a method
-[builtins fixtures/classmethod.pyi]
-
-[case testAssignClassMethodOnInstance]
-import typing
-class A:
- @classmethod
- def f(cls, x: int) -> None: pass
-A().f = A.f # E: Cannot assign to a method
-[builtins fixtures/classmethod.pyi]
-
-[case testClassMethodCalledInClassMethod]
-import typing
-class C:
- @classmethod
- def foo(cls) -> None: pass
- @classmethod
- def bar(cls) -> None:
- cls()
- cls(1) # E: Too many arguments for "C"
- cls.bar()
- cls.bar(1) # E: Too many arguments for "bar" of "C"
- cls.bozo() # E: "C" has no attribute "bozo"
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testClassMethodCalledOnClass]
-import typing
-class C:
- @classmethod
- def foo(cls) -> None: pass
-C.foo()
-C.foo(1) # E: Too many arguments for "foo" of "C"
-C.bozo() # E: "C" has no attribute "bozo"
-[builtins fixtures/classmethod.pyi]
-
-[case testClassMethodCalledOnInstance]
-import typing
-class C:
- @classmethod
- def foo(cls) -> None: pass
-C().foo()
-C().foo(1) # E: Too many arguments for "foo" of "C"
-C.bozo() # E: "C" has no attribute "bozo"
-[builtins fixtures/classmethod.pyi]
-
-[case testClassMethodMayCallAbstractMethod]
-from abc import abstractmethod
-import typing
-class C:
- @classmethod
- def foo(cls) -> None:
- cls().bar()
- @abstractmethod
- def bar(self) -> None:
- pass
-[builtins fixtures/classmethod.pyi]
-
-
--- Properties
--- ----------
-
-
-[case testAccessingReadOnlyProperty]
-import typing
-class A:
- @property
- def f(self) -> str: pass
-a = A()
-reveal_type(a.f) # E: Revealed type is 'builtins.str'
-[builtins fixtures/property.pyi]
-
-[case testAssigningToReadOnlyProperty]
-import typing
-class A:
- @property
- def f(self) -> str: pass
-A().f = '' # E: Property "f" defined in "A" is read-only
-[builtins fixtures/property.pyi]
-
-[case testPropertyGetterBody]
-import typing
-class A:
- @property
- def f(self) -> str:
- self.x = 1
- self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[builtins fixtures/property.pyi]
-[out]
-
-[case testDynamicallyTypedProperty]
-import typing
-class A:
- @property
- def f(self): pass
-a = A()
-a.f.xx
-a.f = '' # E: Property "f" defined in "A" is read-only
-[builtins fixtures/property.pyi]
-
-[case testPropertyWithSetter]
-import typing
-class A:
- @property
- def f(self) -> int:
- return 1
- @f.setter
- def f(self, x: int) -> None:
- pass
-a = A()
-a.f = a.f
-a.f.x # E: "int" has no attribute "x"
-a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[builtins fixtures/property.pyi]
-
-[case testPropertyWithDeleterButNoSetter]
-import typing
-class A:
- @property
- def f(self) -> int:
- return 1
- @f.deleter
- def f(self, x) -> None:
- pass
-a = A()
-a.f = a.f # E: Property "f" defined in "A" is read-only
-a.f.x # E: "int" has no attribute "x"
-[builtins fixtures/property.pyi]
-
--- Descriptors
--- -----------
-
-
-[case testAccessingNonDataDescriptor]
-from typing import Any
-class D:
- def __get__(self, inst: Any, own: Any) -> str: return 's'
-class A:
- f = D()
-a = A()
-reveal_type(a.f) # E: Revealed type is 'builtins.str'
-
-[case testSettingNonDataDescriptor]
-from typing import Any
-class D:
- def __get__(self, inst: Any, own: Any) -> str: return 's'
-class A:
- f = D()
-a = A()
-a.f = 'foo'
-a.f = D() # E: Incompatible types in assignment (expression has type "D", variable has type "str")
-
-[case testSettingDataDescriptor]
-from typing import Any
-class D:
- def __get__(self, inst: Any, own: Any) -> str: return 's'
- def __set__(self, inst: Any, value: str) -> None: pass
-class A:
- f = D()
-a = A()
-a.f = ''
-a.f = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str"
-
-[case testReadingDescriptorWithoutDunderGet]
-from typing import Union, Any
-class D:
- def __set__(self, inst: Any, value: str) -> None: pass
-class A:
- f = D()
- def __init__(self): self.f = 's'
-a = A()
-reveal_type(a.f) # E: Revealed type is '__main__.D'
-
-[case testAccessingDescriptorFromClass]
-# flags: --strict-optional
-from d import D, Base
-class A(Base):
- f = D()
-reveal_type(A.f) # E: Revealed type is 'd.D'
-reveal_type(A().f) # E: Revealed type is 'builtins.str'
-[file d.pyi]
-from typing import TypeVar, Type, Generic, overload
-class Base: pass
-class D:
- def __init__(self) -> None: pass
- @overload
- def __get__(self, inst: None, own: Type[Base]) -> D: pass
- @overload
- def __get__(self, inst: Base, own: Type[Base]) -> str: pass
-[builtins fixtures/bool.pyi]
-
-[case testAccessingDescriptorFromClassWrongBase]
-# flags: --strict-optional
-from d import D, Base
-class A:
- f = D()
-reveal_type(A.f)
-reveal_type(A().f)
-[file d.pyi]
-from typing import TypeVar, Type, Generic, overload
-class Base: pass
-class D:
- def __init__(self) -> None: pass
- @overload
- def __get__(self, inst: None, own: Type[Base]) -> D: pass
- @overload
- def __get__(self, inst: Base, own: Type[Base]) -> str: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Revealed type is 'Any'
-main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]]
-main:6: error: Revealed type is 'Any'
-main:6: error: No overload variant of "__get__" of "D" matches argument types [__main__.A, Type[__main__.A]]
-
-
-[case testAccessingGenericNonDataDescriptor]
-from typing import TypeVar, Type, Generic, Any
-V = TypeVar('V')
-class D(Generic[V]):
- def __init__(self, v: V) -> None: self.v = v
- def __get__(self, inst: Any, own: Type) -> V: return self.v
-class A:
- f = D(10)
- g = D('10')
-a = A()
-reveal_type(a.f) # E: Revealed type is 'builtins.int*'
-reveal_type(a.g) # E: Revealed type is 'builtins.str*'
-
-[case testSettingGenericDataDescriptor]
-from typing import TypeVar, Type, Generic, Any
-V = TypeVar('V')
-class D(Generic[V]):
- def __init__(self, v: V) -> None: self.v = v
- def __get__(self, inst: Any, own: Type) -> V: return self.v
- def __set__(self, inst: Any, v: V) -> None: pass
-class A:
- f = D(10)
- g = D('10')
-a = A()
-a.f = 1
-a.f = '' # E: Argument 2 to "__set__" of "D" has incompatible type "str"; expected "int"
-a.g = ''
-a.g = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str"
-
-[case testAccessingGenericDescriptorFromClass]
-# flags: --strict-optional
-from d import D
-class A:
- f = D(10) # type: D[A, int]
- g = D('10') # type: D[A, str]
-reveal_type(A.f) # E: Revealed type is 'd.D[__main__.A*, builtins.int*]'
-reveal_type(A.g) # E: Revealed type is 'd.D[__main__.A*, builtins.str*]'
-reveal_type(A().f) # E: Revealed type is 'builtins.int*'
-reveal_type(A().g) # E: Revealed type is 'builtins.str*'
-[file d.pyi]
-from typing import TypeVar, Type, Generic, overload
-T = TypeVar('T')
-V = TypeVar('V')
-class D(Generic[T, V]):
- def __init__(self, v: V) -> None: pass
- @overload
- def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass
- @overload
- def __get__(self, inst: T, own: Type[T]) -> V: pass
-[builtins fixtures/bool.pyi]
-
-[case testAccessingGenericDescriptorFromInferredClass]
-# flags: --strict-optional
-from typing import Type
-from d import D
-class A:
- f = D(10) # type: D[A, int]
- g = D('10') # type: D[A, str]
-def f(some_class: Type[A]):
- reveal_type(some_class.f)
- reveal_type(some_class.g)
-[file d.pyi]
-from typing import TypeVar, Type, Generic, overload
-T = TypeVar('T')
-V = TypeVar('V')
-class D(Generic[T, V]):
- def __init__(self, v: V) -> None: pass
- @overload
- def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass
- @overload
- def __get__(self, inst: T, own: Type[T]) -> V: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:8: error: Revealed type is 'd.D[__main__.A*, builtins.int*]'
-main:9: error: Revealed type is 'd.D[__main__.A*, builtins.str*]'
-
-[case testAccessingGenericDescriptorFromClassBadOverload]
-# flags: --strict-optional
-from d import D
-class A:
- f = D(10) # type: D[A, int]
-reveal_type(A.f)
-[file d.pyi]
-from typing import TypeVar, Type, Generic, overload
-T = TypeVar('T')
-V = TypeVar('V')
-class D(Generic[T, V]):
- def __init__(self, v: V) -> None: pass
- @overload
- def __get__(self, inst: None, own: None) -> 'D[T, V]': pass
- @overload
- def __get__(self, inst: T, own: Type[T]) -> V: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Revealed type is 'Any'
-main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]]
-
-[case testAccessingNonDataDescriptorSubclass]
-from typing import Any
-class C:
- def __get__(self, inst: Any, own: Any) -> str: return 's'
-class D(C): pass
-class A:
- f = D()
-a = A()
-reveal_type(a.f) # E: Revealed type is 'builtins.str'
-
-[case testSettingDataDescriptorSubclass]
-from typing import Any
-class C:
- def __get__(self, inst: Any, own: Any) -> str: return 's'
- def __set__(self, inst: Any, v: str) -> None: pass
-class D(C): pass
-class A:
- f = D()
-a = A()
-a.f = ''
-a.f = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str"
-
-[case testReadingDescriptorSubclassWithoutDunderGet]
-from typing import Union, Any
-class C:
- def __set__(self, inst: Any, v: str) -> None: pass
-class D(C): pass
-class A:
- f = D()
- def __init__(self): self.f = 's'
-a = A()
-reveal_type(a.f) # E: Revealed type is '__main__.D'
-
-[case testAccessingGenericNonDataDescriptorSubclass]
-from typing import TypeVar, Type, Generic, Any
-V = TypeVar('V')
-class C(Generic[V]):
- def __init__(self, v: V) -> None: self.v = v
- def __get__(self, inst: Any, own: Type) -> V: return self.v
-class D(C[V], Generic[V]): pass
-class A:
- f = D(10)
- g = D('10')
-a = A()
-reveal_type(a.f) # E: Revealed type is 'builtins.int*'
-reveal_type(a.g) # E: Revealed type is 'builtins.str*'
-
-[case testSettingGenericDataDescriptorSubclass]
-from typing import TypeVar, Type, Generic
-T = TypeVar('T')
-V = TypeVar('V')
-class C(Generic[T, V]):
- def __init__(self, v: V) -> None: self.v = v
- def __get__(self, inst: T, own: Type[T]) -> V: return self.v
- def __set__(self, inst: T, v: V) -> None: pass
-class D(C[T, V], Generic[T, V]): pass
-class A:
- f = D(10) # type: D[A, int]
- g = D('10') # type: D[A, str]
-a = A()
-a.f = 1
-a.f = '' # E: Argument 2 to "__set__" of "C" has incompatible type "str"; expected "int"
-a.g = ''
-a.g = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str"
-
-[case testSetDescriptorOnClass]
-from typing import TypeVar, Type, Generic
-T = TypeVar('T')
-V = TypeVar('V')
-class D(Generic[T, V]):
- def __init__(self, v: V) -> None: self.v = v
- def __get__(self, inst: T, own: Type[T]) -> V: return self.v
- def __set__(self, inst: T, v: V) -> None: pass
-class A:
- f = D(10) # type: D[A, int]
-A.f = D(20)
-A.f = D('some string') # E: Argument 1 to "D" has incompatible type "str"; expected "int"
-
-[case testSetDescriptorOnInferredClass]
-from typing import TypeVar, Type, Generic, Any
-V = TypeVar('V')
-class D(Generic[V]):
- def __init__(self, v: V) -> None: self.v = v
- def __get__(self, inst: Any, own: Type) -> V: return self.v
- def __set__(self, inst: Any, v: V) -> None: pass
-class A:
- f = D(10)
-def f(some_class: Type[A]):
- A.f = D(20)
- A.f = D('some string')
-[out]
-main:11: error: Argument 1 to "D" has incompatible type "str"; expected "int"
-
-[case testDescriptorUncallableDunderSet]
-class D:
- __set__ = 's'
-class A:
- f = D()
-A().f = 'x' # E: __main__.D.__set__ is not callable
-
-[case testDescriptorDunderSetTooFewArgs]
-class D:
- def __set__(self, inst): pass
-class A:
- f = D()
-A().f = 'x' # E: Too many arguments for "__set__"
-
-[case testDescriptorDunderSetTooManyArgs]
-class D:
- def __set__(self, inst, v, other): pass
-class A:
- f = D()
-A().f = 'x' # E: Too few arguments for "__set__"
-
-[case testDescriptorDunderSetWrongArgTypes]
-class D:
- def __set__(self, inst: str, v:str) -> None: pass
-class A:
- f = D()
-A().f = 'x' # E: Argument 1 to "__set__" of "D" has incompatible type "A"; expected "str"
-
-[case testDescriptorUncallableDunderGet]
-class D:
- __get__ = 's'
-class A:
- f = D()
-A().f # E: __main__.D.__get__ is not callable
-
-[case testDescriptorDunderGetTooFewArgs]
-class D:
- def __get__(self, inst): pass
-class A:
- f = D()
-A().f # E: Too many arguments for "__get__"
-
-[case testDescriptorDunderGetTooManyArgs]
-class D:
- def __get__(self, inst, own, other): pass
-class A:
- f = D()
-A().f = 'x' # E: Too few arguments for "__get__"
-
-[case testDescriptorDunderGetWrongArgTypeForInstance]
-from typing import Any
-class D:
- def __get__(self, inst: str, own: Any) -> Any: pass
-class A:
- f = D()
-A().f # E: Argument 1 to "__get__" of "D" has incompatible type "A"; expected "str"
-
-[case testDescriptorDunderGetWrongArgTypeForOwner]
-from typing import Any
-class D:
- def __get__(self, inst: Any, own: str) -> Any: pass
-class A:
- f = D()
-A().f # E: Argument 2 to "__get__" of "D" has incompatible type Type[A]; expected "str"
-
-[case testDescriptorGetSetDifferentTypes]
-from typing import Any
-class D:
- def __get__(self, inst: Any, own: Any) -> str: return 's'
- def __set__(self, inst: Any, v: int) -> None: pass
-class A:
- f = D()
-a = A()
-a.f = 1
-reveal_type(a.f) # E: Revealed type is 'builtins.str'
-
-
--- _promote decorators
--- -------------------
-
-
-[case testSimpleDucktypeDecorator]
-from typing import _promote
-class A: pass
- at _promote(A)
-class B: pass
-a = None # type: A
-b = None # type: B
-b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = b
-
-[case testDucktypeTransitivityDecorator]
-from typing import _promote
-class A: pass
- at _promote(A)
-class B: pass
- at _promote(B)
-class C: pass
-a = None # type: A
-c = None # type: C
-c = a # E: Incompatible types in assignment (expression has type "A", variable has type "C")
-a = c
-
-
--- Hard coded type promotions
--- --------------------------
-
-[case testHardCodedTypePromotions]
-import typing
-def f(x: float) -> None: pass
-def g(x: complex) -> None: pass
-f(1)
-g(1)
-g(1.1)
-[builtins fixtures/complex.pyi]
-
-
--- Operator methods
--- ----------------
-
-
-[case testOperatorMethodOverrideIntroducingOverloading]
-from typing import overload
-class A:
- def __add__(self, x: int) -> int: pass
-class B(A):
- @overload # E: Signature of "__add__" incompatible with supertype "A"
- def __add__(self, x: int) -> int: pass
- @overload
- def __add__(self, x: str) -> str: pass
-[out]
-
-[case testOperatorMethodOverrideWideningArgumentType]
-import typing
-class A:
- def __add__(self, x: int) -> int: pass
-class B(A):
- def __add__(self, x: object) -> int: pass
-[out]
-
-[case testOperatorMethodOverrideNarrowingReturnType]
-import typing
-class A:
- def __add__(self, x: int) -> 'A': pass
-class B(A):
- def __add__(self, x: int) -> 'B': pass
-
-[case testOperatorMethodOverrideWithDynamicallyTyped]
-import typing
-class A:
- def __add__(self, x: int) -> 'A': pass
-class B(A):
- def __add__(self, x): pass
-
-[case testOperatorMethodOverrideWithIdenticalOverloadedType]
-from typing import overload
-class A:
- @overload
- def __add__(self, x: int) -> 'A': pass
- @overload
- def __add__(self, x: str) -> 'A': pass
-class B(A):
- @overload
- def __add__(self, x: int) -> 'A': pass
- @overload
- def __add__(self, x: str) -> 'A': pass
-
-[case testOverloadedOperatorMethodOverrideWithDynamicallyTypedMethod]
-from typing import overload, Any
-class A:
- @overload
- def __add__(self, x: int) -> 'A': pass
- @overload
- def __add__(self, x: str) -> 'A': pass
-class B(A):
- def __add__(self, x): pass
-class C(A):
- def __add__(self, x: Any) -> A: pass
-
-[case testOverloadedOperatorMethodOverrideWithNewItem]
-from typing import overload, Any
-class A:
- @overload
- def __add__(self, x: int) -> 'A': pass
- @overload
- def __add__(self, x: str) -> 'A': pass
-class B(A):
- @overload
- def __add__(self, x: int) -> A: pass
- @overload
- def __add__(self, x: str) -> A: pass
- @overload
- def __add__(self, x: type) -> A: pass
-[out]
-main:8: error: Signature of "__add__" incompatible with supertype "A"
-
-[case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder]
-from typing import overload, Any
-class A:
- @overload
- def __add__(self, x: 'B') -> 'B': pass
- @overload
- def __add__(self, x: 'A') -> 'A': pass
-class B(A):
- @overload
- def __add__(self, x: 'A') -> 'A': pass
- @overload
- def __add__(self, x: 'B') -> 'B': pass
-[out]
-main:8: error: Signature of "__add__" incompatible with supertype "A"
-
-[case testReverseOperatorMethodArgumentType]
-from typing import Any
-class A: pass
-class B:
- def __radd__(self, x: A) -> int: pass # Error
-class C:
- def __radd__(self, x: A) -> Any: pass
-class D:
- def __radd__(self, x: A) -> object: pass
-[out]
-
-[case testReverseOperatorMethodArgumentType2]
-from typing import Any, Tuple, Callable
-class A:
- def __radd__(self, x: Tuple[int, str]) -> int: pass
-class B:
- def __radd__(self, x: Callable[[], int]) -> int: pass
-class C:
- def __radd__(self, x: Any) -> int: pass
-[out]
-
-[case testReverseOperatorMethodForwardIsAny]
-from typing import Any
-def deco(f: Any) -> Any: return f
-class C:
- @deco
- def __add__(self, other: C) -> C: return C()
- def __radd__(self, other: C) -> C: return C()
-[out]
-
-[case testReverseOperatorMethodForwardIsAny2]
-from typing import Any
-def deco(f: Any) -> Any: return f
-class C:
- __add__ = None # type: Any
- def __radd__(self, other: C) -> C: return C()
-[out]
-
-[case testReverseOperatorMethodForwardIsAny3]
-from typing import Any
-def deco(f: Any) -> Any: return f
-class C:
- __add__ = 42
- def __radd__(self, other: C) -> C: return C()
-[out]
-main:5: error: Forward operator "__add__" is not callable
-
-[case testOverloadedReverseOperatorMethodArgumentType]
-from typing import overload, Any
-class A:
- @overload
- def __radd__(self, x: 'A') -> str: pass # Error
- @overload
- def __radd__(self, x: 'A') -> Any: pass
-[out]
-
-[case testReverseOperatorMethodArgumentTypeAndOverloadedMethod]
-from typing import overload
-class A:
- @overload
- def __add__(self, x: int) -> int: pass
- @overload
- def __add__(self, x: str) -> int: pass
- def __radd__(self, x: 'A') -> str: pass
-
-[case testAbstractReverseOperatorMethod]
-import typing
-from abc import abstractmethod
-class A:
- @abstractmethod
- def __lt__(self, x: 'A') -> int: pass
-class B:
- @abstractmethod
- def __lt__(self, x: 'B') -> int: pass
- @abstractmethod
- def __gt__(self, x: 'B') -> int: pass
-[out]
-
-[case testOperatorMethodsAndOverloadingSpecialCase]
-from typing import overload
-class A:
- @overload
- def __add__(self, x: 'A') -> int: pass
- @overload
- def __add__(self, x: str) -> int: pass
-class B:
- def __radd__(self, x: 'A') -> str: pass
-[out]
-
-[case testUnsafeOverlappingWithOperatorMethodsAndOverloading2]
-from typing import overload
-class A:
- def __add__(self, x: 'A') -> int: pass
-class B:
- @overload
- def __radd__(self, x: 'X') -> str: pass # Error
- @overload
- def __radd__(self, x: A) -> str: pass # Error
-class X:
- def __add__(self, x): pass
-[out]
-main:6: error: Signatures of "__radd__" of "B" and "__add__" of "X" are unsafely overlapping
-
-[case testUnsafeOverlappingWithLineNo]
-from typing import TypeVar
-T = TypeVar('T', Real)
-class Real:
- def __add__(self, other): ...
-class Fraction(Real):
- def __radd__(self, other: T) -> T: ...
-[out]
-main:6: error: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping
-
-[case testOverlappingNormalAndInplaceOperatorMethod]
-import typing
-class A:
- # Incompatible (potential trouble with __radd__)
- def __add__(self, x: 'A') -> int: pass
- def __iadd__(self, x: 'B') -> int: pass
-class B:
- # Safe
- def __add__(self, x: 'C') -> int: pass
- def __iadd__(self, x: A) -> int: pass
-class C(A): pass
-[out]
-main:5: error: Signatures of "__iadd__" and "__add__" are incompatible
-
-[case testOverloadedNormalAndInplaceOperatorMethod]
-from typing import overload
-class A:
- @overload
- def __add__(self, x: int) -> int: pass
- @overload
- def __add__(self, x: str) -> int: pass
- @overload # Error
- def __iadd__(self, x: int) -> int: pass
- @overload
- def __iadd__(self, x: object) -> int: pass
-class B:
- @overload
- def __add__(self, x: int) -> int: pass
- @overload
- def __add__(self, x: str) -> str: pass
- @overload
- def __iadd__(self, x: int) -> int: pass
- @overload
- def __iadd__(self, x: str) -> str: pass
-[out]
-main:7: error: Signatures of "__iadd__" and "__add__" are incompatible
-
-[case testIntroducingInplaceOperatorInSubclass]
-import typing
-class A:
- def __add__(self, x: 'A') -> 'B': pass
-class B(A):
- # __iadd__ effectively partially overrides __add__
- def __iadd__(self, x: 'A') -> 'A': pass # Error
-class C(A):
- def __iadd__(self, x: int) -> 'B': pass # Error
-class D(A):
- def __iadd__(self, x: 'A') -> 'B': pass
-[out]
-main:6: error: Return type of "__iadd__" incompatible with "__add__" of supertype "A"
-main:8: error: Argument 1 of "__iadd__" incompatible with "__add__" of supertype "A"
-main:8: error: Signatures of "__iadd__" and "__add__" are incompatible
-
-[case testGetAttribute]
-
-a, b = None, None # type: A, B
-class A:
- def __getattribute__(self, x: str) -> A:
- return A()
-class B: pass
-
-a = a.foo
-b = a.bar
-[out]
-main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testGetAttributeSignature]
-class A:
- def __getattribute__(self, x: str) -> A: pass
-class B:
- def __getattribute__(self, x: A) -> B: pass
-class C:
- def __getattribute__(self, x: str, y: str) -> C: pass
-class D:
- def __getattribute__(self, x: str) -> None: pass
-[out]
-main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B"
-main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C"
-
-[case testGetAttr]
-
-a, b = None, None # type: A, B
-class A:
- def __getattr__(self, x: str) -> A:
- return A()
-class B: pass
-
-a = a.foo
-b = a.bar
-[out]
-main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-
-[case testGetAttrSignature]
-class A:
- def __getattr__(self, x: str) -> A: pass
-class B:
- def __getattr__(self, x: A) -> B: pass
-class C:
- def __getattr__(self, x: str, y: str) -> C: pass
-class D:
- def __getattr__(self, x: str) -> None: pass
-[out]
-main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B"
-main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C"
-
-
--- CallableType objects
--- ----------------
-
-
-[case testCallableObject]
-import typing
-a = A()
-b = B()
-
-a() # E: Too few arguments for "__call__" of "A"
-a(a, a) # E: Too many arguments for "__call__" of "A"
-a = a(a)
-a = a(b) # E: Argument 1 to "__call__" of "A" has incompatible type "B"; expected "A"
-b = a(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-class A:
- def __call__(self, x: A) -> A:
- pass
-class B: pass
-
-
--- __new__
--- --------
-
-
-[case testConstructInstanceWith__new__]
-class C:
- def __new__(cls, foo: int = None) -> 'C':
- obj = object.__new__(cls)
- return obj
-
-x = C(foo=12)
-x.a # E: "C" has no attribute "a"
-C(foo='') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
-[builtins fixtures/__new__.pyi]
-
-[case testConstructInstanceWithDynamicallyTyped__new__]
-class C:
- def __new__(cls, foo):
- obj = object.__new__(cls)
- return obj
-
-x = C(foo=12)
-x = C(foo='x')
-x.a # E: "C" has no attribute "a"
-C(bar='') # E: Unexpected keyword argument "bar" for "C"
-[builtins fixtures/__new__.pyi]
-
-[case testClassWith__new__AndCompatibilityWithType]
-class C:
- def __new__(cls, foo: int = None) -> 'C':
- obj = object.__new__(cls)
- return obj
-def f(x: type) -> None: pass
-def g(x: int) -> None: pass
-f(C)
-g(C) # E: Argument 1 to "g" has incompatible type "C"; expected "int"
-[builtins fixtures/__new__.pyi]
-
-[case testClassWith__new__AndCompatibilityWithType2]
-class C:
- def __new__(cls, foo):
- obj = object.__new__(cls)
- return obj
-def f(x: type) -> None: pass
-def g(x: int) -> None: pass
-f(C)
-g(C) # E: Argument 1 to "g" has incompatible type "C"; expected "int"
-[builtins fixtures/__new__.pyi]
-
-[case testGenericClassWith__new__]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class C(Generic[T]):
- def __new__(cls, foo: T) -> 'C[T]':
- obj = object.__new__(cls)
- return obj
- def set(self, x: T) -> None: pass
-c = C('')
-c.set('')
-c.set(1) # E: Argument 1 to "set" of "C" has incompatible type "int"; expected "str"
-[builtins fixtures/__new__.pyi]
-
-[case testOverloaded__new__]
-from typing import overload
-class C:
- @overload
- def __new__(cls, foo: int) -> 'C':
- obj = object.__new__(cls)
- return obj
- @overload
- def __new__(cls, x: str, y: str) -> 'C':
- obj = object.__new__(cls)
- return obj
-c = C(1)
-c.a # E: "C" has no attribute "a"
-C('', '')
-C('') # E: No overload variant of "C" matches argument types [builtins.str]
-[builtins fixtures/__new__.pyi]
-
-
--- Special cases
--- -------------
-
-
-[case testSubclassInt]
-import typing
-class A(int): pass
-n = 0
-n = A()
-a = A()
-a = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "A")
-
-[case testForwardReferenceToNestedClass]
-def f(o: 'B.C') -> None:
- o.f('') # E: Argument 1 to "f" of "C" has incompatible type "str"; expected "int"
-
-class B:
- class C:
- def f(self, x: int) -> None: pass
-[out]
-
-[case testForwardReferenceToNestedClassDeep]
-def f(o: 'B.C.D') -> None:
- o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int"
-
-class B:
- class C:
- class D:
- def f(self, x: int) -> None: pass
-[out]
-
-[case testForwardReferenceToNestedClassWithinClass]
-class B:
- def f(self, o: 'C.D') -> None:
- o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int"
-
- class C:
- class D:
- def f(self, x: int) -> None: pass
-[out]
-
-[case testClassVsInstanceDisambiguation]
-class A: pass
-def f(x: A) -> None: pass
-f(A) # E: Argument 1 to "f" has incompatible type "A" (type object); expected "A"
-[out]
-
--- TODO
--- attribute inherited from superclass; assign in __init__
--- refer to attribute before type has been inferred (the initialization in
--- __init__ has not been analyzed)
-
-[case testAnyBaseClassUnconstrainedConstructor]
-from typing import Any
-B = None # type: Any
-class C(B): pass
-C(0)
-C(arg=0)
-[out]
-
-[case testErrorMapToSupertype]
-import typing
-class X(Nope): pass # E: Name 'Nope' is not defined
-a, b = X() # Used to crash here (#2244)
-
-
--- Class-valued attributes
--- -----------------------
-
-[case testClassValuedAttributesBasics]
-class A: ...
-class B:
- a = A
- bad = lambda: 42
-
-B().bad() # E: Invalid method type
-reveal_type(B.a) # E: Revealed type is 'def () -> __main__.A'
-reveal_type(B().a) # E: Revealed type is 'def () -> __main__.A'
-reveal_type(B().a()) # E: Revealed type is '__main__.A'
-
-class C:
- a = A
- def __init__(self) -> None:
- self.aa = self.a()
-
-reveal_type(C().aa) # E: Revealed type is '__main__.A'
-[out]
-
-[case testClassValuedAttributesGeneric]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-
-class A(Generic[T]):
- def __init__(self, x: T) -> None:
- self.x = x
-class B(Generic[T]):
- a = A[T]
-
-reveal_type(B[int]().a) # E: Revealed type is 'def (x: builtins.int*) -> __main__.A[builtins.int*]'
-B[int]().a('hi') # E: Argument 1 has incompatible type "str"; expected "int"
-
-class C(Generic[T]):
- a = A
- def __init__(self) -> None:
- self.aa = self.a(42)
-
-reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int]'
-[out]
-
-[case testClassValuedAttributesAlias]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-S = TypeVar('S')
-
-class A(Generic[T, S]): ...
-
-SameA = A[T, T]
-
-class B:
- a_any = SameA
- a_int = SameA[int]
-
-reveal_type(B().a_any) # E: Revealed type is 'def () -> __main__.A[Any, Any]'
-reveal_type(B().a_int()) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
-
-class C:
- a_int = SameA[int]
- def __init__(self) -> None:
- self.aa = self.a_int()
-
-reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
-[out]
-
-
--- Type[C]
--- -------
-
-
-[case testTypeUsingTypeCBasic]
-from typing import Type
-class User: pass
-class ProUser(User): pass
-def new_user(user_class: Type[User]) -> User:
- return user_class()
-reveal_type(new_user(User)) # E: Revealed type is '__main__.User'
-reveal_type(new_user(ProUser)) # E: Revealed type is '__main__.User'
-[out]
-
-[case testTypeUsingTypeCDefaultInit]
-from typing import Type
-class B:
- pass
-def f(A: Type[B]) -> None:
- A(0) # E: Too many arguments for "B"
- A()
-[out]
-
-[case testTypeUsingTypeCInitWithArg]
-from typing import Type
-class B:
- def __init__(self, a: int) -> None: pass
-def f(A: Type[B]) -> None:
- A(0)
- A() # E: Too few arguments for "B"
-[out]
-
-[case testTypeUsingTypeCTypeVar]
-from typing import Type, TypeVar
-class User: pass
-class ProUser(User): pass
-U = TypeVar('U', bound=User)
-def new_user(user_class: Type[U]) -> U:
- user = user_class()
- reveal_type(user)
- return user
-pro_user = new_user(ProUser)
-reveal_type(pro_user)
-[out]
-main:7: error: Revealed type is 'U`-1'
-main:10: error: Revealed type is '__main__.ProUser*'
-
-[case testTypeUsingTypeCTypeVarDefaultInit]
-from typing import Type, TypeVar
-class B:
- pass
-T = TypeVar('T', bound=B)
-def f(A: Type[T]) -> None:
- A()
- A(0) # E: Too many arguments for "B"
-[out]
-
-[case testTypeUsingTypeCTypeVarWithInit]
-from typing import Type, TypeVar
-class B:
- def __init__(self, a: int) -> None: pass
-T = TypeVar('T', bound=B)
-def f(A: Type[T]) -> None:
- A() # E: Too few arguments for "B"
- A(0)
-[out]
-
-[case testTypeUsingTypeCTwoTypeVars]
-from typing import Type, TypeVar
-class User: pass
-class ProUser(User): pass
-class WizUser(ProUser): pass
-U = TypeVar('U', bound=User)
-def new_user(u_c: Type[U]) -> U: pass
-P = TypeVar('P', bound=ProUser)
-def new_pro(pro_c: Type[P]) -> P:
- return new_user(pro_c)
-wiz = new_pro(WizUser)
-reveal_type(wiz)
-def error(u_c: Type[U]) -> P:
- return new_pro(u_c) # Error here, see below
-[out]
-main:11: error: Revealed type is '__main__.WizUser*'
-main:13: error: Incompatible return value type (got "U", expected "P")
-main:13: error: Type argument 1 of "new_pro" has incompatible value "U"
-
-[case testTypeUsingTypeCCovariance]
-from typing import Type, TypeVar
-class User: pass
-class ProUser(User): pass
-def new_user(user_class: Type[User]) -> User:
- return user_class()
-def new_pro_user(user_class: Type[ProUser]):
- new_user(user_class)
-[out]
-
-[case testTypeUsingTypeCErrorCovariance]
-from typing import Type, TypeVar
-class User: pass
-def new_user(user_class: Type[User]):
- return user_class()
-def foo(arg: Type[int]):
- new_user(arg) # E: Argument 1 to "new_user" has incompatible type Type[int]; expected Type[User]
-[out]
-
-[case testTypeUsingTypeCUnionOverload]
-from typing import Type, Union, overload
-class X:
- @overload
- def __init__(self) -> None: pass
- @overload
- def __init__(self, a: int) -> None: pass
-class Y:
- def __init__(self) -> None: pass
-def bar(o: Type[Union[X, Y]]): pass
-bar(X)
-bar(Y)
-[out]
-
-[case testTypeUsingTypeCTypeAny]
-from typing import Type, Any
-def foo(arg: Type[Any]):
- x = arg()
- x = arg(0)
- x = arg('', ())
- reveal_type(x) # E: Revealed type is 'Any'
- x.foo
-class X: pass
-foo(X)
-[out]
-
-[case testTypeUsingTypeCTypeNoArg]
-from typing import Type
-def foo(arg: Type):
- x = arg()
- reveal_type(x) # E: Revealed type is 'Any'
-class X: pass
-foo(X)
-[out]
-
-[case testTypeUsingTypeCBuiltinType]
-from typing import Type
-def foo(arg: type): pass
-class X: pass
-def bar(arg: Type[X]):
- foo(arg)
-foo(X)
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testTypeUsingTypeCClassMethod]
-from typing import Type
-class User:
- @classmethod
- def foo(cls) -> int: pass
- def bar(self) -> int: pass
-def process(cls: Type[User]):
- reveal_type(cls.foo()) # E: Revealed type is 'builtins.int'
- obj = cls()
- reveal_type(cls.bar(obj)) # E: Revealed type is 'builtins.int'
- cls.mro() # Defined in class type
- cls.error # E: Type[User] has no attribute "error"
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeUsingTypeCClassMethodUnion]
-# Ideally this would work, but not worth the effort; just don't crash
-from typing import Type, Union
-class User:
- @classmethod
- def foo(cls) -> int: pass
- def bar(self) -> int: pass
-class ProUser(User): pass
-class BasicUser(User): pass
-def process(cls: Type[Union[BasicUser, ProUser]]):
- cls.foo() # E: Type[Union[BasicUser, ProUser]] has no attribute "foo"
- obj = cls()
- cls.bar(obj) # E: Type[Union[BasicUser, ProUser]] has no attribute "bar"
- cls.mro() # Defined in class type
- cls.error # E: Type[Union[BasicUser, ProUser]] has no attribute "error"
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeUsingTypeCClassMethodFromTypeVar]
-from typing import Type, TypeVar
-class User:
- @classmethod
- def foo(cls) -> int: pass
- def bar(self) -> int: pass
-U = TypeVar('U', bound=User)
-def process(cls: Type[U]):
- reveal_type(cls.foo()) # E: Revealed type is 'builtins.int'
- obj = cls()
- reveal_type(cls.bar(obj)) # E: Revealed type is 'builtins.int'
- cls.mro() # Defined in class type
- cls.error # E: Type[U] has no attribute "error"
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeUsingTypeCClassMethodFromTypeVarUnionBound]
-# Ideally this would work, but not worth the effort; just don't crash
-from typing import Type, TypeVar, Union
-class User:
- @classmethod
- def foo(cls) -> int: pass
- def bar(self) -> int: pass
-class ProUser(User): pass
-class BasicUser(User): pass
-U = TypeVar('U', bound=Union[ProUser, BasicUser])
-def process(cls: Type[U]):
- cls.foo() # E: Type[U] has no attribute "foo"
- obj = cls()
- cls.bar(obj) # E: Type[U] has no attribute "bar"
- cls.mro() # Defined in class type
- cls.error # E: Type[U] has no attribute "error"
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeUsingTypeCErrorUnsupportedType]
-from typing import Type, Tuple
-def foo(arg: Type[Tuple[int]]): # E: Unsupported type Type["Tuple[int]"]
- arg()
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testTypeUsingTypeCOverloadedClass]
-from typing import Type, TypeVar, overload
-class User:
- @overload
- def __init__(self) -> None: pass
- @overload
- def __init__(self, arg: int) -> None: pass
- @classmethod
- def foo(cls) -> None: pass
-U = TypeVar('U', bound=User)
-def new(uc: Type[U]) -> U:
- uc.foo()
- u = uc()
- u.foo()
- u = uc(0)
- u.foo()
- u = uc('')
- u.foo(0)
- return uc()
-u = new(User)
-[builtins fixtures/classmethod.pyi]
-[out]
-main:16: error: No overload variant of "User" matches argument types [builtins.str]
-main:17: error: Too many arguments for "foo" of "User"
-
-[case testTypeUsingTypeCInUpperBound]
-from typing import TypeVar, Type
-class B: pass
-T = TypeVar('T', bound=Type[B])
-def f(a: T): pass
-[out]
-
-[case testTypeUsingTypeCTuple]
-from typing import Type, Tuple
-def f(a: Type[Tuple[int, int]]):
- a()
-[out]
-main:2: error: Unsupported type Type["Tuple[int, int]"]
-
-[case testTypeUsingTypeCNamedTuple]
-from typing import Type, NamedTuple
-N = NamedTuple('N', [('x', int), ('y', int)])
-def f(a: Type[N]):
- a()
-[builtins fixtures/list.pyi]
-[out]
-main:3: error: Unsupported type Type["N"]
-
-[case testTypeUsingTypeCJoin]
-from typing import Type
-class B: pass
-class C(B): pass
-class D(B): pass
-def foo(c: Type[C], d: Type[D]) -> None:
- x = [c, d]
- reveal_type(x)
-
-[builtins fixtures/list.pyi]
-[out]
-main:7: error: Revealed type is 'builtins.list[Type[__main__.B]]'
-
-[case testTypeMatchesOverloadedFunctions]
-from typing import Type, overload, Union
-
-class User: pass
-UserType = User # type: Type[User]
-
- at overload
-def f(a: object) -> int: pass
- at overload
-def f(a: int) -> str: pass
-
-reveal_type(f(User)) # E: Revealed type is 'builtins.int'
-reveal_type(f(UserType)) # E: Revealed type is 'builtins.int'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeMatchesGeneralTypeInOverloadedFunctions]
-from typing import Type, overload
-
-class User: pass
-UserType = User # type: Type[User]
-
- at overload
-def f(a: type) -> int:
- return 1
- at overload
-def f(a: int) -> str:
- return "a"
-
-reveal_type(f(User)) # E: Revealed type is 'builtins.int'
-reveal_type(f(UserType)) # E: Revealed type is 'builtins.int'
-reveal_type(f(1)) # E: Revealed type is 'builtins.str'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeMatchesSpecificTypeInOverloadedFunctions]
-from typing import Type, overload
-
-class User: pass
-UserType = User # type: Type[User]
-
- at overload
-def f(a: User) -> User:
- return User()
- at overload
-def f(a: Type[User]) -> int:
- return 1
- at overload
-def f(a: int) -> str:
- return "a"
-
-reveal_type(f(User)) # E: Revealed type is 'builtins.int'
-reveal_type(f(UserType)) # E: Revealed type is 'builtins.int'
-reveal_type(f(User())) # E: Revealed type is '__main__.User'
-reveal_type(f(1)) # E: Revealed type is 'builtins.str'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testMixingTypeTypeInOverloadedFunctions]
-from typing import Type, overload
-
-class User: pass
-
- at overload
-def f(a: User) -> Type[User]:
- return User
- at overload
-def f(a: Type[User]) -> User:
- return a()
- at overload
-def f(a: int) -> Type[User]:
- return User
- at overload
-def f(a: str) -> User:
- return User()
-
-reveal_type(f(User())) # E: Revealed type is 'Type[__main__.User]'
-reveal_type(f(User)) # E: Revealed type is '__main__.User'
-reveal_type(f(3)) # E: Revealed type is 'Type[__main__.User]'
-reveal_type(f("hi")) # E: Revealed type is '__main__.User'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testGeneralTypeDoesNotMatchSpecificTypeInOverloadedFunctions]
-from typing import Type, overload
-
-class User: pass
-
- at overload
-def f(a: Type[User]) -> None: pass
- at overload
-def f(a: int) -> None: pass
-
-def mock() -> type: return User
-
-f(User)
-f(mock()) # E: No overload variant of "f" matches argument types [builtins.type]
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testNonTypeDoesNotMatchOverloadedFunctions]
-from typing import Type, overload
-
-class User: pass
-
- at overload
-def f(a: Type[User]) -> None: pass
- at overload
-def f(a: type) -> None: pass
-
-f(3) # E: No overload variant of "f" matches argument types [builtins.int]
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testInstancesDoNotMatchTypeInOverloadedFunctions]
-from typing import Type, overload
-
-class User: pass
-
- at overload
-def f(a: Type[User]) -> None: pass
- at overload
-def f(a: int) -> None: pass
-
-f(User)
-f(User()) # E: No overload variant of "f" matches argument types [__main__.User]
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeCovarianceWithOverloadedFunctions]
-from typing import Type, overload
-
-class A: pass
-class B(A): pass
-class C(B): pass
-AType = A # type: Type[A]
-BType = B # type: Type[B]
-CType = C # type: Type[C]
-
- at overload
-def f(a: Type[B]) -> None: pass
- at overload
-def f(a: int) -> None: pass
-
-f(A) # E: No overload variant of "f" matches argument types [def () -> __main__.A]
-f(B)
-f(C)
-f(AType) # E: No overload variant of "f" matches argument types [Type[__main__.A]]
-f(BType)
-f(CType)
-[builtins fixtures/classmethod.pyi]
-[out]
-
-
-[case testOverloadedCovariantTypesFail]
-from typing import Type, overload
-
-class A: pass
-class B(A): pass
-
- at overload
-def f(a: Type[A]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def f(a: Type[B]) -> str: pass
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testDistinctOverloadedCovariantTypesSucceed]
-from typing import Type, overload
-
-class A: pass
-class AChild(A): pass
-class B: pass
-class BChild(B): pass
-
- at overload
-def f(a: Type[A]) -> int: pass
- at overload
-def f(a: Type[B]) -> str: pass
- at overload
-def f(a: A) -> A: pass
- at overload
-def f(a: B) -> B: pass
-
-reveal_type(f(A)) # E: Revealed type is 'builtins.int'
-reveal_type(f(AChild)) # E: Revealed type is 'builtins.int'
-reveal_type(f(B)) # E: Revealed type is 'builtins.str'
-reveal_type(f(BChild)) # E: Revealed type is 'builtins.str'
-
-reveal_type(f(A())) # E: Revealed type is '__main__.A'
-reveal_type(f(AChild())) # E: Revealed type is '__main__.A'
-reveal_type(f(B())) # E: Revealed type is '__main__.B'
-reveal_type(f(BChild())) # E: Revealed type is '__main__.B'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeTypeOverlapsWithObjectAndType]
-from typing import Type, overload
-
-class User: pass
-
- at overload
-def f(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def f(a: object) -> str: pass
-
- at overload
-def g(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def g(a: type) -> str: pass
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeOverlapsWithObject]
-from typing import Type, overload
-
-class User: pass
-
- at overload
-def f(a: type) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def f(a: object) -> str: pass
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeConstructorReturnsTypeType]
-class User:
- @classmethod
- def test_class_method(cls) -> int: pass
- @staticmethod
- def test_static_method() -> str: pass
- def test_instance_method(self) -> None: pass
-
-u = User()
-
-reveal_type(type(u)) # E: Revealed type is 'Type[__main__.User]'
-reveal_type(type(u).test_class_method()) # E: Revealed type is 'builtins.int'
-reveal_type(type(u).test_static_method()) # E: Revealed type is 'builtins.str'
-type(u).test_instance_method() # E: Too few arguments for "test_instance_method" of "User"
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testObfuscatedTypeConstructorReturnsTypeType]
-from typing import TypeVar
-class User: pass
-
-f1 = type
-
-A = TypeVar('A')
-def f2(func: A) -> A:
- return func
-
-u = User()
-
-reveal_type(f1(u)) # E: Revealed type is 'Type[__main__.User]'
-reveal_type(f2(type)(u)) # E: Revealed type is 'Type[__main__.User]'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testTypeConstructorLookalikeFails]
-class User: pass
-
-def fake1(a: object) -> type:
- return User
-def fake2(a: int) -> type:
- return User
-
-reveal_type(type(User())) # E: Revealed type is 'Type[__main__.User]'
-reveal_type(fake1(User())) # E: Revealed type is 'builtins.type'
-reveal_type(fake2(3)) # E: Revealed type is 'builtins.type'
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testOtherTypeConstructorsSucceed]
-def foo(self) -> int: return self.attr
-
-User = type('User', (object,), {'foo': foo, 'attr': 3})
-reveal_type(User) # E: Revealed type is 'builtins.type'
-[builtins fixtures/args.pyi]
-[out]
-
-[case testTypeTypeComparisonWorks]
-class User: pass
-
-User == User
-User == type(User())
-type(User()) == User
-type(User()) == type(User())
-
-User != User
-User != type(User())
-type(User()) != User
-type(User()) != type(User())
-
-int == int
-int == type(3)
-type(3) == int
-type(3) == type(3)
-
-int != int
-int != type(3)
-type(3) != int
-type(3) != type(3)
-
-User is User
-User is type(User)
-type(User) is User
-type(User) is type(User)
-
-int is int
-int is type(3)
-type(3) is int
-type(3) is type(3)
-
-int.__eq__(int)
-int.__eq__(3, 4)
-[builtins fixtures/args.pyi]
-[out]
-main:33: error: Too few arguments for "__eq__" of "int"
-main:33: error: Unsupported operand types for == ("int" and "int")
-
-[case testMroSetAfterError]
-class C(str, str):
- foo = 0
- bar = foo
-[out]
-main:1: error: Duplicate base class "str"
-
-[case testCannotDetermineMro]
-class A: pass
-class B(A): pass
-class C(B): pass
-class D(A, B): pass # E: Cannot determine consistent method resolution order (MRO) for "D"
-class E(C, D): pass # E: Cannot determine consistent method resolution order (MRO) for "E"
-
-[case testInconsistentMroLocalRef]
-class A: pass
-class B(object, A): # E: Cannot determine consistent method resolution order (MRO) for "B"
- def readlines(self): pass
- __iter__ = readlines
-
-[case testDynamicMetaclass]
-# flags: --fast-parser
-class C(metaclass=int()): # E: Dynamic metaclass not supported for 'C'
- pass
-
-[case testVariableSubclass]
-class A:
- a = 1 # type: int
-class B(A):
- a = 1
-[out]
-
-[case testVariableSubclassAssignMismatch]
-class A:
- a = 1 # type: int
-class B(A):
- a = "a"
-[out]
-main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-
-[case testVariableSubclassAssignment]
-class A:
- a = None # type: int
-class B(A):
- def __init__(self) -> None:
- self.a = "a"
-[out]
-main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testVariableSubclassTypeOverwrite]
-class A:
- a = None # type: int
-class B(A):
- a = None # type: str
-class C(B):
- a = "a"
-[out]
-main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-
-[case testVariableSubclassTypeOverwriteImplicit]
-class A:
- a = 1
-class B(A):
- a = None # type: str
-[out]
-main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-
-[case testVariableSuperUsage]
-class A:
- a = [] # type: list
-class B(A):
- a = [1, 2]
-class C(B):
- a = B.a + [3]
-[builtins fixtures/list.pyi]
-[out]
-
-[case testClassAllBases]
-from typing import Union
-class A:
- a = None # type: Union[int, str]
-class B(A):
- a = 1
-class C(B):
- a = "str"
-class D(A):
- a = "str"
-[out]
-main:7: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int")
-
-[case testVariableTypeVar]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- a = None # type: T
-class B(A[int]):
- a = 1
-
-[case testVariableTypeVarInvalid]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- a = None # type: T
-class B(A[int]):
- a = "abc"
-[out]
-main:6: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-
-[case testVariableTypeVarIndirectly]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- a = None # type: T
-class B(A[int]):
- pass
-class C(B):
- a = "a"
-[out]
-main:8: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-
-[case testVariableTypeVarList]
-from typing import List, TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- a = None # type: List[T]
- b = None # type: List[T]
-class B(A[int]):
- a = [1]
- b = ['']
-[builtins fixtures/list.pyi]
-[out]
-main:8: error: List item 0 has incompatible type "str"
-
-[case testVariableMethod]
-class A:
- def a(self) -> None: pass
- b = 1
-class B(A):
- a = 1
- def b(self) -> None: pass
-[out]
-main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as Callable[[A], None])
-main:6: error: Signature of "b" incompatible with supertype "A"
-
-[case testVariableProperty]
-class A:
- @property
- def a(self) -> bool: pass
-class B(A):
- a = None # type: bool
-class C(A):
- a = True
-class D(A):
- a = 1
-[builtins fixtures/property.pyi]
-[out]
-main:9: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "bool")
-
-[case testVariableOverwriteAny]
-from typing import Any
-class A:
- a = 1
-class B(A):
- a = 'x' # type: Any
-[out]
-
-[case testInstanceMethodOverwrite]
-class B():
- def n(self, a: int) -> None: pass
-class C(B):
- def m(self, a: int) -> None: pass
- n = m
-[out]
-
-[case testInstanceMethodOverwriteError]
-class B():
- def n(self, a: int) -> None: pass
-class C(B):
- def m(self, a: str) -> None: pass
- n = m
-[out]
-main:5: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
-
-[case testInstanceMethodOverwriteTypevar]
-from typing import Generic, TypeVar
-T = TypeVar("T")
-class B(Generic[T]):
- def n(self, a: T) -> None: pass
-class C(B[int]):
- def m(self, a: int) -> None: pass
- n = m
-
-[case testInstanceMethodOverwriteTwice]
-class I:
- def foo(self) -> None: pass
-class A(I):
- def foo(self) -> None: pass
-class B(A):
- def bar(self) -> None: pass
- foo = bar
-class C(B):
- def bar(self) -> None: pass
- foo = bar
-
-[case testClassMethodOverwrite]
-class B():
- @classmethod
- def n(self, a: int) -> None: pass
-class C(B):
- @classmethod
- def m(self, a: int) -> None: pass
- n = m
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testClassMethodOverwriteError]
-class B():
- @classmethod
- def n(self, a: int) -> None: pass
-class C(B):
- @classmethod
- def m(self, a: str) -> None: pass
- n = m
-[builtins fixtures/classmethod.pyi]
-[out]
-main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
-
-[case testClassSpec]
-from typing import Callable
-class A():
- b = None # type: Callable[[A, int], int]
-class B(A):
- def c(self, a: int) -> int: pass
- b = c
-
-[case testClassSpecError]
-from typing import Callable
-class A():
- b = None # type: Callable[[A, int], int]
-class B(A):
- def c(self, a: str) -> int: pass
- b = c
-[out]
-main:6: error: Incompatible types in assignment (expression has type Callable[[str], int], base class "A" defined the type as Callable[[int], int])
-
-[case testClassStaticMethod]
-class A():
- @staticmethod
- def a(a: int) -> None: pass
-class B(A):
- @staticmethod
- def b(a: str) -> None: pass
- a = b
-[builtins fixtures/staticmethod.pyi]
-[out]
-main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
-
-[case testClassStaticMethodIndirect]
-class A():
- @staticmethod
- def a(a: int) -> None: pass
- c = a
-class B(A):
- @staticmethod
- def b(a: str) -> None: pass
- c = b
-[builtins fixtures/staticmethod.pyi]
-[out]
-main:8: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
-
-[case testTempNode]
-class A():
- def a(self) -> None: pass
-class B(A):
- def b(self) -> None: pass
- a = c = b
-
-[case testListObject]
-from typing import List
-class A:
- x = [] # type: List[object]
-class B(A):
- x = [1]
-[builtins fixtures/list.pyi]
-
-[case testClassMemberObject]
-class A:
- x = object()
-class B(A):
- x = 1
-class C(B):
- x = ''
-[out]
-main:6: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int")
-
-[case testSlots]
-class A:
- __slots__ = ("a")
-class B(A):
- __slots__ = ("a", "b")
-
-[case testClassOrderOfError]
-class A:
- x = 1
-class B(A):
- x = "a"
-class C(B):
- x = object()
-[out]
-main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-main:6: error: Incompatible types in assignment (expression has type "object", base class "B" defined the type as "str")
-
-[case testClassOneErrorPerLine]
-class A:
- x = 1
-class B(A):
- x = ""
- x = 1.0
-[out]
-main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
-
-[case testClassIgnoreType]
-class A:
- x = 0
-class B(A):
- x = '' # type: ignore
-class C(B):
- x = ''
-[out]
diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test
deleted file mode 100644
index 7e21ca0..0000000
--- a/test-data/unit/check-columns.test
+++ /dev/null
@@ -1,68 +0,0 @@
-[case testColumnsSyntaxError]
-# flags: --show-column-numbers
-1 +
-[out]
-main:2:4: error: invalid syntax
-
-
-[case testColumnsNestedFunctions]
-# flags: --show-column-numbers
-import typing
-def f() -> 'A':
- def g() -> 'B':
- return A() # fail
- return B() # fail
-class A: pass
-class B: pass
-[out]
-main:5:8: error: Incompatible return value type (got "A", expected "B")
-main:6:4: error: Incompatible return value type (got "B", expected "A")
-
-[case testColumnsNestedFunctionsWithFastParse]
-# flags: --show-column-numbers --fast-parser
-import typing
-def f() -> 'A':
- def g() -> 'B':
- return A() # fail
- return B() # fail
-class A: pass
-class B: pass
-[out]
-main:5:8: error: Incompatible return value type (got "A", expected "B")
-main:6:4: error: Incompatible return value type (got "B", expected "A")
-
-
-[case testColumnsMethodDefaultArgumentsAndSignatureAsComment]
-# flags: --show-column-numbers
-import typing
-class A:
- def f(self, x = 1, y = 'hello'): # type: (int, str) -> str
- pass
-A().f()
-A().f(1)
-A().f('') # E:0: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-A().f(1, 1) # E:0: Argument 2 to "f" of "A" has incompatible type "int"; expected "str"
-A().f(1, 'hello', 'hi') # E:0: Too many arguments for "f" of "A"
-
-[case testColumnsMultipleStatementsPerLine]
-# flags: --show-column-numbers
-x = 1
-y = 'hello'
-x = 2; y = x; y += 1
-[out]
-main:4:7: error: Incompatible types in assignment (expression has type "int", variable has type "str")
-main:4:14: error: Unsupported operand types for + ("str" and "int")
-
-[case testColumnsSimpleIsinstance]
-# flags: --show-column-numbers
-import typing
-def f(x: object, n: int, s: str) -> None:
- n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int")
- if isinstance(x, int):
- n = x
- s = x # E:8: Incompatible types in assignment (expression has type "int", variable has type "str")
- n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-
diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test
deleted file mode 100644
index 27f8bee..0000000
--- a/test-data/unit/check-dynamic-typing.test
+++ /dev/null
@@ -1,676 +0,0 @@
--- Assignment
--- ----------
-
-
-[case testAssignmentWithDynamic]
-from typing import Any
-d = None # type: Any
-a = None # type: A
-
-a = d # Everything ok
-d = a
-d = d
-d.x = a
-d.x = d
-
-class A: pass
-
-[case testMultipleAssignmentWithDynamic]
-from typing import Any
-d = None # type: Any
-a, b = None, None # type: (A, B)
-
-d, a = b, b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-d, d = d, d, d # E: Too many values to unpack (2 expected, 3 provided)
-
-a, b = d, d
-d, d = a, b
-a, b = d
-s, t = d
-
-class A: pass
-class B: pass
-
-
--- Expressions
--- -----------
-
-
-[case testCallingFunctionWithDynamicArgumentTypes]
-from typing import Any
-a, b = None, None # type: (A, B)
-
-b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a = f(a)
-a = f(b)
-a = f(None)
-a = f(f)
-
-def f(x: Any) -> 'A':
- pass
-
-class A: pass
-class B: pass
-
-[case testCallingWithDynamicReturnType]
-from typing import Any
-a, b = None, None # type: (A, B)
-
-a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-
-a = f(a)
-b = f(a)
-
-def f(x: 'A') -> Any:
- pass
-
-class A: pass
-class B: pass
-
-[case testBinaryOperationsWithDynamicLeftOperand]
-from typing import Any
-d = None # type: Any
-a = None # type: A
-c = None # type: C
-b = None # type: bool
-n = 0
-
-d in a # E: Unsupported right operand type for in ("A")
-d and a
-d or a
-c = d and b # Unintuitive type inference?
-c = d or b # Unintuitive type inference?
-
-c = d + a
-c = d - a
-c = d * a
-c = d / a
-c = d // a
-c = d % a
-c = d ** a
-b = d == a
-b = d != a
-b = d < a
-b = d <= a
-b = d > a
-b = d >= a
-b = d in c
-b = d and b
-b = d or b
-
-class A: pass
-class C:
- def __contains__(self, a: A) -> bool:
- pass
-[file builtins.py]
-class object:
- def __init__(self): pass
-class bool: pass
-class int: pass
-class type: pass
-class function: pass
-class str: pass
-
-[case testBinaryOperationsWithDynamicAsRightOperand]
-from typing import Any
-d = None # type: Any
-a = None # type: A
-c = None # type: C
-b = None # type: bool
-n = 0
-
-a and d
-a or d
-c = a in d
-c = b and d # Unintuitive type inference?
-c = b or d # Unintuitive type inference?
-b = a + d
-b = a / d
-
-c = a + d
-c = a - d
-c = a * d
-c = a / d
-c = a // d
-c = a % d
-c = a ** d
-b = a in d
-b = b and d
-b = b or d
-
-class A:
- def __add__(self, a: 'A') -> 'C':
- pass
- def __sub__(self, a: 'A') -> 'C':
- pass
- def __mul__(self, a: 'A') -> 'C':
- pass
- def __truediv__(self, a: 'A') -> 'C':
- pass
- def __floordiv__(self, a: 'A') -> 'C':
- pass
- def __mod__(self, a: 'A') -> 'C':
- pass
- def __pow__(self, a: 'A') -> 'C':
- pass
- def _lt(self, a: 'A') -> bool:
- pass
- def _gt(self, a: 'A') -> bool:
- pass
-
-class C: pass
-[file builtins.py]
-class object:
- def __init__(self): pass
-class bool: pass
-class int: pass
-class type: pass
-class function: pass
-class str: pass
-
-[case testDynamicWithUnaryExpressions]
-from typing import Any
-d = None # type: Any
-a = None # type: A
-b = None # type: bool
-a = not d # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
-b = not d
-a = -d
-class A: pass
-[builtins fixtures/bool.pyi]
-[out]
-
-[case testDynamicWithMemberAccess]
-from typing import Any
-d = None # type: Any
-a = None # type: A
-
-a = d.foo(a()) # E: "A" not callable
-
-a = d.x
-a = d.foo(a, a)
-d.x = a
-d.x.y.z # E: "A" has no attribute "y"
-
-class A: pass
-[out]
-
-[case testIndexingWithDynamic]
-from typing import Any
-d = None # type: Any
-a = None # type: A
-
-a = d[a()] # E: "A" not callable
-d[a()] = a # E: "A" not callable
-
-a = d[a]
-d[a] = a
-d[a], d[a] = a, a
-
-class A: pass
-
-[case testTupleExpressionsWithDynamci]
-from typing import Tuple, Any
-t2 = None # type: Tuple[A, A]
-d = None # type: Any
-
-t2 = (d, d, d) # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[A, A]")
-t2 = (d, d)
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-[case testCastsWithDynamicType]
-from typing import Any, cast
-class A: pass
-class B: pass
-d = None # type: Any
-a = None # type: A
-b = None # type: B
-b = cast(A, d) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = cast(A, d)
-b = cast(Any, d)
-a = cast(Any, f())
-def f() -> None: pass
-
-[case testCompatibilityOfDynamicWithOtherTypes]
-from typing import Any, Tuple
-d = None # type: Any
-t = None # type: Tuple[A, A]
-# TODO: callable types, overloaded functions
-
-d = None # All ok
-d = t
-d = g
-d = A
-t = d
-f = d
-
-def g(a: 'A') -> None:
- pass
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-
--- Statements
--- ----------
-
-
-[case testDynamicCondition]
-from typing import Any
-d = None # type: Any
-while d:
- pass
-if d:
- pass
-elif d:
- pass
-[builtins fixtures/bool.pyi]
-
-[case testRaiseWithDynamic]
-from typing import Any
-d = None # type: Any
-raise d
-[builtins fixtures/exception.pyi]
-
-[case testReturnWithDynamic]
-from typing import Any
-d = None # type: Any
-
-def f() -> None:
- return d # Ok
-
-def g() -> 'A':
- return d # Ok
-
-class A: pass
-
-
--- Implicit dynamic types for functions
--- ------------------------------------
-
-
-[case testImplicitGlobalFunctionSignature]
-from typing import Any, Callable
-x = None # type: Any
-a = None # type: A
-g = None # type: Callable[[], None]
-h = None # type: Callable[[A], None]
-
-f() # E: Too few arguments for "f"
-f(x, x) # E: Too many arguments for "f"
-g = f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
-f(a)
-f(x)
-a = f(a)
-h = f
-
-def f(x): pass
-
-class A: pass
-
-[case testImplicitGlobalFunctionSignatureWithDifferentArgCounts]
-from typing import Callable
-g0 = None # type: Callable[[], None]
-g1 = None # type: Callable[[A], None]
-g2 = None # type: Callable[[A, A], None]
-a = None # type: A
-
-g1 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A], None])
-g2 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A, A], None])
-g0 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[], None])
-g1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[A], None])
-
-g0 = g0
-g2 = f2
-f0()
-f2(a, a)
-
-def f0(): pass
-
-def f2(x, y): pass
-
-class A: pass
-
-[case testImplicitGlobalFunctionSignatureWithDefaultArgs]
-from typing import Callable
-a, b = None, None # type: (A, B)
-
-g0 = None # type: Callable[[], None]
-g1 = None # type: Callable[[A], None]
-g2 = None # type: Callable[[A, A], None]
-g3 = None # type: Callable[[A, A, A], None]
-g4 = None # type: Callable[[A, A, A, A], None]
-
-f01(a, a) # Fail
-f13() # Fail
-f13(a, a, a, a) # Fail
-g2 = f01 # Fail
-g0 = f13 # Fail
-g4 = f13 # Fail
-
-f01()
-f01(a)
-f13(a)
-f13(a, a)
-f13(a, a, a)
-
-g0 = f01
-g1 = f01
-g1 = f13
-g2 = f13
-g3 = f13
-
-def f01(x = b): pass
-def f13(x, y = b, z = b): pass
-
-class A: pass
-class B: pass
-[out]
-main:10: error: Too many arguments for "f01"
-main:11: error: Too few arguments for "f13"
-main:12: error: Too many arguments for "f13"
-main:13: error: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
-main:14: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[], None])
-main:15: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[A, A, A, A], None])
-
-[case testSkipTypeCheckingWithImplicitSignature]
-
-a = None # type: A
-def f():
- a()
-def g(x):
- a()
- a.x
- a + a
- if a():
- a()
-class A: pass
-[builtins fixtures/bool.pyi]
-
-[case testSkipTypeCheckingWithImplicitSignatureAndDefaultArgs]
-
-a = None # type: A
-def f(x=a()):
- a()
-def g(x, y=a, z=a()):
- a()
-class A: pass
-
-[case testImplicitMethodSignature]
-from typing import Callable
-g0 = None # type: Callable[[], None]
-g1 = None # type: Callable[[A], None]
-g2 = None # type: Callable[[A, A], None]
-a = None # type: A
-
-g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
-g2 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
-a = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type "A")
-
-class A:
- def g(self) -> None:
- a = self.f(a)
- def f(self, x): pass
-
-g1 = a.f
-a = a.f(a)
-
-[case testSkipTypeCheckingImplicitMethod]
-
-a = None # type: A
-class A:
- def f(self):
- a()
- def g(self, x, y=a()):
- a()
-
-[case testImplicitInheritedMethod]
-from typing import Callable
-g0 = None # type: Callable[[], None]
-g1 = None # type: Callable[[A], None]
-a = None # type: A
-
-g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
-
-g1 = a.f
-a = a.f(a)
-
-class B:
- def f(self, x):
- pass
-class A(B):
- def g(self) -> None:
- a = self.f(a)
-
-[case testEmptyReturnWithImplicitSignature]
-import typing
-def f():
- return
-class A:
- def g(self):
- return
-
-[case testVarArgsWithImplicitSignature]
-from typing import Any
-o = None # type: Any
-def f(x, *a): pass
-f() # E: Too few arguments for "f"
-f(o)
-f(o, o)
-f(o, o, o)
-[builtins fixtures/list.pyi]
-
-
--- Implicit types for constructors
--- -------------------------------
-
-
-[case testInitMethodWithImplicitSignature]
-from typing import Callable
-f1 = None # type: Callable[[A], A]
-f2 = None # type: Callable[[A, A], A]
-a = None # type: A
-
-A(a) # Fail
-f1 = A # Fail
-
-A(a, a)
-f2 = A
-
-class A:
- def __init__(self, a, b): pass
-[out]
-main:6: error: Too few arguments for "A"
-main:7: error: Incompatible types in assignment (expression has type "A", variable has type Callable[[A], A])
-
-[case testUsingImplicitTypeObjectWithIs]
-
-t = None # type: type
-t = A
-t = B
-
-class A: pass
-class B:
- def __init__(self): pass
-
-
--- Type compatibility
--- ------------------
-
-
-[case testTupleTypeCompatibility]
-from typing import Any, Tuple
-t1 = None # type: Tuple[Any, A]
-t2 = None # type: Tuple[A, Any]
-t3 = None # type: Tuple[Any, Any]
-t4 = None # type: Tuple[A, A]
-t5 = None # type: Tuple[Any, Any, Any]
-
-t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[Any, Any]")
-t5 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[Any, Any, Any]")
-
-t1 = t1
-t1 = t2
-t1 = t3
-t1 = t4
-t2 = t1
-t2 = t3
-t2 = t4
-t3 = t1
-t3 = t2
-t3 = t4
-t4 = t1
-t4 = t2
-t4 = t3
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-[case testFunctionTypeCompatibilityAndReturnTypes]
-from typing import Any, Callable
-f1 = None # type: Callable[[], Any]
-f11 = None # type: Callable[[], Any]
-f2 = None # type: Callable[[], A]
-f3 = None # type: Callable[[], None]
-
-f2 = f3 # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[], A])
-
-f1 = f2
-f1 = f3
-f2 = f11
-f3 = f11
-
-class A: pass
-
-[case testFunctionTypeCompatibilityAndArgumentTypes]
-from typing import Any, Callable
-f1 = None # type: Callable[[A, Any], None]
-f2 = None # type: Callable[[Any, A], None]
-f3 = None # type: Callable[[A, A], None]
-
-f1 = f1
-f1 = f2
-f1 = f3
-
-f2 = f1
-f2 = f2
-f2 = f3
-
-f3 = f1
-f3 = f2
-f3 = f3
-
-class A: pass
-
-[case testFunctionTypeCompatibilityAndArgumentCounts]
-from typing import Any, Callable
-f1 = None # type: Callable[[Any], None]
-f2 = None # type: Callable[[Any, Any], None]
-
-f1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
-
-
--- Overriding
--- ----------
-
-
-[case testOverridingMethodWithDynamicTypes]
-from typing import Any
-a, b = None, None # type: (A, B)
-
-b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
-a = a.f(b)
-
-class B:
- def f(self, x: 'A') -> 'B':
- pass
- def g(self, x: 'B') -> None:
- pass
-class A(B):
- def f(self, x: Any) -> Any:
- pass
- def g(self, x: Any) -> None:
- pass
-
-[case testOverridingMethodWithImplicitDynamicTypes]
-
-a, b = None, None # type: (A, B)
-
-b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
-a = a.f(b)
-
-class B:
- def f(self, x: 'A') -> 'B':
- pass
- def g(self, x: 'B') -> None:
- pass
-class A(B):
- def f(self, x):
- pass
- def g(self, x):
- pass
-
-[case testOverridingMethodAcrossHierarchy]
-import typing
-class C:
- def f(self, a: 'A') -> None: pass
-class B(C):
- def f(self, a): pass
-class A(B):
- def f(self, a: 'D') -> None: # E: Argument 1 of "f" incompatible with supertype "C"
- pass
-class D: pass
-[out]
-
-[case testInvalidOverrideArgumentCountWithImplicitSignature1]
-import typing
-class B:
- def f(self, x: A) -> None: pass
-class A(B):
- def f(self, x, y): # dynamic function not type checked
- x()
-[out]
-
-[case testInvalidOverrideArgumentCountWithImplicitSignature2]
-import typing
-class B:
- def f(self, x, y): pass
-class A(B):
- def f(self, x: 'A') -> None: # E: Signature of "f" incompatible with supertype "B"
- pass
-[out]
-
-[case testInvalidOverrideArgumentCountWithImplicitSignature3]
-import typing
-class B:
- def f(self, x: A) -> None: pass
-class A(B):
- def f(self, x, y) -> None: # E: Signature of "f" incompatible with supertype "B"
- x()
-[out]
-
-
--- Don't complain about too few/many arguments in dynamic functions
--- ----------------------------------------------------------------
-
-[case testTooManyArgsInDynamic]
-def f() -> None: pass
-def g():
- f(1) # Silent
-[out]
-
-[case testTooFewArgsInDynamic]
-def f(a: int) -> None: pass
-def g():
- f() # Silent
-[out]
-
-[case testJustRightInDynamic]
-def f(a: int) -> None: pass
-def g():
- f('') # Silent
-[out]
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
deleted file mode 100644
index 8d22174..0000000
--- a/test-data/unit/check-expressions.test
+++ /dev/null
@@ -1,1652 +0,0 @@
--- Test cases for simple expressions.
---
--- See also:
--- * check-functions.test contains test cases for calls.
--- * check-varargs.test contains test cases for *args.
--- * check-dynamic.test contains test cases related to 'Any' type.
--- * check-generics.test contains test cases for generic values.
-
-
--- None expression
--- ---------------
-
-
-[case testNoneAsRvalue]
-import typing
-a = None # type: A
-class A: pass
-[out]
-
-[case testNoneAsArgument]
-import typing
-def f(x: 'A', y: 'B') -> None: pass
-f(None, None)
-class A: pass
-class B(A): pass
-[out]
-
-
--- Simple expressions
--- ------------------
-
-
-[case testIntLiteral]
-a = 0
-b = None # type: A
-b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "A")
-a = 1
-class A:
- pass
-
-[case testStrLiteral]
-a = ''
-b = None # type: A
-b = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-a = 'x'
-a = r"x"
-a = """foo"""
-class A:
- pass
-
-[case testFloatLiteral]
-a = 0.0
-b = None # type: A
-b = 1.1 # E: Incompatible types in assignment (expression has type "float", variable has type "A")
-a = 1.1
-class A:
- pass
-[file builtins.py]
-class object:
- def __init__(self): pass
-class type: pass
-class function: pass
-class float: pass
-class str: pass
-
-[case testComplexLiteral]
-a = 0.0j
-b = None # type: A
-b = 1.1j # E: Incompatible types in assignment (expression has type "complex", variable has type "A")
-a = 1.1j
-class A:
- pass
-[file builtins.py]
-class object:
- def __init__(self): pass
-class type: pass
-class function: pass
-class complex: pass
-class str: pass
-
-[case testBytesLiteral]
-b, a = None, None # type: (bytes, A)
-b = b'foo'
-b = br"foo"
-b = b'''foo'''
-a = b'foo' # E: Incompatible types in assignment (expression has type "bytes", variable has type "A")
-class A: pass
-[file builtins.py]
-class object:
- def __init__(self): pass
-class type: pass
-class tuple: pass
-class function: pass
-class bytes: pass
-class str: pass
-
-[case testUnicodeLiteralInPython3]
-s = None # type: str
-s = u'foo'
-b = None # type: bytes
-b = u'foo' # E: Incompatible types in assignment (expression has type "str", variable has type "bytes")
-[builtins fixtures/primitives.pyi]
-
-
--- Binary operators
--- ----------------
-
-
-[case testAdd]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a + c # Fail
-a = a + b # Fail
-c = b + a # Fail
-c = a + b
-
-class A:
- def __add__(self, x: 'B') -> 'C': pass
-class B: pass
-class C: pass
-[out]
-main:3: error: Unsupported operand types for + ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for + ("B")
-[case testAdd]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a + c # Fail
-a = a + b # Fail
-c = b + a # Fail
-c = a + b
-
-class A:
- def __add__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for + ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for + ("B")
-
-[case testSub]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a - c # Fail
-a = a - b # Fail
-c = b - a # Fail
-c = a - b
-
-class A:
- def __sub__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for - ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for - ("B")
-
-[case testMul]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a * c # Fail
-a = a * b # Fail
-c = b * a # Fail
-c = a * b
-
-class A:
- def __mul__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for * ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for * ("B")
-
-[case testMatMul]
-a, b, c = None, None, None # type: (A, B, C)
-c = a @ c # E: Unsupported operand types for @ ("A" and "C")
-a = a @ b # E: Incompatible types in assignment (expression has type "C", variable has type "A")
-c = b @ a # E: Unsupported left operand type for @ ("B")
-c = a @ b
-
-class A:
- def __matmul__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-
-[case testDiv]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a / c # Fail
-a = a / b # Fail
-c = b / a # Fail
-c = a / b
-
-class A:
- def __truediv__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for / ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for / ("B")
-
-[case testIntDiv]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a // c # Fail
-a = a // b # Fail
-c = b // a # Fail
-c = a // b
-
-class A:
- def __floordiv__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for // ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for // ("B")
-
-[case testMod]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a % c # Fail
-a = a % b # Fail
-c = b % a # Fail
-c = a % b
-
-class A:
- def __mod__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for % ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for % ("B")
-
-[case testPow]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a ** c # Fail
-a = a ** b # Fail
-c = b ** a # Fail
-c = a ** b
-
-class A:
- def __pow__(self, x: 'B') -> 'C':
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Unsupported operand types for ** ("A" and "C")
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Unsupported left operand type for ** ("B")
-
-[case testMiscBinaryOperators]
-
-a, b = None, None # type: (A, B)
-b = a & a # Fail
-b = a | b # Fail
-b = a ^ a # Fail
-b = a << b # Fail
-b = a >> a # Fail
-
-b = a & b
-b = a | a
-b = a ^ b
-b = a << a
-b = a >> b
-class A:
- def __and__(self, x: 'B') -> 'B': pass
- def __or__(self, x: 'A') -> 'B': pass
- def __xor__(self, x: 'B') -> 'B': pass
- def __lshift__(self, x: 'A') -> 'B': pass
- def __rshift__(self, x: 'B') -> 'B': pass
-class B: pass
-[out]
-main:3: error: Unsupported operand types for & ("A" and "A")
-main:4: error: Unsupported operand types for | ("A" and "B")
-main:5: error: Unsupported operand types for ^ ("A" and "A")
-main:6: error: Unsupported operand types for << ("A" and "B")
-main:7: error: Unsupported operand types for >> ("A" and "A")
-
-[case testBooleanAndOr]
-
-a, b = None, None # type: (A, bool)
-b = b and b
-b = b or b
-b = b and a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool")
-b = a and b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool")
-b = b or a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool")
-b = a or b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool")
-class A: pass
-
-[builtins fixtures/bool.pyi]
-
-[case testRestrictedTypeAnd]
-
-b = None # type: bool
-i = None # type: str
-j = not b and i
-if j:
- reveal_type(j) # E: Revealed type is 'builtins.str'
-[builtins fixtures/bool.pyi]
-
-[case testRestrictedTypeOr]
-
-b = None # type: bool
-i = None # type: str
-j = b or i
-if not j:
- reveal_type(j) # E: Revealed type is 'builtins.str'
-[builtins fixtures/bool.pyi]
-
-[case testAndOr]
-
-s = ""
-b = bool()
-reveal_type(s and b or b) # E: Revealed type is 'builtins.bool'
-[builtins fixtures/bool.pyi]
-
-[case testNonBooleanOr]
-
-c, d, b = None, None, None # type: (C, D, bool)
-c = c or c
-c = c or d
-c = d or c
-b = c or c # E: Incompatible types in assignment (expression has type "C", variable has type "bool")
-d = c or d # E: Incompatible types in assignment (expression has type "C", variable has type "D")
-d = d or c # E: Incompatible types in assignment (expression has type "C", variable has type "D")
-class C: pass
-class D(C): pass
-[builtins fixtures/bool.pyi]
-
-[case testInOperator]
-from typing import Iterator, Iterable, Any
-a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any)
-c = c in a # Fail
-a = b in a # Fail
-c = a in b # Fail
-c = b in d # Fail
-c = b in a
-c = a in d
-c = e in d
-c = a in e
-
-class A:
- def __contains__(self, x: 'B') -> bool: pass
-class B: pass
-class D(Iterable[A]):
- def __iter__(self) -> Iterator[A]: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Unsupported operand types for in ("bool" and "A")
-main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-main:5: error: Unsupported right operand type for in ("B")
-main:6: error: Unsupported operand types for in ("B" and "D")
-
-[case testNotInOperator]
-from typing import Iterator, Iterable, Any
-a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any)
-c = c not in a # Fail
-a = b not in a # Fail
-c = a not in b # Fail
-c = b not in d # Fail
-c = b not in a
-c = a not in d
-c = e in d
-c = a in e
-
-class A:
- def __contains__(self, x: 'B') -> bool: pass
-class B: pass
-class D(Iterable[A]):
- def __iter__(self) -> Iterator[A]: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Unsupported operand types for in ("bool" and "A")
-main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-main:5: error: Unsupported right operand type for in ("B")
-main:6: error: Unsupported operand types for in ("B" and "D")
-
-[case testNonBooleanContainsReturnValue]
-
-a, b = None, None # type: (A, bool)
-b = a not in a
-b = a in a
-
-class A:
- def __contains__(self, x: 'A') -> object: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:4: error: Incompatible types in assignment (expression has type "object", variable has type "bool")
-
-[case testEq]
-
-a, b = None, None # type: (A, bool)
-a = a == b # Fail
-a = a != b # Fail
-b = a == b
-b = a != b
-
-class A:
- def __eq__(self, o: object) -> bool: pass
- def __ne__(self, o: object) -> bool: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testLtAndGt]
-
-a, b, bo = None, None, None # type: (A, B, bool)
-a = a < b # Fail
-a = a > b # Fail
-bo = a < b
-bo = a > b
-
-class A:
- def __lt__(self, o: 'B') -> bool: pass
- def __gt__(self, o: 'B') -> bool: pass
-class B:
- def __lt__(self, o: 'B') -> bool: pass
- def __gt__(self, o: 'B') -> bool: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testCmp_python2]
-
-a, b, c, bo = None, None, None, None # type: (A, B, C, bool)
-bo = a == a # E: Unsupported operand types for == ("A" and "A")
-bo = a != a # E: Argument 1 to "__cmp__" of "A" has incompatible type "A"; expected "B"
-bo = a < b
-bo = a > b
-bo = b <= b
-bo = b <= c
-bo = b >= c # E: Argument 1 to "__cmp__" of "B" has incompatible type "C"; expected "B"
-bo = a >= b
-bo = c >= b
-bo = c <= b # E: Argument 1 to "__cmp__" of "C" has incompatible type "B"; expected "A"
-bo = a == c
-bo = b == c # E: Unsupported operand types for == ("C" and "B")
-
-class A:
- def __cmp__(self, o):
- # type: ('B') -> bool
- pass
- def __eq__(self, o):
- # type: ('int') -> bool
- pass
-class B:
- def __cmp__(self, o):
- # type: ('B') -> bool
- pass
- def __le__(self, o):
- # type: ('C') -> bool
- pass
-class C:
- def __cmp__(self, o):
- # type: ('A') -> bool
- pass
- def __eq__(self, o):
- # type: ('int') -> bool
- pass
-
-[builtins_py2 fixtures/bool.pyi]
-
-[case cmpIgnoredPy3]
-
-a, b, bo = None, None, None # type: (A, B, bool)
-bo = a <= b # E: Unsupported left operand type for <= ("A")
-
-class A:
- def __cmp__(self, o: 'B') -> bool: pass
-class B:
- pass
-
-[builtins fixtures/bool.pyi]
-
-[case testLeAndGe]
-
-a, b, bo = None, None, None # type: (A, B, bool)
-a = a <= b # Fail
-a = a >= b # Fail
-bo = a <= b
-bo = a >= b
-
-class A:
- def __le__(self, o: 'B') -> bool: pass
- def __ge__(self, o: 'B') -> bool: pass
-class B:
- def __le__(self, o: 'B') -> bool: pass
- def __ge__(self, o: 'B') -> bool: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testChainedComp]
-
-a, b, bo = None, None, None # type: (A, B, bool)
-a < a < b < b # Fail
-a < b < b < b
-a < a > a < b # Fail
-
-class A:
- def __lt__(self, o: 'B') -> bool: pass
- def __gt__(self, o: 'B') -> bool: pass
-class B:
- def __lt__(self, o: 'B') -> bool: pass
- def __gt__(self, o: 'B') -> bool: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Unsupported operand types for > ("A" and "A")
-main:5: error: Unsupported operand types for > ("A" and "A")
-main:5: error: Unsupported operand types for < ("A" and "A")
-
-
-[case testChainedCompBoolRes]
-
-a, b, bo = None, None, None # type: (A, B, bool)
-bo = a < b < b
-a = a < b < b # Fail
-
-class A:
- def __lt__(self, o: 'B') -> bool: pass
- def __gt__(self, o: 'B') -> bool: pass
-class B:
- def __lt__(self, o: 'B') -> bool: pass
- def __gt__(self, o: 'B') -> bool: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-
-[case testChainedCompResTyp]
-
-x, y = None, None # type: (X, Y)
-a, b, p, bo = None, None, None, None # type: (A, B, P, bool)
-b = y == y == y
-bo = y == y == y # Fail
-a = x < y
-a = x < y == y # Fail
-p = x < y == y
-
-class P:
- pass
-class A(P):
- pass
-class B(P):
- pass
-
-class X:
- def __lt__(self, o: 'Y') -> A: pass
- def __gt__(self, o: 'Y') -> A: pass
-class Y:
- def __lt__(self, o: 'Y') -> A: pass
- def __gt__(self, o: 'Y') -> A: pass
- def __eq__(self, o: 'Y') -> B: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "bool")
-main:7: error: Incompatible types in assignment (expression has type "P", variable has type "A")
-
-
-[case testIs]
-
-a, b = None, None # type: (A, bool)
-a = a is b # Fail
-b = a is b
-b = b is a
-b = a is None
-class A: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testIsNot]
-
-a, b = None, None # type: (A, bool)
-a = a is not b # Fail
-b = a is not b
-b = b is not a
-b = a is not None
-class A: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testReverseBinaryOperator]
-
-class A:
- def __add__(self, x: int) -> int: pass
-class B:
- def __radd__(self, x: A) -> str: pass
-s = None # type: str
-n = None # type: int
-n = A() + 1
-s = A() + B()
-n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testReverseBinaryOperator2]
-
-class A:
- def __add__(self, x: 'A') -> object: pass
-class B:
- def __radd__(self, x: A) -> str: pass
-s = None # type: str
-n = None # type: int
-s = A() + B()
-n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testReverseBinaryOperator3]
-
-class N:
- def __add__(self, x: 'N') -> object: pass
-class A:
- def __add__(self, x: N) -> int: pass
-class B:
- def __radd__(self, x: N) -> str: pass
-s = None # type: str
-s = A() + B() # E: Unsupported operand types for + ("A" and "B")
-
-[case testBinaryOperatorWithAnyRightOperand]
-from typing import Any, cast
-class A: pass
-A() + cast(Any, 1)
-
-[case testReverseComparisonOperator]
-
-class C:
- def __gt__(self, x: 'A') -> object: pass
-class A:
- def __lt__(self, x: C) -> int: pass
-class B:
- def __gt__(self, x: A) -> str: pass
-s = None # type: str
-n = None # type: int
-n = A() < C()
-s = A() < B()
-n = A() < B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-s = object() < B() # E: Unsupported operand types for > ("B" and "object")
-
-[case testErrorContextAndBinaryOperators]
-import typing
-class A:
- def __getitem__(self, i: str) -> int: pass
-def f() -> None:
- A()[1] # Error
-class B:
- A()[1] # Error
-A()[1] # Error
-[out]
-main:5: error: Invalid index type "int" for "A"; expected type "str"
-main:7: error: Invalid index type "int" for "A"; expected type "str"
-main:8: error: Invalid index type "int" for "A"; expected type "str"
-
-[case testErrorContextAndBinaryOperators2]
-import m
-[file m.py]
-import typing
-class A:
- def __getitem__(self, i: str) -> int: pass
-def f() -> None:
- A()[1] # Error
-class B:
- A()[1] # Error
-A()[1] # Error
-[out]
-tmp/m.py:5: error: Invalid index type "int" for "A"; expected type "str"
-tmp/m.py:7: error: Invalid index type "int" for "A"; expected type "str"
-tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str"
-
-
--- Unary operators
--- ---------------
-
-
-[case testUnaryMinus]
-
-a, b = None, None # type: (A, B)
-a = -a # Fail
-b = -b # Fail
-b = -a
-
-class A:
- def __neg__(self) -> 'B':
- pass
-class B:
- pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:4: error: Unsupported operand type for unary - ("B")
-
-[case testUnaryPlus]
-
-a, b = None, None # type: (A, B)
-a = +a # Fail
-b = +b # Fail
-b = +a
-
-class A:
- def __pos__(self) -> 'B':
- pass
-class B:
- pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:4: error: Unsupported operand type for unary + ("B")
-
-[case testUnaryNot]
-
-a, b = None, None # type: (A, bool)
-a = not b # Fail
-b = not a
-b = not b
-class A:
- pass
-[builtins fixtures/bool.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testUnaryBitwiseNeg]
-
-a, b = None, None # type: (A, B)
-a = ~a # Fail
-b = ~b # Fail
-b = ~a
-
-class A:
- def __invert__(self) -> 'B':
- pass
-class B:
- pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:4: error: Unsupported operand type for ~ ("B")
-
-
--- Indexing
--- --------
-
-
-[case testIndexing]
-
-a, b, c = None, None, None # type: (A, B, C)
-c = a[c] # Fail
-a = a[b] # Fail
-c = b[a] # Fail
-c = a[b]
-
-class A:
- def __getitem__(self, x: 'B') -> 'C':
- pass
-class B: pass
-class C: pass
-[out]
-main:3: error: Invalid index type "C" for "A"; expected type "B"
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:5: error: Value of type "B" is not indexable
-
-[case testIndexingAsLvalue]
-
-a, b, c = None, None, None # type: (A, B, C)
-a[c] = c # Fail
-a[b] = a # Fail
-b[a] = c # Fail
-a[b] = c
-
-class A:
- def __setitem__(self, x: 'B', y: 'C') -> None:
- pass
-class B:
- pass
-class C:
- pass
-[out]
-main:3: error: Invalid index type "C" for "A"; expected type "B"
-main:4: error: Incompatible types in assignment (expression has type "A", target has type "C")
-main:5: error: Unsupported target for indexed assignment
-
-[case testOverloadedIndexing]
-
-from typing import overload
-
-a, b, c = None, None, None # type: (A, B, C)
-a[b]
-a[c]
-a[1] # E: No overload variant of "__getitem__" of "A" matches argument types [builtins.int]
-
-i, s = None, None # type: (int, str)
-i = a[b]
-s = a[b] # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-i = a[c] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-s = a[c]
-
-class A:
- @overload
- def __getitem__(self, x: 'B') -> int:
- pass
- @overload
- def __getitem__(self, x: 'C') -> str:
- pass
-class B: pass
-class C: pass
-[out]
-
-
--- Cast expression
--- ---------------
-
-
-[case testCastExpressions]
-from typing import cast, Any
-class A: pass
-class B: pass
-class C(A): pass
-a, b, c = None, None, None # type: (A, B, C)
-
-a = cast(A, a()) # E: "A" not callable
-a = cast(Any, a()) # E: "A" not callable
-b = cast(A, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a = cast(A, b)
-a = cast(A, a)
-c = cast(C, a)
-a = cast(A, c)
-a = cast(Any, b)
-b = cast(Any, a)
-[out]
-
-[case testAnyCast]
-from typing import cast, Any
-a, b = None, None # type: (A, B)
-a = cast(Any, a()) # Fail
-a = cast(Any, b)
-b = cast(Any, a)
-class A: pass
-class B: pass
-[out]
-main:3: error: "A" not callable
-
-
--- None return type
--- ----------------
-
-
-[case testNoneReturnTypeBasics]
-
-a, o = None, None # type: (A, object)
-a = f() # Fail
-o = A().g(a) # Fail
-A().g(f()) # Fail
-x = f() # type: A # Fail
-f()
-A().g(a)
-
-def f() -> None:
- pass
-
-class A:
- def g(self, x: object) -> None:
- pass
-[out]
-main:3: error: "f" does not return a value
-main:4: error: "g" of "A" does not return a value
-main:5: error: "f" does not return a value
-main:6: error: "f" does not return a value
-
-[case testNoneReturnTypeWithStatements]
-import typing
-if f(): # Fail
- pass
-elif f(): # Fail
- pass
-while f(): # Fail
- pass
-def g() -> object:
- return f() # Fail
-raise f() # Fail
-
-def f() -> None: pass
-[builtins fixtures/exception.pyi]
-[out]
-main:2: error: "f" does not return a value
-main:4: error: "f" does not return a value
-main:6: error: "f" does not return a value
-main:9: error: "f" does not return a value
-main:10: error: "f" does not return a value
-
-[case testNoneReturnTypeWithExpressions]
-from typing import cast
-a = None # type: A
-[f()] # E: "f" does not return a value
-f() + a # E: "f" does not return a value
-a + f() # E: "f" does not return a value
-f() == a # E: "f" does not return a value
-a != f() # E: Unsupported left operand type for != ("A")
-cast(A, f()) # E: "f" does not return a value
-f().foo # E: "f" does not return a value
-
-def f() -> None: pass
-class A:
- def __add__(self, x: 'A') -> 'A': pass
-[builtins fixtures/list.pyi]
-
-[case testNoneReturnTypeWithExpressions2]
-
-a, b = None, None # type: (A, bool)
-a < f() # E: Unsupported left operand type for < ("A")
-f() <= a # E: "f" does not return a value
-f() in a # E: Unsupported right operand type for in ("A")
-a in f() # E: "f" does not return a value
--f() # E: "f" does not return a value
-not f() # E: "f" does not return a value
-f() and b # E: "f" does not return a value
-b or f() # E: "f" does not return a value
-
-def f() -> None: pass
-class A:
- def __add__(self, x: 'A') -> 'A':
- pass
-[builtins fixtures/bool.pyi]
-
-
--- Slicing
--- -------
-
-
-[case testGetSlice]
-
-a, b = None, None # type: (A, B)
-a = a[1:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = a[1:] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = a[:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = a[:] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-b = a[1:2]
-b = a[1:]
-b = a[:2]
-b = a[:]
-
-class A:
- def __getitem__(self, s: slice) -> 'B': pass
-class B: pass
-[builtins fixtures/slice.pyi]
-
-[case testSlicingWithInvalidBase]
-
-a = None # type: A
-a[1:2] # E: Invalid index type "slice" for "A"; expected type "int"
-a[:] # E: Invalid index type "slice" for "A"; expected type "int"
-class A:
- def __getitem__(self, n: int) -> 'A': pass
-[builtins fixtures/slice.pyi]
-
-[case testSlicingWithNonindexable]
-
-o = None # type: object
-o[1:2] # E: Value of type "object" is not indexable
-o[:] # E: Value of type "object" is not indexable
-[builtins fixtures/slice.pyi]
-
-[case testNonIntSliceBounds]
-from typing import Any
-a, o = None, None # type: (Any, object)
-a[o:1] # E: Slice index must be an integer or None
-a[1:o] # E: Slice index must be an integer or None
-a[o:] # E: Slice index must be an integer or None
-a[:o] # E: Slice index must be an integer or None
-[builtins fixtures/slice.pyi]
-
-[case testNoneSliceBounds]
-from typing import Any
-a = None # type: Any
-a[None:1]
-a[1:None]
-a[None:]
-a[:None]
-[builtins fixtures/slice.pyi]
-
-
--- String interpolation
--- --------------------
-
-
-[case testStringInterpolationType]
-from typing import Tuple
-i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int])
-'%d' % i
-'%f' % f
-'%s' % s
-'%d' % (f,)
-'%d' % (s,) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
-'%d' % t
-'%d' % s # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
-'%f' % s # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationSAcceptsAnyType]
-from typing import Any
-i, o, s = None, None, None # type: (int, object, str)
-'%s %s %s' % (i, o, s)
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationCount]
-'%d %d' % 1 # E: Not enough arguments for format string
-'%d %d' % (1, 2)
-'%d %d' % (1, 2, 3) # E: Not all arguments converted during string formatting
-t = 1, 's'
-'%d %s' % t
-'%s %d' % t # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
-'%d' % t # E: Not all arguments converted during string formatting
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationWithAnyType]
-from typing import Any
-a = None # type: Any
-'%d %d' % a
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationInvalidPlaceholder]
-'%W' % 1 # E: Unsupported format character 'W'
-
-[case testStringInterpolationWidth]
-'%2f' % 3.14
-'%*f' % 3.14 # E: Not enough arguments for format string
-'%*f' % (4, 3.14)
-'%*f' % (1.1, 3.14) # E: * wants int
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationPrecision]
-'%.2f' % 3.14
-'%.*f' % 3.14 # E: Not enough arguments for format string
-'%.*f' % (4, 3.14)
-'%.*f' % (1.1, 3.14) # E: * wants int
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationWidthAndPrecision]
-'%4.2f' % 3.14
-'%4.*f' % 3.14 # E: Not enough arguments for format string
-'%*.2f' % 3.14 # E: Not enough arguments for format string
-'%*.*f' % 3.14 # E: Not enough arguments for format string
-'%*.*f' % (4, 2, 3.14)
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationFlagsAndLengthModifiers]
-'%04hd' % 1
-'%-.4ld' % 1
-'%+*Ld' % (1, 1)
-'% .*ld' % (1, 1)
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationDoublePercentage]
-'%% %d' % 1
-'%3% %d' % 1
-'%*%' % 1
-'%*% %d' % 1 # E: Not enough arguments for format string
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationC]
-'%c' % 1
-'%c' % 's'
-'%c' % '' # E: %c requires int or char
-'%c' % 'ab' # E: %c requires int or char
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationMappingTypes]
-'%(a)d %(b)s' % {'a': 1, 'b': 's'}
-'%(a)d %(b)s' % {'a': 's', 'b': 1} # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float]")
-[builtins fixtures/primitives.pyi]
-
-[case testStringInterpolationMappingKeys]
-'%()d' % {'': 2}
-'%(a)d' % {'a': 1, 'b': 2, 'c': 3}
-'%(q)d' % {'a': 1, 'b': 2, 'c': 3} # E: Key 'q' not found in mapping
-'%(a)d %%' % {'a': 1}
-
-[builtins fixtures/dict.pyi]
-
-[case testStringInterpolationMappingDictTypes]
-from typing import Any, Dict
-a = None # type: Any
-ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int]
-'%(a)' % 1 # E: Format requires a mapping (expression has type "int", expected type for mapping is Dict[Any, Any])
-'%()d' % a
-'%()d' % ds
-'%()d' % do
-[builtins fixtures/dict.pyi]
-
-[case testStringInterpolationMappingInvalidDictTypes-skip]
-from typing import Any, Dict
-di = None # type: Dict[int, int]
-'%()d' % di # E: Format requires a mapping (expression has type Dict[int, int], expected type for mapping is Dict[str, Any])
-[builtins fixtures/dict.pyi]
-
-[case testStringInterpolationMappingInvalidSpecifiers]
-'%(a)d %d' % 1 # E: String interpolation mixes specifier with and without mapping keys
-'%(b)*d' % 1 # E: String interpolation contains both stars and mapping keys
-'%(b).*d' % 1 # E: String interpolation contains both stars and mapping keys
-
-[case testStringInterpolationMappingFlagsAndLengthModifiers]
-'%(a)1d' % {'a': 1}
-'%(a).1d' % {'a': 1}
-'%(a)#1.1ld' % {'a': 1}
-[builtins fixtures/dict.pyi]
-
-[case testStringInterpolationFloatPrecision]
-'%.f' % 1.2
-'%.3f' % 1.2
-'%.f' % 'x'
-'%.3f' % 'x'
-[builtins fixtures/primitives.pyi]
-[out]
-main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
-main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
-
-[case testStringInterpolationSpaceKey]
-'%( )s' % {' ': 'foo'}
-
-[case testByteByteInterpolation]
-def foo(a: bytes, b: bytes):
- b'%s:%s' % (a, b)
-foo(b'a', b'b') == b'a:b'
-
-[case testBytePercentInterpolationSupported]
-b'%s' % (b'xyz',)
-b'%(name)s' % {'name': 'jane'}
-b'%c' % (123)
-
-[case testUnicodeInterpolation_python2]
-u'%s' % (u'abc',)
-
--- Lambdas
--- -------
-
-
-[case testTrivialLambda]
-from typing import Callable
-f = lambda: 1 # type: Callable[[], int]
-f = lambda: ''.x
-f = lambda: ''
-[out]
-main:3: error: "str" has no attribute "x"
-main:4: error: Incompatible types in assignment (expression has type Callable[[], str], variable has type Callable[[], int])
-main:4: error: Incompatible return value type (got "str", expected "int")
-
-[case testVoidLambda]
-import typing
-def void() -> None:
- pass
-x = lambda: void() # type: typing.Callable[[], None]
-
-
--- List comprehensions
--- -------------------
-
-
-[case testSimpleListComprehension]
-from typing import List
-a = None # type: List[A]
-a = [x for x in a]
-b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
-class A: pass
-class B: pass
-[builtins fixtures/for.pyi]
-
-[case testSimpleListComprehensionNestedTuples]
-from typing import List, Tuple
-l = None # type: List[Tuple[A, Tuple[A, B]]]
-a = [a2 for a1, (a2, b1) in l] # type: List[A]
-b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]
-class A: pass
-class B: pass
-[builtins fixtures/for.pyi]
-
-[case testSimpleListComprehensionNestedTuples2]
-from typing import List, Tuple
-l = None # type: List[Tuple[int, Tuple[int, str]]]
-a = [f(d) for d, (i, s) in l]
-b = [f(s) for d, (i, s) in l] # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-def f(x: int): pass
-[builtins fixtures/for.pyi]
-
-[case testListComprehensionWithNonDirectMapping]
-from typing import List
-a = None # type: List[A]
-b = None # type: List[B]
-b = [f(x) for x in a]
-a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]
-([f(x) for x in b]) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-class A: pass
-class B: pass
-def f(a: A) -> B: pass
-[builtins fixtures/for.pyi]
-
-[case testErrorInListComprehensionCondition]
-from typing import List
-a = None # type: List[A]
-a = [x for x in a if x()] # E: "A" not callable
-class A: pass
-[builtins fixtures/for.pyi]
-
-[case testTypeInferenceOfListComprehension]
-from typing import List
-a = None # type: List[A]
-o = [x for x in a] # type: List[object]
-class A: pass
-[builtins fixtures/for.pyi]
-
-[case testSimpleListComprehensionInClassBody]
-from typing import List
-class A:
- a = None # type: List[A]
- a = [x for x in a]
- b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
-class B: pass
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Set comprehension
--- -----------------
-
-
-[case testSimpleSetComprehension]
-from typing import Set
-a = None # type: Set[A]
-a = {x for x in a}
-b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]
-class A: pass
-class B: pass
-[builtins fixtures/set.pyi]
-
-
--- Dictionary comprehension
--- ------------------------
-
-
-[case testSimpleDictionaryComprehension]
-from typing import Dict, List, Tuple
-abd = None # type: Dict[A, B]
-abl = None # type: List[Tuple[A, B]]
-abd = {a: b for a, b in abl}
-x = {a: b for a, b in abl} # type: Dict[B, A]
-y = {a: b for a, b in abl} # type: A
-class A: pass
-class B: pass
-[builtins fixtures/dict.pyi]
-[out]
-main:5: error: Key expression in dictionary comprehension has incompatible type "A"; expected type "B"
-main:5: error: Value expression in dictionary comprehension has incompatible type "B"; expected type "A"
-main:6: error: Incompatible types in assignment (expression has type Dict[A, B], variable has type "A")
-
-
-[case testDictionaryComprehensionWithNonDirectMapping]
-from typing import Dict, List, Tuple
-abd = None # type: Dict[A, B]
-abl = None # type: List[Tuple[A, B]]
-abd = {a: f(b) for a, b in abl}
-class A: pass
-class B: pass
-class C: pass
-def f(b: A) -> C: pass
-[builtins fixtures/dict.pyi]
-[out]
-main:4: error: Value expression in dictionary comprehension has incompatible type "C"; expected type "B"
-main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-
-
--- Generator expressions
--- ---------------------
-
-
-[case testSimpleGeneratorExpression]
-from typing import Iterator
-# The implementation is mostly identical to list comprehensions, so a single
-# test case is ok.
-a = None # type: Iterator[int]
-a = (x for x in a)
-b = None # type: Iterator[str]
-b = (x for x in a) # E: Generator has incompatible item type "int"
-[builtins fixtures/for.pyi]
-
-
--- Conditional expressions
--- -----------------------
-
-
-[case testSimpleConditionalExpression]
-import typing
-y = ''
-x = 1 if y else 2
-x = 3
-x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testConditionalExpressionWithEmptyCondition]
-import typing
-def f() -> None: pass
-x = 1 if f() else 2 # E: "f" does not return a value
-
-[case testConditionalExpressionWithSubtyping]
-import typing
-class A: pass
-class B(A): pass
-x = B() if bool() else A()
-x = A()
-x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-y = A() if bool() else B()
-y = A()
-y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-[builtins fixtures/bool.pyi]
-
-[case testConditionalExpressionAndTypeContext]
-import typing
-x = [1] if bool() else []
-x = [1]
-x = ['x'] # E: List item 0 has incompatible type "str"
-[builtins fixtures/list.pyi]
-
-
--- Special cases
--- -------------
-
-
-[case testOperationsWithNonInstanceTypes]
-from typing import cast
-class A:
- def __add__(self, a: 'A') -> 'A': pass
-a = None # type: A
-None + a # Fail
-f + a # Fail
-a + f # Fail
-cast(A, f)
-
-def f() -> None:
- pass
-[out]
-main:5: error: Unsupported left operand type for + (None)
-main:6: error: Unsupported left operand type for + (Callable[[], None])
-main:7: error: Unsupported operand types for + ("A" and Callable[[], None])
-
-[case testOperatorMethodWithInvalidArgCount]
-
-a = None # type: A
-a + a # Fail
-
-class A:
- def __add__(self) -> 'A':
- pass
-[out]
-main:3: error: Too many arguments for "__add__" of "A"
-
-[case testOperatorMethodAsVar]
-from typing import Any
-class A:
- def __init__(self, _add: Any) -> None:
- self.__add__ = _add
-a = None # type: A
-a + a
-[out]
-
-[case testOperatorMethodAsVar2]
-
-class A:
- def f(self, x: int) -> str: pass
- __add__ = f
-s = None # type: str
-s = A() + 1
-A() + (A() + 1)
-[out]
-main:7: error: Argument 1 has incompatible type "str"; expected "int"
-
-[case testIndexedLvalueWithSubtypes]
-
-a, b, c = None, None, None # type: (A, B, C)
-a[c] = c
-a[b] = c
-a[c] = b
-
-class A:
- def __setitem__(self, x: 'B', y: 'B') -> None:
- pass
-class B:
- pass
-class C(B):
- pass
-[out]
-
-
--- Ellipsis
--- --------
-
-
-[case testEllipsis]
-
-a = None # type: A
-a = ... # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "A")
-b = ...
-c = ...
-b = c
-....__class__
-....a # E: "ellipsis" has no attribute "a"
-
-class A: pass
-[file builtins.py]
-class object:
- def __init__(self): pass
-class ellipsis:
- def __init__(self): pass
- __class__ = object()
-class type: pass
-class function: pass
-class str: pass
-[out]
-
-
--- Yield expression
--- ----------------
-
-
-[case testYieldExpression]
-def f(x: int) -> None:
- x = yield f('')
- x = 1
-[builtins fixtures/for.pyi]
-[out]
-main:1: error: The return type of a generator function should be "Generator" or one of its supertypes
-main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testYieldExpressionWithNone]
-from typing import Iterator
-def f(x: int) -> Iterator[None]:
- (yield)
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Yield from expression
--- ----------------
-
-
-[case testYieldFromIteratorHasNoValue]
-from typing import Iterator
-def f() -> Iterator[int]:
- yield 5
-def g() -> Iterator[int]:
- a = yield from f()
-[out]
-main:5: error: Function does not return a value
-
-[case testYieldFromGeneratorHasValue]
-from typing import Iterator, Generator
-def f() -> Generator[int, None, str]:
- yield 5
- return "ham"
-def g() -> Iterator[int]:
- a = "string"
- a = yield from f()
-[out]
-
-
--- dict(...)
--- ---------
-
-
--- Note that the stub used in unit tests does not have all overload
--- variants, but it should not matter.
-
-[case testDictWithKeywordArgsOnly]
-from typing import Dict, Any
-d1 = dict(a=1, b=2) # type: Dict[str, int]
-d2 = dict(a=1, b='') # type: Dict[str, int] # E: List item 1 has incompatible type "Tuple[str, str]"
-d3 = dict(a=1) # type: Dict[int, int] # E: List item 0 has incompatible type "Tuple[str, int]"
-d4 = dict(a=1, b=1)
-d4.xyz # E: Dict[str, int] has no attribute "xyz"
-d5 = dict(a=1, b='') # type: Dict[str, Any]
-[builtins fixtures/dict.pyi]
-
-[case testDictWithoutKeywordArgs]
-from typing import Dict
-d = dict() # E: Need type annotation for variable
-d2 = dict() # type: Dict[int, str]
-dict(undefined) # E: Name 'undefined' is not defined
-[builtins fixtures/dict.pyi]
-
-[case testDictFromList]
-from typing import Dict
-d = dict([(1, 'x'), (2, 'y')])
-d() # E: Dict[int, str] not callable
-d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"
-[builtins fixtures/dict.pyi]
-
-[case testDictFromIterableAndKeywordArg]
-from typing import Dict
-it = [('x', 1)]
-
-d = dict(it, x=1)
-d() # E: Dict[str, int] not callable
-
-d2 = dict(it, x='') # E: Cannot infer type argument 2 of "dict"
-d2() # E: Dict[Any, Any] not callable
-
-d3 = dict(it, x='') # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "str"; expected "int"
-[builtins fixtures/dict.pyi]
-
-[case testDictFromIterableAndKeywordArg2]
-it = [(1, 'x')]
-dict(it, x='y') # E: Keyword argument only valid with "str" key type in call to "dict"
-[builtins fixtures/dict.pyi]
-
-[case testDictFromIterableAndKeywordArg3]
-d = dict([], x=1)
-d() # E: Dict[str, int] not callable
-[builtins fixtures/dict.pyi]
-
-[case testDictFromIterableAndStarStarArgs]
-from typing import Dict
-it = [('x', 1)]
-
-kw = {'x': 1}
-d = dict(it, **kw)
-d() # E: Dict[str, int] not callable
-
-kw2 = {'x': ''}
-d2 = dict(it, **kw2) # E: Cannot infer type argument 2 of "dict"
-d2() # E: Dict[Any, Any] not callable
-
-d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type **Dict[str, str]; expected "int"
-[builtins fixtures/dict.pyi]
-
-[case testDictFromIterableAndStarStarArgs2]
-it = [(1, 'x')]
-kw = {'x': 'y'}
-d = dict(it, **kw) # E: Keyword argument only valid with "str" key type in call to "dict"
-d() # E: Dict[int, str] not callable
-[builtins fixtures/dict.pyi]
-
-[case testUserDefinedClassNamedDict]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-S = TypeVar('S')
-class dict(Generic[T, S]):
- def __init__(self, x: T, **kwargs: T) -> None: pass
-dict(1, y=1)
-[builtins fixtures/dict.pyi]
-
-[case testSpecialSignatureForSubclassOfDict]
-from typing import TypeVar, Dict, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class D1(dict): pass # Implicit base class Dict[Any, Any]
-D1([(1, 2)], x=1)
-class D2(Dict[T, S], Generic[T, S]): pass
-da = D2([('x', 2)], x=1)
-da() # E: D2[str, int] not callable
-D2([(1, 2)], x=1) # E: Keyword argument only valid with "str" key type in call to "dict"
-db = D2(x=1)
-db() # E: D2[str, int] not callable
-[builtins fixtures/dict.pyi]
-
-[case testSpecialSignatureForSubclassOfDict2]
-from typing import TypeVar, Dict, Generic
-T = TypeVar('T')
-class D(Dict[str, T], Generic[T]): pass
-D([('x', 1)], x=1)
-[builtins fixtures/dict.pyi]
-
-[case testOverridingSpecialSignatureInSubclassOfDict]
-from typing import TypeVar, Dict, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class D(Dict[T, S], Generic[T, S]):
- def __init__(self, x: S, y: T) -> None: pass
-d = D(1, y='')
-d() # E: D[str, int] not callable
-[builtins fixtures/dict.pyi]
-
-[case testRevealType]
-reveal_type(1) # E: Revealed type is 'builtins.int'
-
-[case testUndefinedRevealType]
-reveal_type(x)
-[out]
-main:1: error: Revealed type is 'Any'
-main:1: error: Name 'x' is not defined
-
-[case testUserDefinedRevealType]
-def reveal_type(x: int) -> None: pass
-reveal_type("foo") # E: Argument 1 to "reveal_type" has incompatible type "str"; expected "int"
-
-[case testRevealTypeVar]
-reveal_type = 1
-1 + "foo" # E: Unsupported operand types for + ("int" and "str")
-
-[case testRevealForward]
-def f() -> None:
- reveal_type(x)
-x = 1 + 1
-[out]
-main:2: error: Revealed type is 'builtins.int'
-
-[case testEqNone]
-None == None
-[builtins fixtures/ops.pyi]
-
-[case testLtNone]
-None < None # E: Unsupported left operand type for < (None)
-[builtins fixtures/ops.pyi]
-
-[case testDictWithStarExpr]
-# flags: --fast-parser
-b = {'z': 26, *a} # E: invalid syntax
-[builtins fixtures/dict.pyi]
-
-[case testDictWithStarStarExpr]
-# flags: --fast-parser
-from typing import Dict
-a = {'a': 1}
-b = {'z': 26, **a}
-c = {**b}
-d = {**a, **b, 'c': 3}
-e = {1: 'a', **a} # E: Argument 1 to "update" of "dict" has incompatible type Dict[str, int]; expected Mapping[int, str]
-f = {**b} # type: Dict[int, int] # E: List item 0 has incompatible type Dict[str, int]
-[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test
deleted file mode 100644
index f982838..0000000
--- a/test-data/unit/check-fastparse.test
+++ /dev/null
@@ -1,301 +0,0 @@
-[case testFastParseSyntaxError]
-# flags: --fast-parser
-1 + # E: invalid syntax
-
-[case testFastParseTypeCommentSyntaxError]
-# flags: --fast-parser
-x = None # type: a : b # E: syntax error in type comment
-
-[case testFastParseInvalidTypeComment]
-# flags: --fast-parser
-x = None # type: a + b # E: invalid type comment
-
--- Function type comments are attributed to the function def line.
--- This happens in both parsers.
-[case testFastParseFunctionAnnotationSyntaxError]
-# flags: --fast-parser
-def f(): # E: syntax error in type comment
- # type: None -> None
- pass
-
-[case testFastParseInvalidFunctionAnnotation]
-# flags: --fast-parser
-def f(x): # E: invalid type comment
- # type: (a + b) -> None
- pass
-
-[case testFastParseProperty]
-# flags: --fast-parser
-class C:
- @property
- def x(self) -> str: pass
- @x.setter
- def x(self, value: str) -> None: pass
-[builtins fixtures/property.pyi]
-
-[case testFastParseConditionalProperty]
-# flags: --fast-parser
-class C:
- if bool():
- @property
- def x(self) -> str: pass
- @x.setter
- def x(self, value: str) -> None: pass
-[builtins fixtures/property.pyi]
-
-[case testFastParsePerArgumentAnnotations]
-# flags: --fast-parser
-class A: pass
-class B: pass
-class C: pass
-class D: pass
-class E: pass
-class F: pass
-def f(a, # type: A
- b = None, # type: B
- *args, # type: C
- d = None, # type: D
- e, # type: E
- **kwargs # type: F
- ):
- reveal_type(a) # E: Revealed type is '__main__.A'
- reveal_type(b) # E: Revealed type is '__main__.B'
- reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]'
- reveal_type(d) # E: Revealed type is '__main__.D'
- reveal_type(e) # E: Revealed type is '__main__.E'
- reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]'
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testFastParsePerArgumentAnnotationsWithReturn]
-# flags: --fast-parser
-class A: pass
-class B: pass
-class C: pass
-class D: pass
-class E: pass
-class F: pass
-def f(a, # type: A
- b = None, # type: B
- *args, # type: C
- d = None, # type: D
- e, # type: E
- **kwargs # type: F
- ):
- # type: (...) -> int
- reveal_type(a) # E: Revealed type is '__main__.A'
- reveal_type(b) # E: Revealed type is '__main__.B'
- reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]'
- reveal_type(d) # E: Revealed type is '__main__.D'
- reveal_type(e) # E: Revealed type is '__main__.E'
- reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]'
- return "not an int" # E: Incompatible return value type (got "str", expected "int")
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testFastParsePerArgumentAnnotationsWithAnnotatedBareStar]
-# flags: --fast-parser
-def f(*, # type: int # E: bare * has associated type comment
- x # type: str
- ):
- # type: (...) -> int
- pass
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testFastParsePerArgumentAnnotationsWithReturnAndBareStar]
-# flags: --fast-parser
-def f(*,
- x # type: str
- ):
- # type: (...) -> int
- reveal_type(x) # E: Revealed type is 'builtins.str'
- return "not an int" # E: Incompatible return value type (got "str", expected "int")
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testFastParsePerArgumentAnnotations_python2]
-# flags: --fast-parser
-class A: pass
-class B: pass
-class C: pass
-class D: pass
-def f(a, # type: A
- b = None, # type: B
- *args # type: C
- # kwargs not tested due to lack of 2.7 dict fixtures
- ):
- reveal_type(a) # E: Revealed type is '__main__.A'
- reveal_type(b) # E: Revealed type is '__main__.B'
- reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]'
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testFastParsePerArgumentAnnotationsWithReturn_python2]
-# flags: --fast-parser
-class A: pass
-class B: pass
-class C: pass
-class D: pass
-def f(a, # type: A
- b = None, # type: B
- *args # type: C
- # kwargs not tested due to lack of 2.7 dict fixtures
- ):
- # type: (...) -> int
- reveal_type(a) # E: Revealed type is '__main__.A'
- reveal_type(b) # E: Revealed type is '__main__.B'
- reveal_type(args) # E: Revealed type is 'builtins.tuple[__main__.C]'
- return "not an int" # E: Incompatible return value type (got "str", expected "int")
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testFasterParseTooManyArgumentsAnnotation]
-# flags: --fast-parser
-def f(): # E: Type signature has too many arguments
- # type: (int) -> None
- pass
-
-[case testFasterParseTooFewArgumentsAnnotation]
-# flags: --fast-parser
-def f(x): # E: Type signature has too few arguments
- # type: () -> None
- pass
-
-[case testFasterParseTypeCommentError_python2]
-# flags: --fast-parser
-from typing import Tuple
-def f(a):
- # type: (Tuple(int, int)) -> int
- pass
-[out]
-main:3: error: invalid type comment
-
-[case testFastParseMatMul]
-# flags: --fast-parser
-from typing import Any
-x = None # type: Any
-x @ 1
-x @= 1
-
-[case testIncorrectTypeCommentIndex]
-# flags: --fast-parser
-from typing import Dict
-x = None # type: Dict[x: y]
-[out]
-main:3: error: syntax error in type comment
-
-[case testPrintStatementTrailingCommaFastParser_python2]
-# flags: --fast-parser
-print 0,
-print 1, 2,
-
-[case testFastParserShowsMultipleErrors]
-def f(x): # E: Type signature has too few arguments
- # type: () -> None
- pass
-def g(): # E: Type signature has too many arguments
- # type: (int) -> None
- pass
-
-[case testFastParseMalformedAssert]
-# flags: --fast-parser
-assert 1, 2
-assert (1, 2) # W: Assertion is always true, perhaps remove parentheses?
-assert (1, 2), 3 # W: Assertion is always true, perhaps remove parentheses?
-assert ()
-assert (1,) # W: Assertion is always true, perhaps remove parentheses?
-
-[case testFastParseAssertMessage]
-# flags: --fast-parser
-assert 1
-assert 1, 2
-assert 1, 1+2
-assert 1, 1+'test' # E: Unsupported operand types for + ("int" and "str")
-assert 1, f() # E: Name 'f' is not defined
-
-[case testFastParserConsistentFunctionTypes]
-# flags: --fast-parser
-def f(x, y, z):
- # type: (int, int, int) -> int
- pass
-
-def f(x, # type: int # E: Function has duplicate type signatures
- y, # type: int
- z # type: int
- ):
- # type: (int, int, int) -> int
- pass
-
-def f(x, # type: int
- y, # type: int
- z # type: int
- ):
- # type: (...) -> int
- pass
-
-def f(x, y, z):
- # type: (int, int, int) -> int
- pass
-
-def f(x) -> int: # E: Function has duplicate type signatures
- # type: (int) -> int
- pass
-
-def f(x: int, y: int, z: int):
- # type: (...) -> int
- pass
-
-def f(x: int): # E: Function has duplicate type signatures
- # type: (int) -> int
- pass
-
-[case testFastParserDuplicateNames]
-# flags: --fast-parser
-def f(x, y, z):
- pass
-
-def g(x, y, x): # E: duplicate argument 'x' in function definition
- pass
-
-def h(x, y, *x): # E: duplicate argument 'x' in function definition
- pass
-
-def i(x, y, *z, **z): # E: duplicate argument 'z' in function definition
- pass
-
-def j(x: int, y: int, *, x: int = 3): # E: duplicate argument 'x' in function definition
- pass
-
-def k(*, y, z, y): # E: duplicate argument 'y' in function definition
- pass
-
-lambda x, y, x: ... # E: duplicate argument 'x' in function definition
-
-[case testFastParserDuplicateNames_python2]
-# flags: --fast-parser
-def f(x, y, z):
- pass
-
-def g(x, y, x): # E: duplicate argument 'x' in function definition
- pass
-
-def h(x, y, *x): # E: duplicate argument 'x' in function definition
- pass
-
-def i(x, y, *z, **z): # E: duplicate argument 'z' in function definition
- pass
-
-def j(x, (y, y), z): # E: duplicate argument 'y' in function definition
- pass
-
-def k(x, (y, x)): # E: duplicate argument 'x' in function definition
- pass
-
-def l((x, y), (z, x)): # E: duplicate argument 'x' in function definition
- pass
-
-def m(x, ((x, y), z)): # E: duplicate argument 'x' in function definition
- pass
-
-lambda x, (y, x): None # E: duplicate argument 'x' in function definition
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
deleted file mode 100644
index 9d057d8..0000000
--- a/test-data/unit/check-flags.test
+++ /dev/null
@@ -1,305 +0,0 @@
-[case testUnannotatedFunction]
-# flags: --disallow-untyped-defs
-def f(x): pass
-[out]
-main:2: error: Function is missing a type annotation
-
-[case testUnannotatedArgument]
-# flags: --disallow-untyped-defs
-def f(x) -> int: pass
-[out]
-main:2: error: Function is missing a type annotation for one or more arguments
-
-[case testUnannotatedArgumentWithFastParser]
-# flags: --fast-parser --disallow-untyped-defs
-def f(x) -> int: pass
-[out]
-main:2: error: Function is missing a type annotation for one or more arguments
-
-[case testNoArgumentFunction]
-# flags: --disallow-untyped-defs
-def f() -> int: pass
-[out]
-
-[case testUnannotatedReturn]
-# flags: --disallow-untyped-defs
-def f(x: int): pass
-[out]
-main:2: error: Function is missing a return type annotation
-
-[case testUnannotatedReturnWithFastParser]
-# flags: --fast-parser --disallow-untyped-defs
-def f(x: int): pass
-[out]
-main:2: error: Function is missing a return type annotation
-
-[case testLambda]
-# flags: --disallow-untyped-defs
-lambda x: x
-[out]
-
-[case testUntypedDef]
-# flags: --disallow-untyped-defs
-def f():
- 1 + "str"
-[out]
-main:2: error: Function is missing a type annotation
-
-[case testSubclassingAny]
-# flags: --disallow-subclassing-any
-from typing import Any
-FakeClass = None # type: Any
-class Foo(FakeClass): pass # E: Class cannot subclass 'FakeClass' (has type 'Any')
-[out]
-
-[case testSubclassingAnyMultipleBaseClasses]
-# flags: --disallow-subclassing-any
-from typing import Any
-FakeClass = None # type: Any
-class ActualClass: pass
-class Foo(ActualClass, FakeClass): pass # E: Class cannot subclass 'FakeClass' (has type 'Any')
-[out]
-
-[case testSubclassingAnySilentImports]
-# flags: --disallow-subclassing-any --follow-imports=skip
-# cmd: mypy -m main
-
-[file main.py]
-from ignored_module import BaseClass
-class Foo(BaseClass): pass
-
-[file ignored_module.py]
-class BaseClass: pass
-
-[out]
-tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any')
-
-[case testSubclassingAnySilentImports2]
-# flags: --disallow-subclassing-any --follow-imports=skip
-# cmd: mypy -m main
-
-[file main.py]
-import ignored_module
-class Foo(ignored_module.BaseClass): pass
-
-[file ignored_module.py]
-class BaseClass: pass
-
-[out]
-tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any')
-
-[case testWarnNoReturnIgnoresTrivialFunctions]
-# flags: --warn-no-return
-def f() -> int:
- pass
-def g() -> int:
- ...
-def h() -> int:
- """with docstring"""
- pass
-def i() -> int:
- """with docstring"""
- ...
-def j() -> int:
- u"""with unicode docstring"""
- pass
-def k() -> int:
- """docstring only"""
-
-[case testWarnNoReturnWorksWithAlwaysTrue]
-# flags: --warn-no-return
-PY3 = True
-def f() -> int:
- if PY3:
- return 0
- else:
- return 0
-[builtins fixtures/bool.pyi]
-
-[case testWarnNoReturnWorksWithAlwaysFalse]
-# flags: --warn-no-return
-PY2 = False
-def f() -> int:
- if PY2:
- return 0
- else:
- return 0
-[builtins fixtures/bool.pyi]
-
-[case testWarnNoReturnWorksWithMypyTrue]
-# flags: --warn-no-return
-MYPY = False
-def f() -> int:
- if MYPY:
- return 0
- else:
- return 0
-[builtins fixtures/bool.pyi]
-
-[case testShowErrorContextFunction]
-# flags: --show-error-context
-def f() -> None:
- 0 + ""
-[out]
-main: note: In function "f":
-main:3: error: Unsupported operand types for + ("int" and "str")
-
-[case testShowErrorContextClass]
-# flags: --show-error-context
-class A:
- 0 + ""
-[out]
-main: note: In class "A":
-main:3: error: Unsupported operand types for + ("int" and "str")
-
-[case testShowErrorContextMember]
-# flags: --show-error-context
-class A:
- def f(self, x: int) -> None:
- self.f("")
-[out]
-main: note: In member "f" of class "A":
-main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-[case testShowErrorContextModule]
-# flags: --show-error-context
-import m
-[file m.py]
-0 + ""
-[out]
-main:2: note: In module imported here:
-tmp/m.py:1: error: Unsupported operand types for + ("int" and "str")
-
-[case testShowErrorContextTopLevel]
-# flags: --show-error-context
-def f() -> None:
- 0 + ""
-0 + ""
-[out]
-main: note: In function "f":
-main:3: error: Unsupported operand types for + ("int" and "str")
-main: note: At top level:
-main:4: error: Unsupported operand types for + ("int" and "str")
-
-[case testShowErrorContextFromHere]
-# flags: --show-error-context
-import a
-[file a.py]
-import b
-[file b.py]
-0 + ""
-[out]
-tmp/a.py:1: note: In module imported here,
-main:2: note: ... from here:
-tmp/b.py:1: error: Unsupported operand types for + ("int" and "str")
-
-[case testFollowImportsNormal]
-# flags: --follow-imports=normal
-from mod import x
-x + ""
-[file mod.py]
-1 + ""
-x = 0
-[out]
-tmp/mod.py:1: error: Unsupported operand types for + ("int" and "str")
-main:3: error: Unsupported operand types for + ("int" and "str")
-
-[case testFollowImportsSilent]
-# flags: --follow-imports=silent
-from mod import x
-x + "" # E: Unsupported operand types for + ("int" and "str")
-[file mod.py]
-1 + ""
-x = 0
-
-[case testFollowImportsSkip]
-# flags: --follow-imports=skip
-from mod import x
-x + ""
-[file mod.py]
-this deliberate syntax error will not be reported
-[out]
-
-[case testFollowImportsError]
-# flags: --follow-imports=error
-from mod import x
-x + ""
-[file mod.py]
-deliberate syntax error
-[out]
-main:2: note: Import of 'mod' ignored
-main:2: note: (Using --follow-imports=error, module not passed on command line)
-
-[case testIgnoreMissingImportsFalse]
-from mod import x
-[out]
-main:1: error: Cannot find module named 'mod'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testIgnoreMissingImportsTrue]
-# flags: --ignore-missing-imports
-from mod import x
-[out]
-
-[case testStrictBoolean]
-# flags: --strict-boolean
-if True:
- pass
-if 'test': # E: Condition must be a boolean
- pass
-elif 1: # E: Condition must be a boolean
- pass
-
-def f() -> bool:
- return True
-
-if f: # E: Condition must be a boolean
- pass
-
-if f():
- pass
-
-class A:
- def __call__(self) -> bool:
- return False
-
-if A: # E: Condition must be a boolean
- pass
-
-if A(): # E: Condition must be a boolean
- pass
-
-if A()():
- pass
-[builtins fixtures/bool.pyi]
-
-[case testStrictBooleanTernary]
-# flags: --strict-boolean
-x = 1 if 'test' else 2 # E: Condition must be a boolean
-y = 1 if not 'test' else 2
-[builtins fixtures/bool.pyi]
-
-[case testStrictBooleanWhile]
-# flags: --strict-boolean
-while 5: # E: Condition must be a boolean
- pass
-
-while False:
- pass
-[builtins fixtures/bool.pyi]
-
-[case testStrictBooleanComplexTypes]
-# flags: --strict-boolean
-from typing import Any, Type, Union
-
-x = True # type: Any
-y = True # type: Union[bool, int]
-z = int # type: Type[int]
-
-if x:
- pass
-if y: # E: Condition must be a boolean
- pass
-if z: # E: Condition must be a boolean
- pass
-[builtins fixtures/bool.pyi]
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
deleted file mode 100644
index 5fb8932..0000000
--- a/test-data/unit/check-functions.test
+++ /dev/null
@@ -1,1666 +0,0 @@
--- Test cases for the type checker related to functions, function types and
--- calls.
-
--- See also check-varargs.test.
-
-
--- Callable type basics
--- --------------------
-
-
-[case testCallingVariableWithFunctionType]
-from typing import Callable
-f = None # type: Callable[[A], B]
-a, b = None, None # type: (A, B)
-a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b = f(b) # E: Argument 1 has incompatible type "B"; expected "A"
-b = f() # E: Too few arguments
-b = f(a, a) # E: Too many arguments
-b = f(a)
-
-class A: pass
-class B: pass
-
-[case testKeywordOnlyArgumentOrderInsensitivity]
-import typing
-
-class A(object):
- def f(self, *, a: int, b: str) -> None: pass
-
-class B(A):
- def f(self, *, b: str, a: int) -> None: pass
-
-class C(A):
- def f(self, *, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A"
-
-[case testPositionalOverridingArgumentNameInsensitivity]
-import typing
-
-class A(object):
- def f(self, a: int, b: str) -> None: pass
-
-class B(A):
- def f(self, b: str, a: int) -> None: pass # E: Argument 1 of "f" incompatible with supertype "A" # E: Argument 2 of "f" incompatible with supertype "A"
-
-class C(A):
- def f(self, foo: int, bar: str) -> None: pass
-
-
-[case testPositionalOverridingArgumentNamesCheckedWhenMismatchingPos]
-import typing
-
-class A(object):
- def f(self, a: int, b: str) -> None: pass
-
-class B(A):
- def f(self, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A"
-
-
-[case testSubtypingFunctionTypes]
-from typing import Callable
-
-class A: pass
-class B(A): pass
-
-f = None # type: Callable[[B], A]
-g = None # type: Callable[[A], A] # subtype of f
-h = None # type: Callable[[B], B] # subtype of f
-g = h # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], A])
-h = f # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[B], B])
-h = g # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[B], B])
-g = f # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[A], A])
-f = g
-f = h
-f = f
-g = g
-h = h
-
-[case testSubtypingFunctionsDoubleCorrespondence]
-
-def l(x) -> None: ...
-def r(__, *, x) -> None: ...
-r = l # E: Incompatible types in assignment (expression has type Callable[[Any], None], variable has type Callable[[Any, NamedArg('x', Any)], None])
-
-[case testSubtypingFunctionsRequiredLeftArgNotPresent]
-
-def l(x, y) -> None: ...
-def r(x) -> None: ...
-r = l # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
-
-[case testSubtypingFunctionsImplicitNames]
-
-def f(a, b): pass
-def g(c: Any, d: Any) -> Any: pass
-
-ff = f
-gg = g
-
-gg = f
-ff = g
-
-[case testSubtypingFunctionsDefaultsNames]
-from typing import Callable
-
-def f(a: int, b: str) -> None: pass
-f_nonames = None # type: Callable[[int, str], None]
-def g(a: int, b: str = "") -> None: pass
-def h(aa: int, b: str = "") -> None: pass
-
-ff_nonames = f_nonames
-ff = f
-gg = g
-hh = h
-
-ff = gg
-ff_nonames = ff
-ff_nonames = f_nonames # reset
-ff = ff_nonames # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg('a', int), Arg('b', str)], None])
-ff = f # reset
-gg = ff # E: Incompatible types in assignment (expression has type Callable[[Arg('a', int), Arg('b', str)], None], variable has type Callable[[Arg('a', int), DefaultArg('b', str)], None])
-gg = hh # E: Incompatible types in assignment (expression has type Callable[[Arg('aa', int), DefaultArg('b', str)], None], variable has type Callable[[Arg('a', int), DefaultArg('b', str)], None])
-
-[case testSubtypingFunctionsArgsKwargs]
-from typing import Any, Callable
-
-def everything(*args: Any, **kwargs: Any) -> None: pass
-everywhere = None # type: Callable[..., None]
-
-def specific_1(a: int, b: str) -> None: pass
-def specific_2(a: int, *, b: str) -> None: pass
-
-ss_1 = specific_1
-ss_2 = specific_2
-ee_def = everything
-ee_var = everywhere
-
-ss_1 = ee_def
-ss_1 = specific_1
-ss_2 = ee_def
-ss_2 = specific_2
-ee_def = everywhere
-ee_def = everything
-ee_var = everything
-ee_var = everywhere
-
-ee_var = specific_1 # The difference between Callable[..., blah] and one with a *args: Any, **kwargs: Any is that the ... goes loosely both ways.
-ee_def = specific_1 # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[StarArg(Any), KwArg(Any)], None])
-
-[builtins fixtures/dict.pyi]
-
-[case testLackOfNames]
-def f(__a: int, __b: str) -> None: pass
-def g(a: int, b: str) -> None: pass
-
-ff = f
-gg = g
-
-ff = g
-gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg('a', int), Arg('b', str)], None])
-
-[case testLackOfNamesFastparse]
-# flags: --fast-parser
-
-def f(__a: int, __b: str) -> None: pass
-def g(a: int, b: str) -> None: pass
-
-ff = f
-gg = g
-
-ff = g
-gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg('a', int), Arg('b', str)], None])
-
-[case testFunctionTypeCompatibilityWithOtherTypes]
-from typing import Callable
-f = None # type: Callable[[], None]
-a, o = None, None # type: (A, object)
-a = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "A")
-f = a # E: Incompatible types in assignment (expression has type "A", variable has type Callable[[], None])
-f = o # E: Incompatible types in assignment (expression has type "object", variable has type Callable[[], None])
-f = f() # E: Function does not return a value
-
-f = f
-f = None
-o = f
-
-class A: pass
-
-[case testFunctionSubtypingWithVoid]
-from typing import Callable
-f = None # type: Callable[[], None]
-g = None # type: Callable[[], object]
-f = g # E: Incompatible types in assignment (expression has type Callable[[], object], variable has type Callable[[], None])
-g = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[], object])
-
-f = f
-g = g
-
-[case testFunctionSubtypingWithMultipleArgs]
-from typing import Callable
-f = None # type: Callable[[A, A], None]
-g = None # type: Callable[[A, B], None]
-h = None # type: Callable[[B, B], None]
-f = g # E: Incompatible types in assignment (expression has type Callable[[A, B], None], variable has type Callable[[A, A], None])
-f = h # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, A], None])
-g = h # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, B], None])
-g = f
-h = f
-h = g
-f = f
-g = g
-h = h
-
-class A: pass
-class B(A): pass
-
-[case testFunctionTypesWithDifferentArgumentCounts]
-from typing import Callable
-f = None # type: Callable[[], None]
-g = None # type: Callable[[A], None]
-h = None # type: Callable[[A, A], None]
-
-f = g # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[], None])
-f = h # E: Incompatible types in assignment (expression has type Callable[[A, A], None], variable has type Callable[[], None])
-h = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[A, A], None])
-h = g # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[A, A], None])
-
-f = f
-g = g
-h = h
-
-class A: pass
-[out]
-
-[case testCompatibilityOfSimpleTypeObjectWithStdType]
-
-t = None # type: type
-a = None # type: A
-
-a = A # E: Incompatible types in assignment (expression has type "A" (type object), variable has type "A")
-t = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "type")
-t = A
-
-class A:
- def __init__(self, a: 'A') -> None: pass
-
-def f() -> None: pass
-
-[case testFunctionTypesWithOverloads]
-from typing import Callable, overload
-f = None # type: Callable[[AA], A]
-g = None # type: Callable[[B], B]
-h = None # type: Callable[[A], AA]
-
-h = i # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], AA])
-f = j
-
-f = i
-g = i
-g = j
-
-class A: pass
-class AA(A): pass
-
-class B: pass
-
- at overload
-def i(x: AA) -> A:
- pass
- at overload
-def i(x: B) -> B:
- pass
-
- at overload
-def j(x: B) -> B:
- pass
- at overload
-def j(x: A) -> AA:
- pass
-
-[case testOverloadWithThreeItems]
-from typing import Callable, overload
-g1 = None # type: Callable[[A], A]
-g2 = None # type: Callable[[B], B]
-g3 = None # type: Callable[[C], C]
-g4 = None # type: Callable[[A], B]
-a, b, c = None, None, None # type: (A, B, C)
-
-b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b = f(c) # E: Incompatible types in assignment (expression has type "C", variable has type "B")
-g4 = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], B])
-
-g1 = f
-g2 = f
-g3 = f
-a = f(a)
-b = f(b)
-c = f(c)
-
-class A: pass
-class B: pass
-class C: pass
-
- at overload
-def f(x: A) -> A: pass
- at overload
-def f(x: B) -> B: pass
- at overload
-def f(x: C) -> C: pass
-
-[case testInferConstraintsUnequalLengths]
-from typing import Any, Callable, List
-def f(fields: List[Callable[[Any], Any]]): pass
-class C: pass
-f([C]) # E: List item 0 has incompatible type
-class D:
- def __init__(self, a, b): pass
-f([D]) # E: List item 0 has incompatible type
-[builtins fixtures/list.pyi]
-
--- Default argument values
--- -----------------------
-
-
-[case testCallingFunctionsWithDefaultArgumentValues]
-
-a, b = None, None # type: (A, B)
-a = f() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-b = f(a, a) # E: Too many arguments for "f"
-
-b = f()
-b = f(a)
-b = f(AA())
-
-def f(x: 'A' = None) -> 'B': pass
-
-class A: pass
-class AA(A): pass
-class B: pass
-
-[case testDefaultArgumentExpressions]
-import typing
-def f(x: 'A' = A()) -> None:
- b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- a = x # type: A
-
-class B: pass
-class A: pass
-[out]
-
-[case testDefaultArgumentExpressions2]
-import typing
-def f(x: 'A' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- a = x # type: A
-
-class B: pass
-class A: pass
-[out]
-
-[case testDefaultArgumentsWithSubtypes]
-import typing
-def f(x: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- pass
-def g(x: 'A' = B()) -> None:
- pass
-
-class A: pass
-class B(A): pass
-[out]
-
-[case testMultipleDefaultArgumentExpressions]
-import typing
-def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- pass
-def h(x: 'A' = A(), y: 'B' = B()) -> None:
- pass
-
-class A: pass
-class B: pass
-[out]
-
-[case testMultipleDefaultArgumentExpressions2]
-import typing
-def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- pass
-
-class A: pass
-class B: pass
-[out]
-
-[case testDefaultArgumentsAndSignatureAsComment]
-import typing
-def f(x = 1): # type: (int) -> str
- pass
-f()
-f(1)
-f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testMethodDefaultArgumentsAndSignatureAsComment]
-import typing
-class A:
- def f(self, x = 1): # type: (int) -> str
- pass
-A().f()
-A().f(1)
-A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-
--- Access to method defined as a data attribute
--- --------------------------------------------
-
-
-[case testMethodAsDataAttribute]
-from typing import Any, Callable
-class B: pass
-x = None # type: Any
-class A:
- f = x # type: Callable[[A], None]
- g = x # type: Callable[[A, B], None]
-a = None # type: A
-a.f()
-a.g(B())
-a.f(a) # E: Too many arguments
-a.g() # E: Too few arguments
-
-[case testMethodWithInvalidMethodAsDataAttribute]
-from typing import Any, Callable
-class B: pass
-x = None # type: Any
-class A:
- f = x # type: Callable[[], None]
- g = x # type: Callable[[B], None]
-a = None # type: A
-a.f() # E: Invalid method type
-a.g() # E: Invalid method type
-
-[case testMethodWithDynamicallyTypedMethodAsDataAttribute]
-from typing import Any, Callable
-class B: pass
-x = None # type: Any
-class A:
- f = x # type: Callable[[Any], Any]
-a = None # type: A
-a.f()
-a.f(a) # E: Too many arguments
-
-[case testOverloadedMethodAsDataAttribute]
-from typing import overload
-class B: pass
-class A:
- @overload
- def f(self) -> None: pass
- @overload
- def f(self, b: B) -> None: pass
- g = f
-a = None # type: A
-a.g()
-a.g(B())
-a.g(a) # E: No overload variant matches argument types [__main__.A]
-
-[case testMethodAsDataAttributeInferredFromDynamicallyTypedMethod]
-
-class A:
- def f(self, x): pass
- g = f
-a = None # type: A
-a.g(object())
-a.g(a, a) # E: Too many arguments
-a.g() # E: Too few arguments
-
-[case testMethodAsDataAttributeInGenericClass]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class B: pass
-class A(Generic[t]):
- def f(self, x: t) -> None: pass
- g = f
-a = None # type: A[B]
-a.g(B())
-a.g(a) # E: Argument 1 has incompatible type A[B]; expected "B"
-
-[case testInvalidMethodAsDataAttributeInGenericClass]
-from typing import Any, TypeVar, Generic, Callable
-t = TypeVar('t')
-class B: pass
-class C: pass
-x = None # type: Any
-class A(Generic[t]):
- f = x # type: Callable[[A[B]], None]
-ab = None # type: A[B]
-ac = None # type: A[C]
-ab.f()
-ac.f() # E: Invalid method type
-
-[case testPartiallyTypedSelfInMethodDataAttribute]
-from typing import Any, TypeVar, Generic, Callable
-t = TypeVar('t')
-class B: pass
-class C: pass
-x = None # type: Any
-class A(Generic[t]):
- f = x # type: Callable[[A], None]
-ab = None # type: A[B]
-ac = None # type: A[C]
-ab.f()
-ac.f()
-
-[case testCallableDataAttribute]
-from typing import Callable
-class A:
- g = None # type: Callable[[A], None]
- def __init__(self, f: Callable[[], None]) -> None:
- self.f = f
-a = A(None)
-a.f()
-a.g()
-a.f(a) # E: Too many arguments
-a.g(a) # E: Too many arguments
-
-
--- Nested functions
--- ----------------
-
-
-[case testSimpleNestedFunction]
-import typing
-def f(a: 'A') -> None:
- def g(b: 'B') -> None:
- b = a # fail
- aa = a # type: A # ok
- b = B()
- g(a) # fail
- g(B())
-class A: pass
-class B: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:7: error: Argument 1 to "g" has incompatible type "A"; expected "B"
-
-[case testReturnAndNestedFunction]
-import typing
-def f() -> 'A':
- def g() -> 'B':
- return A() # fail
- return B()
- return B() # fail
- return A()
-class A: pass
-class B: pass
-[out]
-main:4: error: Incompatible return value type (got "A", expected "B")
-main:6: error: Incompatible return value type (got "B", expected "A")
-
-[case testDynamicallyTypedNestedFunction]
-import typing
-def f(x: object) -> None:
- def g(y):
- pass
- g() # E: Too few arguments for "g"
- g(x)
-[out]
-
-[case testNestedFunctionInMethod]
-import typing
-class A:
- def f(self) -> None:
- def g(x: int) -> None:
- y = x # type: int
- a = x # type: A # fail
- g(2)
- g(A()) # fail
-[out]
-main:6: error: Incompatible types in assignment (expression has type "int", variable has type "A")
-main:8: error: Argument 1 to "g" has incompatible type "A"; expected "int"
-
-[case testMutuallyRecursiveNestedFunctions]
-def f() -> None:
- def g() -> None:
- h(1)
- h('') # E
- def h(x: int) -> None:
- g()
- g(1) # E
-[out]
-main:4: error: Argument 1 to "h" has incompatible type "str"; expected "int"
-main:7: error: Too many arguments for "g"
-
-[case testMutuallyRecursiveDecoratedFunctions]
-from typing import Callable, Any
-def dec(f) -> Callable[..., Any]: pass
-def f() -> None:
- @dec
- def g() -> None:
- h()
- h.x # E
- @dec
- def h(x: int) -> None:
- g(1)
- g.x # E
-[out]
-main:7: error: Callable[..., Any] has no attribute "x"
-main:11: error: Callable[..., Any] has no attribute "x"
-
-[case testNestedGenericFunctions]
-from typing import TypeVar
-T = TypeVar('T')
-U = TypeVar('U')
-
-def outer(x: T) -> T:
- def inner(y: U) -> T: ...
- return inner(1)
-
-
--- Casts
--- -----
-
-
-[case testCastsToAndFromFunctionTypes]
-from typing import TypeVar, Callable, Any, cast
-t = TypeVar('t')
-def f(x: t,
- f1: Callable[[], None],
- f2: Callable[[Any], None], o: object) -> None:
- x = cast(t, f1)
- f1 = cast(Callable[[], None], x)
- f1 = cast(Callable[[], None], f2)
- f1 = cast(Callable[[], None], o)
-
-
--- Function decorators
--- -------------------
-
-
-[case testTrivialStaticallyTypedFunctionDecorator]
-from typing import TypeVar
-t = TypeVar('t')
-def dec(f: t) -> t:
- return f
- at dec
-def f(x: int) -> None: pass
-f(1)
-f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testTrivialStaticallyTypedMethodDecorator]
-from typing import TypeVar
-t = TypeVar('t')
-def dec(f: t) -> t:
- return f
-class A:
- @dec
- def f(self, x: int) -> None: pass
-A().f(1)
-A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-class B: pass
-
-[case testTrivialDecoratedNestedFunction]
-from typing import TypeVar
-t = TypeVar('t')
-def dec(f: t) -> t:
- return f
-def g() -> None:
- @dec
- def f(x: int) -> None: pass
- f(1)
- f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-[out]
-
-[case testCheckingDecoratedFunction]
-import typing
-def dec(f): pass
- at dec
-def f(x: 'A') -> None:
- a = x # type: A
- x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-class A: pass
-[out]
-
-[case testDecoratorThatSwitchesType]
-from typing import Callable
-def dec(x) -> Callable[[], None]: pass
- at dec
-def f(y): pass
-f()
-f(None) # E: Too many arguments for "f"
-
-[case testDecoratorThatSwitchesTypeWithMethod]
-from typing import Any, Callable
-def dec(x) -> Callable[[Any], None]: pass
-class A:
- @dec
- def f(self, a, b, c): pass
-a = None # type: A
-a.f()
-a.f(None) # E: Too many arguments for "f" of "A"
-
-[case testNestedDecorators]
-from typing import Any, Callable
-def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
-def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
- at dec1
- at dec2
-def f(x, y): pass
-f()
-f(None) # E: Too many arguments for "f"
-
-[case testInvalidDecorator1]
-from typing import Any, Callable
-def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
-def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
- at dec1 # E: Argument 1 to "dec2" has incompatible type Callable[[Any], Any]; expected Callable[[Any, Any], None]
- at dec2
-def f(x): pass
-
-[case testInvalidDecorator2]
-from typing import Any, Callable
-def dec1(f: Callable[[Any, Any], None]) -> Callable[[], None]: pass
-def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
- at dec1 # E: Argument 1 to "dec1" has incompatible type Callable[[Any], None]; expected Callable[[Any, Any], None]
- at dec2
-def f(x, y): pass
-
-[case testNoTypeCheckDecoratorOnMethod1]
-from typing import no_type_check
-
- at no_type_check
-def foo(x: 'bar', y: {'x': 4}) -> 42:
- 1 + 'x'
-
-[case testNoTypeCheckDecoratorOnMethod2]
-import typing
-
- at typing.no_type_check
-def foo(x: 's', y: {'x': 4}) -> 42:
- 1 + 'x'
-
- at typing.no_type_check
-def bar() -> None:
- 1 + 'x'
-
-[case testCallingNoTypeCheckFunction]
-import typing
-
- at typing.no_type_check
-def foo(x: {1:2}) -> [1]:
- 1 + 'x'
-
-foo()
-foo(1, 'b')
-
-[case testCallingNoTypeCheckFunction2]
-import typing
-
-def f() -> None:
- foo()
-
- at typing.no_type_check
-def foo(x: {1:2}) -> [1]:
- 1 + 'x'
-
-[case testNoTypeCheckDecoratorSemanticError]
-import typing
-
- at typing.no_type_check
-def foo(x: {1:2}) -> [1]:
- x = y
-
-
--- Forward references to decorated functions
--- -----------------------------------------
-
-
-[case testForwardReferenceToDynamicallyTypedDecorator]
-def f(self) -> None:
- g()
- g(1)
-
-def dec(f):
- return f
-
- at dec
-def g():
- pass
-
-[case testForwardReferenceToDecoratorWithAnyReturn]
-from typing import Any
-
-def f(self) -> None:
- g()
- g(1)
-
-def dec(f) -> Any:
- return f
-
- at dec
-def g():
- pass
-
-[case testForwardReferenceToDecoratorWithIdentityMapping]
-from typing import TypeVar
-
-def f(self) -> None:
- g()
- g(1) # E: Too many arguments for "g"
- h(1).x # E: "str" has no attribute "x"
- h('') # E: Argument 1 to "h" has incompatible type "str"; expected "int"
-
-T = TypeVar('T')
-def dec(f: T) -> T:
- return f
-
- at dec
-def g(): pass
- at dec
-def h(x: int) -> str: pass
-[out]
-
-[case testForwardReferenceToDynamicallyTypedDecoratedMethod]
-def f(self) -> None:
- A().f(1).y
- A().f()
-
-class A:
- @dec
- def f(self, x): pass
-
-def dec(f): return f
-[builtins fixtures/staticmethod.pyi]
-
-[case testForwardReferenceToStaticallyTypedDecoratedMethod]
-from typing import TypeVar
-
-def f(self) -> None:
- A().f(1).y # E: "str" has no attribute "y"
- A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-class A:
- @dec
- def f(self, a: int) -> str: return ''
-
-T = TypeVar('T')
-def dec(f: T) -> T: return f
-[builtins fixtures/staticmethod.pyi]
-[out]
-
-[case testForwardReferenceToDynamicallyTypedProperty]
-def f(self) -> None:
- A().x.y
-
-class A:
- @property
- def x(self): pass
-[builtins fixtures/property.pyi]
-
-[case testForwardReferenceToStaticallyTypedProperty]
-def f(self) -> None:
- A().x.y # E: "int" has no attribute "y"
-
-class A:
- @property
- def x(self) -> int: return 1
-[builtins fixtures/property.pyi]
-[out]
-
-[case testForwardReferenceToDynamicallyTypedStaticMethod]
-def f(self) -> None:
- A.x(1).y
- A.x() # E: Too few arguments for "x"
-
-class A:
- @staticmethod
- def x(x): pass
-[builtins fixtures/staticmethod.pyi]
-[out]
-
-[case testForwardReferenceToStaticallyTypedStaticMethod]
-def f(self) -> None:
- A.x(1).y # E: "str" has no attribute "y"
- A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int"
-
-class A:
- @staticmethod
- def x(a: int) -> str: return ''
-[builtins fixtures/staticmethod.pyi]
-[out]
-
-[case testForwardReferenceToDynamicallyTypedClassMethod]
-def f(self) -> None:
- A.x(1).y
- A.x() # E: Too few arguments for "x"
-
-class A:
- @classmethod
- def x(cls, a): pass
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testForwardReferenceToStaticallyTypedClassMethod]
-def f(self) -> None:
- A.x(1).y # E: "str" has no attribute "y"
- A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int"
-
-class A:
- @classmethod
- def x(cls, x: int) -> str: return ''
-[builtins fixtures/classmethod.pyi]
-[out]
-
-[case testForwardReferenceToDecoratedFunctionUsingMemberExpr]
-import m
-
-def f(self) -> None:
- g(1).x # E: "str" has no attribute "x"
-
- at m.dec
-def g(x: int) -> str: pass
-[file m.py]
-from typing import TypeVar
-T = TypeVar('T')
-def dec(f: T) -> T:
- return f
-[out]
-
-[case testForwardReferenceToFunctionWithMultipleDecorators]
-def f(self) -> None:
- g()
- g(1)
-
-def dec(f):
- return f
-
- at dec
- at dec2
-def g():
- pass
-
-def dec2(f):
- return f
-
-[case testForwardReferenceToDynamicallyTypedDecoratedStaticMethod]
-def f(self) -> None:
- A().f(1).y
- A().f()
- A().g(1).y
- A().g()
-
-class A:
- @dec
- @staticmethod
- def f(self, x): pass
- @staticmethod
- @dec
- def g(self, x): pass
-
-def dec(f): return f
-[builtins fixtures/staticmethod.pyi]
-
-[case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator]
-def f(self) -> None:
- g()
- g(1)
-
- at dec(1)
-def g(): pass
-
-def dec(f): pass
-
-
--- Decorator functions in import cycles
--- ------------------------------------
-
-
-[case testDecoratorWithIdentityTypeInImportCycle]
-import a
-
-[file a.py]
-import b
-from d import dec
- at dec
-def f(x: int) -> None: pass
-b.g(1) # E
-
-[file b.py]
-import a
-from d import dec
- at dec
-def g(x: str) -> None: pass
-a.f('')
-
-[file d.py]
-from typing import TypeVar
-T = TypeVar('T')
-def dec(f: T) -> T: return f
-
-[out]
-tmp/b.py:5: error: Argument 1 to "f" has incompatible type "str"; expected "int"
-tmp/a.py:5: error: Argument 1 to "g" has incompatible type "int"; expected "str"
-
-[case testDecoratorWithNoAnnotationInImportCycle]
-import a
-
-[file a.py]
-import b
-from d import dec
- at dec
-def f(x: int) -> None: pass
-b.g(1, z=4)
-
-[file b.py]
-import a
-from d import dec
- at dec
-def g(x: str) -> None: pass
-a.f('', y=2)
-
-[file d.py]
-def dec(f): return f
-
-[case testDecoratorWithFixedReturnTypeInImportCycle]
-import a
-
-[file a.py]
-import b
-from d import dec
- at dec
-def f(x: int) -> str: pass
-b.g(1)()
-
-[file b.py]
-import a
-from d import dec
- at dec
-def g(x: int) -> str: pass
-a.f(1)()
-
-[file d.py]
-from typing import Callable
-def dec(f: Callable[[int], str]) -> Callable[[int], str]: return f
-
-[out]
-tmp/b.py:5: error: "str" not callable
-tmp/a.py:5: error: "str" not callable
-
-[case testDecoratorWithCallAndFixedReturnTypeInImportCycle]
-import a
-
-[file a.py]
-import b
-from d import dec
- at dec()
-def f(x: int) -> str: pass
-b.g(1)()
-
-[file b.py]
-import a
-from d import dec
- at dec()
-def g(x: int) -> str: pass
-a.f(1)()
-
-[file d.py]
-from typing import Callable
-def dec() -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass
-
-[out]
-tmp/b.py:5: error: "str" not callable
-tmp/a.py:5: error: "str" not callable
-
-[case testDecoratorWithCallAndFixedReturnTypeInImportCycleAndDecoratorArgs]
-import a
-
-[file a.py]
-import b
-from d import dec
- at dec(1)
-def f(x: int) -> str: pass
-b.g(1)()
-
-[file b.py]
-import a
-from d import dec
- at dec(1)
-def g(x: int) -> str: pass
-a.f(1)()
-
-[file d.py]
-from typing import Callable
-def dec(x: str) -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass
-
-[out]
-tmp/b.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str"
-tmp/b.py:5: error: "str" not callable
-tmp/a.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str"
-tmp/a.py:5: error: "str" not callable
-
-[case testUndefinedDecoratorInImportCycle]
-# cmd: mypy -m foo.base
-[file foo/__init__.py]
-import foo.base
-class Derived(foo.base.Base):
- def method(self) -> None: pass
-[file foo/base.py]
-import foo
-class Base:
- @decorator
- def method(self) -> None: pass
-[out]
-tmp/foo/base.py:3: error: Name 'decorator' is not defined
-
-
--- Conditional function definition
--- -------------------------------
-
-
-[case testTypeCheckBodyOfConditionalFunction]
-from typing import Any
-x = None # type: Any
-if x:
- def f(x: int) -> None:
- x = 1
- x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[out]
-
-[case testCallConditionalFunction]
-from typing import Any
-x = None # type: Any
-if x:
- def f(x: int) -> None: pass
- f(1)
- f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-f(1)
-f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testConditionalFunctionDefinitionWithIfElse]
-from typing import Any
-x = None # type: Any
-if x:
- def f(x: int) -> None:
- x = 'x' # fail
- x = 1
-else:
- def f(x: int) -> None:
- x + 'x' # fail
- x = 1
-f(1)
-f('x') # fail
-[out]
-main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:9: error: Unsupported operand types for + ("int" and "str")
-main:12: error: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testNestedConditionalFunctionDefinitionWithIfElse]
-from typing import Any
-x = None # type: Any
-def top() -> None:
- if x:
- def f(x: int) -> None:
- x = 'x' # fail
- x = 1
- else:
- def f(x: int) -> None:
- x + 'x' # fail
- x = 1
- f(1)
- f('x') # fail
-[out]
-main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:10: error: Unsupported operand types for + ("int" and "str")
-main:13: error: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testUnconditionalRedefinitionOfConditionalFunction]
-from typing import Any
-x = None # type: Any
-if x:
- def f(): pass
-def f(): pass # E: Name 'f' already defined
-
-[case testIncompatibleConditionalFunctionDefinition]
-from typing import Any
-x = None # type: Any
-if x:
- def f(x: int) -> None: pass
-else:
- def f(x): pass # E: All conditional function variants must have identical signatures
-
-[case testIncompatibleConditionalFunctionDefinition2]
-from typing import Any
-x = None # type: Any
-if x:
- def f(x: int) -> None: pass
-else:
- def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures
-
-[case testIncompatibleConditionalFunctionDefinition3]
-from typing import Any
-x = None # type: Any
-if x:
- def f(x: int) -> None: pass
-else:
- def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures
-
-[case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
-from typing import Any
-def f(x: str) -> None: pass
-x = None # type: Any
-if x:
- def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures
-
-[case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
-from typing import Any
-def f(x: int) -> None: pass # N: "f" defined here
-x = None # type: Any
-if x:
- def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures
-f(x=1) # The first definition takes precedence.
-f(y=1) # E: Unexpected keyword argument "y" for "f"
-
-[case testRedefineFunctionDefinedAsVariable]
-def g(): pass
-f = g
-if g():
- def f(): pass
-f()
-f(1) # E: Too many arguments
-
-[case testRedefineFunctionDefinedAsVariableInitializedToNone]
-def g(): pass
-f = None
-if g():
- def f(): pass
-f()
-f(1) # E: Too many arguments for "f"
-
-[case testRedefineNestedFunctionDefinedAsVariableInitializedToNone]
-def g() -> None:
- f = None
- if object():
- def f(x: int) -> None: pass
- f() # E: Too few arguments for "f"
- f(1)
- f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-[out]
-
-[case testRedefineFunctionDefinedAsVariableWithInvalidSignature]
-def g(): pass
-f = g
-if g():
- def f(x): pass # E: Incompatible redefinition (redefinition with type Callable[[Any], Any], original type Callable[[], Any])
-
-[case testRedefineFunctionDefinedAsVariableWithVariance1]
-class B: pass
-class C(B): pass
-def g(x: C) -> B: pass
-f = g
-if g(C()):
- def f(x: C) -> C: pass
-
-[case testRedefineFunctionDefinedAsVariableWithVariance2]
-class B: pass
-class C(B): pass
-def g(x: C) -> B: pass
-f = g
-if g(C()):
- def f(x: B) -> B: pass
-
-[case testRedefineFunctionDefinedAsVariableInitializedToEmptyList]
-f = [] # E: Need type annotation for variable
-if object():
- def f(): pass # E: Incompatible redefinition
-f()
-f(1)
-[builtins fixtures/list.pyi]
-
-
--- Conditional method definition
--- -----------------------------
-
-
-[case testTypeCheckBodyOfConditionalMethod]
-from typing import Any
-x = None # type: Any
-class A:
- if x:
- def f(self, x: int) -> None:
- x = 1
- x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[out]
-
-[case testCallConditionalMethodInClassBody]
-from typing import Any
-x = None # type: Any
-class A:
- if x:
- def f(self, x: int) -> None: pass
- f(x, 1)
- f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int"
- f(x, 1)
- f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int"
-[out]
-
-[case testCallConditionalMethodViaInstance]
-from typing import Any
-x = None # type: Any
-class A:
- if x:
- def f(self, x: int) -> None: pass
-A().f(1)
-A().f('x') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-[case testConditionalMethodDefinitionWithIfElse]
-from typing import Any
-x = None # type: Any
-class A:
- if x:
- def f(self, x: int) -> None:
- x = 'x' # fail
- x = 1
- else:
- def f(self, x: int) -> None:
- x + 'x' # fail
- x = 1
-A().f(1)
-A().f('x') # fail
-[out]
-main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:10: error: Unsupported operand types for + ("int" and "str")
-main:13: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-[case testUnconditionalRedefinitionOfConditionalMethod]
-from typing import Any
-x = None # type: Any
-class A:
- if x:
- def f(self): pass
- def f(self): pass # E: Name 'f' already defined
-
-[case testIncompatibleConditionalMethodDefinition]
-from typing import Any
-x = None # type: Any
-class A:
- if x:
- def f(self, x: int) -> None: pass
- else:
- def f(self, x): pass # E: All conditional function variants must have identical signatures
-[out]
-
-[case testConditionalFunctionDefinitionInTry]
-import typing
-try:
- def f(x: int) -> None: pass
-except:
- def g(x: str) -> None: pass
-f(1)
-f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-g('x')
-g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
-
-
--- Callable[..., T]
--- ----------------
-
-
-[case testCallableWithArbitraryArgs]
-from typing import Callable
-def f(x: Callable[..., int]) -> None:
- x()
- x(1)
- x(z=1)
- x() + '' # E: Unsupported operand types for + ("int" and "str")
-[out]
-
-[case testCallableWithArbitraryArgs2]
-from typing import Callable
-def f(x: Callable[..., int]) -> None:
- x(*[1], **{'x': 2})
-[builtins fixtures/dict.pyi]
-
-[case testCastWithCallableAndArbitraryArgs]
-from typing import Callable, cast
-f = cast(Callable[..., int], None)
-f(x=4) + '' # E: Unsupported operand types for + ("int" and "str")
-
-[case testCallableWithArbitraryArgsInErrorMessage]
-from typing import Callable
-def f(x: Callable[..., int]) -> None:
- x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[..., int])
-[out]
-
-[case testCallableWithArbitraryArgsInGenericFunction]
-from typing import Callable, TypeVar
-T = TypeVar('T')
-def f(x: Callable[..., T]) -> T: pass
-def g(*x: int) -> str: pass
-x = f(g)
-x + 1 # E: Unsupported left operand type for + ("str")
-[builtins fixtures/list.pyi]
-
-[case testCallableWithArbitraryArgsSubtyping]
-from typing import Callable
-def f(x: Callable[..., int]) -> None: pass
-def g1(): pass
-def g2(x, y) -> int: pass
-def g3(*, y: str) -> int: pass
-def g4(*, y: int) -> str: pass
-f(g1)
-f(g2)
-f(g3)
-f(g4) # E: Argument 1 to "f" has incompatible type Callable[[NamedArg('y', int)], str]; expected Callable[..., int]
-
-[case testCallableWithArbitraryArgsSubtypingWithGenericFunc]
-from typing import Callable, TypeVar
-T = TypeVar('T')
-def f(x: Callable[..., int]) -> None: pass
-def g1(x: T) -> int: pass
-def g2(*x: T) -> int: pass
-def g3(*x: T) -> T: pass
-f(g1)
-f(g2)
-f(g3)
-
--- (...) -> T
--- ----------------
-[case testEllipsisWithArbitraryArgsOnBareFunction]
-def f(x, y, z): # type: (...) -> None
- pass
-f(1, "hello", [])
-f(x=1, y="hello", z=[])
-[builtins fixtures/dict.pyi]
-
-[case testEllipsisWithArbitraryArgsOnBareFunctionWithDefaults]
-def f(x, y=1, z="hey"): # type: (...) -> None
- pass
-f(1, "hello", [])
-f(x=1, y="hello", z=[])
-[builtins fixtures/dict.pyi]
-
-[case testEllipsisWithArbitraryArgsOnBareFunctionWithKwargs]
-from typing import Dict
-def f(x, **kwargs): # type: (...) -> None
- success_dict_type = kwargs # type: Dict[str, str]
- failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[int, str])
-f(1, thing_in_kwargs=["hey"])
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testEllipsisWithArbitraryArgsOnBareFunctionWithVarargs]
-from typing import Tuple, Any
-def f(x, *args): # type: (...) -> None
- success_tuple_type = args # type: Tuple[Any, ...]
- fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type None)
-f(1, "hello")
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testEllipsisWithArbitraryArgsOnInstanceMethod]
-class A:
- def f(self, x, y, z): # type: (...) -> None
- pass
-
-[case testEllipsisWithArbitraryArgsOnClassMethod]
-class A:
- @classmethod
- def f(cls, x, y, z): # type: (...) -> None
- pass
-[builtins fixtures/classmethod.pyi]
-
-[case testEllipsisWithArbitraryArgsOnStaticMethod]
-class A:
- @staticmethod
- def f(x, y, z): # type: (...) -> None
- pass
-[builtins fixtures/staticmethod.pyi]
-
-[case testEllipsisWithSomethingAfterItFails]
-def f(x, y, z): # type: (..., int) -> None
- pass
-[out]
-main:1: error: Ellipses cannot accompany other argument types in function type signature.
-
-[case testEllipsisWithSomethingBeforeItFails]
-def f(x, y, z): # type: (int, ...) -> None
- pass
-[out]
-main:1: error: Ellipses cannot accompany other argument types in function type signature.
-
-[case testRejectCovariantArgument]
-from typing import TypeVar, Generic
-
-t = TypeVar('t', covariant=True)
-class A(Generic[t]):
- def foo(self, x: t) -> None:
- return None
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Cannot use a covariant type variable as a parameter
-
-[case testRejectContravariantReturnType]
-from typing import TypeVar, Generic
-
-t = TypeVar('t', contravariant=True)
-class A(Generic[t]):
- def foo(self) -> t:
- return None
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Cannot use a contravariant type variable as return type
-
-[case testAcceptCovariantReturnType]
-from typing import TypeVar, Generic
-
-t = TypeVar('t', covariant=True)
-class A(Generic[t]):
- def foo(self) -> t:
- return None
-[builtins fixtures/bool.pyi]
-[case testAcceptContravariantArgument]
-from typing import TypeVar, Generic
-
-t = TypeVar('t', contravariant=True)
-class A(Generic[t]):
- def foo(self, x: t) -> None:
- return None
-[builtins fixtures/bool.pyi]
-
-
--- Redefining functions
--- --------------------
-
-
-[case testRedefineFunction]
-def f(x) -> Any: pass
-def g(x, y): pass
-def h(x): pass
-def j(y) -> Any: pass
-f = h
-f = j # E: Incompatible types in assignment (expression has type Callable[[Arg('y', Any)], Any], variable has type Callable[[Arg('x', Any)], Any])
-f = g # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[Any], Any])
-
-[case testRedefineFunction2]
-def f() -> None: pass
-def f() -> None: pass # E: Name 'f' already defined
-
-
--- Special cases
--- -------------
-
-
-[case testFunctionDefinitionWithForStatement]
-for _ in [1]:
- def f(): pass
-else:
- def g(): pass
-f()
-g()
-[builtins fixtures/list.pyi]
-
-[case testFunctionDefinitionWithWhileStatement]
-while bool():
- def f(): pass
-else:
- def g(): pass
-f()
-g()
-[builtins fixtures/bool.pyi]
-
-[case testBareCallable]
-from typing import Callable, Any
-
-def foo(f: Callable) -> bool:
- return f()
-
-def f1() -> bool:
- return False
-
-foo(f1)
-[builtins fixtures/bool.pyi]
-
-[case testFunctionNestedWithinWith]
-from typing import Any
-a = 1 # type: Any
-with a:
- def f() -> None:
- pass
- f(1) # E: Too many arguments for "f"
-
-
-[case testNameForDecoratorMethod]
-from typing import Callable
-
-class A:
- def f(self) -> None:
- # In particular, test that the error message contains "g" of "A".
- self.g() # E: Too few arguments for "g" of "A"
- self.g(1)
- @dec
- def g(self, x: str) -> None: pass
-
-def dec(f: Callable[[A, str], None]) -> Callable[[A, int], None]: pass
-[out]
-
-[case testUnknownFunctionNotCallable]
-def f() -> None:
- pass
-def g(x: int) -> None:
- pass
-h = f if bool() else g
-reveal_type(h) # E: Revealed type is 'builtins.function'
-h(7) # E: Cannot call function of unknown type
-[builtins fixtures/bool.pyi]
-
--- Positional-only arguments
--- -------------------------
-
-[case testPositionalOnlyArg]
-def f(__a: int) -> None: pass
-
-f(1)
-f(__a=1) # E: Unexpected keyword argument "__a" for "f"
-
-[builtins fixtures/bool.pyi]
-[out]
-main:1: note: "f" defined here
-
-[case testPositionalOnlyArgFastparse]
-# flags: --fast-parser
-
-def f(__a: int) -> None: pass
-
-f(1)
-f(__a=1) # E: Unexpected keyword argument "__a" for "f"
-
-[builtins fixtures/bool.pyi]
-[out]
-main:3: note: "f" defined here
-
-[case testMagicMethodPositionalOnlyArg]
-class A(object):
- def __eq__(self, other) -> bool: return True # We are all equal.
-
-a = A()
-a.__eq__(a)
-a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A"
-
-[builtins fixtures/bool.pyi]
-
-[case testMagicMethodPositionalOnlyArgFastparse]
-# flags: --fast-parser
-
-class A(object):
- def __eq__(self, other) -> bool: return True # We are all equal.
-
-a = A()
-a.__eq__(a)
-a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A"
-
-[builtins fixtures/bool.pyi]
-
-[case testTupleArguments]
-# flags: --python-version 2.7
-
-def f(a, (b, c), d): pass
-
-[case testTupleArgumentsFastparse]
-# flags: --fast-parser --python-version 2.7
-
-def f(a, (b, c), d): pass
diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test
deleted file mode 100644
index 35cd0f9..0000000
--- a/test-data/unit/check-generic-subtyping.test
+++ /dev/null
@@ -1,746 +0,0 @@
--- Test cases for the type checker related to subtyping and inheritance with
--- generics.
-
-
--- Subtyping + inheritance
--- -----------------------
-
-
-[case testSubtypingAndInheritingNonGenericTypeFromGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ac = None # type: A[C]
-ad = None # type: A[D]
-b = None # type: B
-
-b = ad # E: Incompatible types in assignment (expression has type A[D], variable has type "B")
-ad = b # E: Incompatible types in assignment (expression has type "B", variable has type A[D])
-b = ac # E: Incompatible types in assignment (expression has type A[C], variable has type "B")
-
-b = b
-ac = b
-
-class C: pass
-class A(Generic[T]): pass
-class B(A[C]): pass
-class D: pass
-
-[case testSubtypingAndInheritingGenericTypeFromNonGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a = None # type: A
-bc = None # type: B[C]
-bd = None # type: B[D]
-
-bc = bd # E: Incompatible types in assignment (expression has type B[D], variable has type B[C])
-bd = bc # E: Incompatible types in assignment (expression has type B[C], variable has type B[D])
-bc = a # E: Incompatible types in assignment (expression has type "A", variable has type B[C])
-bd = a # E: Incompatible types in assignment (expression has type "A", variable has type B[D])
-
-a = bc
-a = bd
-
-class A: pass
-class B(A, Generic[T]): pass
-class C: pass
-class D: pass
-
-[case testSubtypingAndInheritingGenericTypeFromGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-ac = None # type: A[C]
-ad = None # type: A[D]
-bcc = None # type: B[C, C]
-bdc = None # type: B[D, C]
-
-ad = bcc # E: Incompatible types in assignment (expression has type B[C, C], variable has type A[D])
-ad = bdc # E: Incompatible types in assignment (expression has type B[D, C], variable has type A[D])
-bcc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[C, C])
-bdc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[D, C])
-
-bcc = bcc
-bdc = bdc
-ac = bcc
-ac = bdc
-
-class A(Generic[T]): pass
-class B(A[S], Generic[T, S]): pass
-class C: pass
-class D: pass
-
-[case testSubtypingAndInheritingGenericTypeFromGenericTypeAcrossHierarchy]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-X = TypeVar('X')
-Y = TypeVar('Y')
-ae = None # type: A[A[E]]
-af = None # type: A[A[F]]
-
-cef = None # type: C[E, F]
-cff = None # type: C[F, F]
-cfe = None # type: C[F, E]
-
-ae = cef # E: Incompatible types in assignment (expression has type C[E, F], variable has type A[A[E]])
-af = cfe # E: Incompatible types in assignment (expression has type C[F, E], variable has type A[A[F]])
-
-ae = cfe
-af = cef
-af = cff
-
-class A(Generic[T]): pass
-class B(A[S], Generic[T, S]): pass
-class C(B[A[X], A[Y]], Generic[X, Y]): pass
-class E: pass
-class F: pass
-
-[case testIncludingBaseClassTwice]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class I(Generic[t]): pass
-class A(I[C], I[object]): pass # E: Duplicate base class "I"
-class C: pass
-
-
--- Accessing inherited generic members
--- -----------------------------------
-
-
-[case testAccessingMethodInheritedFromGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-b = None # type: B[C, D]
-c, d = None, None # type: (C, D)
-
-b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D"
-b.f(d)
-
-class A(Generic[T]):
- def f(self, a: T) -> None:
- pass
-class B(A[S], Generic[T, S]): pass
-class C: pass
-class D: pass
-
-[case testAccessingMethodInheritedFromGenericTypeInNonGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-b, c, d = None, None, None # type: (B, C, D)
-
-b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D"
-b.f(d)
-
-class C: pass
-class D: pass
-class A(Generic[T]):
- def f(self, a: T) -> None:
- pass
-class B(A[D]): pass
-
-[case testAccessingMemberVarInheritedFromGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class A(Generic[T]):
- def __init__(self, a: T) -> None:
- self.a = a
-
-b = None # type: B[C, D]
-c, d = None, None # type: (C, D)
-
-b.a = c # E: Incompatible types in assignment (expression has type "C", variable has type "D")
-b.a = d
-
-class B(A[S], Generic[T, S]): pass
-class C: pass
-class D: pass
-
-
--- Overriding with generic types
--- -----------------------------
-
-
-[case testOverridingMethodInSimpleTypeInheritingGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class B(Generic[T]):
- def f(self, a: T) -> None: pass
- def g(self, a: T) -> None: pass
-class C: pass
-class D: pass
-class A(B[C]):
- def f(self, a: D) -> None: pass \
- # E: Argument 1 of "f" incompatible with supertype "B"
- def g(self, a: C) -> None: pass
-[out]
-
-[case testOverridingMethodInGenericTypeInheritingSimpleType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class C: pass
-class B:
- def f(self, a: C) -> None: pass
- def g(self, a: C) -> None: pass
-class A(B, Generic[T]):
- def f(self, a: T) -> None: pass \
- # E: Argument 1 of "f" incompatible with supertype "B"
- def g(self, a: 'C') -> None: pass
-[out]
-
-[case testOverridingMethodInGenericTypeInheritingGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class B(Generic[T]):
- def f(self, a: T) -> None: pass
- def g(self, a: T) -> None: pass
-class A(B[S], Generic[T, S]):
- def f(self, a: T) -> None: pass \
- # E: Argument 1 of "f" incompatible with supertype "B"
- def g(self, a: S) -> None: pass
-[out]
-
-[case testOverridingMethodInMultilevelHierarchyOfGenericTypes]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-U = TypeVar('U')
-V = TypeVar('V')
-
-class D: pass
-class C(Generic[T, U, V]):
- def f(self, a: V) -> None: pass
- def g(self, a: V) -> None: pass
-class B(C[D, D, T], Generic[T]): pass
-class A(B[S], Generic[T, S]):
- def f(self, a: T) -> None: pass \
- # E: Argument 1 of "f" incompatible with supertype "C"
- def g(self, a: S) -> None: pass
-[out]
-
-[case testOverrideGenericMethodInNonGenericClass]
-from typing import TypeVar
-
-T = TypeVar('T')
-S = TypeVar('S')
-
-class A:
- def f(self, x: T, y: S) -> None: pass
-class B(A):
- def f(self, x: S, y: T) -> None: pass
-class C(A):
- # Okay, because T = object allows any type for the arguments.
- def f(self, x: T, y: T) -> None: pass
-
-[case testOverrideGenericMethodInNonGenericClassLists]
-from typing import TypeVar, List
-
-T = TypeVar('T')
-S = TypeVar('S')
-
-class A:
- def f(self, x: List[T], y: List[S]) -> None: pass
-class B(A):
- def f(self, x: List[S], y: List[T]) -> None: pass
-class C(A):
- def f(self, x: List[T], y: List[T]) -> None: pass # E: Signature of "f" incompatible with supertype "A"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testOverrideGenericMethodInNonGenericClassGeneralize]
-from typing import TypeVar
-
-T = TypeVar('T')
-T1 = TypeVar('T1', bound=str)
-S = TypeVar('S')
-
-class A:
- def f(self, x: int, y: S) -> None: pass
-class B(A):
- def f(self, x: T, y: S) -> None: pass
-class C(A):
- def f(self, x: T, y: str) -> None: pass
-class D(A):
- def f(self, x: T1, y: S) -> None: pass # TODO: This error could be more specific.
-[out]
-main:12: error: Argument 2 of "f" incompatible with supertype "A"
-main:14: error: Signature of "f" incompatible with supertype "A"
-
-
--- Inheritance from generic types with implicit dynamic supertype
--- --------------------------------------------------------------
-
-
-[case testInheritanceFromGenericWithImplicitDynamicAndSubtyping]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a = None # type: A
-bc = None # type: B[C]
-bd = None # type: B[D]
-
-a = bc # E: Incompatible types in assignment (expression has type B[C], variable has type "A")
-bc = a
-bd = a
-
-class B(Generic[T]): pass
-class A(B): pass
-class C: pass
-class D: pass
-[out]
-
-[case testInheritanceFromGenericWithImplicitDynamicAndExternalAccess]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-
-class B(Generic[T]):
- def f(self, a: 'B[T]') -> None: pass
- def __init__(self, x: 'B[T]') -> None:
- self.x = x
-class A(B): pass
-class C: pass
-
-a = None # type: A
-c = None # type: C
-bc = None # type: B[C]
-
-a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
-a.f(c) # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
-a.x = bc
-a.f(bc)
-[out]
-
-[case testInheritanceFromGenericWithImplicitDynamic]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a = None # type: A
-c = None # type: C
-bc = None # type: B[C]
-
-class B(Generic[T]):
- def f(self, a: 'B[T]') -> None: pass
- def __init__(self, x: 'B[T]') -> None:
- self.x = x
-
-class A(B):
- def g(self) -> None:
- self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
- self.f(c) # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
- self.x = bc
- self.f(bc)
-
-class C: pass
-[out]
-
-[case testInheritanceFromGenericWithImplicitDynamicAndOverriding]
-from typing import TypeVar, Generic, Tuple
-T = TypeVar('T')
-class B(Generic[T]):
- def f(self, a: T, b: 'Tuple[T, B[T]]') -> None:
- pass
-class A(B):
- def f(self, a, b): pass
-[builtins fixtures/tuple.pyi]
-[out]
-
-
--- Inheritance from generic types and super expressions
--- ----------------------------------------------------
-
-
-[case testSuperExpressionsWhenInheritingFromGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class B(Generic[T]):
- def f(self, a: T) -> None: pass
-class A(B[S], Generic[T, S]):
- def g(self, t: T, s: S) -> None:
- super().f(t) # E: Argument 1 to "f" of "B" has incompatible type "T"; expected "S"
- super().f(s)
-[out]
-
-[case testSuperExpressionsWhenInheritingFromGenericTypeAndDeepHierarchy]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-U = TypeVar('U')
-V = TypeVar('V')
-class C(Generic[T, U, V]):
- def f(self, a: V) -> None: pass
-class D: pass
-class B(C[D, D, T], Generic[T]): pass
-class A(B[S], Generic[T, S]):
- def g(self, t: T, s: S) -> None:
- super().f(t) # E: Argument 1 to "f" of "C" has incompatible type "T"; expected "S"
- super().f(s)
-[out]
-
-
--- Type of inherited constructor
--- -----------------------------
-
-
-[case testInheritedConstructor]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def __init__(self, x: T) -> None: pass
-class B(A[T], Generic[T]): pass
-class C(A[int]): pass
-class D(A[A[T]], Generic[T]): pass
-B(1)
-C(1)
-C('a') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
-D(A(1))
-D(1) # E: Argument 1 to "D" has incompatible type "int"; expected A[None]
-
-
-[case testInheritedConstructor2]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-U = TypeVar('U')
-Z = TypeVar('Z')
-class A(Generic[T, U]):
- def __init__(self, x: T, y: U, z: Z) -> None: pass
-class B(A[int, T], Generic[T]): pass
-class C(B[A[T, str]], Generic[T, U]): pass
-# C[T, U] <: B[A[T, str]] <: A[int, A[T, str]]
-C(1, A(1, 'a', 0), 'z')
-C(1, A('1', 'a', 0), 'z')
-C('1', A(1, 'a', 0), 'z') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
-C(1, A(1, 1, 0), 'z') # E: Argument 2 to "A" has incompatible type "int"; expected "str"
-
-
--- Subtyping with a generic abstract base class
--- --------------------------------------------
-
-
-[case testSubtypingWithGenericTypeSubclassingGenericAbstractClass]
-from typing import TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-S = TypeVar('S')
-acd = None # type: A[C, D]
-adc = None # type: A[D, C]
-ic = None # type: I[C]
-id = None # type: I[D]
-
-ic = acd # E: Incompatible types in assignment (expression has type A[C, D], variable has type I[C])
-id = adc # E: Incompatible types in assignment (expression has type A[D, C], variable has type I[D])
-adc = ic # E: Incompatible types in assignment (expression has type I[C], variable has type A[D, C])
-
-ic = adc
-id = acd
-
-class I(Generic[T]):
- @abstractmethod
- def f(self): pass
-class A(I[S], Generic[T, S]): pass
-class C: pass
-class D: pass
-
-[case testSubtypingWithTypeImplementingGenericABCViaInheritance]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-a, b = None, None # type: (A, B)
-ic, id, ie = None, None, None # type: (I[C], I[D], I[E])
-
-class I(Generic[S]): pass
-class B(I[C]): pass
-class A(B): pass
-
-ie = a # E: Incompatible types in assignment (expression has type "A", variable has type I[E])
-a = ic # E: Incompatible types in assignment (expression has type I[C], variable has type "A")
-a = id # E: Incompatible types in assignment (expression has type I[D], variable has type "A")
-a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-id = a # E: Incompatible types in assignment (expression has type "A", variable has type I[D])
-
-ic = a
-b = a
-
-class C: pass
-class D: pass
-class E: pass
-[out]
-
-[case testSubtypingWithTypeImplementingGenericABCViaInheritance2-skip]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class I(Generic[T]): pass
-class A(I[C]): pass
-class B(A, I[D]): pass # Fail
-
-class C: pass
-class D: pass
-[out]
-main:5: error: Class "B" has base "I" duplicated inconsistently
-
-[case testSubtypingAndABCExtension]
-from typing import TypeVar, Generic
-from abc import abstractmethod, ABCMeta
-t = TypeVar('t')
-a, i, j = None, None, None # type: (A[object], I[object], J[object])
-(ii, jj) = (i, j)
-ii = a
-jj = a
-jj = i
-a = i # E: Incompatible types in assignment (expression has type I[object], variable has type A[object])
-a = j # E: Incompatible types in assignment (expression has type J[object], variable has type A[object])
-
-class J(Generic[t]): pass
-class X(metaclass=ABCMeta): pass
-class I(X, J[t], Generic[t]): pass
-class A(I[t], Generic[t]): pass
-
-
--- Subclassing a generic ABC
--- -------------------------
-
-
-[case testSubclassingGenericABC1]
-from typing import TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-class I(Generic[T]):
- @abstractmethod
- def f(self, a: T) -> None: pass
- @abstractmethod
- def g(self, a: T) -> None: pass
-class A(I[C]):
- def f(self, a: 'D') -> None: pass \
- # E: Argument 1 of "f" incompatible with supertype "I"
- def g(self, a: 'C') -> None: pass
-class C: pass
-class D: pass
-[out]
-
-
--- Extending a generic ABC with deep type hierarchy
--- ------------------------------------------------
-
-
-[case testSubclassingGenericABCWithDeepHierarchy]
-from typing import Any, TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-a = None # type: A
-ic, id = None, None # type: (I[C], I[D])
-
-id = a # Fail
-ic = a
-
-class I(Generic[T]):
- @abstractmethod
- def f(self, a: T, b: T) -> None: pass
- @abstractmethod
- def g(self, a: T, b: 'D') -> None: pass
-class B(I[C]):
- def f(self, a: 'C', b: 'C') -> None: pass
- def g(self, a: 'C', b: Any) -> None: pass
-class A(B):
- def g(self, a: 'C', b: 'C') -> None: pass \
- # E: Argument 2 of "g" incompatible with supertype "I"
- def f(self, a: 'C', b: 'C') -> None: pass
-class C: pass
-class D: pass
-[out]
-main:7: error: Incompatible types in assignment (expression has type "A", variable has type I[D])
-
-[case testSubclassingGenericABCWithDeepHierarchy2]
-from typing import Any, TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-class I(Generic[T]):
- @abstractmethod
- def f(self, a: T, b: T) -> None: pass
-class B(I[C]):
- def f(self, a: 'C', b: Any) -> None: pass
-class A(B):
- def f(self, a: 'C', b: 'D') -> None: pass \
- # E: Argument 2 of "f" incompatible with supertype "I"
-class C: pass
-class D: pass
-[out]
-
-
--- Implicit Any types and subclassing generic ABC
--- ----------------------------------------------
-
-
-[case testSubclassingGenericABCWithImplicitAny]
-from typing import Any, TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-a = None # type: Any
-ic = None # type: I[C]
-id = None # type: I[D]
-
-ic = a
-id = a
-
-class I(Generic[T]):
- @abstractmethod
- def f(self, a: T) -> None: pass
-class A(I):
- def f(self, a): pass
-
-class C: pass
-class D: pass
-
-[case testSubclassingGenericABCWithImplicitAnyAndDeepHierarchy]
-from typing import Any, TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-a = None # type: Any
-ic = None # type: I[C]
-id = None # type: I[D]
-
-ic = a
-id = a
-
-class I(Generic[T]):
- @abstractmethod
- def f(self, a: T, b: T) -> None: pass
-class B(I):
- def f(self, a, b): pass
-class A(B):
- def f(self, a: 'C', b: 'D') -> None: pass
-class C: pass
-class D: pass
-
-[case testImplementingGenericABCWithImplicitAnyAndDeepHierarchy2]
-from typing import Any, TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-a = None # type: Any
-jc = None # type: J[C]
-jd = None # type: J[D]
-
-jc = a
-jd = a
-
-class J(Generic[T]):
- @abstractmethod
- def f(self, a: T, b: T) -> None: pass
-class I(J):
- @abstractmethod
- def f(self, a, b): pass
-class A(I):
- def f(self, a: 'C', b: 'D') -> None: pass
-
-class C: pass
-class D: pass
-
-
--- Accessing generic ABC members
--- -----------------------------
-
-
-[case testAccessingGenericABCMembers]
-from typing import TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-class I(Generic[T]):
- @abstractmethod
- def f(self, a: T) -> None: pass
-class A: pass
-class B: pass
-
-a, b = None, None # type: (A, B)
-ia = None # type: I[A]
-
-ia.f(b) # E: Argument 1 to "f" of "I" has incompatible type "B"; expected "A"
-ia.f(a)
-
-[case testAccessingInheritedGenericABCMembers]
-from typing import TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T')
-class J(Generic[T]):
- @abstractmethod
- def f(self, a: T) -> None: pass
-class I(J[T], Generic[T]): pass
-class A: pass
-class B: pass
-a, b = None, None # type: (A, B)
-ia = None # type: I[A]
-
-ia.f(b) # E: Argument 1 to "f" of "J" has incompatible type "B"; expected "A"
-ia.f(a)
-
-
--- Misc
--- ----
-
-
-[case testMultipleAssignmentAndGenericSubtyping]
-from typing import Iterable
-n, s = None, None # type: int, str
-class Nums(Iterable[int]):
- def __iter__(self): pass
- def __next__(self): pass
-n, n = Nums()
-s, s = Nums() # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Variance
--- --------
-
-
-[case testCovariant]
-from typing import TypeVar, Generic
-T = TypeVar('T', covariant=True)
-
-class G(Generic[T]): pass
-class A: pass
-class B(A): pass
-class C(B): pass
-
-a = None # type: G[A]
-b = None # type: G[B]
-c = None # type: G[C]
-
-b = a # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
-b = c
-[builtins fixtures/bool.pyi]
-[out]
-
-[case testContravariant]
-from typing import TypeVar, Generic
-T = TypeVar('T', contravariant=True)
-
-class G(Generic[T]): pass
-class A: pass
-class B(A): pass
-class C(B): pass
-
-a = None # type: G[A]
-b = None # type: G[B]
-c = None # type: G[C]
-
-b = a
-b = c # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
-[builtins fixtures/bool.pyi]
-[out]
-
-[case testInvariant]
-from typing import TypeVar, Generic
-T = TypeVar('T') # invariant (default)
-
-class G(Generic[T]): pass
-class A: pass
-class B(A): pass
-class C(B): pass
-
-a = None # type: G[A]
-b = None # type: G[B]
-c = None # type: G[C]
-
-b = a # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
-b = c # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
-[builtins fixtures/bool.pyi]
-[out]
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
deleted file mode 100644
index 81eb74b..0000000
--- a/test-data/unit/check-generics.test
+++ /dev/null
@@ -1,1462 +0,0 @@
--- Simple generic types
--- --------------------
-
-
-[case testGenericMethodReturnType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a, b, c = None, None, None # type: (A[B], B, C)
-c = a.f() # Fail
-b = a.f()
-
-class A(Generic[T]):
- def f(self) -> T: pass
-
-class B: pass
-class C: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "C")
-
-[case testGenericMethodArgument]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a.f(c) # Fail
-a.f(b)
-
-a = None # type: A[B]
-b = None # type: B
-c = None # type: C
-
-class A(Generic[T]):
- def f(self, a: T) -> None: pass
-
-class B: pass
-class C: pass
-[out]
-main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B"
-
-[case testGenericMemberVariable]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def __init__(self, v: T) -> None:
- self.v = v
-
-a, b, c = None, None, None # type: (A[B], B, C)
-a.v = c # Fail
-a.v = b
-
-class B: pass
-class C: pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-
-[case testGenericMemberVariable]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a, b, c = None, None, None # type: (A[B], B, C)
-a.v = c # Fail
-a.v = b
-
-class A(Generic[T]):
- v = None # type: T
-class B: pass
-class C: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-
-[case testSimpleGenericSubtyping]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-b, bb, c = None, None, None # type: (A[B], A[B], A[C])
-c = b # Fail
-b = c # Fail
-
-b = b
-b = bb
-
-class A(Generic[T]): pass
-class B: pass
-class C(B): pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
-main:5: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
-
-[case testGenericTypeCompatibilityWithAny]
-from typing import Any, TypeVar, Generic
-T = TypeVar('T')
-b, c, d = None, None, None # type: (A[B], A[C], A[Any])
-
-b = d
-c = d
-d = b
-d = c
-
-class A(Generic[T]): pass
-class B: pass
-class C(B): pass
-[out]
-
-[case testTypeVariableAsTypeArgument]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-a = None # type: A[B]
-b = None # type: A[B]
-c = None # type: A[C]
-
-a.v = c # Fail
-c = a.v # Fail
-a.v = b
-b = a.v
-
-class A(Generic[T]):
- v = None # type: A[T]
-
-class B: pass
-class C: pass
-[out]
-main:7: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
-main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
-
-[case testMultipleGenericTypeParametersWithMemberVars]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-a = None # type: A[B, C]
-s = None # type: B
-t = None # type: C
-
-t = a.s # Fail
-s = a.t # Fail
-
-s = a.s
-t = a.t
-
-class A(Generic[S, T]):
- s = None # type: S
- t = None # type: T
-class B: pass
-class C: pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C")
-main:9: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-
-[case testMultipleGenericTypeParametersWithMethods]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-a = None # type: A[B, C]
-s = None # type: B
-t = None # type: C
-
-a.f(s, s) # Fail
-a.f(t, t) # Fail
-a.f(s, t)
-
-class A(Generic[S, T]):
- def f(self, s: S, t: T) -> None: pass
-class B: pass
-class C: pass
-[out]
-main:8: error: Argument 2 to "f" of "A" has incompatible type "B"; expected "C"
-main:9: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B"
-
-[case testMultipleGenericTypeParametersAndSubtyping]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-bc = None # type: A[B, C]
-bb = None # type: A[B, B]
-cb = None # type: A[C, B]
-
-bb = bc # Fail
-bb = cb # Fail
-bc = bb # Fail
-
-bb = bb
-bc = bc
-
-class A(Generic[S, T]):
- s = None # type: S
- t = None # type: T
-
-class B: pass
-class C(B):pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type A[B, C], variable has type A[B, B])
-main:9: error: Incompatible types in assignment (expression has type A[C, B], variable has type A[B, B])
-main:10: error: Incompatible types in assignment (expression has type A[B, B], variable has type A[B, C])
-
-
--- Simple generic type bodies
--- --------------------------
-
-
-[case testGenericTypeBody1]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- a = None # type: T
-
- def f(self, b: T) -> T:
- self.f(x) # Fail
- d = self # type: A[B] # Fail
- self.a = self.f(self.a)
- return self.a
- c = self # type: A[T]
-x = None # type: B
-class B: pass
-[out]
-main:7: error: Argument 1 to "f" of "A" has incompatible type "B"; expected "T"
-main:8: error: Incompatible types in assignment (expression has type A[T], variable has type A[B])
-
-[case testGenericTypeBodyWithMultipleVariables]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-class A(Generic[S, T]):
- def f(self) -> None:
- s = None # type: S
- t = None # type: T
- s = t # Fail
- t = s # Fail
- a = self # type: A[S, B] # Fail
- b = self # type: A[T, T] # Fail
- c = self # type: A[S, T]
- t = t
-
-class B: pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type "T", variable has type "S")
-main:9: error: Incompatible types in assignment (expression has type "S", variable has type "T")
-main:10: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[S, B])
-main:11: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[T, T])
-
-[case testCompatibilityOfNoneWithTypeVar]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def f(self) -> None:
- a = None # type: T
- a = None
-[out]
-
-[case testCompatibilityOfTypeVarWithObject]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def f(self) -> T:
- a = object() # type: T # Fail
- a = object() # Fail
- b = self.f() # type: object
- b = self.f()
-[out]
-main:5: error: Incompatible types in assignment (expression has type "object", variable has type "T")
-main:6: error: Incompatible types in assignment (expression has type "object", variable has type "T")
-
-
--- Operations with generic types
--- -----------------------------
-
-
-[case testGenericOperations]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-a = None # type: A[B, C]
-b = None # type: B
-c = None # type: C
-
-b = a + b # Fail
-c = a + c # Fail
-c = a[c] # Fail
-b = a[b] # Fail
-
-c = a + b
-b = a[c]
-
-class A(Generic[S, T]):
- def __add__(self, a: S) -> T: pass
- def __getitem__(self, i: T) -> S: pass
-
-class B: pass
-class C: pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-main:9: error: Unsupported operand types for + ("A" and "C")
-main:10: error: Incompatible types in assignment (expression has type "B", variable has type "C")
-main:11: error: Invalid index type "B" for "A"; expected type "C"
-
-[case testOperatorAssignmentWithIndexLvalue1]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-b = None # type: B
-c = None # type: C
-ac = None # type: A[C]
-
-ac[b] += b # Fail
-ac[c] += c # Fail
-ac[b] += c
-ac[b] = ac[b] + c
-
-class A(Generic[T]):
- def __getitem__(self, i: 'B') -> T: pass
- def __setitem__(self, i: 'B', v: T) -> None: pass
-
-class B: pass
-class C:
- def __add__(self, o: 'C') -> 'C': pass
-[out]
-main:7: error: Unsupported operand types for + ("C" and "B")
-main:7: error: Incompatible types in assignment (expression has type "B", target has type "C")
-main:8: error: Invalid index type "C" for "A"; expected type "B"
-
-[case testOperatorAssignmentWithIndexLvalue2]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-b = None # type: B
-c = None # type: C
-ac = None # type: A[C]
-
-ac[b] += c # Fail
-ac[c] += c # Fail
-ac[b] = ac[b] + c # Fail
-
-class A(Generic[T]):
- def __getitem__(self, i: 'B') -> T: pass
- def __setitem__(self, i: 'C', v: T) -> None: pass
-
-class B: pass
-class C:
- def __add__(self, o: 'C') -> 'C': pass
-[out]
-main:7: error: Invalid index type "B" for "A"; expected type "C"
-main:8: error: Invalid index type "C" for "A"; expected type "B"
-main:9: error: Invalid index type "B" for "A"; expected type "C"
-
-
--- Nested generic types
--- --------------------
-
-
-[case testNestedGenericTypes]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-aab = None # type: A[A[B]]
-aac = None # type: A[A[C]]
-ab = None # type: A[B]
-ac = None # type: A[C]
-
-ac = aab.x # Fail
-ac.y = aab # Fail
-
-ab = aab.x
-ac = aac.x
-ab.y = aab
-ac.y = aac
-
-class A(Generic[T]):
- x = None # type: T
- y = None # type: A[A[T]]
-
-class B:
- pass
-class C:
- pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
-main:9: error: Incompatible types in assignment (expression has type A[A[B]], variable has type A[A[C]])
-
-
--- Generic functions
--- -----------------
-
-
-[case testTypeCheckingGenericFunctionBody]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-class A: pass
-class p(Generic[T, S]):
- def __init__(self, t: T, a: S) -> None: pass
-def f(s: S, t: T) -> p[T, A]:
- a = t # type: S # E: Incompatible types in assignment (expression has type "T", variable has type "S")
- s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S")
- p_s_a = None # type: p[S, A]
- if s:
- return p_s_a # E: Incompatible return value type (got p[S, A], expected p[T, A])
- b = t # type: T
- c = s # type: S
- p_t_a = None # type: p[T, A]
- return p_t_a
-[out]
-
-[case testTypeCheckingGenericMethodBody]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class p(Generic[T, S]):
- def __init__(self, t: T, a: S) -> None: pass
-class A(Generic[T]):
- def f(self, s: S, t: T) -> p[S, T]:
- s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S")
- p_s_s = None # type: p[S, S]
- if s:
- return p_s_s # E: Incompatible return value type (got p[S, S], expected p[S, T])
- p_t_t = None # type: p[T, T]
- if t:
- return p_t_t # E: Incompatible return value type (got p[T, T], expected p[S, T])
- t = t
- s = s
- p_s_t = None # type: p[S, T]
- return p_s_t
-[out]
-
-[case testProhibitTypeApplicationToGenericFunctions]
-from typing import TypeVar
-T = TypeVar('T')
-def f(x: T) -> T: pass
-
-y = f[int] # E: Type application is only supported for generic classes
-[out]
-
-
--- Generic types in expressions
--- ----------------------------
-
-
-[case testTypeApplicationArgs]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- ...
-Node[int]() # E: Too few arguments for "Node"
-Node[int](1, 1, 1) # E: Too many arguments for "Node"
-[out]
-
-[case testTypeApplicationTvars]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class A(Generic[T, S]): pass
-A[int]() # E: Type application has too few types (2 expected)
-A[int, str, int]() # E: Type application has too many types (2 expected)
-[out]
-
-[case testInvalidTypeApplicationType]
-a = None # type: A
-class A: pass
-a[A]() # E: Value of type "A" is not indexable
-A[A]() # E: Type application targets a non-generic function or class
-[out]
-
-[case testTypeApplicationArgTypes]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- ...
-
-Node[int](1)
-Node[int]('a') # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
-
-class Dummy(Generic[T]):
- def meth(self, x: T) -> None:
- ...
- def methout(self) -> T:
- ...
-
-Dummy[int]().meth(1)
-Dummy[int]().meth('a') # E: Argument 1 to "meth" of "Dummy" has incompatible type "str"; expected "int"
-reveal_type(Dummy[int]()) # E: Revealed type is '__main__.Dummy[builtins.int*]'
-reveal_type(Dummy[int]().methout()) # E: Revealed type is 'builtins.int*'
-[out]
-
-[case testTypeApplicationArgTypesSubclasses]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class C(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- ...
-
-class D(C[int, T], Generic[T]): ...
-
-D[str](1, 'a')
-D[str](1, 1) # E: Argument 2 to "D" has incompatible type "int"; expected "str"
-
-class E(D[str]): ...
-E(1, 'a')
-E(1, 1) # E: Argument 2 to "E" has incompatible type "int"; expected "str"
-[out]
-
-[case testTypeApplicationAlias]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- ...
-
-Alias = Node
-Alias[int](1)
-Alias[int]("a") # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
-[out]
-
-[case testTypeApplicationCrash]
-type[int] # this was crashing, see #2302 (comment) # E: Type application targets a non-generic function or class
-[out]
-
-
--- Generic type aliases
--- --------------------
-
-[case testGenericTypeAliasesBasic]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- ...
-
-IntNode = Node[int, S]
-IntIntNode = Node[int, int]
-SameNode = Node[T, T]
-
-n = Node(1, 1) # type: IntIntNode
-n1 = Node(1, 'a') # type: IntIntNode # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
-
-m = Node(1, 1) # type: IntNode
-m1 = Node('x', 1) # type: IntNode # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
-m2 = Node(1, 1) # type: IntNode[str] # E: Argument 2 to "Node" has incompatible type "int"; expected "str"
-
-s = Node(1, 1) # type: SameNode[int]
-reveal_type(s) # E: Revealed type is '__main__.Node[builtins.int, builtins.int]'
-s1 = Node(1, 'x') # type: SameNode[int] # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
-
-[out]
-
-[case testGenericTypeAliasesBasic2]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- ...
-
-IntNode = Node[int, S]
-IntIntNode = Node[int, int]
-SameNode = Node[T, T]
-
-def output_bad() -> IntNode[str]:
- return Node(1, 1) # Eroor - bad return type, see out
-
-def input(x: IntNode[str]) -> None:
- pass
-input(Node(1, 's'))
-input(Node(1, 1)) # E: Argument 2 to "Node" has incompatible type "int"; expected "str"
-
-def output() -> IntNode[str]:
- return Node(1, 'x')
-reveal_type(output()) # E: Revealed type is '__main__.Node[builtins.int, builtins.str]'
-
-def func(x: IntNode[T]) -> IntNode[T]:
- return x
-reveal_type(func) # E: Revealed type is 'def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]'
-
-func(1) # E: Argument 1 to "func" has incompatible type "int"; expected Node[int, None]
-func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
-reveal_type(func(Node(1, 'x'))) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
-
-def func2(x: SameNode[T]) -> SameNode[T]:
- return x
-reveal_type(func2) # E: Revealed type is 'def [T] (x: __main__.Node[T`-1, T`-1]) -> __main__.Node[T`-1, T`-1]'
-
-func2(Node(1, 'x')) # E: Cannot infer type argument 1 of "func2"
-y = func2(Node('x', 'x'))
-reveal_type(y) # E: Revealed type is '__main__.Node[builtins.str*, builtins.str*]'
-
-def wrap(x: T) -> IntNode[T]:
- return Node(1, x)
-
-z = None # type: str
-reveal_type(wrap(z)) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
-
-[out]
-main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str"
-
-[case testGenericTypeAliasesWrongAliases]
-# flags: --show-column-numbers --fast-parser --python-version 3.6
-from typing import TypeVar, Generic, List, Callable, Tuple, Union
-T = TypeVar('T')
-S = TypeVar('S')
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- ...
-
-A = Node[T] # Error
-B = Node[T, T]
-C = Node[T, T, T] # Error
-D = Node[T, S]
-E = Node[Node[T, T], List[T]]
-
-F = Node[List[T, T], S] # Error
-G = Callable[..., List[T, T]] # Error
-H = Union[int, Tuple[T, Node[T]]] # Error
-h: H # Error
-h1: H[int, str] # Error
-
-x = None # type: D[int, str]
-reveal_type(x)
-y = None # type: E[int]
-reveal_type(y)
-
-X = T # Error
-
-[builtins fixtures/list.pyi]
-[out]
-main:9:4: error: "Node" expects 2 type arguments, but 1 given
-main:11:4: error: "Node" expects 2 type arguments, but 3 given
-main:15:9: error: "list" expects 1 type argument, but 2 given
-main:16:18: error: "list" expects 1 type argument, but 2 given
-main:17:24: error: "Node" expects 2 type arguments, but 1 given
-main:18:3: error: "Node" expects 2 type arguments, but 1 given
-main:19:4: error: Bad number of arguments for type alias, expected: 1, given: 2
-main:22:0: error: Revealed type is '__main__.Node[builtins.int, builtins.str]'
-main:24:0: error: Revealed type is '__main__.Node[__main__.Node[builtins.int, builtins.int], builtins.list[builtins.int]]'
-main:26:4: error: Type variable "__main__.T" is invalid as target for type alias
-
-[case testGenericTypeAliasesForAliases]
-from typing import TypeVar, Generic, List, Union
-T = TypeVar('T')
-S = TypeVar('S')
-
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- pass
-
-ListedNode = Node[List[T], List[S]]
-Second = ListedNode[int, T]
-Third = Union[int, Second[str]]
-
-def f2(x: T) -> Second[T]:
- return Node([1], [x])
-reveal_type(f2('a')) # E: Revealed type is '__main__.Node[builtins.list[builtins.int], builtins.list[builtins.str*]]'
-
-def f3() -> Third:
- return Node([1], ['x'])
-reveal_type(f3()) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.list[builtins.int], builtins.list[builtins.str]]]'
-
-[builtins fixtures/list.pyi]
-
-[case testGenericTypeAliasesAny]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- self.x = x
- self.y = y
-
-IntNode = Node[int, S]
-AnyNode = Node[S, T]
-
-def output() -> IntNode[str]:
- return Node(1, 'x')
-x = output() # type: IntNode # This is OK (implicit Any)
-
-y = None # type: IntNode
-y.x = 1
-y.x = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-y.y = 1 # Both are OK (implicit Any)
-y.y = 'x'
-
-z = Node(1, 'x') # type: AnyNode
-reveal_type(z) # E: Revealed type is '__main__.Node[Any, Any]'
-
-[out]
-
-[case testGenericTypeAliasesAcessingMethods]
-from typing import TypeVar, Generic, List
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- self.x = x
- def meth(self) -> T:
- return self.x
-
-ListedNode = Node[List[T]]
-l = None # type: ListedNode[int]
-l.x.append(1)
-l.meth().append(1)
-reveal_type(l.meth()) # E: Revealed type is 'builtins.list*[builtins.int]'
-l.meth().append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-
-ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type List[str])
-
-[builtins fixtures/list.pyi]
-
-[case testGenericTypeAliasesSubclassing]
-from typing import TypeVar, Generic, Tuple, List
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- ...
-
-TupledNode = Node[Tuple[T, T]]
-
-class D(Generic[T], TupledNode[T]):
- ...
-class L(Generic[T], List[TupledNode[T]]):
- ...
-
-def f_bad(x: T) -> D[T]:
- return D(1) # Error, see out
-
-L[int]().append(Node((1, 1)))
-L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected Node[Tuple[int, int]]
-
-x = D((1, 1)) # type: D[int]
-y = D(5) # type: D[int] # E: Argument 1 to "D" has incompatible type "int"; expected "Tuple[int, int]"
-
-def f(x: T) -> D[T]:
- return D((x, x))
-reveal_type(f('a')) # E: Revealed type is '__main__.D[builtins.str*]'
-
-[builtins fixtures/list.pyi]
-[out]
-main:15: error: Argument 1 to "D" has incompatible type "int"; expected "Tuple[T, T]"
-
-[case testGenericTypeAliasesSubclassingBad]
-from typing import TypeVar, Generic, Tuple, Union
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- ...
-
-TupledNode = Node[Tuple[T, T]]
-UNode = Union[int, Node[T]]
-
-class C(TupledNode): ... # Same as TupledNode[Any]
-class D(TupledNode[T]): ... # E: Invalid type "__main__.T"
-class E(Generic[T], UNode[T]): ... # E: Invalid base class
-
-[builtins fixtures/list.pyi]
-
-[case testGenericTypeAliasesUnion]
-from typing import TypeVar, Generic, Union, Any
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- self.x = x
-
-UNode = Union[int, Node[T]]
-x = 1 # type: UNode[int]
-
-x + 1 # E: Unsupported left operand type for + (some union)
-if not isinstance(x, Node):
- x + 1
-
-if not isinstance(x, int):
- x.x = 1
- x.x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-def f(x: T) -> UNode[T]:
- if 1:
- return Node(x)
- else:
- return 1
-
-reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.int*]]'
-
-TNode = Union[T, Node[int]]
-s = 1 # type: TNode[str] # E: Incompatible types in assignment (expression has type "int", variable has type "Union[str, Node[int]]")
-
-if not isinstance(s, str):
- s.x = 1
-
-z = None # type: TNode # Same as TNode[Any]
-z.x
-z.foo() # Any simplifies Union to Any now. This test should be updated after #2197
-
-[builtins fixtures/isinstance.pyi]
-
-[case testGenericTypeAliasesTuple]
-from typing import TypeVar, Tuple
-T = TypeVar('T')
-
-SameTP = Tuple[T, T]
-IntTP = Tuple[int, T]
-
-def f1(x: T) -> SameTP[T]:
- return x, x
-
-a, b, c = f1(1) # E: Need more than 2 values to unpack (3 expected)
-x, y = f1(1)
-reveal_type(x) # E: Revealed type is 'builtins.int'
-
-def f2(x: IntTP[T]) -> IntTP[T]:
- return x
-
-f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, None]"
-reveal_type(f2((1, 'x'))) # E: Revealed type is 'Tuple[builtins.int, builtins.str*]'
-
-[builtins fixtures/for.pyi]
-
-[case testGenericTypeAliasesCallable]
-from typing import TypeVar, Generic, Callable
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- ...
-
-BadC = Callable[T] # E: Invalid function type
-
-C = Callable[..., T]
-C2 = Callable[[T, T], Node[T]]
-
-def make_cb(x: T) -> C[T]:
- return lambda *args: x
-
-reveal_type(make_cb(1)) # E: Revealed type is 'def (*Any, **Any) -> builtins.int*'
-
-def use_cb(arg: T, cb: C2[T]) -> Node[T]:
- return cb(arg, arg)
-
-use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected Callable[[int, int], Node[int]]
-my_cb = None # type: C2[int]
-use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type Callable[[int, int], Node[int]]; expected Callable[[str, str], Node[str]]
-reveal_type(use_cb(1, my_cb)) # E: Revealed type is '__main__.Node[builtins.int]'
-
-[out]
-
-[case testGenericTypeAliasesPEPBasedExample]
-from typing import TypeVar, List, Tuple
-T = TypeVar('T', int, bool)
-
-Vec = List[Tuple[T, T]]
-
-vec = [] # type: Vec[bool]
-vec.append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "Tuple[bool, bool]"
-reveal_type(vec[0]) # E: Revealed type is 'Tuple[builtins.bool, builtins.bool]'
-
-def fun1(v: Vec[T]) -> T:
- return v[0][0]
-def fun2(v: Vec[T], scale: T) -> Vec[T]:
- return v
-
-reveal_type(fun1([(1, 1)])) # E: Revealed type is 'builtins.int*'
-fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected List[Tuple[int, int]]
-fun1([(1, 'x')]) # E: Cannot infer type argument 1 of "fun1"
-
-reveal_type(fun2([(1, 1)], 1)) # E: Revealed type is 'builtins.list[Tuple[builtins.int*, builtins.int*]]'
-fun2([('x', 'x')], 'x') # E: Type argument 1 of "fun2" has incompatible value "str"
-
-[builtins fixtures/list.pyi]
-
-[case testGenericTypeAliasesImporting]
-from typing import TypeVar
-from a import Node, TupledNode
-T = TypeVar('T')
-
-n = None # type: TupledNode[int]
-n.x = 1
-n.y = (1, 1)
-n.y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "Tuple[int, int]")
-
-def f(x: Node[T, T]) -> TupledNode[T]:
- return Node(x.x, (x.x, x.x))
-
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected Node[None, None]
-f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f"
-reveal_type(Node('x', 'x')) # E: Revealed type is 'a.Node[builtins.str*, builtins.str*]'
-
-[file a.py]
-from typing import TypeVar, Generic, Tuple
-T = TypeVar('T')
-S = TypeVar('S')
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- self.x = x
- self.y = y
-
-TupledNode = Node[T, Tuple[T, T]]
-
-[builtins fixtures/list.pyi]
-
-[case testGenericTypeAliasesRuntimeExpressionsInstance]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class Node(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None:
- ...
-
-IntNode = Node[int, T]
-IntNode[int](1, 1)
-IntNode[int](1, 'a') # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
-
-SameNode = Node[T, T]
-ff = SameNode[T](1, 1) # E: Need type annotation for variable
-a = SameNode(1, 'x')
-reveal_type(a) # E: Revealed type is '__main__.Node[Any, Any]'
-b = SameNode[int](1, 1)
-reveal_type(b) # E: Revealed type is '__main__.Node[builtins.int*, builtins.int*]'
-SameNode[int](1, 'x') # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
-
-[out]
-
-[case testGenericTypeAliasesRuntimeExpressionsOther]
-from typing import TypeVar, Union, Tuple, Callable, Any
-T = TypeVar('T')
-
-CA = Callable[[T], int]
-TA = Tuple[T, int]
-UA = Union[T, int]
-
-cs = CA[str] + 1 # E: Unsupported left operand type for + ("Type alias to Callable")
-reveal_type(cs) # E: Revealed type is 'Any'
-
-ts = TA[str]() # E: "Type alias to Tuple" not callable
-reveal_type(ts) # E: Revealed type is 'Any'
-
-us = UA[str].x # E: "Type alias to Union" has no attribute "x"
-reveal_type(us) # E: Revealed type is 'Any'
-
-[out]
-
-[case testGenericTypeAliasesTypeVarBinding]
-from typing import TypeVar, Generic, List
-T = TypeVar('T')
-S = TypeVar('S')
-
-class A(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None: ...
-
-class B(Generic[T, S]):
- def __init__(self, x: List[T], y: List[S]) -> None: ...
-
-SameA = A[T, T]
-SameB = B[T, T]
-
-class C(Generic[T]):
- a = None # type: SameA[T]
- b = SameB[T]([], [])
-
-reveal_type(C[int]().a) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
-reveal_type(C[str]().b) # E: Revealed type is '__main__.B[builtins.str*, builtins.str*]'
-
-[builtins fixtures/list.pyi]
-
-[case testGenericTypeAliasesTypeVarConstraints]
-# flags: --show-column-numbers
-from typing import TypeVar, Generic
-T = TypeVar('T', int, list)
-S = TypeVar('S', int, list)
-
-class A(Generic[T, S]):
- def __init__(self, x: T, y: S) -> None: ...
-
-BadA = A[str, T] # One error here
-SameA = A[T, T]
-
-x = None # type: SameA[int]
-y = None # type: SameA[str] # Two errors here, for both args of A
-
-[builtins fixtures/list.pyi]
-[out]
-main:9:7: error: Type argument 1 of "A" has incompatible value "str"
-main:13: error: Type argument 1 of "A" has incompatible value "str"
-main:13: error: Type argument 2 of "A" has incompatible value "str"
-
-[case testGenericTypeAliasesIgnoredPotentialAlias]
-class A: ...
-Bad = A[int] # type: ignore
-
-reveal_type(Bad) # E: Revealed type is 'Any'
-[out]
-
-
--- Multiple assignment with lists
--- ------------------------------
-
-
-[case testMultipleAssignmentWithLists]
-from typing import List
-class A: pass
-class B: pass
-class B2(B): pass
-a = None # type: A
-b = None # type: B
-b2 = None # type: B2
-
-list_a = [a]
-list_b = [b]
-list_b2 = [b2]
-
-a, b = list_a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b, a = list_a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b2, b2 = list_b # E: Incompatible types in assignment (expression has type "B", variable has type "B2")
-
-a, a = list_a
-b, b2, b = list_b2
-[builtins fixtures/for.pyi]
-
-[case testMultipleAssignmentWithListsInInitialization]
-from typing import List
-class A: pass
-list_object = [object()]
-list_a = [A()]
-a, b = list_object # type: (A, object) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-c, d = list_object # type: (object, A) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-e, f = list_a # type: (A, object)
-[builtins fixtures/for.pyi]
-
-[case testMultipleAssignmentWithListAndIndexing]
-from typing import List
-a = None # type: List[A]
-b = None # type: List[int]
-
-a[1], b[1] = a # E: Incompatible types in assignment (expression has type "A", target has type "int")
-a[1], a[2] = a
-
-class A: pass
-[file builtins.py]
-from typing import TypeVar, Generic, Iterable
-T = TypeVar('T')
-class object: pass
-class list(Iterable[T], Generic[T]):
- def __setitem__(self, x: int, v: T) -> None: pass
-class int: pass
-class type: pass
-class tuple: pass
-class function: pass
-class str: pass
-
-[case testMultipleAssignmentWithIterable]
-from typing import Iterable, TypeVar
-a = None # type: int
-b = None # type: str
-T = TypeVar('T')
-
-def f(x: T) -> Iterable[T]: pass
-
-a, b = f(a) # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-b, b = f(a) # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-a, a = f(a)
-b, b = f(b)
-[builtins fixtures/for.pyi]
-
-
--- Error messages
--- --------------
-
-
-[case testErrorWithLongGenericTypeName]
-from typing import TypeVar, Generic
-B = TypeVar('B')
-C = TypeVar('C')
-D = TypeVar('D')
-E = TypeVar('E')
-F = TypeVar('F')
-G = TypeVar('G')
-H = TypeVar('H')
-I = TypeVar('I')
-J = TypeVar('J')
-K = TypeVar('K')
-L = TypeVar('L')
-M = TypeVar('M')
-N = TypeVar('N')
-O = TypeVar('O')
-P = TypeVar('P')
-Q = TypeVar('Q')
-R = TypeVar('R')
-S = TypeVar('S')
-T = TypeVar('T')
-U = TypeVar('U')
-V = TypeVar('V')
-W = TypeVar('W')
-X = TypeVar('X')
-Y = TypeVar('Y')
-Z = TypeVar('Z')
-class OO: pass
-a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]
-
-f(a) # E: Argument 1 to "f" has incompatible type A[...]; expected "OO"
-
-def f(a: OO) -> None:
- pass
-class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass
-
-[case testErrorWithShorterGenericTypeName]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-a = None # type: A[object, B]
-f(a) # E: Argument 1 to "f" has incompatible type A[object, B]; expected "B"
-
-def f(a: 'B') -> None: pass
-class A(Generic[S, T]): pass
-class B: pass
-
-[case testErrorWithShorterGenericTypeName2]
-from typing import Callable, TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-a = None # type: A[object, Callable[[], None]]
-f(a) # E: Argument 1 to "f" has incompatible type A[object, Callable[[], None]]; expected "B"
-
-def f(a: 'B') -> None: pass
-class A(Generic[S, T]): pass
-class B: pass
-
-
--- Overloads + generics
--- --------------------
-
-
-[case testGenericArgumentInOverload]
-from typing import overload, List
-class A: pass
-class B: pass
-a, b = None, None # type: (A, B)
-
- at overload
-def f(a: List[A]) -> A: pass
- at overload
-def f(a: B) -> B: pass
-
-b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f([b]) # E: List item 0 has incompatible type "B"
-a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-a = f([a])
-b = f(b)
-[builtins fixtures/list.pyi]
-
-[case testGenericFunctionAsOverloadItem]
-from typing import overload, TypeVar, List
-T = TypeVar('T')
-class A: pass
-class B: pass
-
- at overload
-def f(a: B) -> B: pass
- at overload
-def f(a: List[T]) -> T: pass
-
-a, b = None, None # type: (A, B)
-
-b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f([b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-a = f([a])
-b = f([b])
-b = f(b)
-[builtins fixtures/list.pyi]
-
-
--- Type variable scoping
--- ---------------------
-
-
-[case testLocalTypeVariable]
-from typing import TypeVar
-def f() -> None:
- T = TypeVar('T')
- def g(x: T) -> T: pass
- a = g(1)
- a = 1
- a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[out]
-
-[case testClassLevelTypeVariable]
-from typing import TypeVar
-class A:
- T = TypeVar('T')
- def g(self, x: T) -> T: pass
-a = A().g(1)
-a = 1
-a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testGenericClassInnerFunctionTypeVariable]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def __init__(self, a: T) -> None:
- self.a = a
- def f(self, n: int) -> None:
- def g(a: T):
- self.a = a
- g(self.a)
- g(n) # E: Argument 1 to "g" has incompatible type "int"; expected "T"
-[out]
-
-
--- Callable subtyping with generic functions
--- -----------------------------------------
-
-
-[case testSubtypingWithGenericFunctions]
-from typing import TypeVar
-A = TypeVar('A')
-B = TypeVar('B')
-
-def f1(x: A) -> A: ...
-def f2(x: A) -> B: ...
-def f3(x: B) -> B: ...
-def f4(x: int) -> A: ...
-
-y1 = f1
-y1 = f1
-y1 = f2
-y1 = f3
-y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], A])
-
-y2 = f2
-y2 = f2
-y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
-y2 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], B])
-y2 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], B])
-
-y3 = f3
-y3 = f3
-y3 = f1
-y3 = f2
-y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[B], B])
-
-y4 = f4
-y4 = f4
-y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[int], A])
-y4 = f2
-y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[int], A])
-
-[case testSubtypingWithGenericInnerFunctions]
-from typing import TypeVar
-A = TypeVar('A')
-B = TypeVar('B')
-T = TypeVar('T')
-def outer(t: T) -> None:
- def f1(x: A) -> A: ...
- def f2(x: A) -> B: ...
- def f3(x: T) -> A: ...
- def f4(x: A) -> T: ...
- def f5(x: T) -> T: ...
-
- y1 = f1
- y1 = f2
- y1 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], A])
- y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[A], A])
- y1 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], A])
-
- y2 = f2
- y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
-
- y3 = f3
- y3 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[T], A])
- y3 = f2
- y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[T], A])
- y3 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], A])
-
- y4 = f4
- y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], T])
- y4 = f2
- y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], T])
- y4 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], T])
-
- y5 = f5
- y5 = f1
- y5 = f2
- y5 = f3
- y5 = f4
-[out]
-
-[case testSubtypingWithGenericFunctionUsingTypevarWithValues]
-from typing import TypeVar, Callable
-T = TypeVar('T', int, str)
-def f(x: T) -> T: pass
-def g1(f: Callable[[str], str]) -> None: pass
-g1(f)
-def g2(f: Callable[[int], int]) -> None: pass
-g2(f)
-def g3(f: Callable[[object], object]) -> None: pass
-g3(f) # E: Argument 1 to "g3" has incompatible type Callable[[T], T]; \
- expected Callable[[object], object]
-
-[case testSubtypingWithGenericFunctionUsingTypevarWithValues2-skip]
-from typing import TypeVar, Callable
-T = TypeVar('T', int, str)
-def f(x: T) -> T: pass
-g = f
-g = f
-
-
---Operations on type variable types
--- ---------------------------------
-
-
-[case testTypeVariableTypeEquality]
-from typing import TypeVar
-T = TypeVar('T')
-def f(a: T, b: T) -> T:
- a.__ne__(b)
- if a == b:
- return a
- else:
- return b
-[builtins fixtures/ops.pyi]
-
-[case testTypeVariableTypeIs]
-from typing import TypeVar
-T = TypeVar('T')
-def f(a: T, b: T) -> T:
- if a is b or a is 1:
- return a
- else:
- return b
-[builtins fixtures/ops.pyi]
-
-[case testTypeVariableTypeLessThan]
-from typing import TypeVar
-T = TypeVar('T')
-def f(a: T, b: T) -> T:
- if a < b:
- return a
- else:
- return b
-[builtins fixtures/ops.pyi]
-[out]
-main:4: error: Unsupported left operand type for < ("T")
-
-
--- Subtyping generic callables
--- ---------------------------
-
-[case testSubtypingGenericTypeObject]
-from typing import Callable, Generic, TypeVar
-T = TypeVar('T')
-class C(Generic[T]):
- def __init__(self) -> None: pass
-x = C # type: Callable[[], C[int]]
-y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type C[T], variable has type Callable[[], int])
-
-
--- Special cases
--- -------------
-
-
-[case testIdentityHigherOrderFunction]
-from typing import Callable, TypeVar
-A = TypeVar('A')
-B = TypeVar('B')
-def square(n: int) -> int:
- return n
-def id(f: Callable[[A], B]) -> Callable[[A], B]:
- return f
-g = id(square)
-g(1)
-g('x') # E: Argument 1 has incompatible type "str"; expected "int"
-
-
-[case testIdentityHigherOrderFunction2]
-from typing import Callable, TypeVar
-A = TypeVar('A')
-def voidify(n: int) -> None: pass
-def identity(f: Callable[[A], None]) -> Callable[[A], None]:
- return f
-identity(voidify)(3)
-
-[case testIdentityHigherOrderFunction3]
-from typing import Callable, TypeVar
-A = TypeVar('A')
-B = TypeVar('B')
-def fn(n: B) -> None: pass
-def identity(f: A) -> A:
- return f
-identity(fn)
-identity(fn)('x')
-
-[case testTypeVariableUnionAndCallableInTypeInference]
-from typing import Union, Callable, TypeVar
-T = TypeVar('T')
-def f(x: T, y: Union[T, Callable[[T], None]]) -> None: pass
-f('', '')
-
-[case testGenericFunctionsWithUnalignedIds]
-from typing import TypeVar
-A = TypeVar('A')
-B = TypeVar('B')
-def f1(x: int, y: A) -> A: ...
-def f2(x: int, y: A) -> B: ...
-def f3(x: A, y: B) -> B: ...
-g = f1
-g = f2
-g = f3
-
-[case testTypeVariableWithContainerAndTuple]
-from typing import TypeVar, Container
-T = TypeVar('T')
-def f(x: Container[T]) -> T: ...
-reveal_type(f((1, 2))) # E: Revealed type is 'builtins.int*'
-
-[case testClassMethodInGenericClassWithGenericConstructorArg]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def __init__(self, a: T) -> None: pass
- @classmethod
- def f(cls) -> None: pass
-[builtins fixtures/classmethod.pyi]
-
-[case testClassMethodInClassWithGenericConstructor]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A:
- def __init__(self, a: T) -> None: pass
- @classmethod
- def f(cls) -> None: pass
-[builtins fixtures/classmethod.pyi]
-
-[case testGenericOperatorMethodOverlapping]
-from typing import TypeVar, Generic, Tuple
-T = TypeVar('T')
-T2 = TypeVar('T2')
-S = TypeVar('S', bound=str)
-S2 = TypeVar('S2', bound=str)
-class G(Generic[T]):
- pass
-class A:
- def __or__(self, x: G[T]) -> G[T]: pass
- def __ior__(self, x: G[T2]) -> G[T2]: pass
-class B:
- def __or__(self, x: G[T]) -> G[T]: pass
- def __ior__(self, x: G[S]) -> G[S]: pass \
- # E: Signatures of "__ior__" and "__or__" are incompatible
-class C:
- def __or__(self, x: G[S]) -> G[S]: pass
- def __ior__(self, x: G[S2]) -> G[S2]: pass
-
-[case testGenericOperatorMethodOverlapping2]
-from typing import TypeVar, Generic, Tuple
-X = TypeVar('X')
-T = TypeVar('T', int, str)
-T2 = TypeVar('T2', int, str)
-S = TypeVar('S', float, str)
-S2 = TypeVar('S2', float, str)
-class G(Generic[X]):
- pass
-class A:
- def __or__(self, x: G[T]) -> G[T]: pass
- def __ior__(self, x: G[T2]) -> G[T2]: pass
-class B:
- def __or__(self, x: G[T]) -> G[T]: pass
- def __ior__(self, x: G[S]) -> G[S]: pass \
- # E: Signatures of "__ior__" and "__or__" are incompatible
-class C:
- def __or__(self, x: G[S]) -> G[S]: pass
- def __ior__(self, x: G[S2]) -> G[S2]: pass
-class D:
- def __or__(self, x: G[X]) -> G[X]: pass
- def __ior__(self, x: G[S2]) -> G[S2]: pass \
- # E: Signatures of "__ior__" and "__or__" are incompatible
diff --git a/test-data/unit/check-ignore.test b/test-data/unit/check-ignore.test
deleted file mode 100644
index b6d127c..0000000
--- a/test-data/unit/check-ignore.test
+++ /dev/null
@@ -1,218 +0,0 @@
-[case testIgnoreTypeError]
-x = 1
-x() # type: ignore
-x() # E: "int" not callable
-
-[case testIgnoreUndefinedName]
-x = 1
-y # type: ignore
-z # E: Name 'z' is not defined
-
-[case testIgnoreImportError]
-import xyz_m # type: ignore
-xyz_m.foo
-1() # E: "int" not callable
-
-[case testIgnoreImportFromError]
-from xyz_m import a, b # type: ignore
-a.foo
-b()
-1() # E: "int" not callable
-
-[case testIgnoreImportFromErrorMultiline]
-from xyz_m import ( # type: ignore
- a, b
-)
-a.foo
-b()
-1() # E: "int" not callable
-
-[case testIgnoreImportAllError]
-from xyz_m import * # type: ignore
-x # E: Name 'x' is not defined
-1() # E: "int" not callable
-
-[case testIgnoreImportBadModule]
-import m # type: ignore
-from m import a # type: ignore
-[file m.py]
-+
-[out]
-tmp/m.py:1: error: invalid syntax
-
-[case testIgnoreAppliesOnlyToMissing]
-import a # type: ignore
-import b # type: ignore
-reveal_type(a.foo) # E: Revealed type is 'Any'
-reveal_type(b.foo) # E: Revealed type is 'builtins.int'
-a.bar()
-b.bar() # E: "module" has no attribute "bar"
-
-[file b.py]
-foo = 3
-
-[builtins fixtures/module_all.pyi]
-[out]
-
-[case testIgnoreImportStarFromBadModule]
-from m import * # type: ignore
-[file m.py]
-+
-[out]
-tmp/m.py:1: error: invalid syntax
-
-[case testIgnoreAssignmentTypeError]
-x = 1
-x = '' # type: ignore
-x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testIgnoreInvalidOverride]
-class A:
- def f(self) -> int: pass
-class B(A):
- def f(self) -> str: pass # type: ignore
-
-[case testIgnoreMissingModuleAttribute]
-import m
-m.x = object # type: ignore
-m.f() # type: ignore
-m.y # E: "module" has no attribute "y"
-[file m.py]
-[builtins fixtures/module.pyi]
-
-[case testIgnoreTypeInferenceError]
-x = [] # type: ignore
-y = x
-x.append(1)
-[builtins fixtures/list.pyi]
-
-[case testIgnoreTypeInferenceError2]
-def f() -> None: pass
-x = f() # type: ignore
-y = x
-x = 1
-[builtins fixtures/list.pyi]
-
-[case testIgnoreTypeInferenceErrorAndMultipleAssignment]
-x, y = [], [] # type: ignore
-z = x
-z = y
-[builtins fixtures/list.pyi]
-
-[case testIgnoreSomeStarImportErrors]
-from m1 import *
-from m2 import * # type: ignore
-# We should still import things that don't conflict.
-y() # E: "str" not callable
-z() # E: "int" not callable
-x() # E: "int" not callable
-[file m1.py]
-x = 1
-y = ''
-[file m2.py]
-x = ''
-z = 1
-
-[case testIgnoredModuleDefinesBaseClass1]
-from m import B # type: ignore
-
-class C(B):
- def f(self) -> None:
- self.f(1) # E: Too many arguments for "f" of "C"
- self.g(1)
-[out]
-
-[case testIgnoredModuleDefinesBaseClass2]
-import m # type: ignore
-
-class C(m.B):
- def f(self) -> None: ...
-
-c = C()
-c.f(1) # E: Too many arguments for "f" of "C"
-c.g(1)
-c.x = 1
-[out]
-
-[case testIgnoredModuleDefinesBaseClassAndClassAttribute]
-import m # type: ignore
-
-class C(m.B):
- @staticmethod
- def f() -> None: pass
-
-C.f(1) # E: Too many arguments for "f" of "C"
-C.g(1)
-C.x = 1
-[builtins fixtures/staticmethod.pyi]
-[out]
-
-[case testIgnoredModuleDefinesBaseClassWithInheritance1]
-from m import B # type: ignore
-
-class C: pass
-class D(C, B):
- def f(self) -> None:
- self.f(1) # E: Too many arguments for "f" of "D"
- self.g(1)
-[out]
-
-[case testIgnoredModuleDefinesBaseClassWithInheritance2]
-from m import B # type: ignore
-
-class C(B): pass
-class D(C):
- def f(self) -> None:
- self.f(1) # E: Too many arguments for "f" of "D"
- self.g(1)
-[out]
-
-[case testIgnoreWithFollowingIndentedComment]
-if 1: # type: ignore
- # blah
- pass
-[out]
-
-[case testIgnoreTooManyTypeArguments]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-U = TypeVar('U')
-
-class Base(Generic[T, U]):
- pass
-
-class PartialBase(Base[T, int], Generic[T]):
- pass
-
-class Child(PartialBase[str, int]): # type: ignore
- pass
-
-
-def foo(x: Base[str, int]) -> None: pass
-foo(Child())
-
-def bar(x: Base[str, str]) -> None: pass
-bar(Child())
-[out]
-main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected Base[str, str]
-
-[case testTypeIgnoreLineNumberWithinFile]
-import m
-pass # type: ignore
-m.f(kw=1)
-[file m.py]
-pass
-def f() -> None: pass
-[out]
-main:3: error: Unexpected keyword argument "kw" for "f"
-tmp/m.py:2: note: "f" defined here
-
-[case testIgnoreUnexpectedKeywordArgument]
-import m
-m.f(kw=1) # type: ignore
-[file m.py]
-def f() -> None: pass
-[out]
-
-[case testCannotIgnoreBlockingError]
-yield # type: ignore # E: 'yield' outside function
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
deleted file mode 100644
index 439d536..0000000
--- a/test-data/unit/check-incremental.test
+++ /dev/null
@@ -1,1780 +0,0 @@
--- Checks for incremental mode (see testcheck.py).
--- Each test is run twice, once with a cold cache, once with a warm cache.
--- Before the tests are run the second time, any *.py.next files are copied to *.py.
---
--- Errors expected in the first run should be in the `[out1]` section, and
--- errors expected in the second run should be in the `[out2]` section. If a
--- section is omitted, it is expected there are no errors on that run.
---
--- Any files that we expect to be rechecked should be annotated in the [rechecked]
--- annotation, and any files expect to be stale (aka have a modified interface)
--- should be annotated in the [stale] annotation. Note that a file that ends up
--- producing an error does not create a new cache file and so is not considered stale.
---
--- The test suite will automatically assume that __main__ is stale and rechecked in
--- all cases so we can avoid constantly having to annotate it. The list of
--- rechecked/stale files can be in any arbitrary order, or can be left empty
--- if no files should be rechecked/stale.
-
-[case testIncrementalEmpty]
-[rechecked]
-[stale]
-
-[case testIncrementalBasics]
-import m
-[file m.py]
-def foo():
- pass
-[file m.py.next]
-def foo() -> None:
- pass
-[rechecked m]
-[stale m]
-
-[case testIncrementalError]
-import m
-[file m.py]
-def foo() -> None:
- pass
-[file m.py.next]
-def foo() -> None:
- bar()
-[rechecked m]
-[stale]
-[out2]
-tmp/m.py:2: error: Name 'bar' is not defined
-
-[case testIncrementalSimpleImportSequence]
-import mod1
-mod1.func1()
-
-[file mod1.py]
-import mod2
-def func1() -> None: mod2.func2()
-
-[file mod2.py]
-import mod3
-def func2() -> None: mod3.func3()
-
-[file mod3.py]
-def func3() -> None: pass
-
-[rechecked]
-[stale]
-
-
-[case testIncrementalInternalChangeOnly]
-import mod1
-mod1.func1()
-
-[file mod1.py]
-import mod2
-def func1() -> None: mod2.func2()
-
-[file mod2.py]
-import mod3
-def func2() -> None: mod3.func3()
-
-[file mod3.py]
-def func3() -> None: pass
-
-[file mod3.py.next]
-def func3() -> None: 3 + 2
-
-[rechecked mod3]
-[stale]
-
-
-[case testIncrementalImportGone]
-import mod1
-
-[file mod1.py]
-from mod2 import A
-def func1() -> A: pass
-
-[file mod2.py]
-class A: pass
-
-[file mod1.py.next]
-def func1() -> A: pass
-
-[rechecked mod1]
-[stale]
-[out2]
-tmp/mod1.py:1: error: Name 'A' is not defined
-
-[case testIncrementalSameNameChange]
-import mod1
-
-[file mod1.py]
-from mod2 import A
-def func1() -> A: pass
-
-[file mod2.py]
-class A: pass
-
-[file mod2.py.next]
-class Parent: pass
-class A(Parent): pass
-
-[rechecked mod1, mod2]
-[stale mod2]
-
-[case testIncrementalPartialInterfaceChange]
-import mod1
-mod1.func1()
-
-[file mod1.py]
-import mod2
-def func1() -> None: mod2.func2()
-
-[file mod2.py]
-import mod3
-def func2() -> None: mod3.func3()
-
-[file mod3.py]
-def func3() -> None: pass
-
-[file mod3.py.next]
-def func3() -> int: return 2
-
-[rechecked mod2, mod3]
-[stale mod3]
-
-[case testIncrementalInternalFunctionDefinitionChange]
-import mod1
-
-[file mod1.py]
-import mod2
-def accepts_int(a: int) -> int: return a
-accepts_int(mod2.foo())
-
-[file mod2.py]
-def foo() -> int:
- def inner() -> int:
- return 42
- return inner()
-
-[file mod2.py.next]
-def foo() -> int:
- def inner2() -> str:
- return "foo"
- return inner2()
-
-[rechecked mod2]
-[stale]
-[out2]
-tmp/mod2.py:4: error: Incompatible return value type (got "str", expected "int")
-
-[case testIncrementalInternalScramble]
-import mod1
-
-[file mod1.py]
-import mod2
-mod2.foo()
-
-[file mod2.py]
-def baz() -> int:
- return 3
-
-def bar() -> int:
- return baz()
-
-def foo() -> int:
- return bar()
-
-[file mod2.py.next]
-def foo() -> int:
- return baz()
-
-def bar() -> int:
- return bar()
-
-def baz() -> int:
- return 42
-[rechecked mod2]
-[stale]
-
-[case testIncrementalMethodInterfaceChange]
-import mod1
-
-[file mod1.py]
-import mod2
-
-[file mod2.py]
-class Foo:
- def bar(self, a: str) -> str:
- return "a"
-
-[file mod2.py.next]
-class Foo:
- def bar(self, a: float) -> str:
- return "a"
-
-[rechecked mod1, mod2]
-[stale mod2]
-
-[case testIncrementalBaseClassChange]
-import mod1
-
-[file mod1.py]
-from mod2 import Child
-Child().good_method()
-
-[file mod2.py]
-class Good:
- def good_method(self) -> int: return 1
-class Bad: pass
-class Child(Good): pass
-
-[file mod2.py.next]
-class Good:
- def good_method(self) -> int: return 1
-class Bad: pass
-class Child(Bad): pass
-
-[rechecked mod1, mod2]
-[stale mod2]
-[out2]
-tmp/mod1.py:2: error: "Child" has no attribute "good_method"
-
-[case testIncrementalCascadingChange]
-import mod1
-
-[file mod1.py]
-from mod2 import A
-def accepts_int(a: int) -> None: pass
-accepts_int(A)
-
-[file mod2.py]
-from mod3 import B
-A = B
-
-[file mod3.py]
-from mod4 import C
-B = C
-
-[file mod4.py]
-C = 3
-
-[file mod4.py.next]
-C = "A"
-
-[rechecked mod1, mod2, mod3, mod4]
-[stale mod2, mod3, mod4]
-[out2]
-tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalBrokenCascade]
-import mod1
-
-[file mod1.py]
-import mod2
-def accept_int(a: int) -> int: return a
-accept_int(mod2.mod3.mod4.const)
-
-[file mod2.py]
-import mod3
-
-[file mod3.py]
-import mod4
-
-[file mod4.py]
-const = 3
-
-[file mod3.py.next]
-# Import to mod4 is gone!
-
-[rechecked mod1, mod2, mod3]
-[stale mod3]
-[builtins fixtures/module.pyi]
-[out2]
-tmp/mod1.py:3: error: "module" has no attribute "mod4"
-
-[case testIncrementalLongBrokenCascade]
-import mod1
-
-[file mod1.py]
-import mod2
-def accept_int(a: int) -> int: return a
-accept_int(mod2.mod3.mod4.mod5.mod6.mod7.const)
-
-[file mod2.py]
-import mod3
-
-[file mod3.py]
-import mod4
-
-[file mod4.py]
-import mod5
-
-[file mod5.py]
-import mod6
-
-[file mod6.py]
-import mod7
-
-[file mod7.py]
-const = 3
-
-[file mod6.py.next]
-# Import to mod7 is gone!
-
-[rechecked mod1, mod5, mod6]
-[stale mod6]
-[builtins fixtures/module.pyi]
-[out2]
-tmp/mod1.py:3: error: "module" has no attribute "mod7"
-
-[case testIncrementalNestedBrokenCascade]
-import mod1
-
-[file mod1.py]
-import mod2
-def accept_int(a: int) -> int: return a
-accept_int(mod2.mod3.mod4.const)
-
-[file mod2/__init__.py]
-import mod2.mod3 as mod3
-
-[file mod2/mod3/__init__.py]
-import mod2.mod3.mod4 as mod4
-
-[file mod2/mod3/__init__.py.next]
-# Import is gone!
-
-[file mod2/mod3/mod4.py]
-const = 3
-
-[rechecked mod1, mod2, mod2.mod3]
-[stale mod2.mod3]
-[builtins fixtures/module.pyi]
-[out2]
-tmp/mod1.py:3: error: "module" has no attribute "mod4"
-
-[case testIncrementalNestedBrokenCascadeWithType1]
-import mod1, mod2.mod3.mod5
-
-[file mod1.py]
-import mod2
-def accept_int(x: int) -> None: pass
-def produce() -> mod2.CustomType:
- return mod2.CustomType()
-a = produce()
-accept_int(a.foo())
-
-[file mod2/__init__.py]
-from mod2.mod3 import CustomType
-
-[file mod2/mod3/__init__.py]
-from mod2.mod3.mod4 import CustomType
-
-[file mod2/mod3/__init__.py.next]
-# Import a different class that also happens to be called 'CustomType'
-from mod2.mod3.mod5 import CustomType
-def produce() -> CustomType:
- return CustomType()
-
-[file mod2/mod3/mod4.py]
-class CustomType:
- def foo(self) -> int: return 1
-
-[file mod2/mod3/mod5.py]
-class CustomType:
- def foo(self) -> str: return "a"
-
-[rechecked mod1, mod2, mod2.mod3]
-[stale mod2, mod2.mod3]
-[builtins fixtures/module.pyi]
-[out1]
-[out2]
-tmp/mod1.py:6: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalNestedBrokenCascadeWithType2]
-import mod1, mod2.mod3.mod5
-
-[file mod1.py]
-from mod2 import produce
-def accept_int(x: int) -> None: pass
-a = produce()
-accept_int(a.foo())
-
-[file mod2/__init__.py]
-from mod2.mod3 import produce
-
-[file mod2/mod3/__init__.py]
-from mod2.mod3.mod4 import CustomType
-def produce() -> CustomType:
- return CustomType()
-
-[file mod2/mod3/__init__.py.next]
-# Import a different class that also happens to be called 'CustomType'
-from mod2.mod3.mod5 import CustomType
-def produce() -> CustomType:
- return CustomType()
-
-[file mod2/mod3/mod4.py]
-class CustomType:
- def foo(self) -> int: return 1
-
-[file mod2/mod3/mod5.py]
-class CustomType:
- def foo(self) -> str: return "a"
-
-[rechecked mod1, mod2, mod2.mod3]
-[stale mod2.mod3]
-[builtins fixtures/module.pyi]
-[out1]
-[out2]
-tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalRemoteChange]
-import mod1
-
-[file mod1.py]
-import mod2
-def accepts_int(a: int) -> None: pass
-accepts_int(mod2.mod3.mod4.const)
-
-[file mod2.py]
-import mod3
-
-[file mod3.py]
-import mod4
-
-[file mod4.py]
-const = 3
-
-[file mod4.py.next]
-const = "foo"
-
-[rechecked mod1, mod3, mod4]
-[stale mod4]
-[out2]
-tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalBadChange]
-import mod1
-
-[file mod1.py]
-from mod2 import func2
-
-def func1() -> int:
- return func2()
-
-[file mod2.py]
-def func2() -> int:
- return 1
-
-[file mod2.py.next]
-def func2() -> str:
- return "foo"
-
-[rechecked mod1, mod2]
-[stale mod2]
-[out2]
-tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int")
-
-[case testIncrementalBadChangeWithSave]
-import mod0
-
-[file mod0.py]
-import mod1
-A = mod1.func2()
-
-[file mod1.py]
-from mod2 import func2
-
-def func1() -> int:
- return func2()
-
-[file mod2.py]
-def func2() -> int:
- return 1
-
-[file mod2.py.next]
-def func2() -> str:
- return "foo"
-
-[rechecked mod0, mod1, mod2]
-[stale mod2]
-[out2]
-tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int")
-
-[case testIncrementalOkChangeWithSave]
-import mod0
-
-[file mod0.py]
-import mod1
-A = mod1.func2()
-
-[file mod1.py]
-from mod2 import func2
-
-def func1() -> int:
- func2()
- return 1
-
-[file mod2.py]
-def func2() -> int:
- return 1
-
-[file mod2.py.next]
-def func2() -> str:
- return "foo"
-
-[rechecked mod0, mod1, mod2]
-[stale mod0, mod2]
-[out2]
-
-[case testIncrementalWithComplexDictExpression]
-import mod1
-
-[file mod1.py]
-import mod1_private
-
-[file mod1_private.py]
-my_dict = {
- 'a': [1, 2, 3],
- 'b': [4, 5, 6]
-}
-
-[file mod1_private.py.next]
-my_dict = {
- 'a': [1, 2, 3],
- 'b': [4, 5, 'a']
-}
-
-[rechecked mod1, mod1_private]
-[stale mod1_private]
-[builtins fixtures/dict.pyi]
-
-[case testIncrementalWithComplexConstantExpressionNoAnnotation]
-import mod1
-
-[file mod1.py]
-import mod1_private
-
-[file mod1_private.py]
-def foobar() -> int: return 1
-def baz() -> int: return 2
-const = 1 + foobar()
-
-[file mod1_private.py.next]
-def foobar() -> int: return 1
-def baz() -> int: return 2
-const = 1 + baz()
-
-[rechecked mod1_private]
-[stale]
-
-[case testIncrementalWithComplexConstantExpressionWithAnnotation]
-import mod1
-
-[file mod1.py]
-import mod1_private
-
-[file mod1_private.py]
-def foobar() -> int: return 1
-def baz() -> int: return 2
-const = 1 + foobar() # type: int
-
-[file mod1_private.py.next]
-def foobar() -> int: return 1
-def baz() -> int: return 2
-const = 1 + baz() # type: int
-
-[rechecked mod1_private]
-[stale]
-
-[case testIncrementalSmall]
-import mod1
-
-[file mod1.py]
-import mod1_private
-def accepts_int(a: int) -> None: pass
-accepts_int(mod1_private.some_func(12))
-
-[file mod1_private.py]
-def some_func(a: int) -> int:
- return 1
-
-[file mod1_private.py.next]
-def some_func(a: int) -> str:
- return "a"
-
-[rechecked mod1, mod1_private]
-[stale mod1_private]
-[builtins fixtures/ops.pyi]
-[out2]
-tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalWithDecorators]
-import mod1
-
-[file mod1.py]
-import mod1_private
-def accepts_int(a: int) -> None: pass
-accepts_int(mod1_private.some_func(12))
-
-[file mod1_private.py]
-from typing import Callable
-def multiply(f: Callable[[int], int]) -> Callable[[int], int]:
- return lambda a: f(a) * 10
-
-def stringify(f: Callable[[int], int]) -> Callable[[int], str]:
- return lambda a: str(f(a))
-
- at multiply
-def some_func(a: int) -> int:
- return a + 2
-
-[file mod1_private.py.next]
-from typing import Callable
-def multiply(f: Callable[[int], int]) -> Callable[[int], int]:
- return lambda a: f(a) * 10
-
-def stringify(f: Callable[[int], int]) -> Callable[[int], str]:
- return lambda a: str(f(a))
-
- at stringify
-def some_func(a: int) -> int:
- return a + 2
-[rechecked mod1, mod1_private]
-[stale mod1_private]
-[builtins fixtures/ops.pyi]
-[out2]
-tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalChangingClassAttributes]
-import mod1
-
-[file mod1.py]
-import mod2
-mod2.Foo.A
-
-[file mod2.py]
-class Foo:
- A = 3
-
-[file mod2.py.next]
-class Foo:
- A = "hello"
-
-[rechecked mod1, mod2]
-[stale mod2]
-
-[case testIncrementalChangingFields]
-import mod1
-
-[file mod1.py]
-import mod2
-f = mod2.Foo()
-f.A
-
-[file mod2.py]
-class Foo:
- def __init__(self) -> None:
- self.A = 3
-
-[file mod2.py.next]
-class Foo:
- def __init__(self) -> None:
- self.A = "hello"
-
-[rechecked mod1, mod2]
-[stale mod2]
-[out2]
-
-[case testIncrementalChangingFieldsWithAssignment]
-import mod1
-
-[file mod1.py]
-import mod2
-f = mod2.Foo()
-B = f.A
-
-[file mod2.py]
-class Foo:
- def __init__(self) -> None:
- self.A = 3
-
-[file mod2.py.next]
-class Foo:
- def __init__(self) -> None:
- self.A = "hello"
-
-[rechecked mod1, mod2]
-[stale mod1, mod2]
-
-[case testIncrementalCheckingChangingFields]
-import mod1
-
-[file mod1.py]
-import mod2
-def accept_int(a: int) -> int: return a
-f = mod2.Foo()
-accept_int(f.A)
-
-[file mod2.py]
-class Foo:
- def __init__(self) -> None:
- self.A = 3
-
-[file mod2.py.next]
-class Foo:
- def __init__(self) -> None:
- self.A = "hello"
-
-[rechecked mod1, mod2]
-[stale mod2]
-[out2]
-tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalNestedClassDefinition]
-import mod1
-
-[file mod1.py]
-import mod2
-b = mod2.Foo.Bar()
-b.attr
-
-[file mod2.py]
-class Foo:
- class Bar:
- attr = 3
-
-[file mod2.py.next]
-class Foo:
- class Bar:
- attr = "foo"
-
-[rechecked mod1, mod2]
-[stale mod2]
-
-[case testIncrementalSimpleBranchingModules]
-import mod1
-import mod2
-
-[file mod1.py]
-def func() -> None: pass
-
-[file mod2.py]
-def func() -> None: pass
-
-[file mod1.py.next]
-def func() -> int: return 1
-
-[rechecked mod1]
-[stale mod1]
-
-[case testIncrementalSubmoduleImport]
-from parent.childA import Foo
-
-def func1() -> Foo:
- return Foo()
-
-[file parent/__init__.py]
-from parent.childA import Foo
-from parent.childB import Bar
-
-__all__ = ['Foo', 'Bar']
-
-[file parent/childA.py]
-import parent
-
-class Foo:
- def test(self) -> int:
- return parent.Bar().test()
-
-[file parent/childB.py]
-class Bar:
- def test(self) -> int: return 3
-
-[builtins fixtures/module_all.pyi]
-[rechecked]
-[stale]
-
-[case testIncrementalSubmoduleWithAttr]
-import mod.child
-x = mod.child.Foo()
-x.bar()
-
-[file mod/__init__.py]
-
-[file mod/child.py]
-class Foo:
- def bar(self) -> None: pass
-[builtins fixtures/module.pyi]
-[rechecked]
-[stale]
-
-[case testIncrementalNestedSubmoduleImportFromWithAttr]
-from mod1.mod2 import mod3
-def accept_int(a: int) -> None: pass
-
-accept_int(mod3.val3)
-
-[file mod1/__init__.py]
-val1 = 1
-
-[file mod1/mod2/__init__.py]
-val2 = 1
-
-[file mod1/mod2/mod3.py]
-val3 = 1
-
-[builtins fixtures/module.pyi]
-[rechecked]
-[stale]
-
-[case testIncrementalNestedSubmoduleWithAttr]
-import mod1.mod2.mod3
-def accept_int(a: int) -> None: pass
-
-accept_int(mod1.mod2.mod3.val3)
-accept_int(mod1.mod2.val2)
-accept_int(mod1.val1)
-
-[file mod1/__init__.py]
-val1 = 1
-
-[file mod1/mod2/__init__.py]
-val2 = 1
-
-[file mod1/mod2/mod3.py]
-val3 = 1
-
-[builtins fixtures/module.pyi]
-[rechecked]
-[stale]
-
-[case testIncrementalSubmoduleParentWithImportFrom]
-import parent
-
-[file parent/__init__.py]
-from parent import a
-
-[file parent/a.py]
-val = 3
-
-[builtins fixtures/args.pyi]
-[stale]
-
-[case testIncrementalSubmoduleParentBackreference]
-import parent
-
-[file parent/__init__.py]
-from parent import a
-
-[file parent/a.py]
-import parent.b
-
-[file parent/b.py]
-
-[builtins fixtures/args.pyi]
-[stale]
-
-[case testIncrementalSubmoduleParentBackreferenceComplex]
-import parent
-
-[file parent/__init__.py]
-import parent.a
-
-[file parent/a.py]
-import parent.b
-import parent.c
-
-[file parent/b.py]
-import parent.a
-
-[file parent/c.py]
-import parent.a
-
-[builtins fixtures/args.pyi]
-[stale]
-
-[case testIncrementalReferenceNewFileWithImportFrom]
-from parent import a
-
-[file parent/__init__.py]
-
-[file parent/a.py]
-
-[file parent/a.py.next]
-from parent import b
-
-[file parent/b.py.next]
-
-[stale parent, parent.a, parent.b]
-
-[case testIncrementalReferenceExistingFileWithImportFrom]
-from parent import a, b
-
-[file parent/__init__.py]
-
-[file parent/a.py]
-
-[file parent/b.py]
-
-[file parent/a.py.next]
-from parent import b
-
-[stale parent.a]
-
-[case testIncrementalWithTypeIgnoreOnDirectImport]
-import a, b
-
-[file a.py]
-import b # type: ignore
-
-[file b.py]
-import c
-
-[file c.py]
-
-[stale]
-
-[case testIncrementalWithTypeIgnoreOnImportFrom]
-import a, b
-
-[file a.py]
-from b import something # type: ignore
-
-[file b.py]
-import c
-something = 3
-
-[file c.py]
-
-[stale]
-
-[case testIncrementalWithPartialTypeIgnore]
-import a # type: ignore
-import a.b
-
-[file a/__init__.py]
-
-[file a/b.py]
-
-[stale]
-
-[case testIncrementalAnyIsDifferentFromIgnore]
-import b
-
-[file b.py]
-from typing import Any
-import a.b
-
-[file b.py.next]
-from typing import Any
-
-a = 3 # type: Any
-import a.b
-
-[file a/__init__.py]
-
-[file a/b.py]
-
-[rechecked b]
-[stale]
-[out2]
-tmp/b.py:4: error: Name 'a' already defined
-
-[case testIncrementalSilentImportsAndImportsInClass]
-# flags: --ignore-missing-imports
-class MyObject(object):
- from bar import FooBar
-[stale]
-
-[case testIncrementalSameFileSize]
-import m
-
-[file m.py]
-def foo(a: int) -> None: pass
-def bar(a: str) -> None: pass
-
-foo(3)
-
-[file m.py.next]
-def foo(a: int) -> None: pass
-def bar(a: str) -> None: pass
-
-bar(3)
-
-[rechecked m]
-[stale]
-[out2]
-tmp/m.py:4: error: Argument 1 to "bar" has incompatible type "int"; expected "str"
-
-[case testIncrementalUnsilencingModule]
-# cmd: mypy -m main package.subpackage.mod2
-# cmd2: mypy -m main package.subpackage.mod1
-# flags: --follow-imports=skip
-
-[file main.py]
-from package.subpackage.mod1 import Class
-
-def handle(c: Class) -> None:
- c.some_attribute
-
-[file package/__init__.py]
-# empty
-
-[file package/subpackage/__init__.py]
-# empty
-
-[file package/subpackage/mod1.py]
-import collections # Any previously unloaded package works here
-
-class Class: pass
-
-[file package/subpackage/mod2.py]
-# empty
-
-[builtins fixtures/args.pyi]
-[rechecked collections, main, package.subpackage.mod1]
-[stale collections, package.subpackage.mod1]
-[out2]
-tmp/main.py:4: error: "Class" has no attribute "some_attribute"
-
-[case testIncrementalWithIgnores]
-import foo # type: ignore
-
-[builtins fixtures/module.pyi]
-[stale]
-
-[case testIncrementalWithSilentImportsAndIgnore]
-# cmd: mypy -m main b
-# cmd2: mypy -m main c c.submodule
-# flags: --follow-imports=skip
-
-[file main.py]
-import a # type: ignore
-import b
-import c
-
-a.A().foo()
-b.B().foo()
-c.C().foo()
-
-[file b.py]
-class B:
- def foo(self) -> None: pass
-
-[file b.py.next]
-
-[file c/__init__.py]
-class C: pass
-
-[file c/submodule.py]
-val = 3 # type: int
-val = "foo"
-
-[builtins fixtures/module_all.pyi]
-[rechecked main, c, c.submodule]
-[stale]
-[out2]
-tmp/c/submodule.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-tmp/main.py:7: error: "C" has no attribute "foo"
-
-[case testIncrementalRemoteError]
-import m
-m.C().foo().bar()
-[file m.py]
-import n
-class C:
- def foo(self) -> n.A: pass
-[file n.py]
-class A:
- def bar(self): pass
-[file n.py.next]
-class A:
- pass
-[rechecked m, n]
-[stale n]
-[out2]
-main:2: error: "A" has no attribute "bar"
-
-[case testIncrementalRemoteErrorFixed]
-import m
-m.C().foo().bar()
-[file m.py]
-import n
-class C:
- def foo(self) -> n.A: pass
-[file n.py]
-class A:
- pass
-[file n.py.next]
-class A:
- def bar(self): pass
-[rechecked m, n]
-[stale n]
-[out1]
-main:2: error: "A" has no attribute "bar"
-
-[case testIncrementalChangedError]
-import m
-[file m.py]
-import n
-def accept_int(x: int) -> None: pass
-accept_int(n.foo)
-[file n.py]
-foo = "hello"
-reveal_type(foo)
-[file n.py.next]
-foo = 3.14
-reveal_type(foo)
-[rechecked m, n]
-[stale]
-[out1]
-tmp/n.py:2: error: Revealed type is 'builtins.str'
-tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-[out2]
-tmp/n.py:2: error: Revealed type is 'builtins.float'
-tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "float"; expected "int"
-
-[case testIncrementalReplacingImports]
-import good, bad, client
-
-[file good.py]
-def foo(a: int) -> None: pass
-
-[file bad.py]
-def foo(a: str) -> None: pass
-
-[file client.py]
-import good
-import bad
-from good import foo
-foo(3)
-
-[file client.py.next]
-import good
-import bad
-from bad import foo
-foo(3)
-
-[rechecked client]
-[stale]
-[out2]
-tmp/client.py:4: error: Argument 1 to "foo" has incompatible type "int"; expected "str"
-
-[case testIncrementalChangingAlias]
-import m1, m2, m3, m4, m5
-
-[file m1.py]
-from m2 import A
-def accepts_int(x: int) -> None: pass
-accepts_int(A())
-
-[file m2.py]
-from m3 import A
-
-[file m3.py]
-from m4 import B
-A = B
-
-[file m3.py.next]
-from m5 import C
-A = C
-
-[file m4.py]
-def B() -> int:
- return 42
-
-[file m5.py]
-def C() -> str:
- return "hello"
-
-[rechecked m1, m2, m3]
-[stale m3]
-[out2]
-tmp/m1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalSilentImportsWithBlatantError]
-# cmd: mypy -m main
-# flags: --follow-imports=skip
-
-[file main.py]
-from evil import Hello
-
-[file main.py.next]
-from evil import Hello
-reveal_type(Hello())
-
-[file evil.py]
-def accept_int(x: int) -> None: pass
-accept_int("not an int")
-
-[rechecked main]
-[stale]
-[out2]
-tmp/main.py:2: error: Revealed type is 'Any'
-
-[case testIncrementalImportIsNewlySilenced]
-# cmd: mypy -m main foo
-# cmd2: mypy -m main
-# flags: --follow-imports=skip
-
-[file main.py]
-from foo import bar
-def accept_int(x: int) -> None: pass
-accept_int(bar)
-
-[file foo.py]
-bar = 3
-
-[file foo.py.next]
-# Empty!
-
-[rechecked main]
-[stale main]
-
-[case testIncrementalSilencedModuleNoLongerCausesError]
-# cmd: mypy -m main evil
-# cmd2: mypy -m main
-# flags: --follow-imports=skip
-
-[file main.py]
-from evil import bar
-def accept_int(x: int) -> None: pass
-accept_int(bar)
-reveal_type(bar)
-
-[file evil.py]
-bar = "str"
-
-[rechecked main]
-[stale]
-[out1]
-tmp/main.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-tmp/main.py:4: error: Revealed type is 'builtins.str'
-[out2]
-tmp/main.py:4: error: Revealed type is 'Any'
-
-[case testIncrementalFixedBugCausesPropagation]
-import mod1
-
-[file mod1.py]
-from mod2 import A
-val = A().makeB().makeC().foo()
-reveal_type(val)
-
-[file mod2.py]
-from mod3 import B
-class A:
- def makeB(self) -> B: return B()
-
-[file mod3.py]
-from mod4 import C
-class B:
- def makeC(self) -> C:
- val = 3 # type: int
- val = "str" # deliberately triggering error
- return C()
-
-[file mod3.py.next]
-from mod4 import C
-class B:
- def makeC(self) -> C: return C()
-
-[file mod4.py]
-class C:
- def foo(self) -> int: return 1
-
-[rechecked mod3, mod2, mod1]
-[stale mod3, mod2]
-[out1]
-tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-tmp/mod1.py:3: error: Revealed type is 'builtins.int'
-
-[out2]
-tmp/mod1.py:3: error: Revealed type is 'builtins.int'
-
-[case testIncrementalIncidentalChangeWithBugCausesPropagation]
-import mod1
-
-[file mod1.py]
-from mod2 import A
-val = A().makeB().makeC().foo()
-reveal_type(val)
-
-[file mod2.py]
-from mod3 import B
-class A:
- def makeB(self) -> B: return B()
-
-[file mod3.py]
-from mod4 import C
-class B:
- def makeC(self) -> C:
- val = 3 # type: int
- val = "str" # deliberately triggering error
- return C()
-
-[file mod4.py]
-class C:
- def foo(self) -> int: return 1
-
-[file mod4.py.next]
-class C:
- def foo(self) -> str: return 'a'
-
-[rechecked mod4, mod3, mod2, mod1]
-[stale mod4]
-[out1]
-tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-tmp/mod1.py:3: error: Revealed type is 'builtins.int'
-
-[out2]
-tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-tmp/mod1.py:3: error: Revealed type is 'builtins.str'
-
-[case testIncrementalIncidentalChangeWithBugFixCausesPropagation]
-import mod1
-
-[file mod1.py]
-from mod2 import A
-val = A().makeB().makeC().foo()
-reveal_type(val)
-
-[file mod2.py]
-from mod3 import B
-class A:
- def makeB(self) -> B: return B()
-
-[file mod3.py]
-from mod4 import C
-class B:
- def makeC(self) -> C:
- val = 3 # type: int
- val = "str" # deliberately triggering error
- return C()
-
-[file mod3.py.next]
-from mod4 import C
-class B:
- def makeC(self) -> C: return C()
-
-[file mod4.py]
-class C:
- def foo(self) -> int: return 1
-
-[file mod4.py.next]
-class C:
- def foo(self) -> str: return 'a'
-
-[rechecked mod4, mod3, mod2, mod1]
-[stale mod4, mod3, mod2]
-[out1]
-tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-tmp/mod1.py:3: error: Revealed type is 'builtins.int'
-
-[out2]
-tmp/mod1.py:3: error: Revealed type is 'builtins.str'
-
-[case testIncrementalSilentImportsWithInnerImports]
-# cmd: mypy -m main foo
-# flags: --ignore-missing-imports
-
-[file main.py]
-from foo import MyClass
-m = MyClass()
-
-[file main.py.next]
-from foo import MyClass
-m = MyClass()
-reveal_type(m.val)
-
-[file foo.py]
-class MyClass:
- def __init__(self) -> None:
- import unrelated
- self.val = unrelated.test()
-
-[rechecked main]
-[stale]
-[out2]
-tmp/main.py:3: error: Revealed type is 'Any'
-
-[case testIncrementalSilentImportsWithInnerImportsAndNewFile]
-# cmd: mypy -m main foo
-# cmd2: mypy -m main foo unrelated
-# flags: --follow-imports=skip
-
-[file main.py]
-from foo import MyClass
-m = MyClass()
-
-[file main.py.next]
-from foo import MyClass
-m = MyClass()
-reveal_type(m.val)
-
-[file foo.py]
-class MyClass:
- def __init__(self) -> None:
- import unrelated
- self.val = unrelated.test()
-
-[file unrelated.py]
-def test() -> str: return "foo"
-
-[rechecked main, foo, unrelated]
-[stale foo, unrelated]
-[out2]
-tmp/main.py:3: error: Revealed type is 'builtins.str'
-
-[case testIncrementalWorksWithNestedClasses]
-import foo
-
-[file foo.py]
-class MyClass:
- class NestedClass:
- pass
-
- class_attr = NestedClass()
-
-[rechecked]
-[stale]
-
-[case testIncrementalWorksWithNamedTuple]
-import foo
-
-[file foo.py]
-from mid import MyTuple
-def accept_int(x: int) -> None: pass
-accept_int(MyTuple(1, "b", "c").a)
-
-[file mid.py]
-from bar import MyTuple
-
-[file bar.py]
-from typing import NamedTuple
-MyTuple = NamedTuple('MyTuple', [
- ('a', int),
- ('b', str),
- ('c', str)
-])
-
-[file bar.py.next]
-from typing import NamedTuple
-MyTuple = NamedTuple('MyTuple', [
- ('b', int), # a and b are swapped
- ('a', str),
- ('c', str)
-])
-
-[rechecked bar, mid, foo]
-[stale bar]
-[out2]
-tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalWorksWithNestedNamedTuple]
-import foo
-
-[file foo.py]
-from mid import Outer
-def accept_int(x: int) -> None: pass
-accept_int(Outer.MyTuple(1, "b", "c").a)
-
-[file mid.py]
-from bar import Outer
-
-[file bar.py]
-from typing import NamedTuple
-class Outer:
- MyTuple = NamedTuple('MyTuple', [
- ('a', int),
- ('b', str),
- ('c', str)
- ])
-
-[file bar.py.next]
-from typing import NamedTuple
-class Outer:
- MyTuple = NamedTuple('MyTuple', [
- ('b', int), # a and b are swapped
- ('a', str),
- ('c', str)
- ])
-
-[rechecked bar, mid, foo]
-[stale bar]
-[out2]
-tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
-
-[case testIncrementalPartialSubmoduleUpdate]
-# cmd: mypy -m a
-# cmd2: mypy -m a a.c
-# flags: --follow-imports=skip
-
-[file a/__init__.py]
-from .b import B
-from .c import C
-
-[file a/b.py]
-class B: pass
-
-[file a/c.py]
-class C: pass
-
-[file a/c.py.next]
-class C: pass
-pass
-
-[rechecked a, a.c]
-[stale a, a.c]
-[out]
-
-[case testIncrementalNestedClassRef]
-import top
-
-[file top.py]
-from funcs import callee
-from classes import Outer
-def caller(a: Outer.Inner) -> None:
- callee(a)
-
-[file funcs.py]
-from classes import Outer
-def callee(a: Outer.Inner) -> None:
- pass
-
-[file classes.py]
-class Outer:
- class Inner:
- pass
-
-[file top.py.next]
-from funcs import callee
-from classes import Outer
-def caller(a: Outer.Inner) -> int:
- callee(a)
-
-[case testIncrementalLoadsParentAfterChild]
-# cmd: mypy -m r.s
-
-[file r/__init__.py]
-from . import s
-
-[file r/m.py]
-class R: pass
-
-[file r/s.py]
-from . import m
-R = m.R
-a = None # type: R
-
-[file r/s.py.next]
-from . import m
-R = m.R
-a = None # type: R
-
-[case testIncrementalBaseClassAttributeConflict]
-class A: pass
-class B: pass
-
-class X:
- attr = None # type: A
-class Y:
- attr = None # type: B
-class Z(X, Y): pass
-[stale]
-[out]
-main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y"
-[out2]
-main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y"
-
-[case testIncrementalFollowImportsSilent]
-# flags: --follow-imports=silent
-import a
-[file a.py]
-x = 0
-[file a.py.next]
-x = 0
-x + ''
-
-[case testIncrementalFollowImportsSkip]
-# flags: --follow-imports=skip
-import a
-reveal_type(a.x)
-[file a.py]
-/
-[file a.py.next]
-//
-[out]
-main:3: error: Revealed type is 'Any'
-[out2]
-main:3: error: Revealed type is 'Any'
-
-[case testIncrementalFollowImportsError]
-# flags: --follow-imports=error
-import a
-[file a.py]
-/
-[file a.py.next]
-//
-[out1]
-main:2: note: Import of 'a' ignored
-main:2: note: (Using --follow-imports=error, module not passed on command line)
-[out2]
-main:2: note: Import of 'a' ignored
-main:2: note: (Using --follow-imports=error, module not passed on command line)
-
-[case testIncrementalFollowImportsVariable]
-# flags: --config-file tmp/mypy.ini
-import a
-reveal_type(a.x)
-[file a.py]
-x = 0
-[file mypy.ini]
-[[mypy]
-follow_imports = normal
-[file mypy.ini.next]
-[[mypy]
-follow_imports = skip
-[out1]
-main:3: error: Revealed type is 'builtins.int'
-[out2]
-main:3: error: Revealed type is 'Any'
-
-[case testIncrementalNamedTupleInMethod]
-from ntcrash import nope
-[file ntcrash.py]
-from typing import NamedTuple
-class C:
- def f(self) -> None:
- A = NamedTuple('A', [('x', int), ('y', int)])
-[out1]
-main:1: error: Module 'ntcrash' has no attribute 'nope'
-[out2]
-main:1: error: Module 'ntcrash' has no attribute 'nope'
-
-[case testIncrementalNamedTupleInMethod2]
-from ntcrash import nope
-[file ntcrash.py]
-from typing import NamedTuple
-class C:
- class D:
- def f(self) -> None:
- A = NamedTuple('A', [('x', int), ('y', int)])
-[out1]
-main:1: error: Module 'ntcrash' has no attribute 'nope'
-[out2]
-main:1: error: Module 'ntcrash' has no attribute 'nope'
-
-[case testIncrementalNamedTupleInMethod3]
-from ntcrash import nope
-[file ntcrash.py]
-from typing import NamedTuple
-class C:
- def a(self):
- class D:
- def f(self) -> None:
- A = NamedTuple('A', [('x', int), ('y', int)])
-[out1]
-main:1: error: Module 'ntcrash' has no attribute 'nope'
-[out2]
-main:1: error: Module 'ntcrash' has no attribute 'nope'
-
-[case testIncrementalNamedTupleInMethod4]
-from ntcrash import C
-reveal_type(C().a)
-reveal_type(C().b)
-reveal_type(C().c)
-[file ntcrash.py]
-from typing import NamedTuple
-class C:
- def __init__(self) -> None:
- A = NamedTuple('A', [('x', int)])
- self.a = A(0)
- self.b = A(0) # type: A
- self.c = A
-[out1]
-main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
-main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
-main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
-[out2]
-main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
-main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
-main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
-
-[case testIncrementalTypedDictInMethod]
-from tdcrash import nope
-[file tdcrash.py]
-from mypy_extensions import TypedDict
-class C:
- def f(self) -> None:
- A = TypedDict('A', {'x': int, 'y': int})
-[builtins fixtures/dict.pyi]
-[out1]
-main:1: error: Module 'tdcrash' has no attribute 'nope'
-[out2]
-main:1: error: Module 'tdcrash' has no attribute 'nope'
-
-[case testIncrementalTypedDictInMethod2]
-from tdcrash import nope
-[file tdcrash.py]
-from mypy_extensions import TypedDict
-class C:
- class D:
- def f(self) -> None:
- A = TypedDict('A', {'x': int, 'y': int})
-[builtins fixtures/dict.pyi]
-[out1]
-main:1: error: Module 'tdcrash' has no attribute 'nope'
-[out2]
-main:1: error: Module 'tdcrash' has no attribute 'nope'
-
-[case testIncrementalTypedDictInMethod3]
-from tdcrash import nope
-[file tdcrash.py]
-from mypy_extensions import TypedDict
-class C:
- def a(self):
- class D:
- def f(self) -> None:
- A = TypedDict('A', {'x': int, 'y': int})
-[builtins fixtures/dict.pyi]
-[out1]
-main:1: error: Module 'tdcrash' has no attribute 'nope'
-[out2]
-main:1: error: Module 'tdcrash' has no attribute 'nope'
-
-[case testIncrementalTypedDictInMethod4]
-from ntcrash import C
-reveal_type(C().a)
-reveal_type(C().b)
-reveal_type(C().c)
-[file ntcrash.py]
-from mypy_extensions import TypedDict
-class C:
- def __init__(self) -> None:
- A = TypedDict('A', {'x': int})
- self.a = A(x=0)
- self.b = A(x=0) # type: A
- self.c = A
-[builtins fixtures/dict.pyi]
-[out1]
-main:2: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-main:3: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=ntcrash.C.A at 4)'
-main:4: error: Revealed type is 'def () -> ntcrash.C.A at 4'
-[out2]
-main:2: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-main:3: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=ntcrash.C.A at 4)'
-main:4: error: Revealed type is 'def () -> ntcrash.C.A at 4'
-
-[case testIncrementalPerFileFlags]
-# flags: --config-file tmp/mypy.ini
-import a
-[file a.py]
-pass
-[file mypy.ini]
-[[mypy]
-warn_no_return = False
-[[mypy-a]
-warn_no_return = True
-[rechecked]
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
deleted file mode 100644
index f93429e..0000000
--- a/test-data/unit/check-inference-context.test
+++ /dev/null
@@ -1,880 +0,0 @@
-
-
--- Basic test cases
--- ----------------
-
-
-[case testBasicContextInference]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ab = None # type: A[B]
-ao = None # type: A[object]
-b = None # type: B
-
-ao = f()
-ab = f()
-b = f() # E: Incompatible types in assignment (expression has type A[None], variable has type "B")
-
-def f() -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-
-[case testBasicContextInferenceForConstructor]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ab = None # type: A[B]
-ao = None # type: A[object]
-b = None # type: B
-
-ao = A()
-ab = A()
-b = A() # E: Incompatible types in assignment (expression has type A[None], variable has type "B")
-
-class A(Generic[T]): pass
-class B: pass
-
-[case testIncompatibleContextInference]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-b = None # type: B
-c = None # type: C
-ab = None # type: A[B]
-ao = None # type: A[object]
-ac = None # type: A[C]
-
-ac = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
-ab = f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "B"
-
-ao = f(b)
-ab = f(b)
-ao = f(c)
-ac = f(c)
-
-def f(a: T) -> 'A[T]':
- pass
-
-class A(Generic[T]): pass
-
-class B: pass
-class C: pass
-
-
--- Local variables
--- ---------------
-
-
-[case testInferGenericLocalVariableTypeWithEmptyContext]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def g() -> None:
- ao = None # type: A[object]
- ab = None # type: A[B]
- o = None # type: object
- b = None # type: B
-
- x = f(o)
- ab = x # E: Incompatible types in assignment (expression has type A[object], variable has type A[B])
- ao = x
- y = f(b)
- ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
- ab = y
-
-def f(a: T) -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-[out]
-
-[case testInferLocalVariableTypeWithUnderspecifiedGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def g() -> None:
- x = f() # E: Need type annotation for variable
-
-def f() -> 'A[T]': pass
-class A(Generic[T]): pass
-[out]
-
-[case testInferMultipleLocalVariableTypesWithTupleRvalue]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def g() -> None:
- ao = None # type: A[object]
- ab = None # type: A[B]
- b = None # type: B
- x, y = f(b), f(b)
- ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
- ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
- ab = x
- ab = y
-
-def f(a: T) -> 'A[T]': pass
-class A(Generic[T]): pass
-class B: pass
-[out]
-
-[case testInferMultipleLocalVariableTypesWithArrayRvalueAndNesting]
-from typing import TypeVar, List, Generic
-T = TypeVar('T')
-def h() -> None:
- ao = None # type: A[object]
- ab = None # type: A[B]
- b = None # type: B
- x, y = g(f(b))
- ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
- ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
- ab = x
- ab = y
-
-def f(a: T) -> 'A[T]': pass
-def g(a: T) -> List[T]: pass
-
-class A(Generic[T]): pass
-class B: pass
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Return types with multiple tvar instances
--- -----------------------------------------
-
-
-[case testInferenceWithTypeVariableTwiceInReturnType]
-from typing import TypeVar, Tuple, Generic
-T = TypeVar('T')
-b = None # type: B
-o = None # type: object
-ab = None # type: A[B]
-ao = None # type: A[object]
-
-ab, ao = f(b) # Fail
-ao, ab = f(b) # Fail
-
-ao, ao = f(b)
-ab, ab = f(b)
-ao, ao = f(o)
-
-def f(a: T) -> 'Tuple[A[T], A[T]]': pass
-
-class A(Generic[T]): pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-[out]
-main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-
-[case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables]
-from typing import TypeVar, Tuple, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-b = None # type: B
-o = None # type: object
-ab = None # type: A[B]
-ao = None # type: A[object]
-
-ao, ao, ab = f(b, b) # Fail
-ao, ab, ao = g(b, b) # Fail
-ao, ab, ab, ab = h(b, b) # Fail
-ab, ab, ao, ab = h(b, b) # Fail
-
-ao, ab, ab = f(b, b)
-ab, ab, ao = g(b, b)
-ab, ab, ab, ab = h(b, b)
-
-def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass
-def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass
-def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass
-
-class A(Generic[T]): pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-[out]
-main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:10: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:11: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:12: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-
-
--- Multiple tvar instances in arguments
--- ------------------------------------
-
-
-[case testMultipleTvatInstancesInArgs]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ac = None # type: A[C]
-ab = None # type: A[B]
-ao = None # type: A[object]
-b = None # type: B
-c = None # type: C
-o = None # type: object
-
-ab = f(b, o) # E: Argument 2 to "f" has incompatible type "object"; expected "B"
-ab = f(o, b) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
-ac = f(b, c) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
-ac = f(c, b) # E: Argument 2 to "f" has incompatible type "B"; expected "C"
-
-ao = f(b, c)
-ao = f(c, b)
-ab = f(c, b)
-
-def f(a: T, b: T) -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-class C(B): pass
-
-
--- Nested generic function calls
--- -----------------------------
-
-
-[case testNestedGenericFunctionCall1]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-aab = None # type: A[A[B]]
-aao = None # type: A[A[object]]
-ao = None # type: A[object]
-b = None # type: B
-o = None # type: object
-
-aab = f(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
-
-aab = f(f(b))
-aao = f(f(b))
-ao = f(f(b))
-
-def f(a: T) -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-
-[case testNestedGenericFunctionCall2]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ab = None # type: A[B]
-ao = None # type: A[object]
-b = None # type: B
-o = None # type: object
-
-ab = f(g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
-
-ab = f(g(b))
-ao = f(g(b))
-
-def f(a: T) -> T: pass
-
-def g(a: T) -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-
-[case testNestedGenericFunctionCall3]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ab = None # type: A[B]
-ao = None # type: A[object]
-b = None # type: B
-o = None # type: object
-
-ab = f(g(o), g(b)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
-ab = f(g(b), g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
-
-ab = f(g(b), g(b))
-ao = f(g(b), g(o))
-ao = f(g(o), g(b))
-
-def f(a: T, b: T) -> T:
- pass
-
-def g(a: T) -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-
-
--- Method calls
--- ------------
-
-
-[case testMethodCallWithContextInference]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-o = None # type: object
-b = None # type: B
-c = None # type: C
-ao = None # type: A[object]
-ab = None # type: A[B]
-ac = None # type: A[C]
-
-ab.g(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
-ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type A[B], variable has type A[C])
-ac = f(c).g(f(b)) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
-
-ab = f(b).g(f(c))
-ab.g(f(c))
-
-def f(a: T) -> 'A[T]': pass
-
-class A(Generic[T]):
- def g(self, a: 'A[T]') -> 'A[T]': pass
-
-class B: pass
-class C(B): pass
-
-
--- List expressions
--- ----------------
-
-
-[case testEmptyListExpression]
-from typing import List
-aa = None # type: List[A]
-ao = None # type: List[object]
-a = None # type: A
-
-a = [] # E: Incompatible types in assignment (expression has type List[None], variable has type "A")
-
-aa = []
-ao = []
-
-class A: pass
-[builtins fixtures/list.pyi]
-
-[case testSingleItemListExpressions]
-from typing import List
-aa = None # type: List[A]
-ab = None # type: List[B]
-ao = None # type: List[object]
-a = None # type: A
-b = None # type: B
-
-aa = [b] # E: List item 0 has incompatible type "B"
-ab = [a] # E: List item 0 has incompatible type "A"
-
-aa = [a]
-ab = [b]
-ao = [a]
-aa = [None]
-ao = [None]
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testMultiItemListExpressions]
-from typing import List
-aa = None # type: List[A]
-ab = None # type: List[B]
-ao = None # type: List[object]
-a = None # type: A
-b = None # type: B
-
-ab = [b, a] # E: List item 1 has incompatible type "A"
-ab = [a, b] # E: List item 0 has incompatible type "A"
-
-aa = [a, b, a]
-ao = [a, b]
-
-class A: pass
-class B(A): pass
-[builtins fixtures/list.pyi]
-
-[case testLocalVariableInferenceFromEmptyList]
-import typing
-def f() -> None:
- a = [] # E: Need type annotation for variable
- b = [None] # E: Need type annotation for variable
- c = [B()]
- c = [object()] # E: List item 0 has incompatible type "object"
- c = [B()]
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testNestedListExpressions]
-from typing import List
-aao = None # type: List[List[object]]
-aab = None # type: List[List[B]]
-ab = None # type: List[B]
-b = None # type: B
-o = None # type: object
-
-aao = [[o], ab] # E: List item 1 has incompatible type List[B]
-aab = [[], [o]] # E: List item 0 has incompatible type "object"
-
-aao = [[None], [b], [], [o]]
-aab = [[None], [b], []]
-aab = [ab, []]
-
-class B: pass
-[builtins fixtures/list.pyi]
-
-
--- Complex context
--- ---------------
-
-
-[case testParenthesesAndContext]
-from typing import List
-l = ([A()]) # type: List[object]
-class A: pass
-[builtins fixtures/list.pyi]
-
-[case testComplexTypeInferenceWithTuple]
-from typing import TypeVar, Tuple, Generic
-k = TypeVar('k')
-t = TypeVar('t')
-v = TypeVar('v')
-def f(x: Tuple[k]) -> 'A[k]': pass
-
-d = f((A(),)) # type: A[A[B]]
-
-class A(Generic[t]): pass
-class B: pass
-class C: pass
-class D(Generic[k, v]): pass
-[builtins fixtures/list.pyi]
-
-
--- Dictionary literals
--- -------------------
-
-
-[case testDictionaryLiteralInContext]
-from typing import Dict, TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-class B: pass
-class C: pass
-a_b = A() # type: A[B]
-a_c = A() # type: A[C]
-d = {A() : a_c,
- a_b : A()} # type: Dict[A[B], A[C]]
-[builtins fixtures/dict.pyi]
-
-
--- Special cases (regression tests etc.)
--- -------------------------------------
-
-
-[case testInitializationWithInferredGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected C[A]
-
-def f(x: T) -> T: pass
-class C(Generic[T]): pass
-class A: pass
-
-[case testInferredGenericTypeAsReturnValue]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def t() -> 'A[B]':
- return f(D()) # E: Argument 1 to "f" has incompatible type "D"; expected "B"
- return A()
- return f(C())
-
-def f(a: T) -> 'A[T]': pass
-class A(Generic[T]): pass
-class B: pass
-class C(B): pass
-class D: pass
-[out]
-
-[case testIntersectionWithInferredGenericArgument]
-from typing import overload, TypeVar, Generic
-T = TypeVar('T')
-f(A())
-
- at overload
-def f(x: 'A[B]') -> None: pass
- at overload
-def f(x: 'B') -> None: pass
-class A(Generic[T]): pass
-class B: pass
-
-[case testInferenceWithAbstractClassContext]
-from typing import TypeVar, Generic
-from abc import abstractmethod, ABCMeta
-t = TypeVar('t')
-x = A() # type: I[int]
-a_object = A() # type: A[object]
-y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type A[object], variable has type I[int])
-
-class I(Generic[t]):
- @abstractmethod
- def f(self): pass
-class A(I[t], Generic[t]):
- def f(self): pass
-
-[case testInferenceWithAbstractClassContext2]
-from typing import TypeVar, Generic
-from abc import abstractmethod, ABCMeta
-t = TypeVar('t')
-a = f(A()) # type: A[int]
-a_int = A() # type: A[int]
-aa = f(a_int)
-class I(Generic[t]): pass
-class A(I[t], Generic[t]): pass
-def f(i: I[t]) -> A[t]: pass
-
-[case testInferenceWithAbstractClassContext3]
-from typing import TypeVar, Generic, Iterable
-t = TypeVar('t')
-class set(Generic[t]):
- def __init__(self, iterable: Iterable[t]) -> None: pass
-b = bool()
-l = set([b])
-l = set([object()]) # E: List item 0 has incompatible type "object"
-[builtins fixtures/for.pyi]
-
-
--- Infer generic type in 'Any' context
--- -----------------------------------
-
-
-[case testInferGenericTypeInAnyContext]
-from typing import Any, TypeVar, Generic
-s = TypeVar('s')
-t = TypeVar('t')
-x = [] # type: Any
-y = C() # type: Any
-class C(Generic[s, t]): pass
-[builtins fixtures/list.pyi]
-
-
--- Lambdas
--- -------
-
-
-[case testInferLambdaArgumentTypeUsingContext]
-from typing import Callable
-f = None # type: Callable[[B], A]
-f = lambda x: x.o
-f = lambda x: x.x # E: "B" has no attribute "x"
-class A: pass
-class B:
- o = None # type: A
-
-[case testInferLambdaReturnTypeUsingContext]
-from typing import List, Callable
-f = None # type: Callable[[], List[A]]
-f = lambda: []
-f = lambda: [B()] # E: List item 0 has incompatible type "B"
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testInferLambdaAsGenericFunctionArgument]
-from typing import TypeVar, List, Any, Callable
-t = TypeVar('t')
-class A:
- x = None # type: A
-def f(a: List[t], fn: Callable[[t], Any]) -> None: pass
-list_a = [] # type: List[A]
-f(list_a, lambda a: a.x)
-[builtins fixtures/list.pyi]
-
-[case testLambdaWithoutContext]
-reveal_type(lambda x: x) # E: Revealed type is 'def (x: Any) -> Any'
-reveal_type(lambda x: 1) # E: Revealed type is 'def (x: Any) -> builtins.int'
-
-[case testLambdaContextVararg]
-from typing import Callable
-def f(t: Callable[[str], str]) -> str: ''
-f(lambda *_: '')
-
-[case testInvalidContextForLambda]
-from typing import Callable
-f = lambda x: A() # type: Callable[[], A]
-f2 = lambda: A() # type: Callable[[A], A]
-class A: pass
-[out]
-main:2: error: Incompatible types in assignment (expression has type Callable[[Any], A], variable has type Callable[[], A])
-main:2: error: Cannot infer type of lambda
-main:3: error: Incompatible types in assignment (expression has type Callable[[], A], variable has type Callable[[A], A])
-main:3: error: Cannot infer type of lambda
-
-[case testEllipsisContextForLambda]
-from typing import Callable
-f1 = lambda x: 1 # type: Callable[..., int]
-f2 = lambda: 1 # type: Callable[..., int]
-f3 = lambda *args, **kwargs: 1 # type: Callable[..., int]
-f4 = lambda x: x # type: Callable[..., int]
-g = lambda x: 1 # type: Callable[..., str]
-[builtins fixtures/dict.pyi]
-[out]
-main:6: error: Incompatible types in assignment (expression has type Callable[[Any], int], variable has type Callable[..., str])
-main:6: error: Incompatible return value type (got "int", expected "str")
-
-[case testEllipsisContextForLambda2]
-from typing import TypeVar, Callable
-T = TypeVar('T')
-def foo(arg: Callable[..., T]) -> None: pass
-foo(lambda: 1)
-
-[case testLambdaNoneInContext]
-from typing import Callable
-def f(x: Callable[[], None]) -> None: pass
-def g(x: Callable[[], int]) -> None: pass
-f(lambda: None)
-g(lambda: None)
-
-[case testIsinstanceInInferredLambda]
-from typing import TypeVar, Callable
-T = TypeVar('T')
-S = TypeVar('S')
-class A: pass
-class B(A): pass
-class C(A): pass
-def f(func: Callable[[T], S], *z: T, r: S = None) -> S: pass
-f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f"
-f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable
-f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable
-f( # E: Argument 1 to "f" has incompatible type Callable[[A], A]; expected Callable[[A], B]
- lambda x: B() if isinstance(x, B) else x, # E: Incompatible return value type (got "A", expected "B")
- A(), r=B())
-[builtins fixtures/isinstance.pyi]
-
-
--- Overloads + generic functions
--- -----------------------------
-
-
-[case testMapWithOverloadedFunc]
-from typing import TypeVar, Callable, List, overload, Any
-t = TypeVar('t')
-s = TypeVar('s')
-def map(f: Callable[[t], s], seq: List[t]) -> List[s]: pass
-
- at overload
-def g(o: object) -> 'B': pass
- at overload
-def g(o: 'A', x: Any = None) -> 'B': pass
-
-class A: pass
-class B: pass
-
-m = map(g, [A()])
-b = m # type: List[B]
-a = m # type: List[A] # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
-[builtins fixtures/list.pyi]
-
-
--- Boolean operators
--- -----------------
-
-
-[case testOrOperationInferredFromContext]
-from typing import List
-a, b, c = None, None, None # type: (List[A], List[B], List[C])
-a = a or []
-a = [] or a
-b = b or [C()]
-a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type List[A])
-b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type List[B])
-
-class A: pass
-class B: pass
-class C(B): pass
-[builtins fixtures/list.pyi]
-
-
--- Special cases
--- -------------
-
-
-[case testSomeTypeVarsInferredFromContext]
-from typing import List, TypeVar
-t = TypeVar('t')
-s = TypeVar('s')
-# Some type variables can be inferred using context, but not all of them.
-a = None # type: List[A]
-a = f(A(), B())
-a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-def f(a: s, b: t) -> List[s]: pass
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testSomeTypeVarsInferredFromContext2]
-from typing import List, TypeVar
-s = TypeVar('s')
-t = TypeVar('t')
-# Like testSomeTypeVarsInferredFromContext, but tvars in different order.
-a = None # type: List[A]
-a = f(A(), B())
-a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-def f(a: s, b: t) -> List[s]: pass
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testLambdaInListAndHigherOrderFunction]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-map(
- [lambda x: x], [])
-def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testChainedAssignmentInferenceContexts]
-from typing import List
-i = None # type: List[int]
-s = None # type: List[str]
-i = i = []
-i = s = [] # E: Incompatible types in assignment (expression has type List[str], variable has type List[int])
-[builtins fixtures/list.pyi]
-
-[case testContextForAttributeDeclaredInInit]
-from typing import List
-class A:
- def __init__(self):
- self.x = [] # type: List[int]
-a = A()
-a.x = []
-a.x = [1]
-a.x = [''] # E: List item 0 has incompatible type "str"
-[builtins fixtures/list.pyi]
-
-[case testListMultiplyInContext]
-from typing import List
-a = None # type: List[int]
-a = [None] * 3
-a = [''] * 3 # E: List item 0 has incompatible type "str"
-[builtins fixtures/list.pyi]
-
-[case testUnionTypeContext]
-from typing import Union, List, TypeVar
-T = TypeVar('T')
-def f(x: Union[List[T], str]) -> None: pass
-f([1])
-f('')
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[None], str]"
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIgnoringInferenceContext]
-from typing import TypeVar, List
-T = TypeVar('T')
-def f(x: List[T]) -> T: pass
-def g(y: object) -> None: pass
-a = [1]
-g(f(a))
-[builtins fixtures/list.pyi]
-
-[case testStar2Context]
-from typing import Any, Dict, Tuple, Iterable
-def f1(iterable: Iterable[Tuple[str, Any]] = None) -> None:
- f2(**dict(iterable))
-def f2(iterable: Iterable[Tuple[str, Any]], **kw: Any) -> None:
- pass
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testInferenceInGenericFunction]
-from typing import TypeVar, List
-T = TypeVar('T')
-def f(a: T) -> None:
- l = [] # type: List[T]
- l.append(a)
- l.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "T"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferenceInGenericClass]
-from typing import TypeVar, Generic, List
-S = TypeVar('S')
-T = TypeVar('T')
-class A(Generic[S]):
- def f(self, a: T, b: S) -> None:
- l = [] # type: List[T]
- l.append(a)
- l.append(b) # E: Argument 1 to "append" of "list" has incompatible type "S"; expected "T"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testLambdaInGenericFunction]
-from typing import TypeVar, Callable
-T = TypeVar('T')
-S = TypeVar('S')
-def f(a: T, b: S) -> None:
- c = lambda x: x # type: Callable[[T], S]
-[out]
-main:5: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
-main:5: error: Incompatible return value type (got "T", expected "S")
-
-[case testLambdaInGenericClass]
-from typing import TypeVar, Callable, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-class A(Generic[T]):
- def f(self, b: S) -> None:
- c = lambda x: x # type: Callable[[T], S]
-[out]
-main:6: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
-main:6: error: Incompatible return value type (got "T", expected "S")
-
-[case testRevealTypeContext]
-from typing import TypeVar, Callable, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- pass
-reveal_type(A()) # E: Revealed type is '__main__.A[builtins.None]'
-b = reveal_type(A()) # type: A[int] # E: Revealed type is '__main__.A[builtins.int]'
-
-[case testUnionWithGenericTypeItemContext]
-from typing import TypeVar, Union, List
-
-T = TypeVar('T')
-
-def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass
-reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]'
-reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]'
-reveal_type(f(None)) # E: Revealed type is 'builtins.list[builtins.int]'
-[builtins fixtures/list.pyi]
-
-[case testUnionWithGenericTypeItemContextAndStrictOptional]
-# flags: --strict-optional
-from typing import TypeVar, Union, List
-
-T = TypeVar('T')
-
-def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass
-reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]'
-reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]'
-reveal_type(f(None)) # E: Revealed type is 'Union[builtins.None, builtins.list[builtins.int]]'
-[builtins fixtures/list.pyi]
-
-[case testUnionWithGenericTypeItemContextInMethod]
-from typing import TypeVar, Union, List, Generic
-
-T = TypeVar('T')
-S = TypeVar('S')
-
-class C(Generic[T]):
- def f(self, x: Union[T, S]) -> Union[T, S]: pass
-
-c = C[List[int]]()
-reveal_type(c.f('')) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.str*]'
-reveal_type(c.f([1])) # E: Revealed type is 'builtins.list[builtins.int]'
-reveal_type(c.f([])) # E: Revealed type is 'builtins.list[builtins.int]'
-reveal_type(c.f(None)) # E: Revealed type is 'builtins.list[builtins.int]'
-[builtins fixtures/list.pyi]
-
-[case testGenericMethodCalledInGenericContext]
-from typing import TypeVar, Generic
-
-_KT = TypeVar('_KT')
-_VT = TypeVar('_VT')
-_T = TypeVar('_T')
-
-class M(Generic[_KT, _VT]):
- def get(self, k: _KT, default: _T) -> _T: ...
-
-def f(d: M[_KT, _VT], k: _KT) -> _VT:
- return d.get(k, None)
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
deleted file mode 100644
index 799fb35..0000000
--- a/test-data/unit/check-inference.test
+++ /dev/null
@@ -1,1765 +0,0 @@
--- Inferring locals/globals with simple types
--- ------------------------------------------
-
-
-[case testInferSimpleGvarType]
-import typing
-x = A()
-y = B()
-x = B() # Fail
-x = A()
-x = y # Fail
-x = x
-class A: pass
-class B: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testInferSimpleLvarType]
-import typing
-def f() -> None:
- x = A()
- y = B()
- x = B() # Fail
- x = A()
- x = y # Fail
- x = x
-class A: pass
-class B: pass
-[out]
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:7: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testLvarInitializedToVoid]
-import typing
-def f() -> None:
- a = g() # E: "g" does not return a value
- #b, c = g() # "g" does not return a value TODO
-
-def g() -> None: pass
-[out]
-
-[case testInferringLvarTypeFromArgument]
-import typing
-def f(a: 'A') -> None:
- b = a
- b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = a
- a = b
-
-class A: pass
-class B: pass
-[out]
-
-[case testInferringLvarTypeFromGvar]
-
-g = None # type: B
-
-def f() -> None:
- a = g
- a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- a = B()
-
-class A: pass
-class B: pass
-[out]
-
-[case testInferringImplicitDynamicTypeForLvar]
-import typing
-def f() -> None:
- a = g()
- None(a) # E: None not callable
- a.x()
-
-def g(): pass
-[out]
-
-[case testInferringExplicitDynamicTypeForLvar]
-from typing import Any
-g = None # type: Any
-
-def f(a: Any) -> None:
- b = g
- None(b) # E: None not callable
- a.x()
-[out]
-
-
--- Inferring types of local variables with complex types
--- -----------------------------------------------------
-
-
-[case testInferringTupleTypeForLvar]
-
-def f() -> None:
- a = A(), B()
- aa = None # type: A
- bb = None # type: B
- bb = a[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- aa = a[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- aa = a[0]
- bb = a[1]
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testInferringTupleTypeForLvarWithNones]
-import typing
-def f() -> None:
- a = A(), None # E: Need type annotation for variable
- b = None, A() # E: Need type annotation for variable
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testInferringGenericTypeForLvar]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]): pass
-a_i = None # type: A[int]
-a_s = None # type: A[str]
-
-def f() -> None:
- a_int = A() # type: A[int]
- a = a_int
- a = a_s # E: Incompatible types in assignment (expression has type A[str], variable has type A[int])
- a = a_i
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testInferringFunctionTypeForLvar]
-import typing
-def f() -> None:
- a = g
- a(B()) # E: Argument 1 has incompatible type "B"; expected "A"
- a(A())
-
-def g(a: 'A') -> None: pass
-
-class A: pass
-class B: pass
-[out]
-
-[case testInferringFunctionTypeForLvarFromTypeObject]
-import typing
-def f() -> None:
- a = A
- a(A()) # E: Too many arguments
- a()
- t = a # type: type
-
-class A: pass
-[out]
-
-
--- Inferring variable types in multiple definition
--- -----------------------------------------------
-
-
-[case testInferringLvarTypesInMultiDef]
-import typing
-def f() -> None:
- a, b = A(), B()
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
- a = A()
- b = B()
-
-class A: pass
-class B: pass
-[out]
-
-[case testInferringLvarTypesInTupleAssignment]
-from typing import Tuple
-def f() -> None:
- t = None # type: Tuple[A, B]
- a, b = t
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
- a = A()
- b = B()
-
-class A: pass
-class B: pass
-[out]
-
-[case testInferringLvarTypesInNestedTupleAssignment1]
-from typing import Tuple
-def f() -> None:
- t = None # type: Tuple[A, B]
- a1, (a, b) = A(), t
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
- a = A()
- b = B()
-
-class A: pass
-class B: pass
-[out]
-
-[case testInferringLvarTypesInNestedTupleAssignment2]
-import typing
-def f() -> None:
- a, (b, c) = A(), (B(), C())
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C")
-
- a = A()
- b = B()
- c = C()
-
-class A: pass
-class B: pass
-class C: pass
-[out]
-
-[case testInferringLvarTypesInNestedListAssignment]
-import typing
-def f() -> None:
- a, (b, c) = A(), [B(), C()]
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C")
-
- a = A()
- b = B()
- c = C()
-
-class A: pass
-class B: pass
-class C: pass
-[out]
-
-[case testInferringLvarTypesInMultiDefWithNoneTypes]
-import typing
-def f() -> None:
- a, b = A(), None # E: Need type annotation for variable
- c, d = None, A() # E: Need type annotation for variable
-
-class A: pass
-[out]
-
-[case testInferringLvarTypesInNestedTupleAssignmentWithNoneTypes]
-import typing
-def f() -> None:
- a1, (a2, b) = A(), (A(), None) # E: Need type annotation for variable
-
-class A: pass
-[out]
-
-[case testInferringLvarTypesInMultiDefWithInvalidTuple]
-from typing import Tuple
-t = None # type: Tuple[object, object, object]
-
-def f() -> None:
- a, b = t # Fail
- c, d, e, f = t # Fail
- g, h, i = t
-[builtins fixtures/tuple.pyi]
-[out]
-main:5: error: Too many values to unpack (2 expected, 3 provided)
-main:6: error: Need more than 3 values to unpack (4 expected)
-
-[case testInvalidRvalueTypeInInferredMultipleLvarDefinition]
-import typing
-def f() -> None:
- a, b = f # E: 'def ()' object is not iterable
- c, d = A() # E: '__main__.A' object is not iterable
-class A: pass
-[builtins fixtures/for.pyi]
-[out]
-
-[case testInvalidRvalueTypeInInferredNestedTupleAssignment]
-import typing
-def f() -> None:
- a1, (a2, b) = A(), f # E: 'def ()' object is not iterable
- a3, (c, d) = A(), A() # E: '__main__.A' object is not iterable
-class A: pass
-[builtins fixtures/for.pyi]
-[out]
-
-[case testInferringMultipleLvarDefinitionWithListRvalue]
-from typing import List
-
-class C: pass
-class D: pass
-
-def f() -> None:
- list_c = [C()]
- list_d = [D()]
- a, b = list_c
- c, d, e = list_d
- a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
- b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
- c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D")
- b = c # E: Incompatible types in assignment (expression has type "D", variable has type "C")
-
- a = C()
- b = C()
- c = D()
- d = D()
- e = D()
-
- a = b
- c = d
- d = e
-[builtins fixtures/for.pyi]
-[out]
-
-[case testInferringNestedTupleAssignmentWithListRvalue]
-from typing import List
-
-class C: pass
-class D: pass
-
-def f() -> None:
- list_c = [C()]
- list_d = [D()]
- c1, (a, b) = C(), list_c
- c2, (c, d, e) = C(), list_d
- a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
- b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
- c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D")
- b = c # E: Incompatible types in assignment (expression has type "D", variable has type "C")
-
- a = C()
- b = C()
- c = D()
- d = D()
- e = D()
-
- a = b
- c = d
- d = e
-[builtins fixtures/for.pyi]
-[out]
-
-[case testInferringMultipleLvarDefinitionWithImplicitDynamicRvalue]
-import typing
-def f() -> None:
- a, b = g()
- a.x
- b.x
-def g(): pass
-
-[case testInferringMultipleLvarDefinitionWithExplicitDynamicRvalue]
-from typing import Any
-def f(d: Any) -> None:
- a, b = d
- a.x
- b.x
-
-[case testInferringTypesFromIterable]
-from typing import Iterable
-class Nums(Iterable[int]):
- def __iter__(self): pass
- def __next__(self): pass
-a, b = Nums()
-a = b = 1
-a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-b = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[builtins fixtures/for.pyi]
-
-
--- Type variable inference for generic functions
--- ---------------------------------------------
-
-
-[case testInferSimpleGenericFunction]
-from typing import Tuple, TypeVar
-T = TypeVar('T')
-a = None # type: A
-b = None # type: B
-c = None # type: Tuple[A, object]
-
-b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = id(c) # E: Incompatible types in assignment (expression has type "Tuple[A, object]", variable has type "A")
-
-a = id(a)
-b = id(b)
-c = id(c)
-
-def id(a: T) -> T: pass
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testInferringGenericFunctionTypeForLvar]
-from typing import TypeVar
-T = TypeVar('T')
-def f() -> None:
- a = id
- b = None # type: int
- c = None # type: str
- b = a(c) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- b = a(b)
- c = a(c)
-def id(x: T) -> T:
- return x
-[out]
-
-[case testUnderspecifiedInferenceResult]
-from typing import TypeVar
-T = TypeVar('T')
-class A: pass
-a = None # type: A
-
-def ff() -> None:
- x = f() # E: Need type annotation for variable
-
-g(None) # Ok
-f() # Ok because not used to infer local variable type
-g(a)
-
-def f() -> T: pass
-def g(a: T) -> None: pass
-[out]
-
-[case testUnsolvableInferenceResult]
-from typing import TypeVar
-T = TypeVar('T')
-f(A(), g()) # Fail
-f(A(), A())
-
-def f(a: T, b: T) -> None: pass
-def g() -> None: pass
-class A: pass
-[out]
-main:3: error: Cannot infer type argument 1 of "f"
-main:3: error: "g" does not return a value
-
-[case testInferenceWithMultipleConstraints]
-from typing import TypeVar
-T = TypeVar('T')
-a = None # type: A
-b = None # type: B
-
-b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f(a, b)
-a = f(b, a)
-
-def f(a: T, b: T) -> T: pass
-
-class A: pass
-class B(A): pass
-
-[case testInferenceWithMultipleVariables]
-from typing import Tuple, TypeVar
-T = TypeVar('T')
-S = TypeVar('S')
-a, b = None, None # type: (A, B)
-taa = None # type: Tuple[A, A]
-tab = None # type: Tuple[A, B]
-tba = None # type: Tuple[B, A]
-
-taa = f(a, b) # Fail
-taa = f(b, a) # Fail
-tba = f(a, b) # Fail
-
-tab = f(a, b)
-tba = f(b, a)
-
-def f(a: T, b: S) -> Tuple[T, S]: pass
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-[out]
-main:9: error: Argument 2 to "f" has incompatible type "B"; expected "A"
-main:10: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-main:11: error: Argument 1 to "f" has incompatible type "A"; expected "B"
-main:11: error: Argument 2 to "f" has incompatible type "B"; expected "A"
-
-[case testConstraintSolvingWithSimpleGenerics]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ao = None # type: A[object]
-ab = None # type: A[B]
-ac = None # type: A[C]
-
-ab = f(ao) # E: Argument 1 to "f" has incompatible type A[object]; expected A[B]
-ao = f(ab) # E: Argument 1 to "f" has incompatible type A[B]; expected A[object]
-ab = f(ac) # E: Argument 1 to "f" has incompatible type A[C]; expected A[B]
-ab = g(ao) # E: Argument 1 to "g" has incompatible type A[object]; expected A[B]
-ao = g(ab) # E: Argument 1 to "g" has incompatible type A[B]; expected A[object]
-
-ab = f(ab)
-ac = f(ac)
-ao = f(ao)
-
-ab = g(ab)
-ao = g(ao)
-
-def f(a: 'A[T]') -> 'A[T]': pass
-
-def g(a: T) -> T: pass
-
-class A(Generic[T]): pass
-class B: pass
-class C: pass
-
-[case testConstraintSolvingFailureWithSimpleGenerics]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-ao = None # type: A[object]
-ab = None # type: A[B]
-
-f(ao, ab) # E: Cannot infer type argument 1 of "f"
-f(ab, ao) # E: Cannot infer type argument 1 of "f"
-f(ao, ao)
-f(ab, ab)
-
-def f(a: 'A[T]', b: 'A[T]') -> None: pass
-
-class A(Generic[T]): pass
-class B: pass
-
-[case testTypeInferenceWithCalleeDefaultArgs]
-from typing import TypeVar
-T = TypeVar('T')
-a = None # type: A
-o = None # type: object
-
-a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-a = g(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-o = f()
-o = f(o)
-a = f(a)
-a = g(a)
-
-def f(a: T = None) -> T: pass
-def g(a: T, b: T = None) -> T: pass
-
-class A: pass
-
-
--- Generic function inference with multiple inheritance
--- ----------------------------------------------------
-
-
-[case testGenericFunctionInferenceWithMultipleInheritance]
-from typing import TypeVar
-
-class I: pass
-class J: pass
-
-class A(I, J): pass
-class B(I, J): pass
-class C(I): pass
-class D(J): pass
-
-T = TypeVar('T')
-def f(a: T, b: T) -> T: pass
-def g(x: I) -> None: pass
-
-a = f(A(), C())
-g(a)
-b = f(A(), B())
-g(b)
-c = f(A(), D())
-g(c) # E: Argument 1 to "g" has incompatible type "J"; expected "I"
-d = f(D(), A())
-g(d) # E: Argument 1 to "g" has incompatible type "J"; expected "I"
-e = f(D(), C())
-g(e) # E: Argument 1 to "g" has incompatible type "object"; expected "I"
-
-[case testGenericFunctionInferenceWithMultipleInheritance2]
-from typing import TypeVar
-
-class I: pass
-class J: pass
-
-class A(I): pass
-class B(A, J): pass
-class C(I, J): pass
-
-T = TypeVar('T')
-def f(a: T, b: T) -> T: pass
-def g(x: I) -> None: pass
-def h(x: J) -> None: pass
-
-a = f(B(), C())
-g(a)
-h(a) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
-b = f(C(), B())
-g(b)
-h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
-c = f(A(), B())
-g(a)
-h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
-
-[case testGenericFunctionInferenceWithMultipleInheritance3]
-from typing import TypeVar
-
-class I: pass
-class J: pass
-class K(J): pass
-
-class A(K): pass
-class B(A, I): pass
-class C(I, J): pass
-
-T = TypeVar('T')
-def f(a: T, b: T) -> T: pass
-def g(x: K) -> None: pass
-
-a = f(B(), C())
-g(a) # E: Argument 1 to "g" has incompatible type "J"; expected "K"
-b = f(A(), C())
-g(b) # E: Argument 1 to "g" has incompatible type "J"; expected "K"
-c = f(A(), B())
-g(c)
-
-[case testPrecedenceOfFirstBaseAsInferenceResult]
-from typing import TypeVar
-from abc import abstractmethod, ABCMeta
-T = TypeVar('T')
-a, i, j = None, None, None # type: (A, I, J)
-
-a = f(B(), C())
-
-class I(metaclass=ABCMeta): pass
-class J(metaclass=ABCMeta): pass
-
-def f(a: T, b: T) -> T: pass
-
-class A: pass
-class B(A, I, J): pass
-class C(A, I, J): pass
-
-
--- Generic function inference with function arguments
--- --------------------------------------------------
-
-
-[case testNonOverloadedMapInference]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-class A: pass
-b = bool()
-def f(x: bool) -> A: pass
-def mymap(f: Callable[[t], s], a: List[t]) -> List[s]: pass
-
-l = mymap(f, [b])
-l = [A()]
-lb = [b]
-l = lb # E: Incompatible types in assignment (expression has type List[bool], variable has type List[A])
-[builtins fixtures/for.pyi]
-
-
--- Generic function inference with unions
--- --------------------------------------
-
-
-[case testUnionInference]
-from typing import TypeVar, Union, List
-T = TypeVar('T')
-U = TypeVar('U')
-def f(x: Union[T, int], y: T) -> T: pass
-f(1, 'a')() # E: "str" not callable
-f('a', 1)() # E: "object" not callable
-f('a', 'a')() # E: "str" not callable
-f(1, 1)() # E: "int" not callable
-
-def g(x: Union[T, List[T]]) -> List[T]: pass
-def h(x: List[str]) -> None: pass
-g('a')() # E: List[str] not callable
-
-# The next line is a case where there are multiple ways to satisfy a constraint
-# involving a Union. Either T = List[str] or T = str would turn out to be valid,
-# but mypy doesn't know how to branch on these two options (and potentially have
-# to backtrack later) and defaults to T = None. The result is an awkward error
-# message. Either a better error message, or simply accepting the call, would be
-# preferable here.
-g(['a']) # E: Argument 1 to "g" has incompatible type List[str]; expected List[None]
-
-h(g(['a']))
-
-def i(x: Union[List[T], List[U]], y: List[T], z: List[U]) -> None: pass
-a = [1]
-b = ['b']
-i(a, a, b)
-i(b, a, b)
-i(a, b, b) # E: Argument 1 to "i" has incompatible type List[int]; expected List[str]
-[builtins fixtures/list.pyi]
-
-
-[case testUnionInferenceWithTypeVarValues]
-from typing import TypeVar, Union
-AnyStr = TypeVar('AnyStr', bytes, str)
-def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass
-f('foo')
-f('foo', 'bar')
-f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
-f(1)
-f(1, 'foo')
-f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
-[builtins fixtures/primitives.pyi]
-
-
-[case testUnionTwoPassInference-skip]
-from typing import TypeVar, Union, List
-T = TypeVar('T')
-U = TypeVar('U')
-def j(x: Union[List[T], List[U]], y: List[T]) -> List[U]: pass
-
-a = [1]
-b = ['b']
-# We could infer: Since List[str] <: List[T], we must have T = str.
-# Then since List[int] <: Union[List[str], List[U]], and List[int] is
-# not a subtype of List[str], we must have U = int.
-# This is not currently implemented.
-j(a, b)
-[builtins fixtures/list.pyi]
-
-
-[case testUnionContext]
-from typing import TypeVar, Union, List
-T = TypeVar('T')
-def f() -> List[T]: pass
-d1 = f() # type: Union[List[int], str]
-d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type List[None], variable has type "Union[int, str]")
-def g(x: T) -> List[T]: pass
-d3 = g(1) # type: Union[List[int], List[str]]
-[builtins fixtures/list.pyi]
-
-
-[case testGenericFunctionSubtypingWithUnions]
-from typing import TypeVar, Union, List
-T = TypeVar('T')
-S = TypeVar('S')
-def k1(x: int, y: List[T]) -> List[Union[T, int]]: pass
-def k2(x: S, y: List[T]) -> List[Union[T, int]]: pass
-a = k2
-a = k2
-a = k1 # E: Incompatible types in assignment (expression has type Callable[[int, List[T]], List[Union[T, int]]], variable has type Callable[[S, List[T]], List[Union[T, int]]])
-b = k1
-b = k1
-b = k2
-[builtins fixtures/list.pyi]
-
-
--- Literal expressions
--- -------------------
-
-
-[case testDictLiteral]
-from typing import Dict
-class A: pass
-class B: pass
-def d_ab() -> Dict[A, B]: return {}
-def d_aa() -> Dict[A, A]: return {}
-a, b = None, None # type: (A, B)
-d = {a:b}
-d = d_ab()
-d = d_aa() # E: Incompatible types in assignment (expression has type Dict[A, A], variable has type Dict[A, B])
-[builtins fixtures/dict.pyi]
-
-[case testSetLiteral]
-from typing import Any, Set
-a, x = None, None # type: (int, Any)
-def s_i() -> Set[int]: return set()
-def s_s() -> Set[str]: return set()
-s = {a}
-s = {x}
-s = s_i()
-s = s_s() # E: Incompatible types in assignment (expression has type Set[str], variable has type Set[int])
-[builtins fixtures/set.pyi]
-
-[case testSetWithStarExpr]
-# flags: --fast-parser
-s = {1, 2, *(3, 4)}
-t = {1, 2, *s}
-reveal_type(s) # E: Revealed type is 'builtins.set[builtins.int*]'
-reveal_type(t) # E: Revealed type is 'builtins.set[builtins.int*]'
-[builtins fixtures/set.pyi]
-
-
--- For statements
--- --------------
-
-
-[case testInferenceOfFor1]
-a, b = None, None # type: (A, B)
-
-for x in [A()]:
- b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- a = x
-
-for y in []: # E: Need type annotation for variable
- a = y
-
-class A: pass
-class B: pass
-[builtins fixtures/for.pyi]
-
-[case testInferenceOfFor2]
-
-a, b, c = None, None, None # type: (A, B, C)
-for x, (y, z) in [(A(), (B(), C()))]:
- b = x # Fail
- c = y # Fail
- a = z # Fail
- a = x
- b = y
- c = z
-for xx, yy, zz in [(A(), B())]: # Fail
- pass
-for xx, (yy, zz) in [(A(), B())]: # Fail
- pass
-for xxx, yyy in [(None, None)]: # Fail
- pass
-
-class A: pass
-class B: pass
-class C: pass
-[builtins fixtures/for.pyi]
-[out]
-main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C")
-main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A")
-main:10: error: Need more than 2 values to unpack (3 expected)
-main:12: error: '__main__.B' object is not iterable
-main:14: error: Need type annotation for variable
-
-[case testInferenceOfFor3]
-
-a, b = None, None # type: (A, B)
-
-for x, y in [[A()]]:
- b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- b = y # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- a = x
- a = y
-
-for e, f in [[]]: # E: Need type annotation for variable
- pass
-
-class A: pass
-class B: pass
-[builtins fixtures/for.pyi]
-
-[case testForStatementInferenceWithVoid]
-import typing
-for x in f(): # E: "f" does not return a value
- pass
-def f() -> None: pass
-[builtins fixtures/for.pyi]
-
-[case testReusingInferredForIndex]
-import typing
-for a in [A()]: pass
-a = A()
-a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-for a in []: pass
-a = A()
-a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-class A: pass
-class B: pass
-[builtins fixtures/for.pyi]
-
-[case testReusingInferredForIndex2]
-import typing
-def f() -> None:
- for a in [A()]: pass
- a = A()
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- for a in []: pass
- a = A()
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-class A: pass
-class B: pass
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Regression tests
--- ----------------
-
-
-[case testMultipleAssignmentWithPartialDefinition]
-
-a = None # type: A
-x, a = a, a
-x = a
-a = x
-x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-class A: pass
-
-[case testMultipleAssignmentWithPartialDefinition2]
-
-a = None # type: A
-a, x = [a, a]
-x = a
-a = x
-x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-class A: pass
-[builtins fixtures/for.pyi]
-
-[case testMultipleAssignmentWithPartialDefinition3]
-from typing import Any, cast
-a = None # type: A
-x, a = cast(Any, a)
-x = a
-a = x
-x = object()
-a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-class A: pass
-
-[case testInferGlobalDefinedInBlock]
-import typing
-if A:
- a = A()
- a = A()
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-class A: pass
-class B: pass
-
-
--- Inferring attribute types
--- -------------------------
-
-
-[case testInferAttributeType]
-import typing
-class A:
- a = B()
-class B: pass
-
-A().a = B()
-A().a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testInferAttributeTypeAndAssignInInit]
-import typing
-class A:
- a = B()
- def __init__(self) -> None:
- self.a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- self.a = B()
-class B: pass
-[out]
-
-[case testInferAttributeInInit]
-import typing
-class B: pass
-class A:
- def __init__(self) -> None:
- self.a = A()
- self.b = B()
-a = A()
-a.a = A()
-a.b = B()
-a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a.b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testInferAttributeInInitUsingChainedAssignment]
-import typing
-class B: pass
-class A:
- def __init__(self) -> None:
- self.a = self.b = A()
-a = A()
-a.a = A()
-a.b = A()
-a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a.b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-
--- Lambdas
--- -------
-
-
-[case testInferLambdaType]
-from typing import List, Callable
-li = [1]
-l = lambda: li
-f1 = l # type: Callable[[], List[int]]
-f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type Callable[[], List[int]], variable has type Callable[[], List[str]])
-[builtins fixtures/list.pyi]
-
-[case testInferLambdaType2]
-from typing import List, Callable
-l = lambda: [B()]
-f1 = l # type: Callable[[], List[B]]
-f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type Callable[[], List[B]], variable has type Callable[[], List[A]])
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testUninferableLambda]
-from typing import TypeVar, Callable
-X = TypeVar('X')
-def f(x: Callable[[X], X]) -> X: pass
-y = f(lambda x: x) # E: Cannot infer type argument 1 of "f"
-
-[case testUninferableLambdaWithTypeError]
-from typing import TypeVar, Callable
-X = TypeVar('X')
-def f(x: Callable[[X], X], y: str) -> X: pass
-y = f(lambda x: x, 1) # Fail
-[out]
-main:4: error: Cannot infer type argument 1 of "f"
-main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str"
-
-[case testInferLambdaNone]
-from typing import Callable
-def f(x: Callable[[], None]) -> None: pass
-def g(x: Callable[[], int]) -> None: pass
-a = lambda: None
-f(a)
-g(a) # E: Argument 1 to "g" has incompatible type Callable[[], None]; expected Callable[[], int]
-b = lambda: None # type: Callable[[], None]
-f(b)
-g(b) # E: Argument 1 to "g" has incompatible type Callable[[], None]; expected Callable[[], int]
-
-
--- Boolean operators
--- -----------------
-
-
-[case testOrOperationWithGenericOperands]
-from typing import List
-a = None # type: List[A]
-o = None # type: List[object]
-a2 = a or []
-a = a2
-a2 = o # E: Incompatible types in assignment (expression has type List[object], variable has type List[A])
-class A: pass
-[builtins fixtures/list.pyi]
-
-
--- Accessing variable before its type has been inferred
--- ----------------------------------------------------
-
-
-[case testAccessGlobalVarBeforeItsTypeIsAvailable]
-import typing
-x.y # E: Cannot determine type of 'x'
-x = object()
-x.y # E: "object" has no attribute "y"
-
-[case testAccessDataAttributeBeforeItsTypeIsAvailable]
-
-a = None # type: A
-a.x.y # E: Cannot determine type of 'x'
-class A:
- def __init__(self) -> None:
- self.x = object()
-a.x.y # E: "object" has no attribute "y"
-
-
--- Ducktype declarations
--- ---------------------
-
-
-[case testListWithDucktypeCompatibility]
-from typing import List, _promote
-class A: pass
- at _promote(A)
-class B: pass
-a = None # type: List[A]
-x1 = [A(), B()]
-x2 = [B(), A()]
-x3 = [B(), B()]
-a = x1
-a = x2
-a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
-[builtins fixtures/list.pyi]
-
-[case testListWithDucktypeCompatibilityAndTransitivity]
-from typing import List, _promote
-class A: pass
- at _promote(A)
-class B: pass
- at _promote(B)
-class C: pass
-a = None # type: List[A]
-x1 = [A(), C()]
-x2 = [C(), A()]
-x3 = [B(), C()]
-a = x1
-a = x2
-a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
-[builtins fixtures/list.pyi]
-
-
--- Inferring type of variable when initialized to an empty collection
--- ------------------------------------------------------------------
-
-
-[case testInferListInitializedToEmpty]
-a = []
-a.append(1)
-a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyUsingUpdate]
-a = []
-a.extend([''])
-a.append(0) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndNotAnnotated]
-a = [] # E: Need type annotation for variable
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndReadBeforeAppend]
-a = [] # E: Need type annotation for variable
-if a: pass
-a.xyz
-a.append('')
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndIncompleteTypeInAppend]
-a = [] # E: Need type annotation for variable
-a.append([])
-a()
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndMultipleAssignment]
-a, b = [], []
-a.append(1)
-b.append('')
-a() # E: List[int] not callable
-b() # E: List[str] not callable
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyInFunction]
-def f() -> None:
- a = []
- a.append(1)
- a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndNotAnnotatedInFunction]
-def f() -> None:
- a = [] # E: Need type annotation for variable
-
-def g() -> None: pass
-
-a = []
-a.append(1)
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndReadBeforeAppendInFunction]
-def f() -> None:
- a = [] # E: Need type annotation for variable
- if a: pass
- a.xyz
- a.append('')
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyInClassBody]
-class A:
- a = []
- a.append(1)
- a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndNotAnnotatedInClassBody]
-class A:
- a = [] # E: Need type annotation for variable
-
-class B:
- a = []
- a.append(1)
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyInMethod]
-class A:
- def f(self) -> None:
- a = []
- a.append(1)
- a.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyAndNotAnnotatedInMethod]
-class A:
- def f(self) -> None:
- a = [] # E: Need type annotation for variable
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferListInitializedToEmptyInMethodViaAttribute]
-class A:
- def f(self) -> None:
- # Attributes aren't supported right now.
- self.a = [] # E: Need type annotation for variable
- self.a.append(1) # E: Cannot determine type of 'a'
- self.a.append('') # E: Cannot determine type of 'a'
-[builtins fixtures/list.pyi]
-[out]
-
-[case testInferSetInitializedToEmpty]
-a = set()
-a.add(1)
-a.add('') # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int"
-[builtins fixtures/set.pyi]
-[out]
-
-[case testInferSetInitializedToEmptyUsingDiscard]
-a = set()
-a.discard('')
-a.add(0) # E: Argument 1 to "add" of "set" has incompatible type "int"; expected "str"
-[builtins fixtures/set.pyi]
-[out]
-
-[case testInferSetInitializedToEmptyUsingUpdate]
-a = set()
-a.update({0})
-a.add('') # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int"
-[builtins fixtures/set.pyi]
-[out]
-
-[case testInferDictInitializedToEmpty]
-a = {}
-a[1] = ''
-a() # E: Dict[int, str] not callable
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testInferDictInitializedToEmptyUsingUpdate]
-a = {}
-a.update({'': 42})
-a() # E: Dict[str, int] not callable
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testInferDictInitializedToEmptyUsingUpdateError]
-a = {} # E: Need type annotation for variable
-a.update([1, 2])
-a()
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testInferDictInitializedToEmptyAndIncompleteTypeInUpdate]
-a = {} # E: Need type annotation for variable
-a[1] = {}
-b = {} # E: Need type annotation for variable
-b[{}] = 1
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testInferDictInitializedToEmptyAndUpdatedFromMethod]
-map = {}
-def add():
- map[1] = 2
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testSpecialCaseEmptyListInitialization]
-def f(blocks: Any): # E: Name 'Any' is not defined
- to_process = [] # E: Need type annotation for variable
- to_process = list(blocks)
-[builtins fixtures/list.pyi]
-[out]
-
-[case testSpecialCaseEmptyListInitialization2]
-def f(blocks: object):
- to_process = [] # E: Need type annotation for variable
- to_process = list(blocks) # E: No overload variant of "list" matches argument types [builtins.object]
-[builtins fixtures/list.pyi]
-[out]
-
-
--- Inferring types of variables first initialized to None (partial types)
--- ----------------------------------------------------------------------
-
-
-[case testLocalVariablePartiallyInitializedToNone]
-def f() -> None:
- if object():
- x = None
- else:
- x = 1
- x() # E: "int" not callable
-[out]
-
-[case testLocalVariablePartiallyTwiceInitializedToNone]
-def f() -> None:
- if object():
- x = None
- elif object():
- x = None
- else:
- x = 1
- x() # E: "int" not callable
-[out]
-
-[case testLvarInitializedToNoneWithoutType]
-import typing
-def f() -> None:
- a = None # E: Need type annotation for variable
- a.x() # E: None has no attribute "x"
-[out]
-
-[case testGvarPartiallyInitializedToNone]
-x = None
-if object():
- x = 1
-x() # E: "int" not callable
-
-[case testPartiallyInitializedToNoneAndThenToPartialList]
-x = None
-if object():
- # Promote from partial None to partial list.
- x = []
- x.append(1)
-x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-[builtins fixtures/list.pyi]
-
-[case testPartiallyInitializedToNoneAndThenReadPartialList]
-x = None
-if object():
- # Promote from partial None to partial list.
- x = [] # E: Need type annotation for variable
- x
-[builtins fixtures/list.pyi]
-
-[case testPartiallyInitializedToNoneAndPartialListAndLeftPartial]
-def f() -> None:
- x = None
- if object():
- # Promote from partial None to partial list.
- x = [] # E: Need type annotation for variable
-[builtins fixtures/list.pyi]
-[out]
-
-[case testPartiallyInitializedToNoneAndThenToIncompleteType]
-from typing import TypeVar, Dict
-T = TypeVar('T')
-def f(*x: T) -> Dict[int, T]: pass
-x = None # E: Need type annotation for variable
-if object():
- x = f()
-[builtins fixtures/dict.pyi]
-
-[case testPartiallyInitializedVariableDoesNotEscapeScope1]
-def f() -> None:
- x = None # E: Need type annotation for variable
-x = 1
-[out]
-
-[case testPartiallyInitializedVariableDoesNotEscapeScope2]
-x = None # E: Need type annotation for variable
-def f() -> None:
- x = None
- x = 1
-x() # E: None not callable
-
-[case testAttributePartiallyInitializedToNone]
-class A:
- def f(self) -> None:
- self.x = None
- self.x = 1
- self.x() # E: "int" not callable
-[out]
-
-[case testAttributePartiallyInitializedToNoneWithMissingAnnotation]
-class A:
- def f(self) -> None:
- self.x = None
-
- def g(self) -> None:
- self.x = 1
- self.x()
-[out]
-main:3: error: Need type annotation for variable
-main:7: error: "int" not callable
-
-[case testGlobalInitializedToNoneSetFromFunction]
-a = None
-def f():
- global a
- a = 42
-[out]
-
-[case testGlobalInitializedToNoneSetFromMethod]
-a = None
-class C:
- def m(self):
- global a
- a = 42
-[out]
-
--- More partial type errors
--- ------------------------
-
-[case testPartialTypeErrorSpecialCase1]
-# This used to crash.
-class A:
- x = None
- def f(self) -> None:
- for a in self.x:
- pass
-[builtins fixtures/for.pyi]
-[out]
-main:3: error: Need type annotation for variable
-main:5: error: None has no attribute "__iter__"
-
-[case testPartialTypeErrorSpecialCase2]
-# This used to crash.
-class A:
- x = []
- def f(self) -> None:
- for a in self.x:
- pass
-[builtins fixtures/for.pyi]
-[out]
-main:3: error: Need type annotation for variable
-
-[case testPartialTypeErrorSpecialCase3]
-class A:
- x = None
- def f(self) -> None:
- for a in A.x:
- pass
-[builtins fixtures/for.pyi]
-[out]
-main:2: error: Need type annotation for variable
-main:4: error: None has no attribute "__iter__"
-
-
--- Multipass
--- ---------
-
-
-[case testMultipassAndAccessVariableBeforeDefinition]
-def f() -> None:
- y = x
- y() # E: "int" not callable
-x = 1
-[out]
-
-[case testMultipassAndAccessInstanceVariableBeforeDefinition]
-class A:
- def f(self) -> None:
- y = self.x
- y() # E: "int" not callable
-
- def g(self) -> None:
- self.x = 1
-[out]
-
-[case testMultipassAndTopLevelVariable]
-y = x # E: Cannot determine type of 'x'
-y()
-x = 1+0
-[out]
-
-[case testMultipassAndDecoratedMethod]
-from typing import Callable, TypeVar
-
-T = TypeVar('T')
-
-class A:
- def f(self) -> None:
- self.g() # E: Too few arguments for "g" of "A"
- self.g(1)
- @dec
- def g(self, x: str) -> None: pass
-
-def dec(f: Callable[[A, str], T]) -> Callable[[A, int], T]: pass
-[out]
-
-[case testMultipassAndDefineAttributeBasedOnNotReadyAttribute]
-class A:
- def f(self) -> None:
- self.y = self.x
-
- def g(self) -> None:
- self.x = 1
-
- def h(self) -> None:
- self.y() # E: "int" not callable
-[out]
-
-[case testMultipassAndDefineAttributeBasedOnNotReadyAttribute2]
-class A:
- def f(self) -> None:
- self.y = self.x
- self.z = self.y
- self.z() # E
- self.y() # E
-
- def g(self) -> None:
- self.x = 1
-
- def h(self) -> None:
- self.y() # E
-[out]
-main:5: error: "int" not callable
-main:6: error: "int" not callable
-main:12: error: "int" not callable
-
-[case testMultipassAndPartialTypes]
-def f() -> None:
- x = []
- y
- x.append(1)
- x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
- x.append(y) # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
-y = ''
-[builtins fixtures/list.pyi]
-[out]
-
-[case testMultipassAndPartialTypes2]
-s = ''
-n = 0
-def f() -> None:
- global s, n
- x = []
- x.append(y)
- s = x[0]
- n = x[0] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str"
-y = ''
-[builtins fixtures/list.pyi]
-[out]
-
-[case testMultipassAndPartialTypes3]
-from typing import Dict
-def g(d: Dict[str, int]) -> None: pass
-def f() -> None:
- x = {}
- x[1] = y
- g(x) # E: Argument 1 to "g" has incompatible type Dict[int, str]; expected Dict[str, int]
- x[1] = 1 # E: Incompatible types in assignment (expression has type "int", target has type "str")
- x[1] = ''
-y = ''
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testMultipassAndPartialTypes4]
-from typing import Dict
-def g(d: Dict[str, int]) -> None: pass
-def f() -> None:
- x = {}
- y
- x[1] = 1
- g(x) # E: Argument 1 to "g" has incompatible type Dict[int, int]; expected Dict[str, int]
-y = ''
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testMultipassAndCircularDependency]
-class A:
- def f(self) -> None:
- self.x = self.y # E: Cannot determine type of 'y'
-
- def g(self) -> None:
- self.y = self.x
-[out]
-
-[case testMultipassAndPartialTypesSpecialCase1]
-def f() -> None:
- y = o
- x = []
- x.append(y)
- x() # E: List[int] not callable
-o = 1
-[builtins fixtures/list.pyi]
-[out]
-
-[case testMultipassAndPartialTypesSpecialCase2]
-def f() -> None:
- y = o
- x = {}
- x[''] = y
- x() # E: Dict[str, int] not callable
-o = 1
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testMultipassAndPartialTypesSpecialCase3]
-def f() -> None:
- x = {} # E: Need type annotation for variable
- y = o
- z = {} # E: Need type annotation for variable
-o = 1
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testMultipassAndPartialTypesSpecialCase4]
-def f() -> None:
- y = o
- x = None
- x = y
- x() # E: "int" not callable
-o = 1
-[out]
-
-[case testMultipassAndPartialTypesSpecialCase5]
-def f() -> None:
- x = None
- y = o
- x = y
- x() # E: "int" not callable
-o = 1
-[out]
-
-[case testMultipassAndClassAttribute]
-class S:
- def foo(self) -> int:
- return R.X
-
-class R:
- X = 2
-
-[case testMultipassAndMultipleFiles]
-import m
-def f() -> None:
- x()
-x = 0
-[file m.py]
-def g() -> None:
- y()
-y = 0
-[out]
-tmp/m.py:2: error: "int" not callable
-main:3: error: "int" not callable
-
-
--- Tests for special cases of unification
--- --------------------------------------
-
-[case testUnificationRedundantUnion]
-from typing import Union
-a = None # type: Union[int, str]
-b = None # type: Union[str, tuple]
-def f(): pass
-def g(x: Union[int, str]): pass
-c = a if f() else b
-g(c) # E: Argument 1 to "g" has incompatible type "Union[int, str, tuple]"; expected "Union[int, str]"
-
-[case testUnificationMultipleInheritance]
-class A: pass
-class B:
- def foo(self): pass
-class C(A, B): pass
-def f(): pass
-a1 = B() if f() else C()
-a1.foo()
-a2 = C() if f() else B()
-a2.foo()
-
-[case testUnificationMultipleInheritanceAmbiguous]
-# Show that join_instances_via_supertype() breaks ties using the first base class.
-class A1: pass
-class B1:
- def foo1(self): pass
-class C1(A1, B1): pass
-
-class A2: pass
-class B2:
- def foo2(self): pass
-class C2(A2, B2): pass
-
-class D1(C1, C2): pass
-class D2(C2, C1): pass
-
-def f(): pass
-
-a1 = D1() if f() else D2()
-a1.foo1()
-a2 = D2() if f() else D1()
-a2.foo2()
-
-[case testUnificationEmptyListLeft]
-def f(): pass
-a = [] if f() else [0]
-a() # E: List[int] not callable
-[builtins fixtures/list.pyi]
-
-[case testUnificationEmptyListRight]
-def f(): pass
-a = [0] if f() else []
-a() # E: List[int] not callable
-[builtins fixtures/list.pyi]
-
-[case testUnificationEmptyListLeftInContext]
-from typing import List
-def f(): pass
-a = [] if f() else [0] # type: List[int]
-a() # E: List[int] not callable
-[builtins fixtures/list.pyi]
-
-[case testUnificationEmptyListRightInContext]
-# TODO Find an example that really needs the context
-from typing import List
-def f(): pass
-a = [0] if f() else [] # type: List[int]
-a() # E: List[int] not callable
-[builtins fixtures/list.pyi]
-
-[case testUnificationEmptySetLeft]
-def f(): pass
-a = set() if f() else {0}
-a() # E: Set[int] not callable
-[builtins fixtures/set.pyi]
-
-[case testUnificationEmptyDictLeft]
-def f(): pass
-a = {} if f() else {0: 0}
-a() # E: Dict[int, int] not callable
-[builtins fixtures/dict.pyi]
-
-[case testUnificationEmptyDictRight]
-def f(): pass
-a = {0: 0} if f() else {}
-a() # E: Dict[int, int] not callable
-[builtins fixtures/dict.pyi]
-
-[case testUnificationDictWithEmptyListLeft]
-def f(): pass
-a = {0: []} if f() else {0: [0]}
-a() # E: Dict[int, List[int]] not callable
-[builtins fixtures/dict.pyi]
-
-[case testUnificationDictWithEmptyListRight]
-def f(): pass
-a = {0: [0]} if f() else {0: []}
-a() # E: Dict[int, List[int]] not callable
-[builtins fixtures/dict.pyi]
-
-[case testMisguidedSetItem]
-from typing import Generic, Sequence, TypeVar
-T = TypeVar('T')
-class C(Sequence[T], Generic[T]): pass
-C[0] = 0
-[out]
-main:4: error: Type expected within [...]
-main:4: error: Unsupported target for indexed assignment
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
deleted file mode 100644
index d6041cc..0000000
--- a/test-data/unit/check-isinstance.test
+++ /dev/null
@@ -1,1330 +0,0 @@
-[case testForcedAssignment]
-x = 1 # type: object
-y = 1
-y = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
-x = 2
-y = x
-[case testJoinAny]
-from typing import List, Any
-
-x = None # type: List[Any]
-
-def foo() -> List[int]: pass
-def bar() -> List[str]: pass
-
-if bool():
- x = foo()
-else:
- x = bar()
-
-x * 2
-[builtins fixtures/list.pyi]
-
-[case testGeneratorExpressionTypes]
-
-class A: y = 1
-x = [A()]
-y = [x]
-
-z = [1,2]
-z = [a.y for b in y for a in b]
-
-[builtins fixtures/list.pyi]
-
-[case testClassAttributeInitialization-skip]
-class A:
- x = None # type: int
- def __init__(self) -> None:
- self.y = None # type: int
- z = self.x
- w = self.y
-[case testAssignmentSubtypes-skip]
-from typing import Union
-
-def foo(x: Union[str, int]):
- if isinstance(x, int):
- x = 'a'
- x + 'a' # Works in the current code
- z = x # We probably want this to be of type str.
- y = [x] # But what type should this be?
- y[0] + 'a' # (1) Should this work?
- y + [1] # (2) Or this?
- z = 1 # Also, is this valid?
-
-x = None # type: int
-y = [x]
-[builtins fixtures/isinstancelist.pyi]
-[out]
-
-[case testFunctionDefaultArgs]
-
-class A: pass
-class B(A): y = 1
-
-x = A()
-
-def foo(x: A = B()):
- x.y # E: "A" has no attribute "y"
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceFancyConditionals]
-
-class A: pass
-class B(A): y = 1
-
-x = A()
-
-if isinstance(x, B):
- x.y
-
-while isinstance(x, B):
- x.y
-
-while isinstance(x, B):
- x.y
- x = B()
-
-[builtins fixtures/isinstance.pyi]
-
-
-[case testSubtypingWithAny]
-
-class A: y = 1
-class B(A): z = 1
-
-def foo(): pass
-
-x = A()
-x = B()
-x.z
-x = foo()
-x.z # E: "A" has no attribute "z"
-x.y
-
-[case testSingleMultiAssignment-skip]
-
-x = 'a'
-(x, ) = ('a',)
-
-[case testUnionMultiAssignment]
-from typing import Union
-
-x = None # type: Union[int, str]
-x = 1
-x = 'a'
-x + 1 # E: Unsupported operand types for + ("str" and "int")
-x = 1
-(x, y) = ('a', 1)
-x + 1 # E: Unsupported operand types for + ("str" and "int")
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testUnionIfZigzag]
-from typing import Union
-
-def f(x: Union[int, str]) -> None:
- x = 1
- if x:
- x = 'a'
- x = 1
- x + 1
-[builtins fixtures/isinstancelist.pyi]
-
-
-[case testTwoLoopsUnion]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-def bar() -> None:
- x = foo()
- if isinstance(x, int):
- return
- while bool():
- x + 'a'
- while bool():
- x = foo()
- if bool():
- return
- x = 'a'
- x + 'a'
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testComplicatedBlocks]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-def bar() -> None:
- x = foo()
- if isinstance(x, int):
- return
- while bool():
- x + 'a'
- while bool():
- x = foo()
- if bool():
- return
- x = 'a'
- x + 'a'
-
- x = foo()
- if isinstance(x, int):
- return
- while bool():
- x + 'a'
- while bool():
- x + 'a' # E: Unsupported operand types for + (likely involving Union)
- x = foo()
- if bool():
- continue
- x = 'a'
- x = 'a'
- x + 'a'
-[builtins fixtures/isinstancelist.pyi]
-[out]
-
-[case testUnionTryExcept]
-
-class A: y = A()
-class B(A): z = 1
-
-x = A()
-x = B()
-x.z
-try:
- x.z
- x = A()
- x = B()
- x.z
-except:
- pass
-x.z # E: "A" has no attribute "z"
-
-
-[case testUnionTryExcept2]
-
-class A: y = A()
-class B(A): z = 1
-
-x = A()
-try:
- x.z # E: "A" has no attribute "z"
- x = A()
- x = B()
- x.z
-except:
- x.z # E: "A" has no attribute "z"
- x = B()
- x.z
-else:
- x = B()
-x.z
-
-[case testUnionTryExcept3]
-class A: y = A()
-class B(A): z = 1
-x = A()
-x = B()
-try:
- raise BaseException()
- x = A()
-except:
- pass
-x.z
-x = B()
-try:
- x = A()
- raise BaseException()
-except:
- pass
-x.z # E: "A" has no attribute "z"
-x = B()
-try:
- pass
-except:
- x = A()
- raise BaseException()
-x.z
-try:
- x = A()
-except:
- pass
-x.z # E: "A" has no attribute "z"
-x = B()
-try:
- pass
-except:
- x = A()
-x.z # E: "A" has no attribute "z"
-[builtins fixtures/exception.pyi]
-[case testUnionTryExcept4]
-
-class A: pass
-class B(A): z = 1
-
-x = A()
-while bool():
- try:
- x.z # E: "A" has no attribute "z"
- x = A()
- except:
- x = B()
- else:
- x = B()
- x.z
-[builtins fixtures/exception.pyi]
-[case testUnionTryFinally]
-class A: pass
-class B(A): b = 1
-
-x = A()
-x = B()
-try:
- x = A()
- x.b # E: "A" has no attribute "b"
- x = B()
-finally:
- x.b # E: "A" has no attribute "b"
-x.b
-[case testUnionTryFinally2]
-class A: pass
-class B(A): b = 1
-
-x = A()
-x = B()
-try:
- x = A()
- x = B()
-except:
- pass
-finally:
- pass
-x.b # E: "A" has no attribute "b"
-[case testUnionTryFinally3]
-class A: pass
-class B(A): b = 1
-
-x = A()
-x = B()
-try:
- x = A()
- x = B()
-except:
- pass
-finally:
- x = B()
-x.b
-[case testUnionTryFinally4]
-class A: pass
-class B(A): b = 1
-
-while 2:
- x = A()
- x = B()
- try:
- x = A()
- x = B()
- except:
- pass
- finally:
- x.b # E: "A" has no attribute "b"
- if not isinstance(x, B):
- break
- x.b
-[builtins fixtures/isinstancelist.pyi]
-[case testUnionTryFinally5]
-class A: pass
-class B(A): b = 1
-
-while 2:
- x = A()
- try:
- x = A()
- x = B()
- finally:
- x.b # E: "A" has no attribute "b"
- break
- x.b
- x.b
-[case testUnionTryFinally6]
-class A: pass
-class B(A): b = 1
-
-def f() -> int:
- x = B() # type: A
- try:
- x = B()
- except:
- x = A()
- # An exception could occur here
- x = B()
- finally:
- return x.b # E: "A" has no attribute "b"
-[out]
-[case testUnionListIsinstance]
-
-from typing import Union, List
-
-class A: y = A()
-class B(A): z = C()
-
-class C: pass
-class D(C): d = 1
-
-
-def f(x: Union[List[int], List[str], int]) -> None:
- if isinstance(x, list):
- a = x[0]
- if isinstance(a, int):
- a + 1
- a + 'x' # E: Unsupported operand types for + ("int" and "str")
-
- # type of a?
- x + 1 # E: Unsupported operand types for + (likely involving Union)
- else:
- x[0] # E: Value of type "int" is not indexable
- x + 1
- x[0] # E: Value of type "Union[List[int], List[str], int]" is not indexable
- x + 1 # E: Unsupported operand types for + (likely involving Union)
-[builtins fixtures/isinstancelist.pyi]
-[out]
-
-[case testUnionListIsinstance2]
-
-from typing import Union, List
-class A: a = 1
-class B: pass
-class C: pass
-
-def g(x: Union[A, B]) -> A: pass
-def h(x: C) -> A: pass
-
-def f(x: Union[A, B, C]) -> None:
- if isinstance(x, C):
- x = h(x)
- else:
- x = g(x)
- x.a
-[builtins fixtures/isinstancelist.pyi]
-
-[case testUnionStrictDefnBasic]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x = 1
-x = x + 1
-x = foo()
-x = x + 1 # E: Unsupported operand types for + (likely involving Union)
-if isinstance(x, str):
- x = x + 1 # E: Unsupported operand types for + ("str" and "int")
- x = 1
- x = x + 1
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testSubtypeRedefinitionBasic]
-from typing import Union
-
-class A: pass
-class B(A): y = 1
-
-x = A()
-x.y # E: "A" has no attribute "y"
-x = B()
-x.y # OK: x is known to be a B
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsInstanceBasic]
-from typing import Union
-
-x = None # type: Union[int, str]
-
-if isinstance(x, str):
- x = x + 1 # E: Unsupported operand types for + ("str" and "int")
- x = x + 'a'
-else:
- x = x + 'a' # E: Unsupported operand types for + ("int" and "str")
- x = x + 1
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsInstanceIndexing]
-from typing import Union
-
-x = None # type: Union[int, str]
-j = [x]
-
-if isinstance(j[0], str):
- j[0] = j[0] + 'a'
- j[0] = j[0] + 1 # E: Unsupported operand types for + ("str" and "int")
-else:
- j[0] = j[0] + 'a' # E: Unsupported operand types for + ("int" and "str")
- j[0] = j[0] + 1
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsInstanceSubClassMember]
-from typing import Union
-
-class Animal:
- pass
-
-class Dog(Animal):
- paws = 4 # type: Union[int, str]
-
- def bark(self): pass
-
-class House:
- pet = None # type: Animal
-
-h = House()
-h.pet = Dog()
-
-while bool():
- if isinstance(h.pet, Dog):
- if isinstance(h.pet.paws, str):
- x = h.pet.paws + 'a'
- y = h.pet.paws + 1 # E: Unsupported operand types for + (likely involving Union)
- z = h.pet.paws + 'a' # E: Unsupported operand types for + (likely involving Union)
- if isinstance(h.pet.paws, str):
- x = h.pet.paws + 'a'
- break
- y = h.pet.paws + 1
- z = h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str")
-[builtins fixtures/isinstancelist.pyi]
-[case testIsInstanceSubClassReset]
-class A: pass
-class B(A): b=1
-
-class C:
- a = A()
-
-x = C()
-x.a.b # E: "A" has no attribute "b"
-if isinstance(x.a, B):
- x.a.b
- x = C()
- x.a.b # E: "A" has no attribute "b"
-[builtins fixtures/isinstance.pyi]
-[case testIsinstanceTuple]
-from typing import Union
-
-class A:
- pass
-
-class B:
- def method2(self, arg: int):
- return 123
-
-class C:
- def method2(self, arg: int):
- return 456
-
- def method3(self, arg: str):
- return 'abc'
-
-v = A() # type: Union[A, B, C]
-
-if isinstance(v, (B, C)):
- v.method2(123)
- v.method3('xyz') # E: Some element of union has no attribute "method3"
-[builtins fixtures/isinstance.pyi]
-
-[case testMemberAssignmentChanges-skip]
-from typing import Union
-
-class Dog:
- paws = 1 # type: Union[int, str]
-
-pet = Dog()
-
-pet.paws + 'a' # E: moo
-pet.paws = 'a'
-pet.paws + 'a'
-pet.paws = 1
-pet.paws + 1
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsInstanceSubClassMemberHard-skip]
-from typing import Union
-
-class Animal:
- pass
-
-class Dog(Animal):
- paws = 4 # type: Union[int, str]
-
- def bark(self): pass
-
-class House:
- pet = None # type: Animal
-
-h = House()
-h.pet = Dog()
-
-if isinstance(h.pet, Dog):
- if isinstance(h.pet.paws, str):
- for i in [1]:
- h.pet.paws + 'a'
- if bool():
- break
- h.pet.paws = 1
- h.pet.paws + 1
-
- if isinstance(h.pet.paws, str):
- h.pet.paws + 'a'
- else:
- h.pet.paws + 1
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsInstanceReturn]
-from typing import Union
-
-def foo() -> None:
- x = 1 # type: Union[int, str]
- if isinstance(x, int):
- return
- y = x + 'asdad'
-
-def bar() -> None:
- x = 1 # type: Union[int, str]
- if isinstance(x, int):
- return
- else:
- pass
- y = x + 'asdad'
-
-foo()
-[builtins fixtures/isinstancelist.pyi]
-[case testIsInstanceBadBreak]
-from typing import Union
-
-def foo() -> None:
- x = None # type: Union[int, str]
- if isinstance(x, int):
- for z in [1,2]:
- break
- else:
- pass
- y = x + 'asdad' # E: Unsupported operand types for + (likely involving Union)
-
-foo()
-[builtins fixtures/isinstancelist.pyi]
-[out]
-[case testIsInstanceThreeUnion]
-from typing import Union, List
-
-x = None # type: Union[int, str, List[int]]
-
-while bool():
- if isinstance(x, int):
- x + 1
- elif isinstance(x, str):
- x + 'a'
- else:
- x + [1]
- x + 'a' # E: Unsupported operand types for + (likely involving Union)
-x + [1] # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/isinstancelist.pyi]
-[case testIsInstanceThreeUnion2]
-from typing import Union, List
-
-x = None # type: Union[int, str, List[int]]
-
-while bool():
- if isinstance(x, int):
- x + 1
- break
- elif isinstance(x, str):
- x + 'a'
- break
- x + [1]
- x + 'a' # E: Unsupported operand types for + ("list" and "str")
-x + [1] # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsInstanceThreeUnion3]
-from typing import Union, List
-
-while bool():
- x = None # type: Union[int, str, List[int]]
- x = 1
- if isinstance(x, int):
- x + 1
- break
- elif isinstance(x, str):
- x + 'a'
- break
- x + [1] # These lines aren't reached because x was an int
- x + 'a'
-x + [1] # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testRemovingTypeRepeatedly]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-for i in [1, 2]:
- x = foo()
- x + 'a' # E: Unsupported operand types for + (likely involving Union)
- if isinstance(x, int):
- break
- x + 'a'
-
- x = foo()
- x + 'a' # E: Unsupported operand types for + (likely involving Union)
- if isinstance(x, int):
- break
- x + 'a'
-
- x = foo()
- x + 'a' # E: Unsupported operand types for + (likely involving Union)
- if isinstance(x, int):
- break
- x + 'a'
-
-x + 'a' # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/isinstancelist.pyi]
-
-
-
-[case testModifyRepeatedly]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-x + 'a' # E: Unsupported operand types for + (likely involving Union)
-
-x = 1
-x + 1
-x + 'a' # E: Unsupported operand types for + ("int" and "str")
-
-x = 'a'
-x + 1 # E: Unsupported operand types for + ("str" and "int")
-x + 'a'
-
-x = foo()
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-x + 'a' # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/isinstancelist.pyi]
-
-[case testModifyLoop]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-x = 'a'
-x + 1 # E: Unsupported operand types for + ("str" and "int")
-x = 1
-x + 1
-
-while bool():
- x + 1 # E: Unsupported operand types for + (likely involving Union)
- x = 'a'
-[builtins fixtures/isinstancelist.pyi]
-
-[case testModifyLoop2]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-x = 'a'
-x + 1 # E: Unsupported operand types for + ("str" and "int")
-x = 1
-x + 1
-
-for i in [1]:
- x = 'a'
-
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-
-[builtins fixtures/isinstancelist.pyi]
-
-
-[case testModifyLoop3]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x = 1
-
-while bool():
- x + 1
- x = 'a'
- break
-else:
- x + 1
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-x = 1
-for y in [1]:
- x + 1
- x = 'a'
- break
-else:
- x + 1
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-[builtins fixtures/isinstancelist.pyi]
-
-[case testModifyLoopWhile4]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x = 1
-
-while bool():
- x + 1
- if bool():
- x = 'a'
- break
-else:
- x + 1
- x = 'a'
-x + 'a'
-x = 1
-while bool():
- x + 1 # E: Unsupported operand types for + (likely involving Union)
- if bool():
- x = 'a'
- continue
-else:
- x + 1 # E: Unsupported operand types for + (likely involving Union)
- x = 'a'
-x + 'a'
-[builtins fixtures/isinstancelist.pyi]
-[case testModifyLoopFor4]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x = 1
-
-for y in [1]:
- x + 1
- if bool():
- x = 'a'
- break
-else:
- x + 1
- x = 'a'
-x + 'a'
-x = 1
-for y in [1]:
- x + 1 # E: Unsupported operand types for + (likely involving Union)
- if bool():
- x = 'a'
- continue
-else:
- x + 1 # E: Unsupported operand types for + (likely involving Union)
- x = 'a'
-x + 'a'
-[builtins fixtures/isinstancelist.pyi]
-[case testModifyNestedLoop]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-x = 1
-
-for y in [1]:
- for z in [1]:
- break
- else:
- x = 'a'
- break
-else:
- x + 1
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-x = 1
-while bool():
- while bool():
- break
- else:
- x = 'a'
- break
-else:
- x + 1
-x + 1 # E: Unsupported operand types for + (likely involving Union)
-[builtins fixtures/isinstancelist.pyi]
-
-[case testModifyLoopLong]
-from typing import Union
-
-class A: a = 1
-
-def foo() -> Union[int, str, A]: pass
-
-def bar() -> None:
- x = foo()
- x + 1 # E: Unsupported left operand type for + (some union)
- if isinstance(x, A):
- x.a
- else:
- if isinstance(x, int):
- x + 1
- x + 'a' # E: Unsupported operand types for + ("int" and "str")
- else:
- x + 'a'
- x.a # E: "str" has no attribute "a"
- x = A()
-
- if isinstance(x, str):
- x + 'a'
- else:
- while bool():
- if isinstance(x, int):
- x + 1
- else:
- x.a
- break
- while bool():
- if isinstance(x, int):
- x + 1
- else:
- x.a
- continue
-
- #for i in [1]:
- while bool():
- if isinstance(x, int):
- x + 1
- else:
- x.a # E: Some element of union has no attribute "a"
- x = 'a'
-
-[builtins fixtures/isinstancelist.pyi]
-[out]
-
-[case testWhileExitCondition1]
-from typing import Union
-x = 1 # type: Union[int, str]
-while isinstance(x, int):
- if bool():
- continue
- x = 'a'
-else:
- reveal_type(x) # E: Revealed type is 'builtins.str'
-reveal_type(x) # E: Revealed type is 'builtins.str'
-[builtins fixtures/isinstance.pyi]
-
-[case testWhileExitCondition2]
-from typing import Union
-x = 1 # type: Union[int, str]
-while isinstance(x, int):
- if bool():
- break
- x = 'a'
-else:
- reveal_type(x) # E: Revealed type is 'builtins.str'
-reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
-[builtins fixtures/isinstance.pyi]
-
-[case testWhileLinkedList]
-from typing import Union
-LinkedList = Union['Cons', 'Nil']
-class Nil: pass
-class Cons:
- tail = None # type: LinkedList
-def last(x: LinkedList) -> Nil:
- while isinstance(x, Cons):
- x = x.tail
- return x
-[builtins fixtures/isinstance.pyi]
-
-[case testReturnAndFlow]
-def foo() -> int:
- return 1 and 2
- return 'a'
-[case testCastIsinstance]
-from typing import Union
-
-def foo() -> Union[int, str]: pass
-
-x = foo()
-y = 1 # type: int
-
-if isinstance(x, str):
- x = y
-x + 1
-x + 'a' # E: Unsupported operand types for + ("int" and "str")
-
-[builtins fixtures/isinstancelist.pyi]
-
-
-[case testUnreachableCode]
-x = 1 # type: int
-
-while bool():
- x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- break
- x = 'a' # Note: no error because unreachable code
-[builtins fixtures/isinstancelist.pyi]
-
-[case testUnreachableCode2]
-x = 1
-while bool():
- try:
- pass
- except:
- continue
- else:
- continue
- x + 'a'
-[builtins fixtures/isinstance.pyi]
-
-[case testUnreachableWhileTrue]
-def f(x: int) -> None:
- while True:
- if x:
- return
- 1()
-[builtins fixtures/bool.pyi]
-
-[case testUnreachableAssertFalse]
-def f() -> None:
- assert False
- 1()
-[builtins fixtures/bool.pyi]
-
-[case testUnreachableAssertFalse2]
-# flags: --fast-parser
-def f() -> None:
- # The old parser doesn't understand the syntax below
- assert False, "hi"
- 1()
-[builtins fixtures/bool.pyi]
-
-[case testUnreachableReturnOrAssertFalse]
-def f(x: int) -> int:
- if x:
- return x
- else:
- assert False
- 1()
-[builtins fixtures/bool.pyi]
-
-[case testUnreachableTryExcept]
-def f() -> None:
- try:
- f()
- return
- except BaseException:
- return
- 1()
-[builtins fixtures/exception.pyi]
-
-[case testUnreachableTryExceptElse]
-def f() -> None:
- try:
- f()
- except BaseException:
- return
- else:
- return
- 1()
-[builtins fixtures/exception.pyi]
-
-[case testUnreachableTryReturnFinally1]
-def f() -> None:
- try:
- return
- finally:
- pass
- 1()
-
-[case testUnreachableTryReturnFinally2]
-def f() -> None:
- try:
- pass
- finally:
- return
- 1()
-
-[case testUnreachableTryReturnExceptRaise]
-def f() -> None:
- try:
- return
- except:
- raise
- 1()
-
-[case testUnreachableReturnLambda]
-from typing import Callable
-def g(t: Callable[[int], int]) -> int: pass
-def f() -> int:
- return g(lambda x: x)
- 1()
-
-[case testIsinstanceAnd]
-class A:
- pass
-
-class B(A):
- flag = 1
-
-x = B() # type: A
-
-if isinstance(x, B) and 1:
- x.flag
-[builtins fixtures/isinstancelist.pyi]
-[case testIsinstanceShortcircuit]
-class A:
- pass
-
-class B(A):
- flag = 1
-
-x = B() # type: A
-
-if isinstance(x, B) and x.flag:
- pass
-if isinstance(x, B) or x.flag: # E: "A" has no attribute "flag"
- pass
-if not isinstance(x, B) or x.flag:
- pass
-if not isinstance(x, B) and x.flag: # E: "A" has no attribute "flag"
- pass
-[builtins fixtures/isinstancelist.pyi]
-[case testIsinstanceExpression]
-class A:
- pass
-
-class B(A):
- flag = 1
-
-x = B() # type: A
-
-x.flag if isinstance(x, B) else 0
-0 if not isinstance(x, B) else x.flag
-0 if isinstance(x, B) else x.flag # E: "A" has no attribute "flag"
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsinstanceMultiAnd]
-class A:
- pass
-
-class B(A):
- flag = 1
-
-class C(A):
- glaf = 1
-
-x = B() # type: A
-y = C() # type: A
-
-if isinstance(x, B) and isinstance(y, C):
- x.flag += 1
- y.glaf += 1
- x() # E: "B" not callable
- y() # E: "C" not callable
-else:
- x() # E: "A" not callable
- y() # E: "A" not callable
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsinstanceMultiAndSpecialCase]
-class A:
- pass
-
-class B(A):
- flag = 1
-
-class C(A):
- glaf = 1
-
-x = B() # type: A
-y = C() # type: A
-
-if isinstance(x, B) and isinstance(y, int):
- 1() # type checking skipped
-if isinstance(y, int) and isinstance(x, B):
- 1() # type checking skipped
-[builtins fixtures/isinstancelist.pyi]
-
-[case testReturnWithCallExprAndIsinstance]
-
-from typing import Union
-def f(x: Union[int, str]) -> None:
- if not isinstance(x, int):
- return foo()
- x() # E: "int" not callable
-def foo(): pass
-[builtins fixtures/isinstancelist.pyi]
-[out]
-
-[case testIsinstanceOr1]
-from typing import Optional
-def f(a: bool, x: object) -> Optional[int]:
- if a or not isinstance(x, int):
- return None
- reveal_type(x) # E: Revealed type is 'builtins.int'
- return x
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceOr2]
-from typing import Optional
-def g(a: bool, x: object) -> Optional[int]:
- if not isinstance(x, int) or a:
- return None
- reveal_type(x) # E: Revealed type is 'builtins.int'
- return x
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceOr3]
-from typing import Optional
-def h(a: bool, x: object) -> Optional[int]:
- if a or isinstance(x, int):
- return None
- return x # E: Incompatible return value type (got "object", expected "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceWithOverlappingUnionType]
-from typing import Union
-def f(x: Union[float, int]) -> None:
- if isinstance(x, float):
- pass
- if not isinstance(x, int):
- f(x)
-[builtins fixtures/isinstance.pyi]
-
-[case testIsinstanceWithOverlappingUnionType2]
-from typing import Union
-class A: pass
-class B(A): pass
-def f(x: Union[A, B]) -> None:
- if isinstance(x, A):
- pass
- if not isinstance(x, B):
- f(x)
-[builtins fixtures/isinstance.pyi]
-
-[case testIsinstanceOfSuperclass]
-class A: pass
-class B(A): pass
-x = B()
-if isinstance(x, A):
- reveal_type(x) # E: Revealed type is '__main__.B'
-if not isinstance(x, A):
- reveal_type(x) # unreachable
- x = A()
-reveal_type(x) # E: Revealed type is '__main__.B'
-[builtins fixtures/isinstance.pyi]
-
-[case testIsinstanceOfNonoverlapping]
-class A: pass
-class B: pass
-x = B()
-if isinstance(x, A):
- reveal_type(x) # unreachable
-else:
- reveal_type(x) # E: Revealed type is '__main__.B'
-[builtins fixtures/isinstance.pyi]
-
-[case testAssertIsinstance]
-def f(x: object):
- assert isinstance(x, int)
- y = 0 # type: int
- y = x
-[builtins fixtures/isinstance.pyi]
-
-[case testUnionAssertIsinstance]
-from typing import Union
-def f(x: Union[str, int]):
- assert isinstance(x, int)
- y = 0 # type: int
- y = x
-[builtins fixtures/isinstance.pyi]
-
-[case testAnyAssertIsinstance]
-from typing import Any
-def f(x: Any):
- assert isinstance(x, int) # this should narrow x to type int
- x + "foo" # E: Unsupported operand types for + ("int" and "str")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceOfGenericClassRetainsParameters]
-from typing import List, Union
-def f(x: Union[List[int], str]) -> None:
- if isinstance(x, list):
- x[0]()
-[builtins fixtures/isinstancelist.pyi]
-[out]
-main:4: error: "int" not callable
-
-[case testIsinstanceOrIsinstance]
-class A: pass
-class B(A):
- flag = 1
-class C(A):
- flag = 2
-x1 = A()
-if isinstance(x1, B) or isinstance(x1, C):
- reveal_type(x1) # E: Revealed type is 'Union[__main__.B, __main__.C]'
- f = x1.flag # type: int
-else:
- reveal_type(x1) # E: Revealed type is '__main__.A'
- f = 0
-x2 = A()
-if isinstance(x2, A) or isinstance(x2, C):
- reveal_type(x2) # E: Revealed type is '__main__.A'
- f = x2.flag # E: "A" has no attribute "flag"
-else:
- # unreachable
- 1()
-[builtins fixtures/isinstance.pyi]
-[out]
-[case testComprehensionIsInstance]
-from typing import List, Union
-a = [] # type: List[Union[int, str]]
-l = [x for x in a if isinstance(x, int)]
-g = (x for x in a if isinstance(x, int))
-d = {0: x for x in a if isinstance(x, int)}
-reveal_type(l) # E: Revealed type is 'builtins.list[builtins.int*]'
-reveal_type(g) # E: Revealed type is 'typing.Iterator[builtins.int*]'
-reveal_type(d) # E: Revealed type is 'builtins.dict[builtins.int*, builtins.int*]'
-[builtins fixtures/isinstancelist.pyi]
-
-[case testIsinstanceInWrongOrderInBooleanOp]
-class A:
- m = 1
-def f(x: object) -> None:
- if x.m and isinstance(x, A) or False: # E: "object" has no attribute "m"
- pass
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndOr]
-class A:
- a = None # type: A
-
-def f(x: object) -> None:
- b = isinstance(x, A) and x.a or A()
- reveal_type(b) # E: Revealed type is '__main__.A'
-[builtins fixtures/isinstance.pyi]
-[out]
diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test
deleted file mode 100644
index ac8f388..0000000
--- a/test-data/unit/check-kwargs.test
+++ /dev/null
@@ -1,339 +0,0 @@
--- Test cases for keyword arguments.
-
-
-[case testTypeErrorInKeywordArgument]
-import typing
-def f(o: object) -> None: pass
-f(o=None()) # E: None not callable
-
-[case testSimpleKeywordArgument]
-import typing
-def f(a: 'A') -> None: pass
-f(a=A())
-f(a=object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
-class A: pass
-
-[case testTwoKeywordArgumentsNotInOrder]
-import typing
-def f(a: 'A', b: 'B') -> None: pass
-f(b=A(), a=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
-f(b=B(), a=B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
-f(a=A(), b=B())
-f(b=B(), a=A())
-class A: pass
-class B: pass
-
-[case testOneOfSeveralOptionalKeywordArguments]
-import typing
-def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass
-f(a=A())
-f(b=B())
-f(c=C())
-f(b=B(), c=C())
-f(a=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-f(b=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
-f(c=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
-f(b=B(), c=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "C"
-class A: pass
-class B: pass
-class C: pass
-
-[case testBothPositionalAndKeywordArguments]
-import typing
-def f(a: 'A', b: 'B') -> None: pass
-f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
-f(A(), b=B())
-class A: pass
-class B: pass
-
-[case testContextSensitiveTypeInferenceForKeywordArg]
-from typing import List
-def f(a: 'A', b: 'List[A]') -> None: pass
-f(b=[], a=A())
-class A: pass
-[builtins fixtures/list.pyi]
-
-[case testGivingSameKeywordArgumentTwice]
-import typing
-def f(a: 'A', b: 'B') -> None: pass
-f(a=A(), b=B(), a=A()) # E: keyword argument repeated
-class A: pass
-class B: pass
-
-[case testGivingArgumentAsPositionalAndKeywordArg]
-import typing
-def f(a: 'A', b: 'B' = None) -> None: pass
-f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a"
-class A: pass
-class B: pass
-
-[case testGivingArgumentAsPositionalAndKeywordArg2]
-import typing
-def f(a: 'A' = None, b: 'B' = None) -> None: pass
-f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a"
-class A: pass
-class B: pass
-
-[case testPositionalAndKeywordForSameArg]
-# This used to crash in check_argument_count(). See #1095.
-def f(a: int): pass
-def g(): f(0, a=1)
-[out]
-
-[case testInvalidKeywordArgument]
-import typing
-def f(a: 'A') -> None: pass # N: "f" defined here
-f(b=object()) # E: Unexpected keyword argument "b" for "f"
-class A: pass
-
-[case testKeywordArgumentsWithDynamicallyTypedCallable]
-from typing import Any
-f = None # type: Any
-f(x=f(), z=None()) # E: None not callable
-f(f, zz=None()) # E: None not callable
-f(x=None)
-
-[case testKeywordArgumentWithFunctionObject]
-from typing import Callable
-f = None # type: Callable[[A, B], None]
-f(a=A(), b=B())
-f(A(), b=B())
-class A: pass
-class B: pass
-[out]
-main:3: error: Unexpected keyword argument "a"
-main:3: error: Unexpected keyword argument "b"
-main:4: error: Unexpected keyword argument "b"
-
-[case testKeywordOnlyArguments]
-import typing
-def f(a: 'A', *, b: 'B' = None) -> None: pass
-def g(a: 'A', *, b: 'B') -> None: pass
-def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass
-def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass
-f(A(), b=B())
-f(b=B(), a=A())
-f(A())
-f(A(), B()) # E: Too many positional arguments for "f"
-g(A(), b=B())
-g(b=B(), a=A())
-g(A()) # E: Missing named argument "b" for function "g"
-g(A(), B()) # E: Too many positional arguments for "g"
-h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
-h(A(), b=B()) # E: Missing named argument "aa" for function "h"
-h(A(), aa=A()) # E: Missing named argument "b" for function "h"
-h(A(), b=B(), aa=A())
-h(A(), aa=A(), b=B())
-i(A()) # E: Missing named argument "b" for function "i"
-i(A(), b=B())
-i(A(), aa=A()) # E: Missing named argument "b" for function "i"
-i(A(), b=B(), aa=A())
-i(A(), aa=A(), b=B())
-
-class A: pass
-class B: pass
-
-[case testKeywordOnlyArgumentsFastparse]
-# flags: --fast-parser
-import typing
-def f(a: 'A', *, b: 'B' = None) -> None: pass
-def g(a: 'A', *, b: 'B') -> None: pass
-def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass
-def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass
-f(A(), b=B())
-f(b=B(), a=A())
-f(A())
-f(A(), B()) # E: Too many positional arguments for "f"
-g(A(), b=B())
-g(b=B(), a=A())
-g(A()) # E: Missing named argument "b" for function "g"
-g(A(), B()) # E: Too many positional arguments for "g"
-h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
-h(A(), b=B()) # E: Missing named argument "aa" for function "h"
-h(A(), aa=A()) # E: Missing named argument "b" for function "h"
-h(A(), b=B(), aa=A())
-h(A(), aa=A(), b=B())
-i(A()) # E: Missing named argument "b" for function "i"
-i(A(), b=B())
-i(A(), aa=A()) # E: Missing named argument "b" for function "i"
-i(A(), b=B(), aa=A())
-i(A(), aa=A(), b=B())
-
-class A: pass
-class B: pass
-
-[case testKwargsAfterBareArgs]
-from typing import Tuple, Any
-def f(a, *, b=None) -> None: pass
-a = None # type: Any
-b = None # type: Any
-f(a, **b)
-
-[builtins fixtures/dict.pyi]
-
-[case testKeywordArgAfterVarArgs]
-import typing
-def f(*a: 'A', b: 'B' = None) -> None: pass
-f()
-f(A())
-f(A(), A())
-f(b=B())
-f(A(), b=B())
-f(A(), A(), b=B())
-f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
-f(b=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs]
-from typing import List
-def f(*a: 'A', b: 'B' = None) -> None: pass
-a = None # type: List[A]
-f(*a)
-f(A(), *a)
-f(b=B())
-f(*a, b=B())
-f(A(), *a, b=B())
-f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
-f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
-f(*a, b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testCallingDynamicallyTypedFunctionWithKeywordArgs]
-import typing
-def f(x, y=A()): pass
-f(x=A(), y=A())
-f(y=A(), x=A())
-f(y=A()) # E: Missing positional argument "x" in call to "f"
-f(A(), z=A()) # E: Unexpected keyword argument "z" for "f"
-class A: pass
-
-[case testKwargsArgumentInFunctionBody]
-from typing import Dict, Any
-def f( **kwargs: 'A') -> None:
- d1 = kwargs # type: Dict[str, A]
- d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[A, Any])
- d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[Any, str])
-class A: pass
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testKwargsArgumentInFunctionBodyWithImplicitAny]
-from typing import Dict, Any
-def f(**kwargs) -> None:
- d1 = kwargs # type: Dict[str, A]
- d2 = kwargs # type: Dict[str, str]
- d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[A, Any])
-class A: pass
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testCallingFunctionThatAcceptsVarKwargs]
-import typing
-def f( **kwargs: 'A') -> None: pass
-f()
-f(x=A())
-f(y=A(), z=A())
-f(x=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-f(A()) # E: Too many arguments for "f"
-# Perhaps a better message would be "Too many *positional* arguments..."
-class A: pass
-class B: pass
-[builtins fixtures/dict.pyi]
-
-[case testCallingFunctionWithKeywordVarArgs]
-from typing import Dict
-def f( **kwargs: 'A') -> None: pass
-d = None # type: Dict[str, A]
-f(**d)
-f(x=A(), **d)
-d2 = None # type: Dict[str, B]
-f(**d2) # E: Argument 1 to "f" has incompatible type **Dict[str, B]; expected "A"
-f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type **Dict[str, B]; expected "A"
-class A: pass
-class B: pass
-[builtins fixtures/dict.pyi]
-
-[case testInvalidTypeForKeywordVarArg]
-from typing import Dict
-def f( **kwargs: 'A') -> None: pass
-d = None # type: Dict[A, A]
-f(**d) # E: Keywords must be strings
-f(**A()) # E: Argument after ** must be a dictionary
-class A: pass
-[builtins fixtures/dict.pyi]
-
-[case testPassingKeywordVarArgsToNonVarArgsFunction]
-from typing import Any, Dict
-def f(a: 'A', b: 'B') -> None: pass
-d = None # type: Dict[str, Any]
-f(**d)
-d2 = None # type: Dict[str, A]
-f(**d2) # E: Argument 1 to "f" has incompatible type **Dict[str, A]; expected "B"
-class A: pass
-class B: pass
-[builtins fixtures/dict.pyi]
-
-[case testBothKindsOfVarArgs]
-from typing import Any, List, Dict
-def f(a: 'A', b: 'A') -> None: pass
-l = None # type: List[Any]
-d = None # type: Dict[Any, Any]
-f(*l, **d)
-class A: pass
-[builtins fixtures/dict.pyi]
-
-[case testKeywordArgumentAndCommentSignature]
-import typing
-def f(x): # type: (int) -> str # N: "f" defined here
- pass
-f(x='') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-f(x=0)
-f(y=0) # E: Unexpected keyword argument "y" for "f"
-
-[case testKeywordArgumentAndCommentSignature2]
-import typing
-class A:
- def f(self, x): # type: (int) -> str
- pass
-A().f(x='') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-A().f(x=0)
-A().f(y=0) # E: Unexpected keyword argument "y" for "f" of "A"
-
-[case testKeywordVarArgsAndCommentSignature]
-import typing
-def f(**kwargs): # type: (**int) -> None
- pass
-f(z=1)
-f(x=1, y=1)
-f(x='', y=1) # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-f(x=1, y='') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
-[builtins fixtures/dict.pyi]
-
-[case testCallsWithStars]
-def f(a: int) -> None:
- pass
-
-s = ('',)
-f(*s) # E: Argument 1 to "f" has incompatible type *"Tuple[str]"; expected "int"
-
-a = {'': 0}
-f(a) # E: Argument 1 to "f" has incompatible type Dict[str, int]; expected "int"
-f(**a) # okay
-
-b = {'': ''}
-f(b) # E: Argument 1 to "f" has incompatible type Dict[str, str]; expected "int"
-f(**b) # E: Argument 1 to "f" has incompatible type **Dict[str, str]; expected "int"
-
-c = {0: 0}
-f(**c) # E: Keywords must be strings
-[builtins fixtures/dict.pyi]
-
-[case testCallStar2WithStar]
-def f(**k): pass
-f(*(1, 2)) # E: Too many arguments for "f"
-[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-lists.test b/test-data/unit/check-lists.test
deleted file mode 100644
index c9c67e8..0000000
--- a/test-data/unit/check-lists.test
+++ /dev/null
@@ -1,72 +0,0 @@
--- Nested list assignment
--- -----------------------------
-
-[case testNestedListAssignment]
-from typing import List
-a1, b1, c1 = None, None, None # type: (A, B, C)
-a2, b2, c2 = None, None, None # type: (A, B, C)
-
-a1, [b1, c1] = a2, [b2, c2]
-a1, [a1, [b1, c1]] = a2, [a2, [b2, c2]]
-a1, [a1, [a1, b1]] = a1, [a1, [a1, c1]] # E: Incompatible types in assignment (expression has type "C", variable has type "B")
-
-class A: pass
-class B: pass
-class C: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testNestedListAssignmentToTuple]
-from typing import List
-a, b, c = None, None, None # type: (A, B, C)
-
-a, b = [a, b]
-a, b = [a] # E: Need more than 1 value to unpack (2 expected)
-a, b = [a, b, c] # E: Too many values to unpack (2 expected, 3 provided)
-
-class A: pass
-class B: pass
-class C: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testListAssignmentFromTuple]
-from typing import List
-a, b, c = None, None, None # type: (A, B, C)
-t = a, b
-
-[a, b], c = t, c
-[a, c], c = t, c # E: Incompatible types in assignment (expression has type "B", variable has type "C")
-[a, a, a], c = t, c # E: Need more than 2 values to unpack (3 expected)
-[a], c = t, c # E: Too many values to unpack (1 expected, 2 provided)
-
-class A: pass
-class B: pass
-class C: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testListAssignmentUnequalAmountToUnpack]
-from typing import List
-a, b, c = None, None, None # type: (A, B, C)
-
-def f() -> None: # needed because test parser tries to parse [a, b] as section header
- [a, b] = [a, b]
- [a, b] = [a] # E: Need more than 1 value to unpack (2 expected)
- [a, b] = [a, b, c] # E: Too many values to unpack (2 expected, 3 provided)
-
-class A: pass
-class B: pass
-class C: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testListWithStarExpr]
-(x, *a) = [1, 2, 3]
-a = [1, *[2, 3]]
-reveal_type(a) # E: Revealed type is 'builtins.list[builtins.int]'
-b = [0, *a]
-reveal_type(b) # E: Revealed type is 'builtins.list[builtins.int*]'
-c = [*a, 0]
-reveal_type(c) # E: Revealed type is 'builtins.list[builtins.int*]'
-[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
deleted file mode 100644
index 2444ce7..0000000
--- a/test-data/unit/check-modules.test
+++ /dev/null
@@ -1,1408 +0,0 @@
--- Type checker test cases dealing with modules and imports.
-
-[case testAccessImportedDefinitions]
-import m
-import typing
-m.f() # E: Too few arguments for "f"
-m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
-m.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-m.f(m.A())
-m.x = m.A()
-[file m.py]
-class A: pass
-def f(a: A) -> None: pass
-x = A()
-
-[case testAccessImportedDefinitions]
-import m
-import typing
-m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
-m.f(m.A())
-[file m.py]
-class A: pass
-def f(a: A) -> None: pass
-
-[case testAccessImportedDefinitions2]
-from m import f, A
-import typing
-f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
-f(A())
-[file m.py]
-class A: pass
-def f(a: A) -> None: pass
-
-[case testImportedExceptionType]
-import m
-import typing
-try:
- pass
-except m.Err:
- pass
-except m.Bad: # E: Exception type must be derived from BaseException
- pass
-[file m.py]
-class Err(BaseException): pass
-class Bad: pass
-[builtins fixtures/exception.pyi]
-
-[case testImportedExceptionType2]
-from m import Err, Bad
-import typing
-try:
- pass
-except Err:
- pass
-except Bad: # E: Exception type must be derived from BaseException
- pass
-[file m.py]
-class Err(BaseException): pass
-class Bad: pass
-[builtins fixtures/exception.pyi]
-
-[case testImportWithinBlock]
-import typing
-if 1:
- import m
- m.a = m.b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- m.a = m.a
- m.f()
- m.f(m.a) # E: Too many arguments for "f"
- m.a = m.A()
- m.a = m.B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[file m.py]
-class A: pass
-class B: pass
-a = A()
-b = B()
-def f() -> None: pass
-
-[case testImportWithinFunction]
-import typing
-def f() -> None:
- from m import a, b, f, A, B
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = a
- f()
- f(a) # E: Too many arguments for "f"
- a = A()
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[file m.py]
-class A: pass
-class B: pass
-a = A()
-b = B()
-def f() -> None: pass
-[out]
-
-[case testImportWithinMethod]
-import typing
-class C:
- def f(self) -> None:
- from m import *
- a = b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = a
- f()
- f(a) # E: Too many arguments for "f"
- a = A()
- a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[file m.py]
-class A: pass
-class B: pass
-a = A()
-b = B()
-def f() -> None: pass
-[out]
-
-[case testImportWithinClassBody]
-import typing
-class C:
- import m
- m.f()
- m.f(C) # E: Too many arguments for "f"
-[file m.py]
-def f() -> None: pass
-[out]
-
-[case testImportWithinClassBody2]
-import typing
-class C:
- from m import f
- f()
- f(C) # E: Too many arguments for "f"
-[file m.py]
-def f() -> None: pass
-[out]
-
-[case testImportWithStub]
-import _m
-_m.f("hola")
-[file _m.pyi]
-def f(c:str) -> None: pass
-[out]
-
-[case testImportWithStubIncompatibleType]
-import _m
-_m.f("hola")
-_m.f(12) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
-[file _m.py]
-def f(c):
- print(c)
-[file _m.pyi]
-def f(c:str) -> None: pass
-
-[case testInvalidOperationsOnModules]
-import m
-import typing
-class A: pass
-m() # E: "module" not callable
-a = m # type: A # E: Incompatible types in assignment (expression has type "module", variable has type "A")
-m + None # E: Unsupported left operand type for + ("module")
-[file m.py]
-[builtins fixtures/module.pyi]
-
-[case testNameDefinedInDifferentModule]
-import m, n
-import typing
-m.x # E: "module" has no attribute "x"
-[file m.py]
-y = object()
-[file n.py]
-x = object()
-[builtins fixtures/module.pyi]
-
-[case testChainedAssignmentAndImports]
-import m
-
-i, s = None, None # type: (int, str)
-i = m.x
-i = m.y
-s = m.x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-s = m.y # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[file m.py]
-x = y = 1
-[builtins fixtures/primitives.pyi]
-
-[case testConditionalFunctionDefinitionAndImports]
-import m
-import typing
-m.f(1)
-m.f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-[file m.py]
-x = object()
-if x:
- def f(x: int) -> None: pass
-else:
- def f(x: int) -> None: pass
-
-[case testTypeCheckWithUnknownModule]
-import nonexistent
-None + ''
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckWithUnknownModule2]
-import m, nonexistent
-None + ''
-m.x = 1
-m.x = ''
-[file m.py]
-x = 1
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
-main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testTypeCheckWithUnknownModule3]
-import nonexistent, m
-None + ''
-m.x = 1
-m.x = ''
-[file m.py]
-x = 1
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
-main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testTypeCheckWithUnknownModule4]
-import nonexistent, another
-None + ''
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:1: error: Cannot find module named 'another'
-main:2: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckWithUnknownModule5]
-import nonexistent as x
-None + ''
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckWithUnknownModuleUsingFromImport]
-from nonexistent import x
-None + ''
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckWithUnknownModuleUsingImportStar]
-from nonexistent import *
-None + ''
-[out]
-main:1: error: Cannot find module named 'nonexistent'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
-
-[case testAccessingUnknownModule]
-import xyz
-xyz.foo()
-xyz()
-[out]
-main:1: error: Cannot find module named 'xyz'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testAccessingUnknownModule2]
-import xyz, bar
-xyz.foo()
-bar()
-[out]
-main:1: error: Cannot find module named 'xyz'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:1: error: Cannot find module named 'bar'
-
-[case testAccessingUnknownModule3]
-import xyz as z
-xyz.foo()
-z()
-[out]
-main:1: error: Cannot find module named 'xyz'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Name 'xyz' is not defined
-
-[case testAccessingNameImportedFromUnknownModule]
-from xyz import y, z
-y.foo()
-z()
-[out]
-main:1: error: Cannot find module named 'xyz'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testAccessingNameImportedFromUnknownModule2]
-from xyz import *
-y
-[out]
-main:1: error: Cannot find module named 'xyz'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Name 'y' is not defined
-
-[case testAccessingNameImportedFromUnknownModule3]
-from xyz import y as z
-y
-z
-[out]
-main:1: error: Cannot find module named 'xyz'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Name 'y' is not defined
-
-[case testUnknownModuleRedefinition]
-import xab
-def xab(): pass
-[out]
-main:1: error: Cannot find module named 'xab'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testAccessingUnknownModuleFromOtherModule]
-import x
-x.nonexistent.foo
-x.z
-[file x.py]
-import nonexistent
-[builtins fixtures/module.pyi]
-[out]
-tmp/x.py:1: error: Cannot find module named 'nonexistent'
-tmp/x.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:3: error: "module" has no attribute "z"
-
-[case testUnknownModuleImportedWithinFunction]
-def f():
- import foobar
-def foobar(): pass
-foobar('')
-[out]
-main:2: error: Cannot find module named 'foobar'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:4: error: Too many arguments for "foobar"
-
-[case testUnknownModuleImportedWithinFunction2]
-def f():
- from foobar import x
-def x(): pass
-x('')
-[out]
-main:2: error: Cannot find module named 'foobar'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:4: error: Too many arguments for "x"
-
-[case testRelativeImports]
-import typing
-import m.a
-m.a.x = m.a.y # Error
-[file m/__init__.py]
-[file m/a.py]
-import typing
-from .b import A, B, x, y
-z = x
-z = y # Error
-[file m/b.py]
-import typing
-class A: pass
-class B: pass
-x = A()
-y = B()
-[out]
-tmp/m/a.py:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testRelativeImports2]
-import typing
-import m.a
-m.a.x = m.a.y # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[file m/__init__.py]
-[file m/a.py]
-import typing
-from .b import A, B, x, y
-[file m/b.py]
-import typing
-class A: pass
-class B: pass
-x = A()
-y = B()
-
-[case testExportedValuesInImportAll]
-import typing
-from m import *
-_ = a
-_ = b
-_ = c
-_ = d
-_ = e
-_ = f # E: Name 'f' is not defined
-_ = _g # E: Name '_g' is not defined
-[file m.py]
-__all__ = ['a']
-__all__ += ('b',)
-__all__.append('c')
-__all__.extend(('d', 'e'))
-
-a = b = c = d = e = f = _g = 1
-[builtins fixtures/module_all.pyi]
-
-[case testAllMustBeSequenceStr]
-import typing
-__all__ = [1, 2, 3]
-[builtins fixtures/module_all.pyi]
-[out]
-main:2: error: Type of __all__ must be Sequence[str], not List[int]
-
-[case testAllMustBeSequenceStr_python2]
-import typing
-__all__ = [1, 2, 3]
-[builtins_py2 fixtures/module_all_python2.pyi]
-[out]
-main:2: error: Type of __all__ must be Sequence[unicode], not List[int]
-
-[case testAllUnicodeSequenceOK_python2]
-import typing
-__all__ = [u'a', u'b', u'c']
-[builtins_py2 fixtures/module_all_python2.pyi]
-
-[out]
-
-[case testEllipsisInitializerInStubFileWithType]
-import m
-m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[file m.pyi]
-x = ... # type: int
-
-[case testEllipsisInitializerInStubFileWithoutType]
-import m
-m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "ellipsis")
-[file m.pyi]
-# Ellipsis is only special with a # type: comment (not sure though if this is great)
-x = ...
-
-[case testEllipsisInitializerInModule]
-x = ... # type: int # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
-
-[case testEllipsisDefaultArgValueInStub]
-import m
-m.f(1)
-m.f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-[file m.pyi]
-def f(x: int = ...) -> None: pass
-
-[case testEllipsisDefaultArgValueInStub2]
-import m
-def f(x: int = ...) -> None: pass
-[file m.pyi]
-def g(x: int = '') -> None: pass
-[out]
-tmp/m.pyi:1: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:2: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
-
-[case testEllipsisDefaultArgValueInNonStub]
-def f(x: int = ...) -> None: pass # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
-[out]
-
-[case testStarImportOverlapping]
-from m1 import *
-from m2 import *
-j = ''
-[file m1.py]
-x = 1
-[file m2.py]
-x = 1
-
-[case testStarImportOverlappingMismatch]
-from m1 import *
-from m2 import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str")
-j = ''
-[file m1.py]
-x = ''
-[file m2.py]
-x = 1
-
-[case testStarImportOverridingLocalImports-skip]
-from m1 import *
-from m2 import *
-x = '' # E: TODO (cannot assign str to int)
-[file m1.py]
-x = 1
-[file m2.py]
-x = 1
-
-[case testAssignToFuncDefViaImport]
-from m import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str")
-f = None # E: Need type annotation for variable
-x = ''
-[file m.py]
-def f(): pass
-x = 1+0
-[out]
-
-
--- Conditional definitions and function redefinitions via module object
--- --------------------------------------------------------------------
-
-
-[case testConditionalImportAndAssign]
-try:
- from m import x
-except:
- x = None
-try:
- from m import x as y
-except:
- y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[file m.py]
-x = ''
-
-[case testAssignAndConditionalImport]
-x = ''
-try:
- from m import x
-except:
- pass
-y = 1
-try:
- from m import x as y # E: Incompatible import of "y" (imported name has type "str", local name has type "int")
-except:
- pass
-[file m.py]
-x = ''
-
-[case testAssignAndConditionalStarImport]
-x = ''
-y = 1
-try:
- from m import * # E: Incompatible import of "y" (imported name has type "str", local name has type "int")
-except:
- pass
-[file m.py]
-x = ''
-y = ''
-
-[case testRedefineImportedFunctionViaImport]
-try:
- from m import f, g
-except:
- def f(x): pass
- def g(x): pass # E: All conditional function variants must have identical signatures
-[file m.py]
-def f(x): pass
-def g(x, y): pass
-
-[case testImportedVariableViaImport]
-try:
- from m import x
-except:
- from n import x # E: Incompatible import of "x" (imported name has type "str", local name has type "int")
-[file m.py]
-x = 1
-[file n.py]
-x = ''
-
-[case testRedefineFunctionViaImport]
-def f(x): pass
-def g(x): pass
-try:
- from m import f, g # E: Incompatible import of "g" (imported name has type Callable[[Any, Any], Any], local name has type Callable[[Any], Any])
-except:
- pass
-[file m.py]
-def f(x): pass
-def g(x, y): pass
-
-[case testImportVariableAndAssignNone]
-try:
- from m import x
-except:
- x = None
-[file m.py]
-x = 1
-
-[case testImportFunctionAndAssignNone]
-try:
- from m import f
-except:
- f = None
-[file m.py]
-def f(): pass
-
-[case testImportFunctionAndAssignFunction]
-def g(x): pass
-try:
- from m import f
-except:
- f = g
-[file m.py]
-def f(x): pass
-
-[case testImportFunctionAndAssignIncompatible]
-try:
- from m import f
-except:
- f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
-[file m.py]
-def f(): pass
-
-[case testAssignToFuncDefViaGlobalDecl2]
-import typing
-from m import f
-def g() -> None:
- global f
- f = None
- f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
-[file m.py]
-def f(): pass
-[out]
-
-[case testAssignToFuncDefViaNestedModules]
-import m.n
-m.n.f = None
-m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
-[file m/__init__.py]
-[file m/n.py]
-def f(): pass
-[out]
-
-[case testAssignToFuncDefViaModule]
-import m
-m.f = None
-m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
-[file m.py]
-def f(): pass
-[out]
-
-[case testConditionalImportAndAssignNoneToModule]
-if object():
- import m
-else:
- m = None
-m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
-[file m.py]
-def f(x: str) -> None: pass
-[builtins fixtures/module.pyi]
-[out]
-
-[case testConditionalImportAndAssignInvalidToModule]
-if object():
- import m
-else:
- m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "module")
-[file m.py]
-[builtins fixtures/module.pyi]
-[out]
-
-[case testImportAndAssignToModule]
-import m
-m = None
-m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
-[file m.py]
-def f(x: str) -> None: pass
-[builtins fixtures/module.pyi]
-[out]
-
-
--- Test cases that simulate 'mypy -m modname'
---
--- The module name to import is encoded in a comment.
-
-[case testTypeCheckNamedModule]
-# cmd: mypy -m m.a
-[file m/__init__.py]
-None + 1
-[file m/a.py]
-[out]
-tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckNamedModule2]
-# cmd: mypy -m m.a
-[file m/__init__.py]
-[file m/a.py]
-None + 1
-[out]
-tmp/m/a.py:1: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckNamedModule3]
-# cmd: mypy -m m
-[file m/__init__.py]
-None + 1
-[file m/a.py]
-[out]
-tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckNamedModule4]
-# cmd: mypy -m m
-[file m/__init__.py]
-[file m/a.py]
-None + 1 # Not analyzed.
-[out]
-
-[case testTypeCheckNamedModule5]
-# cmd: mypy -m m
-None + '' # Not analyzed.
-[file m.py]
-None + 1
-[out]
-tmp/m.py:1: error: Unsupported left operand type for + (None)
-
-[case testTypeCheckNamedModuleWithImportCycle]
-# cmd: mypy -m m.a
-None + 1 # Does not generate error, as this file won't be analyzed.
-[file m/__init__.py]
-import m.a
-[file m/a.py]
-[out]
-
-
--- Checks dealing with submodules and different kinds of imports
--- -------------------------------------------------------------
-
-[case testSubmoduleRegularImportAddsAllParents]
-import a.b.c
-reveal_type(a.value) # E: Revealed type is 'builtins.int'
-reveal_type(a.b.value) # E: Revealed type is 'builtins.str'
-reveal_type(a.b.c.value) # E: Revealed type is 'builtins.float'
-b.value # E: Name 'b' is not defined
-c.value # E: Name 'c' is not defined
-
-[file a/__init__.py]
-value = 3
-[file a/b/__init__.py]
-value = "a"
-[file a/b/c.py]
-value = 3.2
-[out]
-
-[case testSubmoduleImportAsDoesNotAddParents]
-import a.b.c as foo
-reveal_type(foo.value) # E: Revealed type is 'builtins.float'
-a.value # E: Name 'a' is not defined
-b.value # E: Name 'b' is not defined
-c.value # E: Name 'c' is not defined
-
-[file a/__init__.py]
-value = 3
-[file a/b/__init__.py]
-value = "a"
-[file a/b/c.py]
-value = 3.2
-[out]
-
-[case testSubmoduleImportFromDoesNotAddParents]
-from a import b
-reveal_type(b.value) # E: Revealed type is 'builtins.str'
-b.c.value # E: "module" has no attribute "c"
-a.value # E: Name 'a' is not defined
-
-[file a/__init__.py]
-value = 3
-[file a/b/__init__.py]
-value = "a"
-[file a/b/c.py]
-value = 3.2
-[builtins fixtures/module.pyi]
-[out]
-
-[case testSubmoduleImportFromDoesNotAddParents2]
-from a.b import c
-reveal_type(c.value) # E: Revealed type is 'builtins.float'
-a.value # E: Name 'a' is not defined
-b.value # E: Name 'b' is not defined
-
-[file a/__init__.py]
-value = 3
-[file a/b/__init__.py]
-value = "a"
-[file a/b/c.py]
-value = 3.2
-[out]
-
-[case testSubmoduleRegularImportNotDirectlyAddedToParent]
-import a.b.c
-def accept_float(x: float) -> None: pass
-accept_float(a.b.c.value)
-
-[file a/__init__.py]
-value = 3
-b.value
-a.b.value
-
-[file a/b/__init__.py]
-value = "a"
-c.value
-a.b.c.value
-
-[file a/b/c.py]
-value = 3.2
-[out]
-tmp/a/b/__init__.py:2: error: Name 'c' is not defined
-tmp/a/b/__init__.py:3: error: Name 'a' is not defined
-tmp/a/__init__.py:2: error: Name 'b' is not defined
-tmp/a/__init__.py:3: error: Name 'a' is not defined
-
-[case testSubmoduleMixingLocalAndQualifiedNames]
-from a.b import MyClass
-val1 = None # type: a.b.MyClass # E: Name 'a' is not defined
-val2 = None # type: MyClass
-
-[file a/__init__.py]
-[file a/b.py]
-class MyClass: pass
-[out]
-
-[case testSubmoduleMixingImportFrom]
-import parent.child
-
-[file parent/__init__.py]
-
-[file parent/common.py]
-class SomeClass: pass
-
-[file parent/child.py]
-from parent.common import SomeClass
-from parent import common
-foo = parent.common.SomeClass()
-
-[builtins fixtures/module.pyi]
-[out]
-tmp/parent/child.py:3: error: Name 'parent' is not defined
-
-[case testSubmoduleMixingImportFromAndImport]
-import parent.child
-
-[file parent/__init__.py]
-
-[file parent/common.py]
-class SomeClass: pass
-
-[file parent/unrelated.py]
-class ShouldNotLoad: pass
-
-[file parent/child.py]
-from parent.common import SomeClass
-import parent
-
-# Note, since this might be unintuitive -- when `parent.common` is loaded in any way,
-# shape, or form, it's added to `parent`'s namespace, which is why the below line
-# succeeds.
-foo = parent.common.SomeClass()
-reveal_type(foo)
-bar = parent.unrelated.ShouldNotLoad()
-
-[builtins fixtures/module.pyi]
-[out]
-tmp/parent/child.py:8: error: Revealed type is 'parent.common.SomeClass'
-tmp/parent/child.py:9: error: "module" has no attribute "unrelated"
-
-[case testSubmoduleMixingImportFromAndImport2]
-import parent.child
-
-[file parent/__init__.py]
-
-[file parent/common.py]
-class SomeClass: pass
-
-[file parent/child.py]
-from parent import common
-import parent
-foo = parent.common.SomeClass()
-reveal_type(foo)
-
-[builtins fixtures/module.pyi]
-[out]
-tmp/parent/child.py:4: error: Revealed type is 'parent.common.SomeClass'
-
--- Tests repeated imports
-
-[case testIdenticalImportFromTwice]
-from a import x, y, z
-from b import x, y, z
-[file a.py]
-from common import x, y, z
-[file b.py]
-from common import x, y, z
-[file common.py]
-x = 3
-def y() -> int: return 3
-class z: pass
-[out]
-
-[case testIdenticalImportStarTwice]
-from a import *
-from b import *
-[file a.py]
-from common import x, y, z
-[file b.py]
-from common import x, y, z
-[file common.py]
-x = 3
-def y() -> int: return 3
-class z: pass
-[out]
-
-[case testDifferentImportSameNameTwice]
-from a import x, y, z
-from b import x, y, z
-[file a.py]
-x = 3
-def y() -> int: return 1
-class z: pass
-[file b.py]
-x = "foo"
-def y() -> str: return "foo"
-class z: pass
-[out]
-main:2: error: Incompatible import of "x" (imported name has type "str", local name has type "int")
-main:2: error: Incompatible import of "y" (imported name has type Callable[[], str], local name has type Callable[[], int])
-main:2: error: Incompatible import of "z" (imported name has type "z" (type object), local name has type "z" (type object))
-
--- Misc
-
-[case testInheritFromBadImport]
-# cmd: mypy -m bar
-[file foo.py]
-pass
-[file bar.py]
-from foo import B
-class C(B):
- pass
-[out]
-tmp/bar.py:1: error: Module 'foo' has no attribute 'B'
-
-[case testImportSuppressedWhileAlmostSilent]
-# cmd: mypy -m main
-# flags: --follow-imports=error
-[file main.py]
-import mod
-[file mod.py]
-[builtins fixtures/module.pyi]
-[out]
-tmp/main.py:1: note: Import of 'mod' ignored
-tmp/main.py:1: note: (Using --follow-imports=error, module not passed on command line)
-
-[case testAncestorSuppressedWhileAlmostSilent]
-# cmd: mypy -m foo.bar
-# flags: --follow-imports=error
-[file foo/bar.py]
-[file foo/__init__.py]
-[builtins fixtures/module.pyi]
-[out]
-tmp/foo/bar.py: note: Ancestor package 'foo' ignored
-tmp/foo/bar.py: note: (Using --follow-imports=error, submodule passed on command line)
-
-[case testStubImportNonStubWhileSilent]
-# cmd: mypy -m main
-# flags: --follow-imports=skip
-[file main.py]
-from stub import x # Permitted
-from other import y # Disallowed
-x + '' # Error here
-y + '' # But not here
-[file stub.pyi]
-from non_stub import x
-[file non_stub.py]
-x = 42
-[file other.py]
-y = 42
-[builtins fixtures/module.pyi]
-[out]
-tmp/main.py:3: error: Unsupported left operand type for + ("int")
-
-[case testSilentSubmoduleImport]
-# cmd: mypy -m foo
-# flags: --follow-imports=skip
-[file foo/__init__.py]
-from foo import bar
-[file foo/bar.py]
-pass
-
-[case testSuperclassInImportCycle]
-import a
-import d
-a.A().f(d.D())
-[file a.py]
-if 0:
- import d
-class B: pass
-class C(B): pass
-class A:
- def f(self, x: B) -> None: pass
-[file d.py]
-import a
-class D(a.C): pass
-
-[case testSuperclassInImportCycleReversedImports]
-import d
-import a
-a.A().f(d.D())
-[file a.py]
-if 0:
- import d
-class B: pass
-class C(B): pass
-class A:
- def f(self, x: B) -> None: pass
-[file d.py]
-import a
-class D(a.C): pass
-
-[case testPreferPackageOverFile]
-import a
-[file a.py]
-/ # intentional syntax error -- this file shouldn't be parsed
-[file a/__init__.py]
-pass
-[out]
-
-[case testPreferPackageOverFile2]
-from a import x
-[file a.py]
-/ # intentional syntax error -- this file shouldn't be parsed
-[file a/__init__.py]
-x = 0
-[out]
-
-[case testImportInClass]
-class C:
- import foo
-reveal_type(C.foo.bar) # E: Revealed type is 'builtins.int'
-[file foo.py]
-bar = 0
-[builtins fixtures/module.pyi]
-[out]
-
-[case testIfFalseImport]
-if False:
- import a
-def f(x: 'a.A') -> int:
- return x.f()
-[file a.py]
-class A:
- def f(self) -> int:
- return 0
-[builtins fixtures/bool.pyi]
-
-
--- Test stability under import cycles
--- ----------------------------------
-
--- The first two tests are identical except one main has 'import x'
--- and the other 'import y'. Previously (before build.order_ascc()
--- was added) one of these would fail because the imports were
--- processed in the (reverse) order in which the files were
--- encountered.
-
-[case testImportCycleStability1]
-import x
-[file x.py]
-def f() -> str: return ''
-class Base:
- attr = f()
-def foo():
- import y
-[file y.py]
-import x
-class Sub(x.Base):
- attr = x.Base.attr
-[out]
-
-[case testImportCycleStability2]
-import y
-[file x.py]
-def f() -> str: return ''
-class Base:
- attr = f()
-def foo():
- import y
-[file y.py]
-import x
-class Sub(x.Base):
- attr = x.Base.attr
-[out]
-
--- This case isn't fixed by order_ascc(), but is fixed by the
--- lightweight type inference added to semanal.py
--- (analyze_simple_literal_type()).
-
-[case testImportCycleStability3]
-import y
-[file x.py]
-class Base:
- pass
-def foo() -> int:
- import y
- reveal_type(y.Sub.attr)
- return y.Sub.attr
-[file y.py]
-import x
-class Sub(x.Base):
- attr = 0
-[out]
-tmp/x.py:5: error: Revealed type is 'builtins.int'
-
--- This case has a symmetrical cycle, so it doesn't matter in what
--- order the files are processed. It depends on the lightweight type
--- interference.
-
-[case testImportCycleStability4]
-import x
-[file x.py]
-import y
-class C:
- attr = ''
-def foo() -> int:
- return y.D.attr
-[file y.py]
-import x
-class D:
- attr = 0
-def bar() -> str:
- return x.C.attr
-
--- These cases test all supported literal types.
-
-[case testImportCycleStability5]
-import y
-[file x.py]
-class Base:
- pass
-def foo() -> None:
- import y
- i = y.Sub.iattr # type: int
- f = y.Sub.fattr # type: float
- s = y.Sub.sattr # type: str
- b = y.Sub.battr # type: bytes
-[file y.py]
-import x
-class Sub(x.Base):
- iattr = 0
- fattr = 0.0
- sattr = ''
- battr = b''
-[out]
-
-[case testImportCycleStability6_python2]
-import y
-[file x.py]
-class Base:
- pass
-def foo():
- # type: () -> None
- import y
- i = y.Sub.iattr # type: int
- f = y.Sub.fattr # type: float
- s = y.Sub.sattr # type: str
- u = y.Sub.uattr # type: unicode
-[file y.py]
-import x
-class Sub(x.Base):
- iattr = 0
- fattr = 0.0
- sattr = ''
- uattr = u''
-[out]
-
--- This case tests module-level variables.
-
-[case testImportCycleStability7]
-import x
-[file x.py]
-def foo() -> int:
- import y
- reveal_type(y.value)
- return y.value
-[file y.py]
-import x
-value = 12
-[out]
-tmp/x.py:3: error: Revealed type is 'builtins.int'
-
--- This is not really cycle-related but still about the lightweight
--- type checker.
-
-[case testImportCycleStability8]
-x = 1 # type: str
-reveal_type(x)
-[out]
-main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str")
-main:2: error: Revealed type is 'builtins.str'
-
--- Tests for cross-module second_pass checking.
-
-[case testSymmetricImportCycle1]
-import a
-[file a.py]
-import b
-def f() -> int:
- return b.x
-y = 0 + 0
-[file b.py]
-import a
-def g() -> int:
- reveal_type(a.y)
- return a.y
-x = 1 + 1
-[out]
-tmp/b.py:3: error: Revealed type is 'builtins.int'
-
-[case testSymmetricImportCycle2]
-import b
-[file a.py]
-import b
-def f() -> int:
- reveal_type(b.x)
- return b.x
-y = 0 + 0
-[file b.py]
-import a
-def g() -> int:
- return a.y
-x = 1 + 1
-[out]
-tmp/a.py:3: error: Revealed type is 'builtins.int'
-
-[case testThreePassesRequired]
-import b
-[file a.py]
-import b
-class C:
- def f1(self) -> None:
- self.x2
- def f2(self) -> None:
- self.x2 = b.b
-[file b.py]
-import a
-b = 1 + 1
-[out]
-tmp/a.py:4: error: Cannot determine type of 'x2'
-
-[case testErrorInPassTwo1]
-import b
-[file a.py]
-import b
-def f() -> None:
- a = b.x + 1
- a + ''
-[file b.py]
-import a
-x = 1 + 1
-[out]
-tmp/a.py:4: error: Unsupported operand types for + ("int" and "str")
-
-[case testErrorInPassTwo2]
-import a
-[file a.py]
-import b
-def f() -> None:
- a = b.x + 1
- a + ''
-[file b.py]
-import a
-x = 1 + 1
-[out]
-tmp/a.py:4: error: Unsupported operand types for + ("int" and "str")
-
-[case testDeferredDecorator]
-import a
-[file a.py]
-import b
-def g() -> None:
- f('')
- at b.deco
-def f(a: str) -> int: pass
-reveal_type(f)
-x = 1 + 1
-[file b.py]
-from typing import Callable, TypeVar
-import a
-T = TypeVar('T')
-def deco(f: Callable[[T], int]) -> Callable[[T], int]:
- a.x
- return f
-[out]
-tmp/a.py:6: error: Revealed type is 'def (builtins.str*) -> builtins.int'
-
-[case testDeferredClassContext]
-class A:
- def f(self) -> str: return 'foo'
-class B(A):
- def f(self) -> str: return self.x
- def initialize(self): self.x = 'bar'
-[out]
-
-
--- Scripts and __main__
-
-[case testScriptsAreModules]
-# flags: --scripts-are-modules
-[file a]
-pass
-[file b]
-pass
-
-[case testScriptsAreNotModules]
-# cmd: mypy a b
-[file a]
-pass
-[file b]
-pass
-[out]
-
-[case testTypeCheckPrio]
-# cmd: mypy -m part1 part2 part3 part4
-
-[file part1.py]
-from part3 import Thing
-class FirstThing: pass
-
-[file part2.py]
-from part4 import part4_thing as Thing
-
-[file part3.py]
-from part2 import Thing
-reveal_type(Thing)
-
-[file part4.py]
-from typing import TYPE_CHECKING
-if TYPE_CHECKING:
- from part1 import FirstThing
-def part4_thing(a: int) -> str: pass
-
-[builtins fixtures/bool.pyi]
-[out]
-tmp/part3.py:2: error: Revealed type is 'def (a: builtins.int) -> builtins.str'
-
-[case testImportStarAliasAnyList]
-import bar
-
-[file bar.py]
-from foo import *
-def bar(y: AnyAlias) -> None: pass
-
-l = None # type: ListAlias[int]
-reveal_type(l)
-
-[file foo.py]
-from typing import Any, List
-AnyAlias = Any
-ListAlias = List
-[builtins fixtures/list.pyi]
-[out]
-tmp/bar.py:5: error: Revealed type is 'builtins.list[builtins.int]'
-
-[case testImportStarAliasSimpleGeneric]
-from ex2a import *
-
-def do_something(dic: Row) -> None:
- pass
-
-def do_another() -> Row:
- return {}
-
-do_something({'good': 'bad'}) # E: List item 0 has incompatible type "Tuple[str, str]"
-reveal_type(do_another()) # E: Revealed type is 'builtins.dict[builtins.str, builtins.int]'
-
-[file ex2a.py]
-from typing import Dict
-Row = Dict[str, int]
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testImportStarAliasGeneric]
-from y import *
-notes = None # type: G[X]
-another = G[X]()
-second = XT[str]()
-last = XT[G]()
-
-reveal_type(notes) # E: Revealed type is 'y.G[y.G[builtins.int]]'
-reveal_type(another) # E: Revealed type is 'y.G[y.G*[builtins.int]]'
-reveal_type(second) # E: Revealed type is 'y.G[builtins.str*]'
-reveal_type(last) # E: Revealed type is 'y.G[y.G*]'
-
-[file y.py]
-from typing import Generic, TypeVar
-
-T = TypeVar('T')
-
-class G(Generic[T]):
- pass
-
-X = G[int]
-XT = G[T]
-[out]
-
-[case testImportStarAliasCallable]
-from foo import *
-from typing import Any
-
-def bar(x: Any, y: AnyCallable) -> Any:
- return 'foo'
-
-cb = None # type: AnyCallable
-reveal_type(cb) # E: Revealed type is 'def (*Any, **Any) -> Any'
-
-[file foo.py]
-from typing import Callable, Any
-AnyCallable = Callable[..., Any]
-[out]
diff --git a/test-data/unit/check-multiple-inheritance.test b/test-data/unit/check-multiple-inheritance.test
deleted file mode 100644
index 678ccad..0000000
--- a/test-data/unit/check-multiple-inheritance.test
+++ /dev/null
@@ -1,242 +0,0 @@
--- Test cases for multiple inheritance.
---
--- Related: check-abstract.test
-
-
--- No name collisions
--- ------------------
-
-
-[case testSimpleMultipleInheritanceAndMethods]
-import typing
-class A:
- def f(self, x: int) -> None: pass
-class B:
- def g(self, x: str) -> None: pass
-class C(A, B): pass
-c = C()
-c.f(1)
-c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-c.g('')
-c.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str"
-
-[case testSimpleMultipleInheritanceAndMethods2]
-import typing
-class A:
- def f(self, x: int) -> None: pass
-class B:
- def g(self, x): pass
-class C(A, B): pass
-c = C()
-c.f(1)
-c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-c.g('')
-c.g(1)
-
-[case testSimpleMultipleInheritanceAndInstanceVariables]
-import typing
-class A:
- def f(self) -> None:
- self.x = 1
-class B:
- def g(self) -> None:
- self.y = ''
-class C(A, B): pass
-c = C()
-c.x = 1
-c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-c.y = ''
-c.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testSimpleMultipleInheritanceAndInstanceVariableInClassBody]
-import typing
-class A:
- x = 1
-class B:
- y = ''
-class C(A, B): pass
-c = C()
-c.x = 1
-c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-c.y = ''
-c.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testSimpleMultipleInheritanceAndClassVariable]
-import typing
-class A:
- x = 1
-class B:
- y = ''
-class C(A, B): pass
-C.x = 1
-C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-C.y = ''
-C.y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-
--- Name collisions
--- ---------------
-
-
-[case testMethodNameCollisionInMultipleInheritanceWithValidSigs]
-import typing
-class A:
- def f(self, x: int) -> None: pass
-class B:
- def f(self, x: int) -> None: pass
-class C(A, B): pass
-c = C()
-c.f(1)
-c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-[case testInstanceVarNameOverlapInMultipleInheritanceWithCompatibleTypes]
-import typing
-class A:
- def f(self) -> None:
- self.x = 1
-class B:
- def g(self) -> None:
- self.x = 1
-class C(A, B): pass
-c = C()
-c.x = 1
-c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testClassVarNameOverlapInMultipleInheritanceWithCompatibleTypes]
-import typing
-class A:
- x = 1
-class B:
- x = 1
-class C(A, B): pass
-c = C()
-c.x = 1
-c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-C.x = 1
-C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs]
-import typing
-class A:
- def f(self, x: int) -> None: pass
-class B:
- def f(self, x: str) -> None: pass
-class C(A, B): pass
-[out]
-main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
-
-[case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs2]
-import typing
-class A:
- def f(self, x: int) -> None: pass
-class B:
- def f(self, x, y): pass
-class C(A, B): pass
-class D(B, A): pass
-[out]
-main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
-main:7: error: Definition of "f" in base class "B" is incompatible with definition in base class "A"
-
-
-[case testMethodOverridingWithBothDynamicallyAndStaticallyTypedMethods]
-class A:
- def f(self) -> int: pass
-class B:
- def f(self): pass
-class C(B, A): pass
-class D(A, B): pass
-[out]
-
-[case testInstanceVarNameOverlapInMultipleInheritanceWithInvalidTypes]
-import typing
-class A:
- def f(self) -> None:
- self.x = 1
-class B:
- def g(self) -> None:
- self.x = ''
-class C(A, B): pass
-[out]
-main:8: error: Definition of "x" in base class "A" is incompatible with definition in base class "B"
-
-[case testClassVarNameOverlapInMultipleInheritanceWithInvalidTypes]
-import typing
-class A:
- x = 1
-class B:
- x = ''
-class C(A, B): pass
-[out]
-main:6: error: Definition of "x" in base class "A" is incompatible with definition in base class "B"
-
-[case testMethodOverlapsWithClassVariableInMultipleInheritance]
-from typing import Callable
-class A:
- def f(self) -> None: pass
-class B:
- f = ''
-class C(A, B): pass
-[out]
-main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
-
-[case testMethodOverlapsWithInstanceVariableInMultipleInheritance]
-from typing import Callable
-class A:
- def f(self) -> None: pass
-class B:
- def g(self) -> None:
- self.f = ''
-class C(A, B): pass
-[out]
-main:7: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
-
-[case testMultipleInheritanceAndInit]
-import typing
-class A:
- def __init__(self, x: int) -> None: pass
-class B:
- def __init__(self) -> None: pass
-class C(A, B): pass
-
-[case testMultipleInheritanceAndDifferentButCompatibleSignatures]
-class A:
- def clear(self): pass
-
-class B:
- def clear(self, x=None): pass
-
-class C(B, A): pass
-class D(A, B): pass
-[out]
-main:8: error: Definition of "clear" in base class "A" is incompatible with definition in base class "B"
-
-
--- Special cases
--- -------------
-
-
-[case testGenericInheritanceAndOverridingWithMultipleInheritance]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-class G(Generic[T]):
- def f(self, s: int) -> 'G[T]': pass
-class A(G[int]):
- def f(self, s: int) -> 'A': pass
-class B(A, int): pass
-
-[case testCannotDetermineTypeInMultipleInheritance]
-from typing import Callable, TypeVar
-T = TypeVar('T')
-class A(B, C):
- def f(self): pass
-class B:
- @dec
- def f(self): pass
-class C:
- @dec
- def f(self): pass
-def dec(f: Callable[..., T]) -> Callable[..., T]:
- return f
-[out]
-main:3: error: Cannot determine type of 'f' in base class 'B'
-main:3: error: Cannot determine type of 'f' in base class 'C'
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
deleted file mode 100644
index 71a058b..0000000
--- a/test-data/unit/check-namedtuple.test
+++ /dev/null
@@ -1,429 +0,0 @@
-[case testNamedTupleUsedAsTuple]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = None # type: X
-a, b = x
-b = x[0]
-a = x[1]
-a, b, c = x # E: Need more than 2 values to unpack (3 expected)
-x[2] # E: Tuple index out of range
-
-[case testNamedTupleWithTupleFieldNamesUsedAsTuple]
-from collections import namedtuple
-
-X = namedtuple('X', ('x', 'y'))
-x = None # type: X
-a, b = x
-b = x[0]
-a = x[1]
-a, b, c = x # E: Need more than 2 values to unpack (3 expected)
-x[2] # E: Tuple index out of range
-
-[case testNamedTupleNoUnderscoreFields]
-from collections import namedtuple
-
-X = namedtuple('X', 'x, _y, _z') # E: namedtuple() field names cannot start with an underscore: _y, _z
-
-[case testNamedTupleAccessingAttributes]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = None # type: X
-x.x
-x.y
-x.z # E: "X" has no attribute "z"
-
-
-[case testNamedTupleAttributesAreReadOnly]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = None # type: X
-x.x = 5 # E: Property "x" defined in "X" is read-only
-x.y = 5 # E: Property "y" defined in "X" is read-only
-x.z = 5 # E: "X" has no attribute "z"
-
-class A(X): pass
-a = None # type: A
-a.x = 5 # E: Property "x" defined in "A" is read-only
-a.y = 5 # E: Property "y" defined in "A" is read-only
--- a.z = 5 # not supported yet
-
-
-[case testNamedTupleCreateWithPositionalArguments]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = X(1, 'x')
-x.x
-x.z # E: "X" has no attribute "z"
-x = X(1) # E: Too few arguments for "X"
-x = X(1, 2, 3) # E: Too many arguments for "X"
-
-[case testCreateNamedTupleWithKeywordArguments]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = X(x=1, y='x')
-x = X(1, y='x')
-x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X"
-x = X(y=1) # E: Missing positional argument "x" in call to "X"
-
-
-[case testNamedTupleCreateAndUseAsTuple]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = X(1, 'x')
-a, b = x
-a, b, c = x # E: Need more than 2 values to unpack (3 expected)
-
-
-[case testNamedTupleWithItemTypes]
-from typing import NamedTuple
-N = NamedTuple('N', [('a', int),
- ('b', str)])
-n = N(1, 'x')
-s = n.a # type: str # E: Incompatible types in assignment (expression has type "int", \
- variable has type "str")
-i = n.b # type: int # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-x, y = n
-x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-
-[case testNamedTupleWithTupleFieldNamesWithItemTypes]
-from typing import NamedTuple
-N = NamedTuple('N', (('a', int),
- ('b', str)))
-n = N(1, 'x')
-s = n.a # type: str # E: Incompatible types in assignment (expression has type "int", \
- variable has type "str")
-i = n.b # type: int # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-x, y = n
-x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-
-[case testNamedTupleConstructorArgumentTypes]
-from typing import NamedTuple
-N = NamedTuple('N', [('a', int),
- ('b', str)])
-n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int"
-n = N(1, b=2) # E: Argument 2 to "N" has incompatible type "int"; expected "str"
-N(1, 'x')
-N(b='x', a=1)
-
-[case testNamedTupleAsBaseClass]
-from typing import NamedTuple
-N = NamedTuple('N', [('a', int),
- ('b', str)])
-class X(N):
- pass
-x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str"
-s = ''
-i = 0
-s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-i, s = x
-s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testNamedTupleAsBaseClass2]
-from typing import NamedTuple
-class X(NamedTuple('N', [('a', int),
- ('b', str)])):
- pass
-x = X(1, 2) # E: Argument 2 to "X" has incompatible type "int"; expected "str"
-s = ''
-i = 0
-s = x.a # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-i, s = x
-s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-
-[case testNamedTuplesTwoAsBaseClasses]
-from typing import NamedTuple
-A = NamedTuple('A', [('a', int)])
-B = NamedTuple('B', [('a', int)])
-class X(A, B): # E: Class has two incompatible bases derived from tuple
- pass
-
-
-[case testNamedTuplesTwoAsBaseClasses2]
-from typing import NamedTuple
-A = NamedTuple('A', [('a', int)])
-class X(A, NamedTuple('B', [('a', int)])): # E: Class has two incompatible bases derived from tuple
- pass
-
-
-[case testNamedTupleSelfTypeWithNamedTupleAsBase]
-from typing import NamedTuple
-A = NamedTuple('A', [('a', int), ('b', str)])
-class B(A):
- def f(self, x: int) -> None:
- self.f(self.a)
- self.f(self.b) # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int"
- i = 0
- s = ''
- i, s = self
- i, i = self # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-
-
-[out]
-
-[case testNamedTupleTypeReferenceToClassDerivedFrom]
-from typing import NamedTuple
-A = NamedTuple('A', [('a', int), ('b', str)])
-class B(A):
- def f(self, x: 'B') -> None:
- i = 0
- s = ''
- self = x
- i, s = x
- i, s = x.a, x.b
- i, s = x.a, x.a # E: Incompatible types in assignment (expression has type "int", \
- variable has type "str")
- i, i = self # E: Incompatible types in assignment (expression has type "str", \
- variable has type "int")
-
-[out]
-
-[case testNamedTupleSubtyping]
-from typing import NamedTuple, Tuple
-A = NamedTuple('A', [('a', int), ('b', str)])
-class B(A): pass
-a = A(1, '')
-b = B(1, '')
-t = None # type: Tuple[int, str]
-b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A")
-b = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B")
-t = a
-t = (1, '')
-t = b
-a = b
-
-
-[case testNamedTupleSimpleTypeInference]
-from typing import NamedTuple, Tuple
-A = NamedTuple('A', [('a', int)])
-l = [A(1), A(2)]
-a = A(1)
-a = l[0]
-(i,) = l[0]
-i, i = l[0] # E: Need more than 1 value to unpack (2 expected)
-l = [A(1)]
-a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int]", \
- variable has type "A")
-[builtins fixtures/list.pyi]
-
-[case testNamedTupleMissingClassAttribute]
-import collections
-MyNamedTuple = collections.namedtuple('MyNamedTuple', ['spam', 'eggs'])
-MyNamedTuple.x # E: "MyNamedTuple" has no attribute "x"
-
-
-[case testNamedTupleEmptyItems]
-from typing import NamedTuple
-A = NamedTuple('A', [])
-
-
-[case testNamedTupleProperty]
-from typing import NamedTuple
-A = NamedTuple('A', [('a', int)])
-class B(A):
- @property
- def b(self) -> int:
- return self.a
-class C(B): pass
-B(1).b
-C(2).b
-
-[builtins fixtures/property.pyi]
-
-[case testNamedTupleAsDict]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = None # type: X
-reveal_type(x._asdict()) # E: Revealed type is 'builtins.dict[builtins.str, Any]'
-
-[builtins fixtures/dict.pyi]
-
-[case testNamedTupleReplace]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = None # type: X
-reveal_type(x._replace()) # E: Revealed type is 'Tuple[Any, Any, fallback=__main__.X]'
-x._replace(y=5)
-x._replace(x=3)
-x._replace(x=3, y=5)
-x._replace(z=5) # E: Unexpected keyword argument "z" for X._replace
-x._replace(5) # E: Too many positional arguments for X._replace
-
-[case testNamedTupleReplaceAsClass]
-from collections import namedtuple
-
-X = namedtuple('X', ['x', 'y'])
-x = None # type: X
-X._replace(x, x=1, y=2)
-X._replace(x=1, y=2) # E: Missing positional argument "self" in call to X._replace
-
-
-[case testNamedTupleReplaceTyped]
-from typing import NamedTuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-x = None # type: X
-reveal_type(x._replace()) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
-x._replace(x=5)
-x._replace(y=5) # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
-
-[case testNamedTupleMake]
-from typing import NamedTuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-reveal_type(X._make([5, 'a'])) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
-X._make('a b') # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
-
--- # FIX: not a proper class method
--- x = None # type: X
--- reveal_type(x._make([5, 'a'])) # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
--- x._make('a b') # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
-
-[builtins fixtures/list.pyi]
-
-[case testNamedTupleFields]
-from typing import NamedTuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-reveal_type(X._fields) # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
-
-[case testNamedTupleSource]
-from typing import NamedTuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-reveal_type(X._source) # E: Revealed type is 'builtins.str'
-x = None # type: X
-reveal_type(x._source) # E: Revealed type is 'builtins.str'
-
-[case testNamedTupleUnit]
-from typing import NamedTuple
-
-X = NamedTuple('X', [])
-x = X() # type: X
-x._replace()
-x._fields[0] # E: Tuple index out of range
-
-[case testNamedTupleJoinNamedTuple]
-from typing import NamedTuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-Y = NamedTuple('Y', [('x', int), ('y', str)])
-reveal_type([X(3, 'b'), Y(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
-
-[builtins fixtures/list.pyi]
-
-[case testNamedTupleJoinTuple]
-from typing import NamedTuple, Tuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-reveal_type([(3, 'b'), X(1, 'a')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
-reveal_type([X(1, 'a'), (3, 'b')]) # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
-
-[builtins fixtures/list.pyi]
-
-[case testNamedTupleFieldTypes]
-from typing import NamedTuple
-
-X = NamedTuple('X', [('x', int), ('y', str)])
-reveal_type(X._field_types) # E: Revealed type is 'builtins.dict[builtins.str, Any]'
-x = None # type: X
-reveal_type(x._field_types) # E: Revealed type is 'builtins.dict[builtins.str, Any]'
-
-[builtins fixtures/dict.pyi]
-
-[case testNamedTupleAndOtherSuperclass]
-from typing import NamedTuple
-
-class A: pass
-def f(x: A) -> None: pass
-
-class B(NamedTuple('B', []), A): pass
-f(B())
-x = None # type: A
-x = B()
-
-# Sanity check: fail if baseclass does not match
-class C: pass
-def g(x: C) -> None: pass
-class D(NamedTuple('D', []), A): pass
-
-g(D()) # E: Argument 1 to "g" has incompatible type "D"; expected "C"
-y = None # type: C
-y = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
-
-[case testNamedTupleSelfTypeMethod]
-from typing import TypeVar, NamedTuple
-
-T = TypeVar('T', bound='A')
-
-class A(NamedTuple('A', [('x', str)])):
- def member(self: T) -> T:
- return self
-
-class B(A):
- pass
-
-a = None # type: A
-a = A('').member()
-b = None # type: B
-b = B('').member()
-a = B('')
-a = B('').member()
-
-[case testNamedTupleSelfTypeReplace]
-from typing import NamedTuple, TypeVar
-A = NamedTuple('A', [('x', str)])
-reveal_type(A('hello')._replace(x='')) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]'
-a = None # type: A
-a = A('hello')._replace(x='')
-
-class B(A):
- pass
-
-reveal_type(B('hello')._replace(x='')) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]'
-b = None # type: B
-b = B('hello')._replace(x='')
-
-[case testNamedTupleSelfTypeMake]
-from typing import NamedTuple, TypeVar
-A = NamedTuple('A', [('x', str)])
-reveal_type(A._make([''])) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]'
-a = A._make(['']) # type: A
-
-class B(A):
- pass
-
-reveal_type(B._make([''])) # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]'
-b = B._make(['']) # type: B
-
-[builtins fixtures/list.pyi]
-
-[case testNamedTupleInClassNamespace]
-# https://github.com/python/mypy/pull/2553#issuecomment-266474341
-from typing import NamedTuple
-class C:
- def f(self):
- A = NamedTuple('A', [('x', int)])
- def g(self):
- A = NamedTuple('A', [('y', int)])
-C.A # E: "C" has no attribute "A"
-
-[case testNamedTupleInFunction]
-from typing import NamedTuple
-def f() -> None:
- A = NamedTuple('A', [('x', int)])
-A # E: Name 'A' is not defined
diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test
deleted file mode 100644
index bfbaade..0000000
--- a/test-data/unit/check-newsyntax.test
+++ /dev/null
@@ -1,100 +0,0 @@
-[case testNewSyntaxRequire36]
-# flags: --fast-parser --python-version 3.5
-x: int = 5 # E: Variable annotation syntax is only supported in Python 3.6, use type comment instead
-[out]
-
-[case testNewSyntaxSyntaxError]
-# flags: --fast-parser --python-version 3.6
-x: int: int # E: invalid syntax
-[out]
-
-[case testNewSyntaxBasics]
-# flags: --fast-parser --python-version 3.6
-x: int
-x = 5
-y: int = 5
-
-a: str
-a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-b: str = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-zzz: int
-zzz: str # E: Name 'zzz' already defined
-[out]
-
-[case testNewSyntaxWithDict]
-# flags: --fast-parser --python-version 3.6
-from typing import Dict, Any
-
-d: Dict[int, str] = {}
-d[42] = 'ab'
-d[42] = 42 # E: Incompatible types in assignment (expression has type "int", target has type "str")
-d['ab'] = 'ab' # E: Invalid index type "str" for "dict"; expected type "int"
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testNewSyntaxWithRevealType]
-# flags: --fast-parser --python-version 3.6
-from typing import Dict
-
-def tst_local(dct: Dict[int, T]) -> Dict[T, int]:
- ret: Dict[T, int] = {}
- return ret
-
-reveal_type(tst_local({1: 'a'})) # E: Revealed type is 'builtins.dict[builtins.str*, builtins.int]'
-[builtins fixtures/dict.pyi]
-[out]
-
-[case testNewSyntaxWithInstanceVars]
-# flags: --fast-parser --python-version 3.6
-class TstInstance:
- a: str
- def __init__(self) -> None:
- self.x: int
-
-TstInstance().x = 5
-TstInstance().x = 'ab' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-TstInstance().a = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-TstInstance().a = 'ab'
-[out]
-
-[case testNewSyntaxWithClassVars]
-# flags: --fast-parser --strict-optional --python-version 3.6
-class CCC:
- a: str = None # E: Incompatible types in assignment (expression has type None, variable has type "str")
-[out]
-
-[case testNewSyntaxWithStrictOptional]
-# flags: --fast-parser --strict-optional --python-version 3.6
-strict: int
-strict = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
-strict2: int = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
-[out]
-
-[case testNewSyntaxWithStrictOptionalFunctions]
-# flags: --fast-parser --strict-optional --python-version 3.6
-def f() -> None:
- x: int
- x = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
-[out]
-
-[case testNewSyntaxWithStrictOptionalClasses]
-# flags: --fast-parser --strict-optional --python-version 3.6
-class C:
- def meth(self) -> None:
- x: int = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
- self.x: int = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
-[out]
-
-[case testNewSyntaxSpecialAssign]
-# flags: --fast-parser --python-version 3.6
-class X:
- x: str
- x[0]: int
- x.x: int
-
-[out]
-main:4: error: Unexpected type declaration
-main:4: error: Unsupported target for indexed assignment
-main:5: error: Type cannot be declared in assignment to non-self attribute
-main:5: error: "str" has no attribute "x"
diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test
deleted file mode 100644
index 7ebe696..0000000
--- a/test-data/unit/check-newtype.test
+++ /dev/null
@@ -1,324 +0,0 @@
--- Checks NewType(...)
-
--- Checks for basic functionality
-
-[case testNewTypePEP484Example1]
-from typing import NewType
-
-UserId = NewType('UserId', int)
-
-def name_by_id(user_id: UserId) -> str:
- return "foo"
-
-UserId('user') # E: Argument 1 to "UserId" has incompatible type "str"; expected "int"
-name_by_id(42) # E: Argument 1 to "name_by_id" has incompatible type "int"; expected "UserId"
-name_by_id(UserId(42))
-
-id = UserId(5)
-num = id + 1
-
-reveal_type(id) # E: Revealed type is '__main__.UserId'
-reveal_type(num) # E: Revealed type is 'builtins.int'
-[out]
-
-[case testNewTypePEP484Example2]
-from typing import NewType
-
-class PacketId:
- def __init__(self, major: int, minor: int) -> None:
- self._major = major
- self._minor = minor
-
-TcpPacketId = NewType('TcpPacketId', PacketId)
-
-packet = PacketId(100, 100)
-tcp_packet = TcpPacketId(packet)
-tcp_packet = TcpPacketId(127, 0)
-
-[out]
-main:12: error: Too many arguments for "TcpPacketId"
-main:12: error: Argument 1 to "TcpPacketId" has incompatible type "int"; expected "PacketId"
-
-[case testNewTypeWithTuples]
-from typing import NewType, Tuple
-TwoTuple = NewType('TwoTuple', Tuple[int, str])
-a = TwoTuple((3, "a"))
-b = TwoTuple(("a", 3)) # E: Argument 1 to "TwoTuple" has incompatible type "Tuple[str, int]"; expected "Tuple[int, str]"
-
-reveal_type(a[0]) # E: Revealed type is 'builtins.int'
-reveal_type(a[1]) # E: Revealed type is 'builtins.str'
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testNewTypeWithLists]
-from typing import NewType, List
-UserId = NewType('UserId', int)
-IdList = NewType('IdList', List[UserId])
-
-bad1 = IdList([1]) # E: List item 0 has incompatible type "int"
-
-foo = IdList([])
-foo.append(3) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "UserId"
-foo.append(UserId(3))
-foo.extend([UserId(1), UserId(2), UserId(3)])
-foo.extend(IdList([UserId(1), UserId(2), UserId(3)]))
-bar = IdList([UserId(2)])
-
-baz = foo + bar
-reveal_type(foo) # E: Revealed type is '__main__.IdList'
-reveal_type(bar) # E: Revealed type is '__main__.IdList'
-reveal_type(baz) # E: Revealed type is 'builtins.list[__main__.UserId*]'
-
-[builtins fixtures/list.pyi]
-[out]
-
-[case testNewTypeWithGenerics]
-from typing import TypeVar, Generic, NewType, Any
-
-T = TypeVar('T')
-
-class Base(Generic[T]):
- def __init__(self, item: T) -> None:
- self.item = item
-
- def getter(self) -> T:
- return self.item
-
-Derived1 = NewType('Derived1', Base[str])
-Derived2 = NewType('Derived2', Base) # Implicit 'Any'
-Derived3 = NewType('Derived3', Base[Any]) # Explicit 'Any'
-
-Derived1(Base(1)) # E: Argument 1 to "Base" has incompatible type "int"; expected "str"
-Derived1(Base('a'))
-Derived2(Base(1))
-Derived2(Base('a'))
-Derived3(Base(1))
-Derived3(Base('a'))
-
-reveal_type(Derived1(Base('a')).getter()) # E: Revealed type is 'builtins.str*'
-reveal_type(Derived3(Base('a')).getter()) # E: Revealed type is 'Any'
-[out]
-
-[case testNewTypeWithNamedTuple]
-from collections import namedtuple
-from typing import NewType, NamedTuple
-
-Vector1 = namedtuple('Vector1', ['x', 'y'])
-Point1 = NewType('Point1', Vector1)
-p1 = Point1(Vector1(1, 2))
-reveal_type(p1.x) # E: Revealed type is 'Any'
-reveal_type(p1.y) # E: Revealed type is 'Any'
-
-Vector2 = NamedTuple('Vector2', [('x', int), ('y', int)])
-Point2 = NewType('Point2', Vector2)
-p2 = Point2(Vector2(1, 2))
-reveal_type(p2.x) # E: Revealed type is 'builtins.int'
-reveal_type(p2.y) # E: Revealed type is 'builtins.int'
-
-class Vector3:
- def __init__(self, x: int, y: int) -> None:
- self.x = x
- self.y = y
-Point3 = NewType('Point3', Vector3)
-p3 = Point3(Vector3(1, 3))
-reveal_type(p3.x) # E: Revealed type is 'builtins.int'
-reveal_type(p3.y) # E: Revealed type is 'builtins.int'
-[out]
-
-[case testNewTypeWithCasts]
-from typing import NewType, cast
-UserId = NewType('UserId', int)
-foo = UserId(3)
-foo = cast(UserId, 3)
-foo = cast(UserId, "foo")
-foo = cast(UserId, UserId(4))
-[out]
-
-[case testNewTypeWithTypeAliases]
-from typing import NewType
-Foo = int
-Bar = NewType('Bar', Foo)
-Bar2 = Bar
-
-def func1(x: Foo) -> Bar:
- return Bar(x)
-
-def func2(x: int) -> Bar:
- return Bar(x)
-
-def func3(x: Bar2) -> Bar:
- return x
-
-x = Bar(42)
-y = Bar2(42)
-
-y = func3(x)
-[out]
-
-
--- Make sure NewType works as expected in a variety of different scopes/across files
-
-[case testNewTypeInLocalScope]
-from typing import NewType
-A = NewType('A', int)
-a = A(3)
-
-def func() -> None:
- A = NewType('A', str)
- B = NewType('B', str)
-
- a = A(3) # E: Argument 1 to "A" has incompatible type "int"; expected "str"
- a = A('xyz')
- b = B('xyz')
-
-class MyClass:
- C = NewType('C', float)
-
- def foo(self) -> 'MyClass.C':
- return MyClass.C(3.2)
-
-b = A(3)
-c = MyClass.C(3.5)
-[out]
-
-[case testNewTypeInMultipleFiles]
-import a
-import b
-list1 = [a.UserId(1), a.UserId(2)]
-list1.append(b.UserId(3)) # E: Argument 1 to "append" of "list" has incompatible type "b.UserId"; expected "a.UserId"
-
-[file a.py]
-from typing import NewType
-UserId = NewType('UserId', int)
-
-[file b.py]
-from typing import NewType
-UserId = NewType('UserId', int)
-
-[builtins fixtures/list.pyi]
-[out]
-
-[case testNewTypeWithIncremental]
-import m
-
-[file m.py]
-from typing import NewType
-
-UserId = NewType('UserId', int)
-
-def name_by_id(user_id: UserId) -> str:
- return "foo"
-
-name_by_id(UserId(42))
-
-id = UserId(5)
-num = id + 1
-
-[file m.py.next]
-from typing import NewType
-
-UserId = NewType('UserId', int)
-
-def name_by_id(user_id: UserId) -> str:
- return "foo"
-
-name_by_id(UserId(42))
-
-id = UserId(5)
-num = id + 1
-
-reveal_type(id)
-reveal_type(num)
-[rechecked m]
-[stale]
-[out1]
-[out2]
-tmp/m.py:13: error: Revealed type is 'm.UserId'
-tmp/m.py:14: error: Revealed type is 'builtins.int'
-
-
--- Check misuses of NewType fail
-
-[case testNewTypeBadInitializationFails]
-from typing import NewType
-
-a = NewType('b', int) # E: String argument 1 'b' to NewType(...) does not match variable name 'a'
-b = NewType('b', 3) # E: Argument 2 to NewType(...) must be a valid type
-c = NewType(2, int) # E: Argument 1 to NewType(...) must be a string literal
-foo = "d"
-d = NewType(foo, int) # E: Argument 1 to NewType(...) must be a string literal
-e = NewType(name='e', tp=int) # E: NewType(...) expects exactly two positional arguments
-f = NewType('f', tp=int) # E: NewType(...) expects exactly two positional arguments
-[out]
-
-[case testNewTypeWithAnyFails]
-from typing import NewType, Any
-A = NewType('A', Any) # E: Argument 2 to NewType(...) must be subclassable (got Any)
-[out]
-
-[case testNewTypeWithUnionsFails]
-from typing import NewType, Union
-Foo = NewType('Foo', Union[int, float]) # E: Argument 2 to NewType(...) must be subclassable (got Union[builtins.int, builtins.float])
-[out]
-
-[case testNewTypeWithTypeTypeFails]
-from typing import NewType, Type
-Foo = NewType('Foo', Type[int]) # E: Argument 2 to NewType(...) must be subclassable (got Type[builtins.int])
-a = Foo(type(3))
-[builtins fixtures/args.pyi]
-[out]
-
-[case testNewTypeWithTypeVarsFails]
-from typing import NewType, TypeVar, List
-T = TypeVar('T')
-A = NewType('A', T)
-B = NewType('B', List[T])
-[builtins fixtures/list.pyi]
-[out]
-main:3: error: Argument 2 to NewType(...) must be subclassable (got T?)
-main:3: error: Invalid type "__main__.T"
-main:4: error: Invalid type "__main__.T"
-
-[case testNewTypeWithNewTypeFails]
-from typing import NewType
-A = NewType('A', int)
-B = NewType('B', A) # E: Argument 2 to NewType(...) cannot be another NewType
-C = A
-D = C
-E = NewType('E', D) # E: Argument 2 to NewType(...) cannot be another NewType
-[out]
-
-[case testNewTypeRedefiningVariablesFails]
-from typing import NewType
-
-a = 3
-a = NewType('a', int)
-
-b = NewType('b', int)
-b = NewType('b', float) # this line throws two errors
-
-c = NewType('c', str) # type: str
-[out]
-main:4: error: Cannot redefine 'a' as a NewType
-main:7: error: Invalid assignment target
-main:7: error: Cannot redefine 'b' as a NewType
-main:9: error: Cannot declare the type of a NewType declaration
-
-[case testNewTypeAddingExplicitTypesFails]
-from typing import NewType
-UserId = NewType('UserId', int)
-
-a = 3 # type: UserId # E: Incompatible types in assignment (expression has type "int", variable has type "UserId")
-[out]
-
-[case testNewTypeTestSubclassingFails]
-from typing import NewType
-class A: pass
-B = NewType('B', A)
-class C(B): pass # E: Cannot subclass NewType
-[out]
-
-[case testNewTypeAny]
-from typing import NewType
-Any = NewType('Any', int)
-Any(5)
diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test
deleted file mode 100644
index 09fb4b5..0000000
--- a/test-data/unit/check-optional.test
+++ /dev/null
@@ -1,546 +0,0 @@
--- Tests for strict Optional behavior
-
-[case testImplicitNoneType]
-x = None
-x() # E: None not callable
-
-[case testExplicitNoneType]
-x = None # type: None
-x() # E: None not callable
-
-[case testNoneMemberOfOptional]
-from typing import Optional
-x = None # type: Optional[int]
-
-[case testTypeMemberOfOptional]
-from typing import Optional
-x = 0 # type: Optional[int]
-
-[case testNoneNotMemberOfType]
-x = None # type: int
-[out]
-main:1: error: Incompatible types in assignment (expression has type None, variable has type "int")
-
-[case testTypeNotMemberOfNone]
-x = 0 # type: None
-[out]
-main:1: error: Incompatible types in assignment (expression has type "int", variable has type None)
-
-[case testOptionalNotMemberOfType]
-from typing import Optional
-def f(a: int) -> None: pass
-x = None # type: Optional[int]
-f(x) # E: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int"
-
-[case testIsinstanceCases]
-from typing import Optional
-x = None # type: Optional[int]
-if isinstance(x, int):
- reveal_type(x) # E: Revealed type is 'builtins.int'
-else:
- reveal_type(x) # E: Revealed type is 'builtins.None'
-[builtins fixtures/isinstance.pyi]
-
-[case testIfCases]
-from typing import Optional
-x = None # type: Optional[int]
-if x:
- reveal_type(x) # E: Revealed type is 'builtins.int'
-else:
- reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]'
-[builtins fixtures/bool.pyi]
-
-[case testIfNotCases]
-from typing import Optional
-x = None # type: Optional[int]
-if not x:
- reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]'
-else:
- reveal_type(x) # E: Revealed type is 'builtins.int'
-[builtins fixtures/bool.pyi]
-
-[case testIsNotNoneCases]
-from typing import Optional
-x = None # type: Optional[int]
-if x is not None:
- reveal_type(x) # E: Revealed type is 'builtins.int'
-else:
- reveal_type(x) # E: Revealed type is 'builtins.None'
-[builtins fixtures/bool.pyi]
-
-[case testIsNoneCases]
-from typing import Optional
-x = None # type: Optional[int]
-if x is None:
- reveal_type(x) # E: Revealed type is 'builtins.None'
-else:
- reveal_type(x) # E: Revealed type is 'builtins.int'
-[builtins fixtures/bool.pyi]
-
-[case testOrCases]
-from typing import Optional
-x = None # type: Optional[str]
-y1 = x or 'a'
-reveal_type(y1) # E: Revealed type is 'builtins.str'
-y2 = x or 1
-reveal_type(y2) # E: Revealed type is 'Union[builtins.str, builtins.int]'
-z1 = 'a' or x
-reveal_type(z1) # E: Revealed type is 'Union[builtins.str, builtins.None]'
-z2 = int() or x
-reveal_type(z2) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
-
-[case testAndCases]
-from typing import Optional
-x = None # type: Optional[str]
-y1 = x and 'b'
-reveal_type(y1) # E: Revealed type is 'Union[builtins.str, builtins.None]'
-y2 = x and 1 # x could be '', so...
-reveal_type(y2) # E: Revealed type is 'Union[builtins.str, builtins.None, builtins.int]'
-z1 = 'b' and x
-reveal_type(z1) # E: Revealed type is 'Union[builtins.str, builtins.None]'
-z2 = int() and x
-reveal_type(z2) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
-
-[case testLambdaReturningNone]
-f = lambda: None
-x = f() # E: Function does not return a value
-
-[case testNoneArgumentType]
-def f(x: None) -> None: pass
-f(None)
-
-[case testInferOptionalFromDefaultNone]
-def f(x: int = None) -> None:
- x + 1 # E: Unsupported left operand type for + (some union)
-f(None)
-[out]
-
-[case testInferOptionalFromDefaultNoneWithFastParser]
-# flags: --fast-parser
-def f(x: int = None) -> None:
- x + 1 # E: Unsupported left operand type for + (some union)
-f(None)
-[out]
-
-[case testInferOptionalFromDefaultNoneComment]
-def f(x=None):
- # type: (int) -> None
- x + 1 # E: Unsupported left operand type for + (some union)
-f(None)
-[out]
-
-[case testInferOptionalFromDefaultNoneCommentWithFastParser]
-# flags: --fast-parser
-def f(x=None):
- # type: (int) -> None
- x + 1 # E: Unsupported left operand type for + (some union)
-f(None)
-[out]
-
-[case testInferOptionalType]
-x = None
-if bool():
- # scope limit assignment
- x = 1
- # in scope of the assignment, x is an int
- reveal_type(x) # E: Revealed type is 'builtins.int'
-# out of scope of the assignment, it's an Optional[int]
-reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]'
-[builtins fixtures/bool.pyi]
-
-[case testInferOptionalTypeLocallyBound]
-x = None
-x = 1
-reveal_type(x) # E: Revealed type is 'builtins.int'
-
-
-[case testInferOptionalTypeFromOptional]
-from typing import Optional
-y = None # type: Optional[int]
-x = None
-x = y
-reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.None]'
-
-[case testInferOptionalListType]
-x = [None]
-x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected None
-[builtins fixtures/list.pyi]
-
-[case testInferNonOptionalListType]
-x = []
-x.append(1)
-x() # E: List[int] not callable
-[builtins fixtures/list.pyi]
-
-[case testInferOptionalDictKeyValueTypes]
-x = {None: None}
-x["bar"] = 1
-[builtins fixtures/dict.pyi]
-[out]
-main:2: error: Invalid index type "str" for "dict"; expected type None
-main:2: error: Incompatible types in assignment (expression has type "int", target has type None)
-
-[case testInferNonOptionalDictType]
-x = {}
-x["bar"] = 1
-x() # E: Dict[str, int] not callable
-[builtins fixtures/dict.pyi]
-
-[case testNoneClassVariable]
-from typing import Optional
-class C:
- x = None # type: int
- def __init__(self) -> None:
- self.x = 0
-
-[case testNoneClassVariableInInit]
-from typing import Optional
-class C:
- x = None # type: int
- def __init__(self) -> None:
- self.x = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
-[out]
-
-[case testMultipleAssignmentNoneClassVariableInInit]
-from typing import Optional
-class C:
- x, y = None, None # type: int, str
- def __init__(self) -> None:
- self.x = None # E: Incompatible types in assignment (expression has type None, variable has type "int")
- self.y = None # E: Incompatible types in assignment (expression has type None, variable has type "str")
-[out]
-
-[case testOverloadWithNone]
-from typing import overload
- at overload
-def f(x: None) -> str: pass
- at overload
-def f(x: int) -> int: pass
-reveal_type(f(None)) # E: Revealed type is 'builtins.str'
-reveal_type(f(0)) # E: Revealed type is 'builtins.int'
-
-[case testOptionalTypeOrTypePlain]
-from typing import Optional
-def f(a: Optional[int]) -> int:
- return a or 0
-[out]
-
-[case testOptionalTypeOrTypeTypeVar]
-from typing import Optional, TypeVar
-T = TypeVar('T')
-def f(a: Optional[T], b: T) -> T:
- return a or b
-[out]
-
-[case testOptionalTypeOrTypeBothOptional]
-from typing import Optional
-def f(a: Optional[int], b: Optional[int]) -> None:
- reveal_type(a or b)
-def g(a: int, b: Optional[int]) -> None:
- reveal_type(a or b)
-[out]
-main:3: error: Revealed type is 'Union[builtins.int, builtins.None]'
-main:5: error: Revealed type is 'Union[builtins.int, builtins.None]'
-
-[case testOptionalTypeOrTypeComplexUnion]
-from typing import Union
-def f(a: Union[int, str, None]) -> None:
- reveal_type(a or 'default')
-[out]
-main:3: error: Revealed type is 'Union[builtins.int, builtins.str]'
-
-[case testOptionalTypeOrTypeNoTriggerPlain]
-from typing import Optional
-def f(a: Optional[int], b: int) -> int:
- return b or a
-[out]
-main:3: error: Incompatible return value type (got "Optional[int]", expected "int")
-
-[case testOptionalTypeOrTypeNoTriggerTypeVar]
-from typing import Optional, TypeVar
-T = TypeVar('T')
-def f(a: Optional[T], b: T) -> T:
- return b or a
-[out]
-main:4: error: Incompatible return value type (got "Optional[T]", expected "T")
-
-[case testNoneOrStringIsString]
-def f() -> str:
- a = None
- b = ''
- return a or b
-[out]
-
-[case testNoneOrTypeVarIsTypeVar]
-from typing import TypeVar
-T = TypeVar('T')
-def f(b: T) -> T:
- a = None
- return a or b
-[out]
-
-[case testYieldNothingInFunctionReturningGenerator]
-from typing import Generator
-def f() -> Generator[None, None, None]:
- yield
-[out]
-
-[case testNoneAndStringIsNone]
-a = None
-b = "foo"
-reveal_type(a and b) # E: Revealed type is 'builtins.None'
-
-[case testNoneMatchesObjectInOverload]
-import a
-a.f(None)
-
-[file a.pyi]
-from typing import overload
- at overload
-def f() -> None: ...
- at overload
-def f(o: object) -> None: ...
-
-[case testGenericSubclassReturningNone]
-from typing import Generic, TypeVar
-
-T = TypeVar('T')
-
-class Base(Generic[T]):
- def f(self) -> T:
- pass
-
-class SubNone(Base[None]):
- def f(self) -> None:
- pass
-
-class SubInt(Base[int]):
- def f(self) -> int:
- return 1
-
-[case testUseOfNoneReturningFunction]
-from typing import Optional
-def f() -> None:
- pass
-
-def g(x: Optional[int]) -> int:
- pass
-
-x = f() # E: Function does not return a value
-f() + 1 # E: Function does not return a value
-g(f()) # E: Function does not return a value
-
-[case testEmptyReturn]
-def f() -> None:
- return
-
-[case testReturnNone]
-def f() -> None:
- return None
-
-[case testNoneCallable]
-from typing import Callable
-def f() -> None: pass
-x = f # type: Callable[[], None]
-
-[case testOptionalCallable]
-from typing import Callable, Optional
-T = Optional[Callable[..., None]]
-
-[case testAnyTypeInPartialTypeList]
-# flags: --check-untyped-defs
-def f(): ...
-
-def lookup_field(name, obj):
- try:
- pass
- except:
- attr = f()
- else:
- attr = None
-
-[case testTernaryWithNone]
-reveal_type(None if bool() else 0) # E: Revealed type is 'Union[builtins.int, builtins.None]'
-[builtins fixtures/bool.pyi]
-
-[case testListWithNone]
-reveal_type([0, None, 0]) # E: Revealed type is 'builtins.list[Union[builtins.int, builtins.None]]'
-[builtins fixtures/list.pyi]
-
-[case testOptionalWhitelistSuppressesOptionalErrors]
-# flags: --strict-optional-whitelist
-import a
-import b
-[file a.py]
-from typing import Optional
-x = None # type: Optional[str]
-x + "foo"
-
-[file b.py]
-from typing import Optional
-x = None # type: Optional[int]
-x + 1
-
-[case testOptionalWhitelistPermitsOtherErrors]
-# flags: --strict-optional-whitelist
-import a
-import b
-[file a.py]
-from typing import Optional
-x = None # type: Optional[str]
-x + "foo"
-
-[file b.py]
-from typing import Optional
-x = None # type: Optional[int]
-x + 1
-1 + "foo"
-[out]
-tmp/b.py:4: error: Unsupported operand types for + ("int" and "str")
-
-[case testOptionalWhitelistPermitsWhitelistedFiles]
-# flags: --strict-optional-whitelist **/a.py
-import a
-import b
-[file a.py]
-from typing import Optional
-x = None # type: Optional[str]
-x + "foo"
-
-[file b.py]
-from typing import Optional
-x = None # type: Optional[int]
-x + 1
-[out]
-tmp/a.py:3: error: Unsupported left operand type for + (some union)
-
-[case testNoneContextInference]
-from typing import Dict, List
-def f() -> List[None]:
- return []
-def g() -> Dict[None, None]:
- return {}
-[builtins fixtures/dict.pyi]
-
-[case testRaiseFromNone]
-raise BaseException from None
-[builtins fixtures/exception.pyi]
-
-[case testOptionalNonPartialTypeWithNone]
-from typing import Generator
-def f() -> Generator[str, None, None]: pass
-x = f()
-reveal_type(x) # E: Revealed type is 'typing.Generator[builtins.str, builtins.None, builtins.None]'
-l = [f()]
-reveal_type(l) # E: Revealed type is 'builtins.list[typing.Generator*[builtins.str, builtins.None, builtins.None]]'
-[builtins fixtures/list.pyi]
-
-[case testNoneListTernary]
-x = [None] if "" else [1] # E: List item 0 has incompatible type "int"
-[builtins fixtures/list.pyi]
-
-[case testInferEqualsNotOptional]
-from typing import Optional
-x = '' # type: Optional[str]
-if x == '<string>':
- reveal_type(x) # E: Revealed type is 'builtins.str'
-else:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.None]'
-[builtins fixtures/ops.pyi]
-
-[case testInferEqualsNotOptionalWithUnion]
-from typing import Union
-x = '' # type: Union[str, int, None]
-if x == '<string>':
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int]'
-else:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
-[builtins fixtures/ops.pyi]
-
-[case testInferEqualsNotOptionalWithOverlap]
-from typing import Union
-x = '' # type: Union[str, int, None]
-if x == object():
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int]'
-else:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
-[builtins fixtures/ops.pyi]
-
-[case testInferEqualsStillOptionalWithNoOverlap]
-from typing import Optional
-x = '' # type: Optional[str]
-if x == 0:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.None]'
-else:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.None]'
-[builtins fixtures/ops.pyi]
-
-[case testInferEqualsStillOptionalWithBothOptional]
-from typing import Union
-x = '' # type: Union[str, int, None]
-y = '' # type: Union[str, None]
-if x == y:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
-else:
- reveal_type(x) # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
-[builtins fixtures/ops.pyi]
-
-[case testWarnNoReturnWorksWithStrictOptional]
-# flags: --warn-no-return
-def f() -> None:
- 1 + 1 # no error
-
-def g() -> int:
- 1 + 1 #
-[out]
-main:5: note: Missing return statement
-
-[case testGenericTypeAliasesOptional]
-from typing import TypeVar, Generic, Optional
-T = TypeVar('T')
-class Node(Generic[T]):
- def __init__(self, x: T) -> None:
- self.x = x
-
-ONode = Optional[Node[T]]
-def f(x: T) -> ONode[T]:
- if 1 > 0:
- return Node(x)
- else:
- return None
-
-x = None # type: ONode[int]
-x = f(1)
-x = f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-x.x = 1 # E: Some element of union has no attribute "x"
-if x is not None:
- x.x = 1 # OK here
-
-[builtins fixtures/ops.pyi]
-
-[case testOptionalLambdaInference]
-from typing import Optional, Callable
-f = None # type: Optional[Callable[[int], None]]
-f = lambda x: None
-f(0)
-[builtins fixtures/function.pyi]
-
-[case testUnionSimplificationWithStrictOptional]
-from typing import Any, TypeVar, Union
-class C(Any): pass
-T = TypeVar('T')
-S = TypeVar('S')
-def u(x: T, y: S) -> Union[S, T]: pass
-a = None # type: Any
-
-# Test both orders
-reveal_type(u(C(), None)) # E: Revealed type is 'Union[builtins.None, __main__.C*]'
-reveal_type(u(None, C())) # E: Revealed type is 'Union[__main__.C*, builtins.None]'
-
-# This will be fixed later
-reveal_type(u(a, None)) # E: Revealed type is 'Any'
-reveal_type(u(None, a)) # E: Revealed type is 'Any'
-
-reveal_type(u(1, None)) # E: Revealed type is 'Union[builtins.None, builtins.int*]'
-reveal_type(u(None, 1)) # E: Revealed type is 'Union[builtins.int*, builtins.None]'
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
deleted file mode 100644
index e173fb2..0000000
--- a/test-data/unit/check-overloading.test
+++ /dev/null
@@ -1,759 +0,0 @@
--- Test cases for function overloading
-
-
-[case testTypeCheckOverloadedFunctionBody]
-from typing import overload
- at overload
-def f(x: 'A'):
- x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- x = A()
- at overload
-def f(x: 'B'):
- x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- x = B()
-class A: pass
-class B: pass
-[out]
-
-[case testTypeCheckOverloadedMethodBody]
-from typing import overload
-class A:
- @overload
- def f(self, x: 'A'):
- x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- x = A()
- @overload
- def f(self, x: 'B'):
- x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
- x = B()
-class B: pass
-[out]
-
-[case testCallToOverloadedFunction]
-from typing import overload
-f(C()) # E: No overload variant of "f" matches argument types [__main__.C]
-f(A())
-f(B())
-
- at overload
-def f(x: 'A') -> None: pass
- at overload
-def f(x: 'B') -> None: pass
-
-class A: pass
-class B: pass
-class C: pass
-
-[case testOverloadedFunctionReturnValue]
-from typing import overload
-a, b = None, None # type: (A, B)
-b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = f(a)
-b = f(b)
-
- at overload
-def f(x: 'A') -> 'A': pass
- at overload
-def f(x: 'B') -> 'B': pass
-class A: pass
-class B: pass
-
-[case testCallToOverloadedMethod]
-from typing import overload
-A().f(C()) # E: No overload variant of "f" of "A" matches argument types [__main__.C]
-A().f(A())
-A().f(B())
-
-class A:
- @overload
- def f(self, x: 'A') -> None: pass
- @overload
- def f(self, x: 'B') -> None: pass
-
-class B: pass
-class C: pass
-
-[case testOverloadedMethodReturnValue]
-from typing import overload
-a, b = None, None # type: (A, B)
-b = a.f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = a.f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = a.f(a)
-b = a.f(b)
-
-class A:
- @overload
- def f(self, x: 'A') -> 'A': pass
- @overload
- def f(self, x: 'B') -> 'B': pass
-class B: pass
-
-[case testOverloadsWithDifferentArgumentCounts]
-from typing import overload
-a, b = None, None # type: (A, B)
-a = f(a)
-b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-f(b) # E: No overload variant of "f" matches argument types [__main__.B]
-b = f(b, a)
-a = f(b, a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-f(a, a) # E: No overload variant of "f" matches argument types [__main__.A, __main__.A]
-f(b, b) # E: No overload variant of "f" matches argument types [__main__.B, __main__.B]
-
- at overload
-def f(x: 'A') -> 'A': pass
- at overload
-def f(x: 'B', y: 'A') -> 'B': pass
-class A: pass
-class B: pass
-
-[case testGenericOverloadVariant]
-from typing import overload, TypeVar, Generic
-t = TypeVar('t')
-ab, ac, b, c = None, None, None, None # type: (A[B], A[C], B, C)
-b = f(ab)
-c = f(ac)
-b = f(ac) # E: Incompatible types in assignment (expression has type "C", variable has type "B")
-b = f(b)
-c = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "C")
- at overload
-def f(x: 'A[t]') -> t: pass
- at overload
-def f(x: 'B') -> 'B': pass
-class A(Generic[t]): pass
-class B: pass
-class C: pass
-
-[case testOverloadedInit]
-from typing import overload
-a, b = None, None # type: (A, B)
-a = A(a)
-a = A(b)
-a = A(object()) # E: No overload variant of "A" matches argument types [builtins.object]
-
-class A:
- @overload
- def __init__(self, a: 'A') -> None: pass
- @overload
- def __init__(self, b: 'B') -> None: pass
-class B: pass
-
-[case testIntersectionTypeCompatibility]
-from typing import overload, Callable
-o = None # type: object
-a = None # type: A
-
-a = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "A")
-o = f
-
- at overload
-def f(a: 'A') -> None: pass
- at overload
-def f(a: Callable[[], None]) -> None: pass
-class A: pass
-
-[case testCompatibilityOfIntersectionTypeObjectWithStdType]
-from typing import overload
-t, a = None, None # type: (type, A)
-
-a = A # E: Incompatible types in assignment (expression has type "A" (type object), variable has type "A")
-t = A
-
-class A:
- @overload
- def __init__(self, a: 'A') -> None: pass
- @overload
- def __init__(self, a: 'B') -> None: pass
-class B: pass
-
-[case testOverloadedGetitem]
-from typing import overload
-a, b = None, None # type: int, str
-a = A()[a]
-b = A()[a] # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-b = A()[b]
-a = A()[b] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-class A:
- @overload
- def __getitem__(self, a: int) -> int: pass
- @overload
- def __getitem__(self, b: str) -> str: pass
-
-[case testOverloadedGetitemWithGenerics]
-from typing import TypeVar, Generic, overload
-t = TypeVar('t')
-a, b, c = None, None, None # type: (A, B, C[A])
-a = c[a]
-b = c[a] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = c[b]
-b = c[b] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-class C(Generic[t]):
- @overload
- def __getitem__(self, a: 'A') -> t: pass
- @overload
- def __getitem__(self, b: 'B') -> t: pass
-class A: pass
-class B: pass
-
-[case testImplementingOverloadedMethod]
-from typing import overload
-from abc import abstractmethod, ABCMeta
-
-class I(metaclass=ABCMeta):
- @overload
- @abstractmethod
- def f(self) -> None: pass
- @overload
- @abstractmethod
- def f(self, a: 'A') -> None: pass
-class A(I):
- @overload
- def f(self) -> None: pass
- @overload
- def f(self, a: 'A') -> None: pass
-
-[case testOverloadWithFunctionType]
-from typing import overload, Callable
-class A: pass
- at overload
-def f(x: A) -> None: pass
- at overload
-def f(x: Callable[[], None]) -> None: pass
-
-f(A())
-[builtins fixtures/function.pyi]
-
-[case testVarArgsOverload]
-from typing import overload, Any
- at overload
-def f(x: 'A', *more: Any) -> 'A': pass
- at overload
-def f(x: 'B', *more: Any) -> 'A': pass
-f(A())
-f(A(), A, A)
-f(B())
-f(B(), B)
-f(B(), B, B)
-f(object()) # E: No overload variant of "f" matches argument types [builtins.object]
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testVarArgsOverload2]
-from typing import overload
- at overload
-def f(x: 'A', *more: 'B') -> 'A': pass
- at overload
-def f(x: 'B', *more: 'A') -> 'A': pass
-f(A(), B())
-f(A(), B(), B())
-f(A(), A(), B()) # E: No overload variant of "f" matches argument types [__main__.A, __main__.A, __main__.B]
-f(A(), B(), A()) # E: No overload variant of "f" matches argument types [__main__.A, __main__.B, __main__.A]
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testOverloadWithTypeObject]
-from typing import overload
- at overload
-def f(a: 'A', t: type) -> None: pass
- at overload
-def f(a: 'B', t: type) -> None: pass
-f(A(), B)
-f(B(), A)
-class A: pass
-class B: pass
-[builtins fixtures/function.pyi]
-
-[case testOverloadedInitAndTypeObjectInOverload]
-from typing import overload
- at overload
-def f(t: type) -> 'A': pass
- at overload
-def f(t: 'A') -> 'B': pass
-a, b = None, None # type: (A, B)
-a = f(A)
-b = f(a)
-b = f(A) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-class A:
- @overload
- def __init__(self) -> None: pass
- @overload
- def __init__(self, a: 'A') -> None: pass
-class B:
- pass
-
-[case testOverlappingErasedSignatures]
-from typing import overload, List
- at overload
-def f(a: List[int]) -> int: pass
- at overload
-def f(a: List[str]) -> int: pass
-list_int = [] # type: List[int]
-list_str = [] # type: List[str]
-list_object = [] # type: List[object]
-n = f(list_int)
-m = f(list_str)
-n = 1
-m = 1
-n = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-m = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-f(list_object) # E: Argument 1 to "f" has incompatible type List[object]; expected List[int]
-[builtins fixtures/list.pyi]
-
-[case testOverlappingOverloadSignatures]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def f(x: B) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def f(x: A) -> str: pass
-
-[case testContravariantOverlappingOverloadSignatures]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def f(x: A) -> A: pass
- at overload
-def f(x: B) -> B: pass # This is more specific than the first item, and thus
- # will never be called.
-
-[case testPartiallyCovariantOverlappingOverloadSignatures]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def f(x: B) -> A: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def f(x: A) -> B: pass
-
-[case testPartiallyContravariantOverloadSignatures]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def g(x: A) -> int: pass # Fine, since A us supertype of B.
- at overload
-def g(x: B) -> str: pass
-
-[case testCovariantOverlappingOverloadSignatures]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def g(x: B) -> B: pass
- at overload
-def g(x: A) -> A: pass
-
-[case testCovariantOverlappingOverloadSignaturesWithSomeSameArgTypes]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def g(x: int, y: B) -> B: pass
- at overload
-def g(x: int, y: A) -> A: pass
-
-[case testCovariantOverlappingOverloadSignaturesWithAnyType]
-from typing import Any, overload
- at overload
-def g(x: int) -> int: pass
- at overload
-def g(x: Any) -> Any: pass
-
-[case testContravariantOverlappingOverloadSignaturesWithAnyType]
-from typing import Any, overload
- at overload
-def g(x: Any) -> Any: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def g(x: int) -> int: pass
-
-[case testOverloadedLtAndGtMethods]
-from typing import overload
-class A:
- def __lt__(self, x: A) -> int: pass
- def __gt__(self, x: A) -> int: pass
-class B:
- @overload
- def __lt__(self, x: B) -> int: pass
- @overload
- def __lt__(self, x: A) -> int: pass
- @overload
- def __gt__(self, x: B) -> int: pass
- @overload
- def __gt__(self, x: A) -> int: pass
-A() < A()
-A() < B()
-B() < A()
-B() < B()
-A() < object() # E: Unsupported operand types for < ("A" and "object")
-B() < object() # E: No overload variant of "__lt__" of "B" matches argument types [builtins.object]
-
-[case testOverloadedForwardMethodAndCallingReverseMethod]
-from typing import overload
-class A:
- @overload
- def __add__(self, x: 'A') -> int: pass
- @overload
- def __add__(self, x: int) -> int: pass
-class B:
- def __radd__(self, x: A) -> int: pass
-A() + A()
-A() + 1
-A() + B()
-A() + '' # E: No overload variant of "__add__" of "A" matches argument types [builtins.str]
-
-[case testOverrideOverloadedMethodWithMoreGeneralArgumentTypes]
-from typing import overload
-
-class IntSub(int): pass
-
-class StrSub(str): pass
-class A:
- @overload
- def f(self, x: IntSub) -> int: return 0
- @overload
- def f(self, x: StrSub) -> str: return ''
-class B(A):
- @overload
- def f(self, x: int) -> int: return 0
- @overload
- def f(self, x: str) -> str: return ''
-[out]
-
-[case testOverrideOverloadedMethodWithMoreSpecificArgumentTypes]
-from typing import overload
-
-class IntSub(int): pass
-
-class StrSub(str): pass
-class A:
- @overload
- def f(self, x: int) -> int: return 0
- @overload
- def f(self, x: str) -> str: return ''
-class B(A):
- @overload
- def f(self, x: IntSub) -> int: return 0
- @overload
- def f(self, x: str) -> str: return ''
-class C(A):
- @overload
- def f(self, x: int) -> int: return 0
- @overload
- def f(self, x: StrSub) -> str: return ''
-class D(A):
- @overload
- def f(self, x: int) -> int: return 0
- @overload
- def f(self, x: str) -> str: return ''
-[out]
-main:12: error: Signature of "f" incompatible with supertype "A"
-main:17: error: Signature of "f" incompatible with supertype "A"
-
-[case testOverloadingAndDucktypeCompatibility]
-from typing import overload, _promote
-
-class A: pass
-
- at _promote(A)
-class B: pass
-
- at overload
-def f(n: B) -> B:
- return n
- at overload
-def f(n: A) -> A:
- return n
-
-f(B()) + 'x' # E: Unsupported left operand type for + ("B")
-f(A()) + 'x' # E: Unsupported left operand type for + ("A")
-
-[case testOverloadingAndIntFloatSubtyping]
-from typing import overload
- at overload
-def f(x: float) -> None: pass
- at overload
-def f(x: str) -> None: pass
-f(1.1)
-f('')
-f(1)
-f(()) # E: No overload variant of "f" matches argument types [Tuple[]]
-[builtins fixtures/primitives.pyi]
-[out]
-
-[case testOverloadingVariableInputs]
-from typing import overload
- at overload
-def f(x: int, y: int) -> None: pass
- at overload
-def f(x: int) -> None: pass
-f(1)
-f(1, 2)
-z = (1, 2)
-f(*z)
-[builtins fixtures/primitives.pyi]
-[out]
-
-[case testTypeInferenceSpecialCaseWithOverloading]
-from typing import overload
-
-class A:
- def __add__(self, x: A) -> A: pass
-class B:
- def __radd__(self, x: A) -> B: pass
-
- at overload
-def f(x: A) -> A: pass
- at overload
-def f(x: B) -> B: pass
-
-f(A() + B())() # E: "B" not callable
-
-[case testKeywordArgOverload]
-from typing import overload
- at overload
-def f(x: int, y: str) -> int: pass
- at overload
-def f(x: str, y: int) -> str: pass
-f(x=1, y='')() # E: "int" not callable
-f(y=1, x='')() # E: "str" not callable
-
-[case testIgnoreOverloadVariantBasedOnKeywordArg]
-from typing import overload
- at overload
-def f(x: int) -> int: pass
- at overload
-def f(y: int) -> str: pass
-f(x=1)() # E: "int" not callable
-f(y=1)() # E: "str" not callable
-
-[case testOverloadWithTupleVarArg]
-from typing import overload
- at overload
-def f(x: int, y: str) -> int: pass
- at overload
-def f(*x: str) -> str: pass
-f(*(1,))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int]]
-f(*('',))() # E: "str" not callable
-f(*(1, ''))() # E: "int" not callable
-f(*(1, '', 1))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int, builtins.str, builtins.int]]
-
-[case testPreferExactSignatureMatchInOverload]
-from typing import overload, List
- at overload
-def f(x: int, y: List[int] = None) -> int: pass
- at overload
-def f(x: int, y: List[str] = None) -> int: pass
-f(y=[1], x=0)() # E: "int" not callable
-f(y=[''], x=0)() # E: "int" not callable
-a = f(y=[['']], x=0) # E: List item 0 has incompatible type List[str]
-a() # E: "int" not callable
-[builtins fixtures/list.pyi]
-
-[case testOverloadWithDerivedFromAny]
-from typing import Any, overload
-Base = None # type: Any
-
-class C:
- @overload
- def __init__(self, a: str) -> None: pass
- @overload
- def __init__(self, a: int) -> None: pass
-
-class Derived(Base):
- def to_dict(self) -> C:
- return C(self) # fails without the fix for #1363
-C(Derived()) # fails without the hack
-C(Base()) # Always ok
-
-[case testOverloadWithBoundedTypeVar]
-from typing import overload, TypeVar
-T = TypeVar('T', bound=str)
- at overload
-def f(x: T) -> T: pass
- at overload
-def f(x: int) -> bool: pass
-class mystr(str): pass
-
-f('x')() # E: "str" not callable
-f(1)() # E: "bool" not callable
-f(1.1) # E: No overload variant of "f" matches argument types [builtins.float]
-f(mystr())() # E: "mystr" not callable
-[builtins fixtures/primitives.pyi]
-
-[case testOverloadedCallWithVariableTypes]
-from typing import overload, TypeVar, List
-T = TypeVar('T', bound=str)
- at overload
-def f(x: T) -> T: pass
- at overload
-def f(x: List[T]) -> None: pass
-class mystr(str): pass
-
-U = TypeVar('U', bound=mystr)
-V = TypeVar('V')
-def g(x: U, y: V) -> None:
- f(x)() # E: "mystr" not callable
- f(y) # E: No overload variant of "f" matches argument types [V`-2]
- a = f([x]) # E: "f" does not return a value
- f([y]) # E: Type argument 1 of "f" has incompatible value "V"
- f([x, y]) # E: Type argument 1 of "f" has incompatible value "object"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testOverlapWithTypeVars]
-from typing import overload, TypeVar, Sequence
-T = TypeVar('T', bound=str)
- at overload
-def f(x: Sequence[T]) -> None: pass
- at overload
-def f(x: Sequence[int]) -> int: pass
-# These are considered overlapping despite the bound on T due to runtime type erasure.
-[out]
-main:4: error: Overloaded function signatures 1 and 2 overlap with incompatible return types
-
-[case testOverlapWithTypeVarsWithValues]
-from typing import overload, TypeVar
-AnyStr = TypeVar('AnyStr', bytes, str)
-
- at overload
-def f(x: int) -> int: pass
- at overload
-def f(x: AnyStr) -> str: pass
-
-f(1)() # E: "int" not callable
-f('1')() # E: "str" not callable
-f(b'1')() # E: "str" not callable
-f(1.0) # E: No overload variant of "f" matches argument types [builtins.float]
-
- at overload
-def g(x: AnyStr, *a: AnyStr) -> None: pass
- at overload
-def g(x: int, *a: AnyStr) -> None: pass
-
-g('foo')
-g('foo', 'bar')
-g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
-g(1)
-g(1, 'foo')
-g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
-[builtins fixtures/primitives.pyi]
-
-[case testBadOverlapWithTypeVarsWithValues]
-from typing import overload, TypeVar
-AnyStr = TypeVar('AnyStr', bytes, str)
-
- at overload
-def f(x: AnyStr) -> None: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
- at overload
-def f(x: str) -> bool: pass
-[builtins fixtures/primitives.pyi]
-
-[case testOverlappingOverloadCounting]
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def f(x: int) -> None: pass
- at overload
-def f(x: B) -> str: pass # E: Overloaded function signatures 2 and 3 overlap with incompatible return types
- at overload
-def f(x: A) -> int: pass
-
-[case testOverloadWithTupleMatchingTypeVar]
-from typing import TypeVar, Generic, Tuple, overload
-
-T = TypeVar('T')
-
-class A(Generic[T]):
- @overload
- def f(self, arg: T) -> None:
- pass
- @overload
- def f(self, arg: T, default: int) -> None:
- pass
-
-b = A() # type: A[Tuple[int, int]]
-b.f((0, 0))
-b.f((0, '')) # E: Argument 1 to "f" of "A" has incompatible type "Tuple[int, str]"; expected "Tuple[int, int]"
-
-[case testSingleOverload]
-from typing import overload
- at overload
-def f(a: int) -> None: pass
-def f(a: str) -> None: pass
-[out]
-main:2: error: Single overload definition, multiple required
-main:4: error: Name 'f' already defined
-
-[case testSingleOverload2]
-from typing import overload
-def f(a: int) -> None: pass
- at overload
-def f(a: str) -> None: pass
-[out]
-main:3: error: Name 'f' already defined
-main:3: error: Single overload definition, multiple required
-
-[case testNonconsecutiveOverloads]
-from typing import overload
- at overload
-def f(a: int) -> None: pass
-1
- at overload
-def f(a: str) -> None: pass
-[out]
-main:2: error: Single overload definition, multiple required
-main:5: error: Name 'f' already defined
-main:5: error: Single overload definition, multiple required
-
-[case testNonconsecutiveOverloadsMissingFirstOverload]
-from typing import overload
-def f(a: int) -> None: pass
-1
- at overload
-def f(a: str) -> None: pass
-[out]
-main:4: error: Name 'f' already defined
-main:4: error: Single overload definition, multiple required
-
-[case testNonconsecutiveOverloadsMissingLaterOverload]
-from typing import overload
- at overload
-def f(a: int) -> None: pass
-1
-def f(a: str) -> None: pass
-[out]
-main:2: error: Single overload definition, multiple required
-main:5: error: Name 'f' already defined
-
-[case testOverloadTuple]
-from typing import overload, Tuple
- at overload
-def f(x: int, y: Tuple[str, ...]) -> None: pass
- at overload
-def f(x: int, y: str) -> None: pass
-f(1, ('2', '3'))
-f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected Tuple[str, ...]
-f(1, ('2',))
-f(1, '2')
-f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected Tuple[str, ...]
-x = ('2', '3') # type: Tuple[str, ...]
-f(1, x)
-y = (2, 3) # type: Tuple[int, ...]
-f(1, y) # E: Argument 2 to "f" has incompatible type Tuple[int, ...]; expected Tuple[str, ...]
-[builtins fixtures/tuple.pyi]
-
-[case testCallableSpecificOverload]
-from typing import overload, Callable
- at overload
-def f(a: Callable[[], int]) -> None: pass
- at overload
-def f(a: str) -> None: pass
-f(0) # E: No overload variant of "f" matches argument types [builtins.int]
diff --git a/test-data/unit/check-python2.test b/test-data/unit/check-python2.test
deleted file mode 100644
index 7a9446b..0000000
--- a/test-data/unit/check-python2.test
+++ /dev/null
@@ -1,242 +0,0 @@
--- Type checker test cases for Python 2.x mode.
-
-
-[case testUnicode]
-u = u'foo'
-u = unicode()
-s = ''
-s = u'foo' # E: Incompatible types in assignment (expression has type "unicode", variable has type "str")
-s = b'foo'
-[builtins_py2 fixtures/python2.pyi]
-
-[case testTypeVariableUnicode]
-from typing import TypeVar
-T = TypeVar(u'T')
-
-[case testNamedTuple*sh Unicode]
-from typing import NamedTuple
-from collections import namedtuple
-N = NamedTuple(u'N', [(u'x', int)])
-n = namedtuple(u'n', u'x y')
-
-[builtins fixtures/dict.pyi]
-
-[case testPrintStatement]
-print ''() # E: "str" not callable
-print 1, 1() # E: "int" not callable
-
-[case testPrintStatementWithTarget]
-class A:
- def write(self, s):
- # type: (str) -> None
- pass
-
-print >>A(), ''
-print >>None, ''
-print >>1, '' # E: "int" has no attribute "write"
-print >>(None + ''), None # E: Unsupported left operand type for + (None)
-
-[case testDivision]
-class A:
- def __div__(self, x):
- # type: (int) -> str
- pass
-s = A() / 1
-s = ''
-s = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testStrUnicodeCompatibility]
-import typing
-def f(x):
- # type: (unicode) -> None
- pass
-f('')
-f(u'')
-f(b'')
-[builtins_py2 fixtures/python2.pyi]
-
-[case testStaticMethodWithCommentSignature]
-class A:
- @staticmethod
- def f(x): # type: (int) -> str
- return ''
-A.f(1)
-A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-[builtins_py2 fixtures/staticmethod.pyi]
-
-[case testRaiseTuple]
-import typing
-raise BaseException, "a"
-raise BaseException, "a", None
-[builtins_py2 fixtures/exception.pyi]
-
-[case testTryExceptWithTuple]
-try:
- None
-except BaseException, e:
- e() # E: "BaseException" not callable
-[builtins_py2 fixtures/exception.pyi]
-
-[case testAlternateNameSuggestions]
-class Foo(object):
- def say_hello(self):
- pass
- def say_hell(self):
- pass
- def say_hullo(self):
- pass
- def say_goodbye(self):
- pass
- def go_away(self):
- pass
- def go_around(self):
- pass
- def append(self):
- pass
- def extend(self):
- pass
- def _add(self):
- pass
-
-f = Foo()
-f.say_hallo() # E: "Foo" has no attribute "say_hallo"; maybe "say_hullo", "say_hello", or "say_hell"?
-f.go_array() # E: "Foo" has no attribute "go_array"; maybe "go_away"?
-f.add() # E: "Foo" has no attribute "add"; maybe "append", "extend", or "_add"?
-
-[case testTupleArgListDynamicallyTyped]
-def f(x, (y, z)):
- x = y + z
-f(1, 1)
-f(1, (1, 2))
-
-[case testTupleArgListAnnotated]
-from typing import Tuple
-def f(x, (y, z)): # type: (object, Tuple[int, str]) -> None
- x() # E
- y() # E
- z() # E
-f(object(), (1, ''))
-f(1, 1) # E
-[builtins_py2 fixtures/tuple.pyi]
-[out]
-main:3: error: "object" not callable
-main:4: error: "int" not callable
-main:5: error: "str" not callable
-main:7: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, str]"
-
-[case testNestedTupleArgListAnnotated]
-from typing import Tuple
-def f(x, (y, (a, b))): # type: (object, Tuple[int, Tuple[str, int]]) -> None
- x() # E
- y() # E
- a() # E
- b() # E
-f(object(), (1, ('', 2)))
-f(1, 1) # E
-[builtins fixtures/tuple.pyi]
-[out]
-main:3: error: "object" not callable
-main:4: error: "int" not callable
-main:5: error: "str" not callable
-main:6: error: "int" not callable
-main:8: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, Tuple[str, int]]"
-
-[case testBackquoteExpr]
-`1`.x # E: "str" has no attribute "x"
-
-[case testPython2OnlyStdLibModuleWithoutStub]
-import asyncio
-import Bastion
-[out]
-main:1: error: Cannot find module named 'asyncio'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: No library stub file for standard library module 'Bastion'
-main:2: note: (Stub files are from https://github.com/python/typeshed)
-
-[case testImportFromPython2Builtin]
-from __builtin__ import int as i
-x = 1 # type: i
-y = '' # type: i # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testImportPython2Builtin]
-import __builtin__
-x = 1 # type: __builtin__.int
-y = '' # type: __builtin__.int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testImportAsPython2Builtin]
-import __builtin__ as bi
-x = 1 # type: bi.int
-y = '' # type: bi.int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testImportFromPython2BuiltinOverridingDefault]
-from __builtin__ import int
-x = 1 # type: int
-y = '' # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
--- Copied from check-functions.test
-[case testEllipsisWithArbitraryArgsOnBareFunctionInPython2]
-def f(x, y, z): # type: (...) -> None
- pass
-
--- Copied from check-functions.test
-[case testEllipsisWithSomethingAfterItFailsInPython2]
-def f(x, y, z): # type: (..., int) -> None
- pass
-[out]
-main:1: error: Ellipses cannot accompany other argument types in function type signature.
-
-[case testLambdaTupleArgInPython2]
-f = lambda (x, y): x + y
-f((0, 0))
-[out]
-
-[case testLambdaSingletonTupleArgInPython2]
-f = lambda (x,): x + 1
-f((0,))
-[out]
-
-[case testLambdaNoTupleArgInPython2]
-f = lambda (x): x + 1
-f(0)
-[out]
-
-[case testDefTupleEdgeCasesPython2]
-def f((x,)): return x
-def g((x)): return x
-f(0) + g(0)
-[out]
-
-[case testLambdaAsSortKeyForTuplePython2]
-from typing import Any, Tuple, Callable
-def bar(key):
- # type: (Callable[[Tuple[int, int]], int]) -> int
- pass
-def foo():
- # type: () -> int
- return bar(key=lambda (a, b): a)
-[out]
-
-[case testImportBuiltins]
-# flags: --fast-parser
-import __builtin__
-__builtin__.str
-
-[case testUnicodeAlias]
-from typing import List
-Alias = List[u'Foo']
-class Foo: pass
-[builtins_py2 fixtures/python2.pyi]
-
-[case testExec]
-exec('print 1 + 1')
-
-[case testUnicodeDocStrings]
-# flags: --python-version=2.7
-__doc__ = u"unicode"
-
-class A:
- u"unicode"
-
-def f():
- # type: () -> None
- u"unicode"
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
deleted file mode 100644
index 98bcfa1..0000000
--- a/test-data/unit/check-selftype.test
+++ /dev/null
@@ -1,358 +0,0 @@
-[case testSelfTypeInstance]
-from typing import TypeVar
-
-T = TypeVar('T', bound='A', covariant=True)
-
-class A:
- def copy(self: T) -> T: pass
-
-class B(A):
- pass
-
-reveal_type(A().copy) # E: Revealed type is 'def () -> __main__.A*'
-reveal_type(B().copy) # E: Revealed type is 'def () -> __main__.B*'
-reveal_type(A().copy()) # E: Revealed type is '__main__.A*'
-reveal_type(B().copy()) # E: Revealed type is '__main__.B*'
-
-[builtins fixtures/bool.pyi]
-
-[case testSelfTypeStaticAccess]
-from typing import TypeVar
-
-T = TypeVar('T', bound='A', covariant=True)
-class A:
- def copy(self: T) -> T: pass
-
-class B(A):
- pass
-
-# Erased instances appear on reveal_type; unrelated to self type
-def f(a: A) -> None: pass
-f(A.copy(A()))
-f(A.copy(B()))
-f(B.copy(B()))
-
-# TODO: make it an error
-# f(B.copy(A()))
-
-def g(a: B) -> None: pass
-g(A.copy(A())) # E: Argument 1 to "g" has incompatible type "A"; expected "B"
-g(A.copy(B()))
-g(B.copy(B()))
-
-[builtins fixtures/bool.pyi]
-
-[case testSelfTypeReturn]
-from typing import TypeVar, Type
-
-R = TypeVar('R')
-def _type(self: R) -> Type[R]: pass
-
-T = TypeVar('T', bound='A', covariant=True)
-class A:
- def copy(self: T) -> T:
- if B():
- return A() # E: Incompatible return value type (got "A", expected "T")
- elif A():
- return B() # E: Incompatible return value type (got "B", expected "T")
- reveal_type(_type(self)) # E: Revealed type is 'Type[T`-1]'
- return reveal_type(_type(self)()) # E: Revealed type is 'T`-1'
-
-class B(A):
- pass
-
-Q = TypeVar('Q', bound='C', covariant=True)
-class C:
- def __init__(self, a: int) -> None: pass
-
- def copy(self: Q) -> Q:
- if self:
- return reveal_type(_type(self)(1)) # E: Revealed type is 'Q`-1'
- else:
- return _type(self)() # E: Too few arguments for "C"
-
-
-[builtins fixtures/bool.pyi]
-
-[case testSelfTypeClass]
-from typing import TypeVar, Type
-
-T = TypeVar('T', bound='A')
-
-class A:
- @classmethod
- def new(cls: Type[T]) -> T:
- return reveal_type(cls()) # E: Revealed type is 'T`-1'
-
-class B(A):
- pass
-
-Q = TypeVar('Q', bound='C', covariant=True)
-class C:
- def __init__(self, a: int) -> None: pass
-
- @classmethod
- def new(cls: Type[Q]) -> Q:
- if cls:
- return cls(1)
- else:
- return cls() # E: Too few arguments for "C"
-
-
-reveal_type(A.new) # E: Revealed type is 'def () -> __main__.A*'
-reveal_type(B.new) # E: Revealed type is 'def () -> __main__.B*'
-reveal_type(A.new()) # E: Revealed type is '__main__.A*'
-reveal_type(B.new()) # E: Revealed type is '__main__.B*'
-
-[builtins fixtures/classmethod.pyi]
-
-[case testSelfTypeOverride]
-from typing import TypeVar, cast
-
-T = TypeVar('T', bound='A', covariant=True)
-
-class A:
- def copy(self: T) -> T: pass
-
-class B(A):
- pass
-
-Q = TypeVar('Q', bound='C', covariant=True)
-class C(A):
- def copy(self: Q) -> Q: pass
-
-reveal_type(C().copy) # E: Revealed type is 'def () -> __main__.C*'
-reveal_type(C().copy()) # E: Revealed type is '__main__.C*'
-reveal_type(cast(A, C()).copy) # E: Revealed type is 'def () -> __main__.A*'
-reveal_type(cast(A, C()).copy()) # E: Revealed type is '__main__.A*'
-
-[builtins fixtures/bool.pyi]
-
-[case testSelfTypeSuper]
-from typing import TypeVar, cast
-
-T = TypeVar('T', bound='A', covariant=True)
-
-class A:
- def copy(self: T) -> T: pass
-
-Q = TypeVar('Q', bound='B', covariant=True)
-class B(A):
- def copy(self: Q) -> Q:
- reveal_type(self) # E: Revealed type is 'Q`-1'
- reveal_type(super().copy) # E: Revealed type is 'def () -> Q`-1'
- return super().copy()
-
-[builtins fixtures/bool.pyi]
-
-[case testSelfTypeRecursiveBinding]
-from typing import TypeVar, Callable, Type
-
-T = TypeVar('T', bound='A', covariant=True)
-class A:
- # TODO: This is potentially unsafe, as we use T in an argument type
- def copy(self: T, factory: Callable[[T], T]) -> T:
- return factory(self)
-
- @classmethod
- def new(cls: Type[T], factory: Callable[[T], T]) -> T:
- reveal_type(cls) # E: Revealed type is 'Type[T`-1]'
- reveal_type(cls()) # E: Revealed type is 'T`-1'
- cls(2) # E: Too many arguments for "A"
- return cls()
-
-class B(A):
- pass
-
-reveal_type(A().copy) # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*'
-reveal_type(B().copy) # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*'
-reveal_type(A.new) # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*'
-reveal_type(B.new) # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*'
-
-[builtins fixtures/classmethod.pyi]
-
-[case testSelfTypeBound]
-from typing import TypeVar, Callable, cast
-
-TA = TypeVar('TA', bound='A', covariant=True)
-
-class A:
- def copy(self: TA) -> TA:
- pass
-
-class C(A):
- def copy(self: C) -> C:
- pass
-
-class D(A):
- def copy(self: A) -> A: # E: Return type of "copy" incompatible with supertype "A"
- pass
-
-TB = TypeVar('TB', bound='B', covariant=True)
-class B(A):
- x = 1
- def copy(self: TB) -> TB:
- reveal_type(self.x) # E: Revealed type is 'builtins.int'
- return cast(TB, None)
-
-[builtins fixtures/bool.pyi]
-
--- # TODO: fail for this
--- [case testSelfTypeBare]
--- from typing import TypeVar, Type
---
--- T = TypeVar('T', bound='E')
---
--- class E:
--- def copy(self: T, other: T) -> T: pass
-
-[case testSelfTypeClone]
-from typing import TypeVar, Type
-
-T = TypeVar('T', bound='C')
-
-class C:
- def copy(self: T) -> T:
- return self
-
- @classmethod
- def new(cls: Type[T]) -> T:
- return cls()
-
-def clone(arg: T) -> T:
- reveal_type(arg.copy) # E: Revealed type is 'def () -> T`-1'
- return arg.copy()
-
-
-def make(cls: Type[T]) -> T:
- reveal_type(cls.new) # E: Revealed type is 'def () -> T`-1'
- return cls.new()
-
-[builtins fixtures/classmethod.pyi]
-
-[case testSelfTypeGeneric]
-from typing import TypeVar
-
-T = TypeVar('T', int, str)
-
-class A:
- pass
-
-class B(A):
- def __init__(self, arg: T) -> None:
- super(B, self).__init__()
-
-[case testSelfTypeNonsensical]
-from typing import TypeVar, Type
-
-T = TypeVar('T', bound=str)
-class A:
- def foo(self: T) -> T: # E: The erased type of self 'builtins.str' is not a supertype of its class '__main__.A'
- return self
-
- @classmethod
- def cfoo(cls: Type[T]) -> T: # E: The erased type of self 'Type[builtins.str]' is not a supertype of its class 'Type[__main__.A]'
- return cls()
-
-Q = TypeVar('Q', bound='B')
-class B:
- def foo(self: Q) -> Q:
- return self
-
- @classmethod
- def cfoo(cls: Type[Q]) -> Q:
- return cls()
-
-class C:
- def foo(self: C) -> C: return self
-
- @classmethod
- def cfoo(cls: Type[C]) -> C:
- return cls()
-
-class D:
- def foo(self: str) -> str: # E: The erased type of self 'builtins.str' is not a supertype of its class '__main__.D'
- return self
-
- @staticmethod
- def bar(self: str) -> str:
- return self
-
- @classmethod
- def cfoo(cls: Type[str]) -> str: # E: The erased type of self 'Type[builtins.str]' is not a supertype of its class 'Type[__main__.D]'
- return cls()
-
-[builtins fixtures/classmethod.pyi]
-
-[case testSelfTypeLambdaDefault]
-from typing import Callable
-class C:
- @classmethod
- def foo(cls,
- arg: Callable[[int], str] = lambda a: ''
- ) -> None:
- pass
-
- def bar(self,
- arg: Callable[[int], str] = lambda a: ''
- ) -> None:
- pass
-[builtins fixtures/classmethod.pyi]
-
-[case testSelfTypeNew]
-from typing import TypeVar, Type
-
-T = TypeVar('T', bound=A)
-class A:
- def __new__(cls: Type[T]) -> T:
- return cls()
-
- def __init_subclass__(cls: Type[T]) -> None:
- pass
-
-class B:
- def __new__(cls: Type[T]) -> T: # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]'
- return cls()
-
- def __init_subclass__(cls: Type[T]) -> None: # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]'
- pass
-
-class C:
- def __new__(cls: Type[C]) -> C:
- return cls()
-
- def __init_subclass__(cls: Type[C]) -> None:
- pass
-
-class D:
- def __new__(cls: D) -> D: # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]'
- return cls
-
- def __init_subclass__(cls: D) -> None: # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]'
- pass
-
-class E:
- def __new__(cls) -> E:
- reveal_type(cls) # E: Revealed type is 'def () -> __main__.E'
- return cls()
-
- def __init_subclass__(cls) -> None:
- reveal_type(cls) # E: Revealed type is 'def () -> __main__.E'
-
-[case testSelfTypeProperty]
-from typing import TypeVar
-
-T = TypeVar('T', bound='A')
-
-class A:
- @property
- def member(self: T) -> T:
- pass
-
-class B(A):
- pass
-
-reveal_type(A().member) # E: Revealed type is '__main__.A*'
-reveal_type(B().member) # E: Revealed type is '__main__.B*'
-
-[builtins fixtures/property.pyi]
diff --git a/test-data/unit/check-semanal-error.test b/test-data/unit/check-semanal-error.test
deleted file mode 100644
index 0a07829..0000000
--- a/test-data/unit/check-semanal-error.test
+++ /dev/null
@@ -1,81 +0,0 @@
--- Type checking after an error during semantic analysis
--- -----------------------------------------------------
---
--- This tests both the semantic analyzer (that it does not generate
--- corrupt state on error) and the type checker (that it can deal with
--- whatever state the semantic analyzer sets up).
-
--- TODO:
--- - invalid type in annotation
--- - invalid function comment type annotation
--- - invalid multiple assignment type annotation
--- - using a type variable as a value
--- - using special names defined in typing as values
-
-[case testMissingModuleImport1]
-import m # E
-m.foo()
-m.x = m.y
-1() # E
-[out]
-main:1: error: Cannot find module named 'm'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:4: error: "int" not callable
-
-[case testMissingModuleImport2]
-from m import x # E
-x.foo()
-x.a = x.b
-1() # E
-[out]
-main:1: error: Cannot find module named 'm'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:4: error: "int" not callable
-
-[case testMissingModuleImport3]
-from m import * # E
-x # E
-1() # E
-[out]
-main:1: error: Cannot find module named 'm'
-main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Name 'x' is not defined
-main:3: error: "int" not callable
-
-[case testInvalidBaseClass1]
-class A(X): # E: Name 'X' is not defined
- x = 1
-A().foo(1)
-A().x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testInvalidBaseClass2]
-X = 1
-class A(X): # E
- x = 1
-A().foo(1)
-A().x = '' # E
-[out]
-main:2: error: Invalid type "__main__.X"
-main:2: error: Invalid base class
-main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-
-[case testInvalidNumberOfTypeArgs]
-from typing import TypeVar
-T = TypeVar('T')
-class C: # Forgot to add type params here
- def __init__(self, t: T) -> None: pass
-c = C(t=3) # type: C[int] # E: "C" expects no type arguments, but 1 given
-
-[case testBreakOutsideLoop]
-break # E: 'break' outside loop
-
-[case testContinueOutsideLoop]
-continue # E: 'continue' outside loop
-
-[case testYieldOutsideFunction]
-yield # E: 'yield' outside function
-
-[case testYieldFromOutsideFunction]
-x = 1
-yield from x # E: 'yield from' outside function
diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test
deleted file mode 100644
index 1fa0cdc..0000000
--- a/test-data/unit/check-statements.test
+++ /dev/null
@@ -1,1451 +0,0 @@
--- Return statement
--- ----------------
-
-
-[case testReturnValue]
-import typing
-def f() -> 'A':
- return A()
-def g() -> 'B':
- return A()
-class A:
- pass
-class B:
- pass
-[out]
-main:5: error: Incompatible return value type (got "A", expected "B")
-
-[case testReturnSubtype]
-import typing
-def f() -> 'B':
- return A()
-def g() -> 'A':
- return B()
-class A:
- pass
-class B(A):
- pass
-[out]
-main:3: error: Incompatible return value type (got "A", expected "B")
-
-[case testReturnWithoutAValue]
-import typing
-def f() -> 'A':
- return
-def g() -> None:
- return
-class A:
- pass
-[out]
-main:3: error: Return value expected
-
-[case testReturnNoneInFunctionReturningNone]
-import typing
-def f() -> None:
- return None
-def g() -> None:
- return f() # E: No return value expected
-[out]
-
-[case testReturnInGenerator]
-from typing import Generator
-def f() -> Generator[int, None, str]:
- yield 1
- return "foo"
-[out]
-
-[case testEmptyReturnInGenerator]
-from typing import Generator
-def f() -> Generator[int, None, str]:
- yield 1
- return # E: Return value expected
-[out]
-
-[case testEmptyReturnInNoneTypedGenerator]
-from typing import Generator
-def f() -> Generator[int, None, None]:
- yield 1
- return
-[out]
-
-[case testNonEmptyReturnInNoneTypedGenerator]
-from typing import Generator
-def f() -> Generator[int, None, None]:
- yield 1
- return 42 # E: No return value expected
-[out]
-
-[case testReturnInIterator]
-from typing import Iterator
-def f() -> Iterator[int]:
- yield 1
- return "foo"
-[out]
-
-
--- If statement
--- ------------
-
-
-[case testIfStatement]
-
-a = None # type: A
-a2 = None # type: A
-a3 = None # type: A
-b = None # type: bool
-if a:
- a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
-elif a2:
- a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
-elif a3:
- a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
-else:
- a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
-if b:
- pass
-elif b:
- pass
-if b:
- pass
-
-class A: pass
-[builtins fixtures/bool.pyi]
-
-
--- Loops
--- -----
-
-
-[case testWhileStatement]
-
-a = None # type: A
-b = None # type: bool
-while a:
- a = b # Fail
-else:
- a = b # Fail
-while b:
- b = b
-
-class A: pass
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
-
-[case testForStatement]
-
-a = None # type: A
-b = None # type: object
-for a in [A()]:
- a = b # Fail
-else:
- a = b # Fail
-
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-[case testBreakStatement]
-import typing
-while None:
- break
-[builtins fixtures/bool.pyi]
-[out]
-
-[case testContinueStatement]
-import typing
-while None:
- continue
-[builtins fixtures/bool.pyi]
-[out]
-
-[case testForStatementTypeComments]
-# flags: --fast-parser
-from typing import List, Union
-x = [] # type: List[int]
-
-for y in x: # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- pass
-
-for z in x: # type: int
- pass
-
-for w in x: # type: Union[int, str]
- reveal_type(w) # E: Revealed type is 'Union[builtins.int, builtins.str]'
-
-for v in x: # type: int, int # E: Invalid tuple literal type
- pass
-[builtins fixtures/list.pyi]
-
-[case testForStatementMultipleTypeComments]
-# flags: --fast-parser
-from typing import List, Tuple
-x = [] # type: List[Tuple[int, int]]
-
-for y in x: # type: int, int # E: Invalid tuple literal type
- pass
-
-for z in x: # type: Tuple[int, int]
- pass
-
-for w,v in x: # type: int, str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- pass
-
-for a, b in x: # type: int, int, int # E: Incompatible number of tuple items
- pass
-[builtins fixtures/list.pyi]
-
-
--- Operator assignment
--- -------------------
-
-
-[case testPlusAssign]
-
-a, b, c = None, None, None # type: (A, B, C)
-a += b # Fail
-b += a # Fail
-c += a # Fail
-a += c
-
-class A:
- def __add__(self, x: 'C') -> 'A': pass
-
-class B:
- def __add__(self, x: A) -> 'C': pass
-
-class C: pass
-[out]
-main:3: error: Unsupported operand types for + ("A" and "B")
-main:4: error: Result type of + incompatible in assignment
-main:5: error: Unsupported left operand type for + ("C")
-
-[case testMinusAssign]
-
-a, b, c = None, None, None # type: (A, B, C)
-a -= b # Fail
-b -= a # Fail
-c -= a # Fail
-a -= c
-
-class A:
- def __sub__(self, x: 'C') -> 'A': pass
-
-class B:
- def __sub__(self, x: A) -> 'C': pass
-
-class C: pass
-[out]
-main:3: error: Unsupported operand types for - ("A" and "B")
-main:4: error: Result type of - incompatible in assignment
-main:5: error: Unsupported left operand type for - ("C")
-
-[case testMulAssign]
-
-a, c = None, None # type: (A, C)
-a *= a # Fail
-c *= a # Fail
-a *= c
-
-class A:
- def __mul__(self, x: 'C') -> 'A': pass
-
-class C: pass
-[out]
-main:3: error: Unsupported operand types for * ("A" and "A")
-main:4: error: Unsupported left operand type for * ("C")
-
-[case testMatMulAssign]
-a, c = None, None # type: (A, C)
-a @= a # E: Unsupported operand types for @ ("A" and "A")
-c @= a # E: Unsupported left operand type for @ ("C")
-a @= c
-
-class A:
- def __matmul__(self, x: 'C') -> 'A': pass
-
-class C: pass
-
-[case testDivAssign]
-
-a, c = None, None # type: (A, C)
-a /= a # Fail
-c /= a # Fail
-a /= c
-
-class A:
- def __truediv__(self, x: 'C') -> 'A': pass
-
-class C: pass
-[out]
-main:3: error: Unsupported operand types for / ("A" and "A")
-main:4: error: Unsupported left operand type for / ("C")
-
-[case testPowAssign]
-
-a, c = None, None # type: (A, C)
-a **= a # Fail
-c **= a # Fail
-a **= c
-
-class A:
- def __pow__(self, x: 'C') -> 'A': pass
-
-class C: pass
-[out]
-main:3: error: Unsupported operand types for ** ("A" and "A")
-main:4: error: Unsupported left operand type for ** ("C")
-
-[case testSubtypesInOperatorAssignment]
-
-a, b = None, None # type: (A, B)
-b += b
-b += a
-a += b
-
-class A:
- def __add__(self, x: 'A') -> 'B': pass
-
-class B(A): pass
-[out]
-
-[case testAdditionalOperatorsInOpAssign]
-
-a, c = None, None # type: (A, C)
-a &= a # Fail
-a >>= a # Fail
-a //= a # Fail
-a &= c
-a >>= c
-a //= c
-class A:
- def __and__(self, x: 'C') -> 'A': pass
- def __rshift__(self, x: 'C') -> 'A': pass
- def __floordiv__(self, x: 'C') -> 'A': pass
-class C: pass
-[out]
-main:3: error: Unsupported operand types for & ("A" and "A")
-main:4: error: Unsupported operand types for >> ("A" and "A")
-main:5: error: Unsupported operand types for // ("A" and "A")
-
-[case testInplaceOperatorMethods]
-import typing
-class A:
- def __iadd__(self, x: int) -> 'A': pass
- def __imul__(self, x: str) -> 'A': pass
- def __imatmul__(self, x: str) -> 'A': pass
-a = A()
-a += 1
-a *= ''
-a @= ''
-a += '' # E: Argument 1 to "__iadd__" of "A" has incompatible type "str"; expected "int"
-a *= 1 # E: Argument 1 to "__imul__" of "A" has incompatible type "int"; expected "str"
-a @= 1 # E: Argument 1 to "__imatmul__" of "A" has incompatible type "int"; expected "str"
-
-[case testInplaceSetitem]
-class A(object):
- def __init__(self):
- self.a = 0
-
- def __iadd__(self, a):
- # type: (int) -> A
- self.a += 1
- return self
-
-a = A()
-b = [a]
-b[0] += 1
-[builtins fixtures/list.pyi]
-[out]
-
-
--- Assert statement
--- ----------------
-
-
-[case testAssert]
-import typing
-assert None + None # Fail
-assert None
-[out]
-main:2: error: Unsupported left operand type for + (None)
-
-
--- Exception handling
--- ------------------
-
-
-[case testRaiseStatement]
-
-e = None # type: BaseException
-f = None # type: MyError
-a = None # type: A
-raise a # Fail
-raise e
-raise f
-class A: pass
-class MyError(BaseException): pass
-[builtins fixtures/exception.pyi]
-[out]
-main:5: error: Exception must be derived from BaseException
-
-[case testRaiseClassobject]
-import typing
-class A: pass
-class MyError(BaseException): pass
-def f(): pass
-raise BaseException
-raise MyError
-raise A # E: Exception must be derived from BaseException
-raise object # E: Exception must be derived from BaseException
-raise f # E: Exception must be derived from BaseException
-[builtins fixtures/exception.pyi]
-
-[case testRaiseFromStatement]
-
-e = None # type: BaseException
-f = None # type: MyError
-a = None # type: A
-raise e from a # E: Exception must be derived from BaseException
-raise e from e
-raise e from f
-class A: pass
-class MyError(BaseException): pass
-[builtins fixtures/exception.pyi]
-
-[case testRaiseFromClassobject]
-import typing
-class A: pass
-class MyError(BaseException): pass
-def f(): pass
-raise BaseException from BaseException
-raise BaseException from MyError
-raise BaseException from A # E: Exception must be derived from BaseException
-raise BaseException from object # E: Exception must be derived from BaseException
-raise BaseException from f # E: Exception must be derived from BaseException
-[builtins fixtures/exception.pyi]
-
-[case testTryFinallyStatement]
-import typing
-try:
- b = object() # type: A # Fail
-finally:
- c = object() # type: A # Fail
-class A: pass
-[out]
-main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-[case testSimpleTryExcept]
-
-try:
- pass
-except BaseException as e:
- a, o = None, None # type: (BaseException, object)
- e = a
- e = o # Fail
-class A: pass
-class B: pass
-[builtins fixtures/exception.pyi]
-[out]
-main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
-
-[case testTypeErrorInBlock]
-
-while object:
- x = None # type: A
- x = object()
- x = B()
-class A: pass
-class B: pass
-[out]
-main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testTypeErrorInvolvingBaseException]
-
-x, a = None, None # type: (BaseException, A)
-a = BaseException() # Fail
-a = object() # Fail
-x = object() # Fail
-x = A() # Fail
-x = BaseException()
-class A: pass
-[builtins fixtures/exception.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "BaseException", variable has type "A")
-main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
-main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
-main:6: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException")
-
-[case testSimpleTryExcept2]
-import typing
-try:
- pass
-except BaseException as e:
- e = object() # Fail
- e = BaseException()
-[builtins fixtures/exception.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
-
-[case testBaseClassAsExceptionTypeInExcept]
-import typing
-try:
- pass
-except Err as e:
- e = BaseException() # Fail
- e = Err()
-class Err(BaseException): pass
-[builtins fixtures/exception.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
-
-[case testMultipleExceptHandlers]
-import typing
-try:
- pass
-except BaseException as e:
- pass
-except Err as f:
- f = BaseException() # Fail
- f = Err()
-class Err(BaseException): pass
-[builtins fixtures/exception.pyi]
-[out]
-main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
-
-[case testTryExceptStatement]
-import typing
-try:
- a = B() # type: A # Fail
-except BaseException as e:
- e = A() # Fail
- e = Err()
-except Err as f:
- f = BaseException() # Fail
- f = Err()
-class A: pass
-class B: pass
-class Err(BaseException): pass
-[builtins fixtures/exception.pyi]
-[out]
-main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException")
-main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
-
-[case testTryExceptWithinFunction]
-import typing
-def f() -> None:
- try: pass
- except BaseException as e:
- e = object() # Fail
- e = BaseException()
- except Err as f:
- f = BaseException() # Fail
- f = Err()
-class Err(BaseException): pass
-[builtins fixtures/exception.pyi]
-[out]
-main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
-main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
-
-[case testTryExceptFlow]
-def f() -> None:
- x = 1
- try:
- pass
- except:
- raise
- x + 'a' # E: Unsupported left operand type for + ("int")
-[builtins fixtures/exception.pyi]
-[out]
-
-[case testTryWithElse]
-import typing
-try: pass
-except BaseException: pass
-else:
- object(None) # E: Too many arguments for "object"
-[builtins fixtures/exception.pyi]
-
-[case testRedefinedFunctionInTryWithElse]
-def f() -> None: pass
-try:
- pass
-except BaseException:
- f2 = f
-else:
- def f2() -> str: pass
-try:
- pass
-except BaseException:
- f3 = f
-else:
- def f3() -> None: pass
-[builtins fixtures/exception.pyi]
-[out]
-main:7: error: Incompatible redefinition (redefinition with type Callable[[], str], original type Callable[[], None])
-
-[case testExceptWithoutType]
-import typing
-try:
- -None # E: Unsupported operand type for unary - (None)
-except:
- ~None # E: Unsupported operand type for ~ (None)
-[builtins fixtures/exception.pyi]
-
-[case testRaiseWithoutArgument]
-import typing
-try:
- None
-except:
- raise
-[builtins fixtures/exception.pyi]
-
-[case testExceptWithMultipleTypes]
-import typing
-class E1(BaseException): pass
-class E2(E1): pass
-try:
- pass
-except (E1, E2): pass
-except (E1, object): pass # E: Exception type must be derived from BaseException
-except (object, E2): pass # E: Exception type must be derived from BaseException
-except (E1, (E2,)): pass # E: Exception type must be derived from BaseException
-
-except (E1, E2): pass
-except ((E1, E2)): pass
-except (((E1, E2))): pass
-[builtins fixtures/exception.pyi]
-
-[case testExceptWithMultipleTypes2]
-import typing
-class E1(BaseException): pass
-class E2(E1): pass
-try:
- pass
-except (E1, E2) as e1:
- x = e1 # type: E1
- y = e1 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2")
-except (E2, E1) as e2:
- a = e2 # type: E1
- b = e2 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2")
-except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException
- pass
-[builtins fixtures/exception.pyi]
-
-[case testExceptWithMultipleTypes3]
-import typing
-class E1(BaseException): pass
-class E1_1(E1): pass
-class E1_2(E1): pass
-try: pass
-except (E1, E1_1, E1_2) as e1:
- x = e1 # type: E1
- y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1")
- z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2")
-except (E1_1, E1_2) as e2:
- a = e2 # type: E1
- b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1")
- c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2")
-[builtins fixtures/exception.pyi]
-
-[case testExceptWithAnyTypes]
-from typing import Any
-
-E1 = None # type: Any
-class E2(BaseException): pass
-class NotBaseDerived: pass
-
-try:
- pass
-except BaseException as e1:
- reveal_type(e1) # E: Revealed type is 'builtins.BaseException'
-except (E1, BaseException) as e2:
- reveal_type(e2) # E: Revealed type is 'Any'
-except (E1, E2) as e3:
- reveal_type(e3) # E: Revealed type is 'Any'
-except (E1, E2, BaseException) as e4:
- reveal_type(e4) # E: Revealed type is 'Any'
-
-try: pass
-except E1 as e1:
- reveal_type(e1) # E: Revealed type is 'Any'
-except E2 as e2:
- reveal_type(e2) # E: Revealed type is '__main__.E2'
-except NotBaseDerived as e3: # E: Exception type must be derived from BaseException
- pass
-except (NotBaseDerived, E1) as e4: # E: Exception type must be derived from BaseException
- pass
-except (NotBaseDerived, E2) as e5: # E: Exception type must be derived from BaseException
- pass
-except (NotBaseDerived, E1, E2) as e6: # E: Exception type must be derived from BaseException
- pass
-except (E1, E2, NotBaseDerived) as e6: # E: Exception type must be derived from BaseException
- pass
-[builtins fixtures/exception.pyi]
-
-[case testReuseTryExceptionVariable]
-import typing
-class E1(BaseException): pass
-class E2(BaseException): pass
-try: pass
-except E1 as e: pass
-try: pass
-except E1 as e: pass
-try: pass
-except E2 as e: pass
-e + 1 # E: Trying to read deleted variable 'e'
-e = E1() # E: Assignment to variable 'e' outside except: block
-[builtins fixtures/exception.pyi]
-
-[case testReuseDefinedTryExceptionVariable]
-import typing
-class E1(BaseException): pass
-class E2(BaseException): pass
-e = 1
-e = 1
-try: pass
-except E1 as e: pass
-e = 1 # E: Assignment to variable 'e' outside except: block
-e = E1() # E: Assignment to variable 'e' outside except: block
-[builtins fixtures/exception.pyi]
-
-[case testExceptionVariableReuseInDeferredNode1]
-def f(*a: BaseException) -> int:
- x
- try: pass
- except BaseException as err: pass
- try: pass
- except BaseException as err: f(err)
-x = f()
-[builtins fixtures/exception.pyi]
-
-[case testExceptionVariableReuseInDeferredNode2]
-def f(*a: BaseException) -> int:
- try: pass
- except BaseException as err: pass
- x
- try: pass
- except BaseException as err: f(err)
-x = f()
-[builtins fixtures/exception.pyi]
-
-[case testExceptionVariableReuseInDeferredNode3]
-def f(*a: BaseException) -> int:
- try: pass
- except BaseException as err: pass
- try: pass
- except BaseException as err: f(err)
- x
-x = f()
-[builtins fixtures/exception.pyi]
-
-[case testExceptionVariableReuseInDeferredNode4]
-class EA(BaseException):
- a = None # type: int
-class EB(BaseException):
- b = None # type: str
-def f(*arg: BaseException) -> int:
- x
- try: pass
- except EA as err:
- f(err)
- a = err.a
- reveal_type(a)
- try: pass
- except EB as err:
- f(err)
- b = err.b
- reveal_type(b)
-x = f()
-[builtins fixtures/exception.pyi]
-[out]
-main:11: error: Revealed type is 'builtins.int'
-main:16: error: Revealed type is 'builtins.str'
-
-[case testExceptionVariableReuseInDeferredNode5]
-class EA(BaseException):
- a = None # type: int
-class EB(BaseException):
- b = None # type: str
-def f(*arg: BaseException) -> int:
- try: pass
- except EA as err:
- f(err)
- a = err.a
- reveal_type(a)
- x
- try: pass
- except EB as err:
- f(err)
- b = err.b
- reveal_type(b)
-x = f()
-[builtins fixtures/exception.pyi]
-[out]
-main:10: error: Revealed type is 'builtins.int'
-main:16: error: Revealed type is 'builtins.str'
-
-[case testExceptionVariableReuseInDeferredNode6]
-class EA(BaseException):
- a = None # type: int
-class EB(BaseException):
- b = None # type: str
-def f(*arg: BaseException) -> int:
- try: pass
- except EA as err:
- f(err)
- a = err.a
- reveal_type(a)
- try: pass
- except EB as err:
- f(err)
- b = err.b
- reveal_type(b)
- x
-x = f()
-[builtins fixtures/exception.pyi]
-[out]
-main:10: error: Revealed type is 'builtins.int'
-main:15: error: Revealed type is 'builtins.str'
-
-[case testArbitraryExpressionAsExceptionType]
-import typing
-a = BaseException
-try: pass
-except a as b:
- b = BaseException()
- b = object() # E: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
-[builtins fixtures/exception.pyi]
-
-[case testInvalidExceptionCallable]
-import typing
-def exc() -> BaseException: pass
-try: pass
-except exc as e: pass # E: Exception type must be derived from BaseException
-except BaseException() as b: pass # E: Exception type must be derived from BaseException
-[builtins fixtures/exception.pyi]
-
-[case testTupleValueAsExceptionType]
-import typing
-def exc() -> BaseException: pass
-class E1(BaseException): pass
-class E1_1(E1): pass
-class E1_2(E1): pass
-
-exs1 = (E1, E1_1, E1_2)
-try: pass
-except exs1 as e1:
- x = e1 # type: E1
- y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1")
- z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2")
-
-exs2 = (E1_1, E1_2)
-try: pass
-except exs2 as e2:
- a = e2 # type: E1
- b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1")
- c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2")
-
-exs3 = (E1, (E1_1, (E1_2,)))
-try: pass
-except exs3 as e3: pass # E: Exception type must be derived from BaseException
-[builtins fixtures/exception.pyi]
-
-[case testInvalidTupleValueAsExceptionType]
-import typing
-def exc() -> BaseException: pass
-class E1(BaseException): pass
-class E2(E1): pass
-
-exs1 = (E1, E2, int)
-try: pass
-except exs1 as e: pass # E: Exception type must be derived from BaseException
-[builtins fixtures/exception.pyi]
-
-[case testOverloadedExceptionType]
-from typing import overload
-class E(BaseException):
- @overload
- def __init__(self) -> None: pass
- @overload
- def __init__(self, x) -> None: pass
-try:
- pass
-except E as e:
- e = E()
- e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "E")
-[builtins fixtures/exception.pyi]
-
-[case testExceptionWithAnyBaseClass]
-from typing import Any
-E = None # type: Any
-class EE(E): pass
-raise EE()
-raise EE
-[builtins fixtures/exception.pyi]
-
-[case testExceptionIsType]
-from typing import Type
-class B(BaseException): pass
-def f(e: Type[B]):
- try: pass
- except e: pass
-def g(e: Type[BaseException]):
- try: pass
- except e as err:
- reveal_type(err)
-def h(e: Type[int]):
- try: pass
- except e: pass
-[builtins fixtures/exception.pyi]
-[out]
-main:9: error: Revealed type is 'builtins.BaseException'
-main:12: error: Exception type must be derived from BaseException
-
-
--- Del statement
--- -------------
-
-
-[case testDelStmtWithIndex]
-a, b = None, None # type: (A, B)
-del b[a]
-del b[b] # E: Argument 1 to "__delitem__" of "B" has incompatible type "B"; expected "A"
-del a[a] # E: "A" has no attribute "__delitem__"
-del a[b] # E: "A" has no attribute "__delitem__"
-class B:
- def __delitem__(self, index: 'A'): pass
-class A: pass
-
-[case testDelStmtWithAttribute]
-class A:
- def f(self): pass
- x = 0
-a = A()
-del a.f
-del a.x
-del a.z # E: "A" has no attribute "z"
-
-[case testDelStatementWithTuple]
-class A:
- x = 0
-a = A()
-del a.x, a.y # E: "A" has no attribute "y"
-
-
-[case testDelStatementWithAssignmentSimple]
-a = 1
-a + 1
-del a
-a + 1 # E: Trying to read deleted variable 'a'
-[builtins fixtures/ops.pyi]
-
-[case testDelStatementWithAssignmentTuple]
-a = 1
-b = 1
-del (a, b)
-b + 1 # E: Trying to read deleted variable 'b'
-[builtins fixtures/ops.pyi]
-
-[case testDelStatementWithAssignmentClass]
-class C:
- a = 1
-
-c = C()
-c.a = 1
-c.a + 1
-del c.a
-c.a + 1
-[builtins fixtures/ops.pyi]
-
-[case testDelStatementWithConditions]
-x = 5
-del x
-if x: ... # E: Trying to read deleted variable 'x'
-
-def f(x):
- return x
-
-if 0: ...
-elif f(x): ... # E: Trying to read deleted variable 'x'
-
-while x == 5: ... # E: Trying to read deleted variable 'x'
-
--- Yield statement
--- ---------------
-
-
-[case testSimpleYield]
-from typing import Iterator
-def f() -> Iterator[int]:
- yield 1
- yield '' # E: Incompatible types in yield (actual type "str", expected type "int")
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldInFunctionReturningGenerator]
-from typing import Generator
-def f() -> Generator[int, None, None]:
- yield 1
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldInFunctionReturningIterable]
-from typing import Iterable
-def f() -> Iterable[int]:
- yield 1
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldInFunctionReturningObject]
-def f() -> object:
- yield 1
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldInFunctionReturningAny]
-from typing import Any
-def f() -> Any:
- yield object()
-[out]
-
-[case testYieldInFunctionReturningFunction]
-from typing import Callable
-def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes
- yield object()
-[out]
-
-[case testYieldInDynamicallyTypedFunction]
-import typing
-def f():
- yield f
-
-[case testWithInvalidInstanceReturnType]
-import typing
-def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes
- yield 1
-[builtins fixtures/for.pyi]
-[out]
-
-[case testTypeInferenceContextAndYield]
-from typing import List, Iterator
-def f() -> 'Iterator[List[int]]':
- yield []
- yield [object()] # E: List item 0 has incompatible type "object"
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldAndReturnWithoutValue]
-from typing import Iterator
-def f() -> Iterator[int]:
- yield 1
- return
-[builtins fixtures/for.pyi]
-
-[case testYieldWithNoValue]
-from typing import Iterator
-def f() -> Iterator[None]:
- yield
-[builtins fixtures/for.pyi]
-
-[case testYieldWithNoValueWhenValueRequired]
-from typing import Iterator
-def f() -> Iterator[int]:
- yield # E: Yield value expected
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldWithExplicitNone]
-from typing import Iterator
-def f() -> Iterator[None]:
- yield None # E: Incompatible types in yield (actual type None, expected type None)
-[builtins fixtures/for.pyi]
-[out]
-
-
--- Yield from statement
--- --------------------
-
--- Iterables
--- ----------
-
-[case testSimpleYieldFromWithIterator]
-from typing import Iterator
-def g() -> Iterator[str]:
- yield '42'
-def h() -> Iterator[int]:
- yield 42
-def f() -> Iterator[str]:
- yield from g()
- yield from h() # E: Incompatible types in "yield from" (actual type "int", expected type "str")
-[out]
-
-[case testYieldFromAppliedToAny]
-from typing import Any
-def g() -> Any:
- yield object()
-def f() -> Any:
- yield from g()
-[out]
-
-[case testYieldFromInFunctionReturningFunction]
-from typing import Iterator, Callable
-def g() -> Iterator[int]:
- yield 42
-def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes
- yield from g()
-[out]
-
-[case testYieldFromNotIterableReturnType]
-from typing import Iterator
-def g() -> Iterator[int]:
- yield 42
-def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes
- yield from g()
-[out]
-
-[case testYieldFromNotAppliedIterator]
-from typing import Iterator
-def g() -> int:
- return 42
-def f() -> Iterator[int]:
- yield from g() # E: "yield from" can't be applied to "int"
-[out]
-
-[case testYieldFromCheckIncompatibleTypesTwoIterables]
-from typing import List, Iterator
-def g() -> Iterator[List[int]]:
- yield [2, 3, 4]
-def f() -> Iterator[List[int]]:
- yield from g()
- yield from [1, 2, 3] # E: Incompatible types in "yield from" (actual type "int", expected type List[int])
-[builtins fixtures/for.pyi]
-[out]
-
-[case testYieldFromNotAppliedToNothing]
-def h():
- yield from # E: invalid syntax
-[out]
-
-[case testYieldFromAndYieldTogether]
-from typing import Iterator
-def f() -> Iterator[str]:
- yield "g1 ham"
- yield from g()
- yield "g1 eggs"
-def g() -> Iterator[str]:
- yield "g2 spam"
- yield "g2 more spam"
-[out]
-
-[case testYieldFromAny]
-from typing import Iterator
-def f(a):
- b = yield from a
- return b
-[out]
-
--- With statement
--- --------------
-
-
-[case testSimpleWith]
-import typing
-class A:
- def __enter__(self) -> None: pass
- def __exit__(self, x, y, z) -> None: pass
-with A():
- object(A) # E: Too many arguments for "object"
-
-[case testWithStmtAndInvalidExit]
-import typing
-class A:
- def __enter__(self) -> None: pass
- def __exit__(self, x, y) -> None: pass
-with A(): # E: Too many arguments for "__exit__" of "A"
- pass
-
-[case testWithStmtAndMissingExit]
-import typing
-class A:
- def __enter__(self) -> None: pass
-with A(): # E: "A" has no attribute "__exit__"
- pass
-
-[case testWithStmtAndInvalidEnter]
-import typing
-class A:
- def __enter__(self, x) -> None: pass
- def __exit__(self, x, y, z) -> None: pass
-with A(): # E: Too few arguments for "__enter__" of "A"
- pass
-
-[case testWithStmtAndMissingEnter]
-import typing
-class A:
- def __exit__(self, x, y, z) -> None: pass
-with A(): # E: "A" has no attribute "__enter__"
- pass
-
-[case testWithStmtAndMultipleExprs]
-import typing
-class A:
- def __enter__(self) -> None: pass
- def __exit__(self, x, y, z) -> None: pass
-class B:
- def __enter__(self) -> None: pass
-with A(), B(): # E: "B" has no attribute "__exit__"
- pass
-with B(), A(): # E: "B" has no attribute "__exit__"
- pass
-
-[case testWithStmtAndResult]
-import typing
-class B: pass
-class A:
- def __enter__(self) -> B: pass
- def __exit__(self, x, y, z): pass
-with A() as b:
- b = B()
- b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testWithStmtAndMultipleResults]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class B: pass
-class C: pass
-class A(Generic[t]):
- def __enter__(self) -> t: pass
- def __exit__(self, x, y, z): pass
-a_b = A() # type: A[B]
-a_c = A() # type: A[C]
-with a_b as b, a_c as c:
- b = B()
- c = C()
- b = c # E: Incompatible types in assignment (expression has type "C", variable has type "B")
- c = b # E: Incompatible types in assignment (expression has type "B", variable has type "C")
-
-[case testWithStmtAndComplexTarget]
-from typing import Tuple
-class A:
- def __enter__(self) -> Tuple[int, str]: pass
- def __exit__(self, x, y, z): pass
-with A() as (a, b):
- a = 1
- b = ''
- a = b # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[builtins fixtures/tuple.pyi]
-
-[case testWithStmtTypeComment]
-# flags: --fast-parser
-from typing import Union
-class A:
- def __enter__(self) -> int: pass
- def __exit__(self, x, y, z): pass
-
-with A(): # type: int # E: Invalid type comment
- pass
-
-with A() as a: # type: int
- pass
-
-with A() as b: # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- pass
-
-with A() as c: # type: int, int # E: Invalid tuple literal type
- pass
-
-with A() as d: # type: Union[int, str]
- reveal_type(d) # E: Revealed type is 'Union[builtins.int, builtins.str]'
-
-[case testWithStmtTupleTypeComment]
-# flags: --fast-parser
-from typing import Tuple
-class A:
- def __enter__(self) -> Tuple[int, int]: pass
- def __exit__(self, x, y, z): pass
-
-with A():
- pass
-
-with A() as a: # type: Tuple[int, int]
- pass
-
-with A() as b: # type: Tuple[int, str] # E: Incompatible types in assignment (expression has type "Tuple[int, int]", variable has type "Tuple[int, str]")
- pass
-
-with A() as (c, d): # type: int, int
- pass
-
-with A() as (e, f): # type: Tuple[int, int]
- pass
-
-with A() as (g, h): # type: int # E: Tuple type expected for multiple variables
- pass
-
-with A() as (i, j): # type: int, int, str # E: Incompatible number of tuple items
- pass
-
-with A() as (k, l): # type: int, str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- pass
-[builtins fixtures/tuple.pyi]
-
-[case testWithStmtComplexTypeComment]
-# flags: --fast-parser
-from typing import Tuple
-class A:
- def __enter__(self) -> Tuple[int, int]: pass
- def __exit__(self, x, y, z): pass
-
-class B:
- def __enter__(self) -> str: pass
- def __exit__(self, x, y, z): pass
-
-with A() as a, A() as (b, c), B() as d: # type: Tuple[int, int], (int, int), str
- pass
-
-with A() as e, A() as (f, g), B() as h: # type: Tuple[int, int], Tuple[int, int], str
- pass
-
-with A() as i, A() as (j, k), B() as l: # type: (int, int), (int, int), str # E: Invalid tuple literal type
- pass
-
-with A(), A(), B() as m, A() as n, B(), B() as o: # type: int, Tuple[int, int] # E: Incompatible number of types for `with` targets
- pass
-
-with A(), B(), B() as p, A(), A(): # type: str
- pass
-[builtins fixtures/tuple.pyi]
-
--- Chained assignment
--- ------------------
-
-
-[case testChainedAssignment]
-import typing
-class A: pass
-class B: pass
-x = y = A()
-x = A()
-y = A()
-x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testChainedAssignment2]
-import typing
-def f() -> None:
- x = 1
- y = 'x'
- x = y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- x = y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[builtins fixtures/primitives.pyi]
-[out]
-
-[case testChainedAssignmentWithType]
-
-x = y = None # type: int
-x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-x = 1
-y = 1
-
-
--- Star assignment
--- ---------------
-
-
-[case testAssignListToStarExpr]
-from typing import List
-bs, cs = None, None # type: List[A], List[B]
-*bs, b = bs
-*bs, c = cs # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
-*ns, c = cs
-nc = cs
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-
--- Type aliases
--- ------------
-
-
-[case testSimpleTypeAlias]
-import typing
-foo = int
-def f(x: foo) -> None: pass
-f(1)
-f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testTypeAliasDefinedInAModule]
-import typing
-import m
-def f(x: m.foo) -> None: pass
-f(1)
-f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-[file m.py]
-import typing
-foo = int
-
-[case testTypeAliasDefinedInAModule2]
-import typing
-from m import foo
-def f(x: foo) -> None: pass
-f(1)
-f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-[file m.py]
-import typing
-foo = int
-
-
--- nonlocal and global
--- -------------------
-
-
-[case testTypeOfGlobalUsed]
-import typing
-g = A()
-def f() -> None:
- global g
- g = B()
-
-class A(): pass
-class B(): pass
-[out]
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testTypeOfNonlocalUsed]
-import typing
-def f() -> None:
- a = A()
- def g() -> None:
- nonlocal a
- a = B()
-
-class A(): pass
-class B(): pass
-[out]
-main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-[case testTypeOfOuterMostNonlocalUsed]
-import typing
-def f() -> None:
- a = A()
- def g() -> None:
- a = B()
- def h() -> None:
- nonlocal a
- a = A()
- a = B()
-
-class A(): pass
-class B(): pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type "A", variable has type "B")
diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test
deleted file mode 100644
index 2993113..0000000
--- a/test-data/unit/check-super.test
+++ /dev/null
@@ -1,109 +0,0 @@
--- Test cases for type checker related to super().
-
-
--- Supertype member reference
--- --------------------------
-
-
-[case testAccessingSupertypeMethod]
-
-class B:
- def f(self) -> 'B': pass
-class A(B):
- def f(self) -> 'A':
- a, b = None, None # type: (A, B)
- a = super().f() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
- a = super().g() # E: "g" undefined in superclass
- b = super().f()
-[out]
-
-[case testAccessingSuperTypeMethodWithArgs]
-from typing import Any
-class B:
- def f(self, y: 'A') -> None: pass
-class A(B):
- def f(self, y: Any) -> None:
- a, b = None, None # type: (A, B)
- super().f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
- super().f(a)
- self.f(b)
- self.f(a)
-[out]
-
-[case testAccessingSuperInit]
-import typing
-class B:
- def __init__(self, x: A) -> None: pass
-class A(B):
- def __init__(self) -> None:
- super().__init__(B(None)) # E: Argument 1 to "__init__" of "B" has incompatible type "B"; expected "A"
- super().__init__() # E: Too few arguments for "__init__" of "B"
- super().__init__(A())
-[out]
-
-[case testAccessingSuperMemberWithDeepHierarchy]
-import typing
-class C:
- def f(self) -> None: pass
-class B(C): pass
-class A(B):
- def f(self) -> None:
- super().g() # E: "g" undefined in superclass
- super().f()
-[out]
-
-[case testAssignToBaseClassMethod]
-import typing
-class A:
- def f(self) -> None: pass
-class B(A):
- def g(self) -> None:
- super().f = None
-[out]
-main:6: error: Invalid assignment target
-
-[case testSuperWithMultipleInheritance]
-import typing
-class A:
- def f(self) -> None: pass
-class B:
- def g(self, x: int) -> None: pass
-class C(A, B):
- def f(self) -> None:
- super().f()
- super().g(1)
- super().f(1) # E: Too many arguments for "f" of "A"
- super().g() # E: Too few arguments for "g" of "B"
- super().not_there() # E: "not_there" undefined in superclass
-[out]
-
-[case testSuperWithNew]
-class A:
- def __new__(cls, x: int) -> 'A':
- return object.__new__(cls)
-
-class B(A):
- def __new__(cls, x: int, y: str = '') -> 'A':
- super().__new__(cls, 1)
- super().__new__(cls, 1, '') # E: Too many arguments for "__new__" of "A"
-B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int"
-B(1)
-B(1, 'x')
-[builtins fixtures/__new__.pyi]
-
-[case testSuperWithUnknownBase]
-from typing import Any
-B = None # type: Any
-class C(B):
- def __init__(self, arg=0):
- super(C, self).__init__(arg, arg=arg)
-[out]
-
-[case testSuperSilentInDynamicFunction]
-class A:
- pass
-
-class B(A):
- def foo(self):
- super(B, self).foo() # Not an error
-[out]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
deleted file mode 100644
index a506bce..0000000
--- a/test-data/unit/check-tuples.test
+++ /dev/null
@@ -1,927 +0,0 @@
--- Normal assignment and subtyping
--- -------------------------------
-
-
-[case testTupleAssignmentWithTupleTypes]
-from typing import Tuple
-t1 = None # type: Tuple[A]
-t2 = None # type: Tuple[B]
-t3 = None # type: Tuple[A, A]
-t4 = None # type: Tuple[A, B]
-t5 = None # type: Tuple[B, A]
-
-t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[B]", variable has type "Tuple[A]")
-t1 = t3 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
-t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A]", variable has type "Tuple[A, A]")
-t3 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[A, A]")
-t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, A]")
-
-# Ok
-t1 = t1
-t2 = t2
-t3 = t3
-t4 = t4
-t5 = t5
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testTupleSubtyping]
-from typing import Tuple
-t1 = None # type: Tuple[A, A]
-t2 = None # type: Tuple[A, B]
-t3 = None # type: Tuple[B, A]
-
-t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
-t2 = t3 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, B]")
-t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[B, A]")
-t3 = t2 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[B, A]")
-
-t1 = t2
-t1 = t3
-
-class A: pass
-class B(A): pass
-[builtins fixtures/tuple.pyi]
-
-[case testTupleCompatibilityWithOtherTypes]
-from typing import Tuple
-a, o = None, None # type: (A, object)
-t = None # type: Tuple[A, A]
-
-a = t # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "A")
-t = o # E: Incompatible types in assignment (expression has type "object", variable has type "Tuple[A, A]")
-t = a # E: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, A]")
-# TODO: callable types + tuples
-
-# Ok
-o = t
-t = None
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-[case testNestedTupleTypes]
-from typing import Tuple
-t1 = None # type: Tuple[A, Tuple[A, A]]
-t2 = None # type: Tuple[B, Tuple[B, B]]
-
-t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]")
-t1 = t2
-
-class A: pass
-class B(A): pass
-[builtins fixtures/tuple.pyi]
-
-[case testNestedTupleTypes2]
-from typing import Tuple
-t1 = None # type: Tuple[A, Tuple[A, A]]
-t2 = None # type: Tuple[B, Tuple[B, B]]
-
-t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]")
-t1 = t2
-
-class A: pass
-class B(A): pass
-[builtins fixtures/tuple.pyi]
-
-[case testSubtypingWithNamedTupleType]
-from typing import Tuple
-t1 = None # type: Tuple[A, A]
-t2 = None # type: tuple
-
-t1 = t2 # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type "Tuple[A, A]")
-t2 = t1
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-[case testTupleInitializationWithNone]
-from typing import Tuple
-t = None # type: Tuple[A, A]
-t = None
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-
--- Tuple expressions
--- -----------------
-
-
-[case testTupleExpressions]
-from typing import Tuple
-t1 = None # type: tuple
-t2 = None # type: Tuple[A]
-t3 = None # type: Tuple[A, B]
-
-a, b, c = None, None, None # type: (A, B, C)
-
-t2 = () # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]")
-t2 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
-t3 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
-t3 = (b, b) # E: Incompatible types in assignment (expression has type "Tuple[B, B]", variable has type "Tuple[A, B]")
-t3 = (a, b, a) # E: Incompatible types in assignment (expression has type "Tuple[A, B, A]", variable has type "Tuple[A, B]")
-
-t1 = ()
-t1 = (a,)
-t2 = (a,)
-t3 = (a, b)
-t3 = (a, c)
-t3 = (None, None)
-
-class A: pass
-class B: pass
-class C(B): pass
-[builtins fixtures/tuple.pyi]
-
-[case testVoidValueInTuple]
-import typing
-(None, f()) # E: "f" does not return a value
-(f(), None) # E: "f" does not return a value
-
-def f() -> None: pass
-[builtins fixtures/tuple.pyi]
-
-
--- Indexing
--- --------
-
-
-[case testIndexingTuples]
-from typing import Tuple
-t1 = None # type: Tuple[A, B]
-t2 = None # type: Tuple[A]
-t3 = None # type: Tuple[A, B, C, D, E]
-a, b = None, None # type: (A, B)
-x = None # type: Tuple[A, B, C]
-y = None # type: Tuple[A, C, E]
-n = 0
-
-a = t1[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b = t1[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-t1[2] # E: Tuple index out of range
-t1[3] # E: Tuple index out of range
-t2[1] # E: Tuple index out of range
-t1[n] # E: Tuple index must be an integer literal
-t3[n:] # E: Tuple slice must be an integer literal
-b = t1[(0)] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a = t1[0]
-b = t1[1]
-b = t1[-1]
-a = t1[(0)]
-x = t3[0:3] # type (A, B, C)
-y = t3[0:5:2] # type (A, C, E)
-x = t3[:-2] # type (A, B, C)
-
-class A: pass
-class B: pass
-class C: pass
-class D: pass
-class E: pass
-[builtins fixtures/tuple.pyi]
-
-[case testIndexingTuplesWithNegativeIntegers]
-from typing import Tuple
-t1 = None # type: Tuple[A, B]
-t2 = None # type: Tuple[A]
-a, b = None, None # type: A, B
-
-a = t1[-1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b = t1[-2] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-t1[-3] # E: Tuple index out of range
-t1[-4] # E: Tuple index out of range
-b = t2[(-1)] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a = t1[-2]
-b = t1[-1]
-a = t2[(-1)]
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testAssigningToTupleItems]
-from typing import Tuple
-t = None # type: Tuple[A, B]
-n = 0
-
-t[0] = A() # E: Unsupported target for indexed assignment
-t[2] = A() # E: Unsupported target for indexed assignment
-t[n] = A() # E: Unsupported target for indexed assignment
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-
--- Multiple assignment
--- -------------------
-
-
-[case testMultipleAssignmentWithTuples]
-from typing import Tuple
-t1 = None # type: Tuple[A, B]
-t2 = None # type: Tuple[A, B, A]
-a, b = None, None # type: (A, B)
-
-a, a = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b, b = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a, b, b = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a, b = t1
-a, b, a = t2
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testMultipleAssignmentWithInvalidNumberOfValues]
-from typing import Tuple
-t1 = None # type: Tuple[A, A, A]
-a = None # type: A
-
-a, a = t1 # E: Too many values to unpack (2 expected, 3 provided)
-a, a, a, a = t1 # E: Need more than 3 values to unpack (4 expected)
-
-a, a, a = t1
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-[case testMultipleAssignmentWithTupleExpressionRvalue]
-
-a, b = None, None # type: (A, B)
-
-a, b = a, a # Fail
-a, b = b, a # Fail
-
-a, b = a, b
-a, a = a, a
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-[out]
-main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
-main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-[case testSubtypingInMultipleAssignment]
-
-a, b = None, None # type: (A, B)
-
-b, b = a, b # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b, b = b, a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-a, b = b, b
-b, a = b, b
-
-class A: pass
-class B(A): pass
-[builtins fixtures/tuple.pyi]
-
-[case testInitializationWithMultipleValues]
-
-a, b = None, None # type: (A, B)
-
-a1, b1 = a, a # type: (A, B) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a2, b2 = b, b # type: (A, B) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a3, b3 = a # type: (A, B) # E: '__main__.A' object is not iterable
-a4, b4 = None # type: (A, B) # E: 'builtins.None' object is not iterable
-a5, b5 = a, b, a # type: (A, B) # E: Too many values to unpack (2 expected, 3 provided)
-
-ax, bx = a, b # type: (A, B)
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testMultipleAssignmentWithNonTupleRvalue]
-
-a, b = None, None # type: (A, B)
-def f(): pass
-
-a, b = None # E: 'builtins.None' object is not iterable
-a, b = a # E: '__main__.A' object is not iterable
-a, b = f # E: 'def () -> Any' object is not iterable
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testMultipleAssignmentWithIndexedLvalues]
-
-a, b = None, None # type: (A, B)
-aa, bb = None, None # type: (AA, BB)
-
-a[a], b[b] = a, bb # E: Incompatible types in assignment (expression has type "A", target has type "AA")
-a[a], b[b] = aa, b # E: Incompatible types in assignment (expression has type "B", target has type "BB")
-a[aa], b[b] = aa, bb # E: Invalid index type "AA" for "A"; expected type "A"
-a[a], b[bb] = aa, bb # E: Invalid index type "BB" for "B"; expected type "B"
-a[a], b[b] = aa, bb
-
-class A:
- def __setitem__(self, x: 'A', y: 'AA') -> None: pass
-class B:
- def __setitem__(self, x: 'B', y: 'BB') -> None: pass
-
-class AA: pass
-class BB: pass
-[builtins fixtures/tuple.pyi]
-
-[case testMultipleDeclarationWithParentheses]
-
-(a, b) = (None, None) # type: int, str
-a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-a = 1
-b = ''
-
-[case testMultipleAssignmentWithExtraParentheses]
-
-a, b = None, None # type: (A, B)
-
-(a, b) = (a, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-(a, b) = (b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-((a), (b)) = ((a), (a)) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-((a), (b)) = ((b), (b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-[a, b] = a, a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-[a, b] = b, b # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-(a, b) = (a, b)
-((a), (b)) = ((a), (b))
-[a, b] = a, b
-
-class A: pass
-class B: pass
-[builtins fixtures/tuple.pyi]
-
-[case testMultipleAssignmentUsingSingleTupleType]
-from typing import Tuple
-a, b = None, None # type: Tuple[int, str]
-a = 1
-b = ''
-a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testMultipleAssignmentWithMixedVariables]
-a = b, c = 1, 1
-x, y = p, q = 1, 1
-u, v, w = r, s = 1, 1 # E: Need more than 2 values to unpack (3 expected)
-d, e = f, g, h = 1, 1 # E: Need more than 2 values to unpack (3 expected)
-
-
--- Assignment to starred expressions
--- ---------------------------------
-
-
-[case testAssignmentToStarMissingAnnotation]
-from typing import List
-t = 1, 2
-a, b, *c = 1, 2 # E: Need type annotation for variable
-aa, bb, *cc = t # E: Need type annotation for variable
-[builtins fixtures/list.pyi]
-
-[case testAssignmentToStarAnnotation]
-from typing import List
-li, lo = None, None # type: List[int], List[object]
-a, b, *c = 1, 2 # type: int, int, List[int]
-c = lo # E: Incompatible types in assignment (expression has type List[object], variable has type List[int])
-c = li
-[builtins fixtures/list.pyi]
-
-[case testAssignmentToStarCount1]
-from typing import List
-ca = None # type: List[int]
-c = [1]
-a, b, *c = 1, # E: Need more than 1 value to unpack (2 expected)
-a, b, *c = 1, 2
-a, b, *c = 1, 2, 3
-a, b, *c = 1, 2, 3, 4
-[builtins fixtures/list.pyi]
-
-[case testAssignmentToStarCount2]
-from typing import List
-ca = None # type: List[int]
-t1 = 1,
-t2 = 1, 2
-t3 = 1, 2, 3
-t4 = 1, 2, 3, 4
-c = [1]
-a, b, *c = t1 # E: Need more than 1 value to unpack (2 expected)
-a, b, *c = t2
-a, b, *c = t3
-a, b, *c = t4
-[builtins fixtures/list.pyi]
-
-[case testAssignmentToStarFromAny]
-from typing import Any, cast
-a, c = cast(Any, 1), C()
-p, *q = a
-c = a
-c = q
-
-class C: pass
-
-[case testAssignmentToComplexStar]
-from typing import List
-li = None # type: List[int]
-a, *(li) = 1,
-a, *(b, c) = 1, 2 # E: Need more than 1 value to unpack (2 expected)
-a, *(b, c) = 1, 2, 3
-a, *(b, c) = 1, 2, 3, 4 # E: Too many values to unpack (2 expected, 3 provided)
-[builtins fixtures/list.pyi]
-
-[case testAssignmentToStarFromTupleType]
-from typing import List, Tuple
-li = None # type: List[int]
-la = None # type: List[A]
-ta = None # type: Tuple[A, A, A]
-a, *la = ta
-a, *li = ta # E
-a, *na = ta
-na = la
-na = a # E
-
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-main:6: error: List item 0 has incompatible type "A"
-main:6: error: List item 1 has incompatible type "A"
-main:9: error: Incompatible types in assignment (expression has type "A", variable has type List[A])
-
-[case testAssignmentToStarFromTupleInference]
-from typing import List
-li = None # type: List[int]
-la = None # type: List[A]
-a, *l = A(), A()
-l = li # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
-l = la
-
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testAssignmentToStarFromListInference]
-from typing import List
-li = None # type: List[int]
-la = None # type: List[A]
-a, *l = [A(), A()]
-l = li # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
-l = la
-
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testAssignmentToStarFromTupleTypeInference]
-from typing import List, Tuple
-li = None # type: List[int]
-la = None # type: List[A]
-ta = None # type: Tuple[A, A, A]
-a, *l = ta
-l = li # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
-l = la
-
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-
-[case testAssignmentToStarFromListTypeInference]
-from typing import List
-li = None # type: List[int]
-la = None # type: List[A]
-a, *l = la
-l = li # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
-l = la
-
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-
-
--- Nested tuple assignment
--- ----------------------------
-
-
-[case testNestedTupleAssignment1]
-
-a1, b1, c1 = None, None, None # type: (A, B, C)
-a2, b2, c2 = None, None, None # type: (A, B, C)
-
-a1, (b1, c1) = a2, (b2, c2)
-a1, (a1, (b1, c1)) = a2, (a2, (b2, c2))
-a1, (a1, (a1, b1)) = a1, (a1, (a1, c1)) # Fail
-
-class A: pass
-class B: pass
-class C: pass
-[out]
-main:7: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-
-[case testNestedTupleAssignment2]
-
-a1, b1, c1 = None, None, None # type: (A, B, C)
-a2, b2, c2 = None, None, None # type: (A, B, C)
-t = a1, b1
-
-a2, b2 = t
-(a2, b2), c2 = t, c1
-(a2, c2), c2 = t, c1 # Fail
-t, c2 = (a2, b2), c2
-t, c2 = (a2, a2), c2 # Fail
-t = a1, a1, a1 # Fail
-t = a1 # Fail
-a2, a2, a2 = t # Fail
-a2, = t # Fail
-a2 = t # Fail
-
-class A: pass
-class B: pass
-class C: pass
-[out]
-main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C")
-main:10: error: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
-main:11: error: Incompatible types in assignment (expression has type "Tuple[A, A, A]", variable has type "Tuple[A, B]")
-main:12: error: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, B]")
-main:13: error: Need more than 2 values to unpack (3 expected)
-main:14: error: Too many values to unpack (1 expected, 2 provided)
-main:15: error: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "A")
-
-
--- Error messages
--- --------------
-
-
-[case testTupleErrorMessages]
-
-a = None # type: A
-
-(a, a) + a # E: Unsupported left operand type for + ("Tuple[A, A]")
-a + (a, a) # E: Unsupported operand types for + ("A" and "Tuple[A, A]")
-f((a, a)) # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A"
-(a, a).foo # E: "Tuple[A, A]" has no attribute "foo"
-
-def f(x: 'A') -> None: pass
-
-class A:
- def __add__(self, x: 'A') -> 'A': pass
-[builtins fixtures/tuple.pyi]
-
-[case testLargeTuplesInErrorMessages]
-
-a = None # type: LongTypeName
-a + (a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a) # Fail
-
-class LongTypeName:
- def __add__(self, x: 'LongTypeName') -> 'LongTypeName': pass
-[builtins fixtures/tuple.pyi]
-[out]
-main:3: error: Unsupported operand types for + ("LongTypeName" and tuple(length 50))
-
-
--- Tuple methods
--- -------------
-
-
-[case testTupleMethods]
-from typing import Tuple
-t = None # type: Tuple[int, str]
-i = 0
-s = ''
-b = bool()
-
-s = t.__len__() # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-i = t.__str__() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-i = s in t # E: Incompatible types in assignment (expression has type "bool", variable has type "int")
-t.foo # E: "Tuple[int, str]" has no attribute "foo"
-
-i = t.__len__()
-s = t.__str__()
-b = s in t
-
-[file builtins.py]
-from typing import TypeVar, Generic
-_T = TypeVar('_T')
-class object:
- def __init__(self) -> None: pass
-class tuple(Generic[_T]):
- def __len__(self) -> int: pass
- def __str__(self) -> str: pass
- def __contains__(self, o: object) -> bool: pass
-class int: pass
-class str: pass
-class bool: pass
-class type: pass
-class function: pass
-
-
--- For loop over tuple
--- -------------------
-
-
-[case testForLoopOverTuple]
-import typing
-t = 1, 2
-for x in t:
- x = 1
- x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[builtins fixtures/for.pyi]
-
-[case testForLoopOverEmptyTuple]
-import typing
-t = ()
-for x in t: pass # E: Need type annotation for variable
-[builtins fixtures/for.pyi]
-
-[case testForLoopOverNoneValuedTuple]
-import typing
-t = ()
-for x in None, None: pass # E: Need type annotation for variable
-[builtins fixtures/for.pyi]
-
-[case testForLoopOverTupleAndSubtyping]
-import typing
-class A: pass
-class B(A): pass
-for x in B(), A():
- x = A()
- x = B()
- x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
-[builtins fixtures/for.pyi]
-
-[case testTupleIterable]
-y = 'a'
-x = sum((1,2))
-y = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[builtins fixtures/tuple.pyi]
-
-
--- Tuple as a base type
--- --------------------
-
-
-[case testTupleBaseClass]
-import m
-[file m.pyi]
-from typing import Tuple
-class A(Tuple[int, str]):
- def f(self, x: int) -> None:
- a, b = 1, ''
- a, b = self
- b, a = self # Error
- self.f('') # Error
-[builtins fixtures/tuple.pyi]
-[out]
-tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "int", variable has type "str")
-tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-tmp/m.pyi:7: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-
-[case testValidTupleBaseClass2]
-from typing import Tuple
-class A(Tuple[int, str]): pass
-
-x, y = A()
-reveal_type(x) # E: Revealed type is 'builtins.int'
-reveal_type(y) # E: Revealed type is 'builtins.str'
-
-x1 = A()[0] # type: int
-x2 = A()[1] # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-A()[2] # E: Tuple index out of range
-
-class B(Tuple[int, ...]): pass
-
-z1 = B()[0] # type: int
-z2 = B()[1] # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-B()[100]
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testValidTupleBaseClass]
-from typing import Tuple
-class A(tuple): pass
-[out]
-
-[case testTupleBaseClass2-skip]
-import m
-[file m.pyi]
-# This doesn't work correctly -- no errors are reported (#867)
-from typing import Tuple
-a = None # type: A
-class A(Tuple[int, str]): pass
-x, y = a
-x() # Expected: "int" not callable
-y() # Expected: "str" not callable
-[out]
-(should fail)
-
-[case testGenericClassWithTupleBaseClass]
-from typing import TypeVar, Generic, Tuple
-T = TypeVar('T')
-class Test(Generic[T], Tuple[T]): pass
-x = Test() # type: Test[int]
-[builtins fixtures/tuple.pyi]
-[out]
-main:4: error: Generic tuple types not supported
-
-
--- Variable-length tuples (Tuple[t, ...] with literal '...')
--- ---------------------------------------------------------
-
-
-[case testIndexingVariableLengthTuple]
-from typing import Tuple
-x = () # type: Tuple[str, ...]
-n = 5
-x[n]() # E: "str" not callable
-x[3]() # E: "str" not callable
-[builtins fixtures/tuple.pyi]
-
-[case testSubtypingVariableLengthTuple]
-from typing import Tuple
-class A: pass
-class B(A): pass
-def fa(t: Tuple[A, ...]) -> None: pass
-def fb(t: Tuple[B, ...]) -> None: pass
-ta = () # type: Tuple[A, ...]
-tb = () # type: Tuple[B, ...]
-fa(ta)
-fa(tb)
-fb(tb)
-fb(ta) # E: Argument 1 to "fb" has incompatible type Tuple[A, ...]; expected Tuple[B, ...]
-[builtins fixtures/tuple.pyi]
-
-[case testSubtypingFixedAndVariableLengthTuples]
-from typing import Tuple
-class A: pass
-class B(A): pass
-def fa(t: Tuple[A, ...]) -> None: pass
-def fb(t: Tuple[B, ...]) -> None: pass
-aa = (A(), A())
-ab = (A(), B())
-bb = (B(), B())
-fa(aa)
-fa(ab)
-fa(bb)
-fb(bb)
-fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected Tuple[B, ...]
-fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected Tuple[B, ...]
-[builtins fixtures/tuple.pyi]
-
-[case testSubtypingTupleIsContainer]
-from typing import Container
-a = None # type: Container[str]
-a = ()
-
-[case testSubtypingTupleIsSized]
-from typing import Sized
-a = None # type: Sized
-a = ()
-
-[case testTupleWithStarExpr1]
-# flags: --fast-parser
-a = (1, 2)
-b = (*a, '')
-reveal_type(b) # E: Revealed type is 'Tuple[builtins.int, builtins.int, builtins.str]'
-
-[case testTupleWithStarExpr2]
-a = [1]
-b = (0, *a)
-reveal_type(b) # E: Revealed type is 'builtins.tuple[builtins.int*]'
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithStarExpr3]
-a = ['']
-b = (0, *a)
-reveal_type(b) # E: Revealed type is 'builtins.tuple[builtins.object*]'
-c = (*a, '')
-reveal_type(c) # E: Revealed type is 'builtins.tuple[builtins.str*]'
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithStarExpr4]
-a = (1, 1, 'x', 'x')
-b = (1, 'x')
-a = (0, *b, '')
-[builtins fixtures/tuple.pyi]
-
-[case testTupleMeetTupleAny]
-from typing import Union, Tuple
-class A: pass
-class B: pass
-
-def f(x: Union[B, Tuple[A, A]]) -> None:
- if isinstance(x, tuple):
- reveal_type(x) # E: Revealed type is 'Tuple[__main__.A, __main__.A]'
- else:
- reveal_type(x) # E: Revealed type is '__main__.B'
-
-def g(x: Union[str, Tuple[str, str]]) -> None:
- if isinstance(x, tuple):
- reveal_type(x) # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
- else:
- reveal_type(x) # E: Revealed type is 'builtins.str'
-
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testTupleMeetTUpleAnyComplex]
-from typing import Tuple, Union
-
-Pair = Tuple[int, int]
-Variant = Union[int, Pair]
-def tuplify(v: Variant) -> None:
- reveal_type(v) # E: Revealed type is 'Union[builtins.int, Tuple[builtins.int, builtins.int]]'
- if not isinstance(v, tuple):
- reveal_type(v) # E: Revealed type is 'builtins.int'
- v = (v, v)
- reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
- reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
- reveal_type(v[0]) # E: Revealed type is 'builtins.int'
-
-Pair2 = Tuple[int, str]
-Variant2 = Union[int, Pair2]
-def tuplify2(v: Variant2) -> None:
- if isinstance(v, tuple):
- reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.str]'
- else:
- reveal_type(v) # E: Revealed type is 'builtins.int'
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testTupleMeetTupleAnyAfter]
-from typing import Tuple, Union
-
-def good(blah: Union[Tuple[int, int], int]) -> None:
- reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]'
- if isinstance(blah, tuple):
- reveal_type(blah) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
- reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]'
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testTupleMeetTupleVariable]
-from typing import Tuple, TypeVar, Generic, Union
-T = TypeVar('T')
-
-class A: pass
-class B1(A): pass
-class B2(A): pass
-class C: pass
-
-x = None # type: Tuple[A, ...]
-y = None # type: Tuple[Union[B1, C], Union[B2, C]]
-
-def g(x: T) -> Tuple[T, T]:
- return (x, x)
-
-z = 1
-x, y = g(z) # E: Argument 1 to "g" has incompatible type "int"; expected "Tuple[B1, B2]"
-[builtins fixtures/tuple.pyi]
-[out]
-
-[case testTupleWithUndersizedContext]
-a = ([1], 'x')
-a = ([], 'x', 1) # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]")
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithOversizedContext]
-a = (1, [1], 'x')
-a = (1, []) # E: Incompatible types in assignment (expression has type "Tuple[int, List[int]]", variable has type "Tuple[int, List[int], str]")
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithoutContext]
-a = (1, []) # E: Need type annotation for variable
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithUnionContext]
-from typing import List, Union, Tuple
-def f() -> Union[int, Tuple[List[str]]]:
- return ([],)
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithVariableSizedTupleContext]
-from typing import List, Tuple
-def f() -> Tuple[List[str], ...]:
- return ([],)
-[builtins fixtures/tuple.pyi]
-
-[case testTupleWithoutArgs]
-from typing import Tuple
-def f(a: Tuple) -> None: pass
-f(())
-f((1,))
-f(('', ''))
-f(0) # E: Argument 1 to "f" has incompatible type "int"; expected Tuple[Any, ...]
-[builtins fixtures/tuple.pyi]
-
-[case testTupleSingleton]
-# flags: --fast-parser
-from typing import Tuple
-def f(a: Tuple[()]) -> None: pass
-f(())
-f((1,)) # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]"
-f(('', '')) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]"
-f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]"
-[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
deleted file mode 100644
index 20022cc..0000000
--- a/test-data/unit/check-type-aliases.test
+++ /dev/null
@@ -1,74 +0,0 @@
-[case testSimpleTypeAlias]
-import typing
-i = int
-def f(x: i) -> None: pass
-f(1)
-f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testUnionTypeAlias]
-from typing import Union
-U = Union[int, str]
-def f(x: U) -> None: pass
-f(1)
-f('')
-f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
-
-[case testTupleTypeAlias]
-from typing import Tuple
-T = Tuple[int, str]
-def f(x: T) -> None: pass
-f((1, 'x'))
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[int, str]"
-
-[case testCallableTypeAlias]
-from typing import Callable
-A = Callable[[int], None]
-f = None # type: A
-f(1)
-f('') # E: Argument 1 has incompatible type "str"; expected "int"
-
-[case testListTypeAlias]
-from typing import List
-A = List[int]
-def f(x: A) -> None: pass
-f([1])
-f(['x']) # E: List item 0 has incompatible type "str"
-[builtins fixtures/list.pyi]
-[out]
-
-[case testAnyTypeAlias]
-from typing import Any
-A = Any
-def f(x: A) -> None:
- x.foo()
-f(1)
-f('x')
-
-[case testImportUnionAlias]
-import typing
-from _m import U
-def f(x: U) -> None: pass
-f(1)
-f('x')
-f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
-[file _m.py]
-from typing import Union
-U = Union[int, str]
-[builtins fixtures/tuple.pyi]
-
-[case testTypeAliasInBuiltins]
-def f(x: bytes): pass
-bytes
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
-[builtins fixtures/alias.pyi]
-
-[case testEmptyTupleTypeAlias]
-from typing import Tuple, Callable
-EmptyTuple = Tuple[()]
-x = None # type: EmptyTuple
-reveal_type(x) # E: Revealed type is 'Tuple[]'
-
-EmptyTupleCallable = Callable[[Tuple[()]], None]
-f = None # type: EmptyTupleCallable
-reveal_type(f) # E: Revealed type is 'def (Tuple[])'
-[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-type-checks.test b/test-data/unit/check-type-checks.test
deleted file mode 100644
index c4905a7..0000000
--- a/test-data/unit/check-type-checks.test
+++ /dev/null
@@ -1,113 +0,0 @@
--- Conditional type checks.
-
-
-[case testSimpleIsinstance]
-
-x = None # type: object
-n = None # type: int
-s = None # type: str
-n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
-if isinstance(x, int):
- n = x
- s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-
-[case testSimpleIsinstance2]
-import typing
-def f(x: object, n: int, s: str) -> None:
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
- if isinstance(x, int):
- n = x
- s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testSimpleIsinstance3]
-
-class A:
- x = None # type: object
- n = None # type: int
- s = None # type: str
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
- if isinstance(x, int):
- n = x
- s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- else:
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testMultipleIsinstanceTests]
-import typing
-class A: pass
-class B(A): pass
-def f(x: object, a: A, b: B, c: int) -> None:
- if isinstance(x, A):
- if isinstance(x, B):
- b = x
- x = a
- a = x
- c = x # E: Incompatible types in assignment (expression has type "A", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testMultipleIsinstanceTests2]
-import typing
-class A: pass
-class B(A): pass
-def f(x: object, y: object, n: int, s: str) -> None:
- if isinstance(x, int):
- if isinstance(y, str):
- n = x
- s = y
- s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- n = y # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- s = y # E: Incompatible types in assignment (expression has type "object", variable has type "str")
- n = y # E: Incompatible types in assignment (expression has type "object", variable has type "int")
- n = x
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndElif]
-import typing
-def f(x: object, n: int, s: str) -> None:
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
- if isinstance(x, int):
- n = x
- s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- elif isinstance(x, str):
- s = x
- n = x # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- else:
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
- s = x # E: Incompatible types in assignment (expression has type "object", variable has type "str")
- n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndAnyType]
-from typing import Any
-def f(x: Any, n: int, s: str) -> None:
- s = x
- if isinstance(x, int):
- n = x
- s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
- s = x
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class C(Generic[T]):
- def f(self, x: T) -> None: pass
-def f(x: object) -> None:
- if isinstance(x, C):
- x.f(1)
- x.f('')
- x.g() # E: C[Any] has no attribute "g"
- x.g() # E: "object" has no attribute "g"
-[builtins fixtures/isinstance.pyi]
-[out]
diff --git a/test-data/unit/check-type-promotion.test b/test-data/unit/check-type-promotion.test
deleted file mode 100644
index 0a39996..0000000
--- a/test-data/unit/check-type-promotion.test
+++ /dev/null
@@ -1,39 +0,0 @@
--- Test cases for type promotion (e.g. int -> float).
-
-
-[case testPromoteIntToFloat]
-def f(x: float) -> None: pass
-f(1)
-[builtins fixtures/primitives.pyi]
-
-[case testCantPromoteFloatToInt]
-def f(x: int) -> None: pass
-f(1.1) # E: Argument 1 to "f" has incompatible type "float"; expected "int"
-[builtins fixtures/primitives.pyi]
-
-[case testPromoteFloatToComplex]
-def f(x: complex) -> None: pass
-f(1)
-[builtins fixtures/primitives.pyi]
-
-[case testPromoteIntToComplex]
-def f(x: complex) -> None: pass
-f(1)
-[builtins fixtures/primitives.pyi]
-
-[case testPromoteBytearrayToByte]
-def f(x: bytes) -> None: pass
-f(bytearray())
-[builtins fixtures/primitives.pyi]
-
-[case testNarrowingDownFromPromoteTargetType]
-y = 0.0
-y = 1
-y() # E: "int" not callable
-[builtins fixtures/primitives.pyi]
-
-[case testNarrowingDownFromPromoteTargetType2]
-y = 0.0
-y = 1
-y.x # E: "int" has no attribute "x"
-[builtins fixtures/primitives.pyi]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
deleted file mode 100644
index 424c8b2..0000000
--- a/test-data/unit/check-typeddict.test
+++ /dev/null
@@ -1,462 +0,0 @@
--- Create Instance
-
-[case testCanCreateTypedDictInstanceWithKeywordArguments]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point(x=42, y=1337)
-reveal_type(p) # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-[builtins fixtures/dict.pyi]
-
-[case testCanCreateTypedDictInstanceWithDictCall]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point(dict(x=42, y=1337))
-reveal_type(p) # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-[builtins fixtures/dict.pyi]
-
-[case testCanCreateTypedDictInstanceWithDictLiteral]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point({'x': 42, 'y': 1337})
-reveal_type(p) # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-[builtins fixtures/dict.pyi]
-
-[case testCanCreateTypedDictInstanceWithNoArguments]
-from mypy_extensions import TypedDict
-EmptyDict = TypedDict('EmptyDict', {})
-p = EmptyDict()
-reveal_type(p) # E: Revealed type is 'TypedDict(_fallback=typing.Mapping[builtins.str, builtins.None])'
-[builtins fixtures/dict.pyi]
-
-
--- Create Instance (Errors)
-
-[case testCannotCreateTypedDictInstanceWithUnknownArgumentPattern]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point(42, 1337) # E: Expected keyword arguments, {...}, or dict(...) in TypedDict constructor
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictInstanceNonLiteralItemName]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-x = 'x'
-p = Point({x: 42, 'y': 1337}) # E: Expected TypedDict item name to be string literal
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictInstanceWithExtraItems]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point(x=42, y=1337, z=666) # E: Expected items ['x', 'y'] but found ['x', 'y', 'z'].
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictInstanceWithMissingItems]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point(x=42) # E: Expected items ['x', 'y'] but found ['x'].
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictInstanceWithIncompatibleItemType]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-p = Point(x='meaning_of_life', y=1337) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int")
-[builtins fixtures/dict.pyi]
-
-
--- Subtyping
-
-[case testCanConvertTypedDictToItself]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-def identity(p: Point) -> Point:
- return p
-[builtins fixtures/dict.pyi]
-
-[case testCanConvertTypedDictToEquivalentTypedDict]
-from mypy_extensions import TypedDict
-PointA = TypedDict('PointA', {'x': int, 'y': int})
-PointB = TypedDict('PointB', {'x': int, 'y': int})
-def identity(p: PointA) -> PointB:
- return p
-[builtins fixtures/dict.pyi]
-
-[case testCannotConvertTypedDictToSimilarTypedDictWithNarrowerItemTypes]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object})
-def convert(op: ObjectPoint) -> Point:
- return op # E: Incompatible return value type (got "ObjectPoint", expected "Point")
-[builtins fixtures/dict.pyi]
-
-[case testCannotConvertTypedDictToSimilarTypedDictWithWiderItemTypes]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object})
-def convert(p: Point) -> ObjectPoint:
- return p # E: Incompatible return value type (got "Point", expected "ObjectPoint")
-[builtins fixtures/dict.pyi]
-
-[case testCannotConvertTypedDictToSimilarTypedDictWithIncompatibleItemTypes]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-Chameleon = TypedDict('Chameleon', {'x': str, 'y': str})
-def convert(p: Point) -> Chameleon:
- return p # E: Incompatible return value type (got "Point", expected "Chameleon")
-[builtins fixtures/dict.pyi]
-
-[case testCanConvertTypedDictToNarrowerTypedDict]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-Point1D = TypedDict('Point1D', {'x': int})
-def narrow(p: Point) -> Point1D:
- return p
-[builtins fixtures/dict.pyi]
-
-[case testCannotConvertTypedDictToWiderTypedDict]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int})
-def widen(p: Point) -> Point3D:
- return p # E: Incompatible return value type (got "Point", expected "Point3D")
-[builtins fixtures/dict.pyi]
-
-[case testCanConvertTypedDictToCompatibleMapping]
-from mypy_extensions import TypedDict
-from typing import Mapping
-Point = TypedDict('Point', {'x': int, 'y': int})
-def as_mapping(p: Point) -> Mapping[str, int]:
- return p
-[builtins fixtures/dict.pyi]
-
-[case testCannotConvertTypedDictToCompatibleMapping]
-from mypy_extensions import TypedDict
-from typing import Mapping
-Point = TypedDict('Point', {'x': int, 'y': int})
-def as_mapping(p: Point) -> Mapping[str, str]:
- return p # E: Incompatible return value type (got "Point", expected Mapping[str, str])
-[builtins fixtures/dict.pyi]
-
--- TODO: Fix mypy stubs so that the following passes in the test suite
---[case testCanConvertTypedDictToAnySuperclassOfMapping]
---from mypy_extensions import TypedDict
---from typing import Sized, Iterable, Container
---Point = TypedDict('Point', {'x': int, 'y': int})
---def as_sized(p: Point) -> Sized:
--- return p
---def as_iterable(p: Point) -> Iterable[str]:
--- return p
---def as_container(p: Point) -> Container[str]:
--- return p
---def as_object(p: Point) -> object:
--- return p
---[builtins fixtures/dict.pyi]
-
-[case testCannotConvertTypedDictToDictOrMutableMapping]
-from mypy_extensions import TypedDict
-from typing import Dict, MutableMapping
-Point = TypedDict('Point', {'x': int, 'y': int})
-def as_dict(p: Point) -> Dict[str, int]:
- return p # E: Incompatible return value type (got "Point", expected Dict[str, int])
-def as_mutable_mapping(p: Point) -> MutableMapping[str, int]:
- return p # E: Incompatible return value type (got "Point", expected MutableMapping[str, int])
-[builtins fixtures/dict.pyi]
-
-[case testCanConvertTypedDictToAny]
-from mypy_extensions import TypedDict
-from typing import Any
-Point = TypedDict('Point', {'x': int, 'y': int})
-def unprotect(p: Point) -> Any:
- return p
-[builtins fixtures/dict.pyi]
-
-
--- Join
-
-[case testJoinOfTypedDictHasOnlyCommonKeysAndNewFallback]
-from mypy_extensions import TypedDict
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int})
-p1 = TaggedPoint(type='2d', x=0, y=0)
-p2 = Point3D(x=1, y=1, z=1)
-joined_points = [p1, p2]
-reveal_type(p1) # E: Revealed type is 'TypedDict(type=builtins.str, x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.object])'
-reveal_type(p2) # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, z=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-reveal_type(joined_points) # E: Revealed type is 'builtins.list[TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])]'
-[builtins fixtures/dict.pyi]
-
-[case testJoinOfTypedDictRemovesNonequivalentKeys]
-from mypy_extensions import TypedDict
-CellWithInt = TypedDict('CellWithInt', {'value': object, 'meta': int})
-CellWithObject = TypedDict('CellWithObject', {'value': object, 'meta': object})
-c1 = CellWithInt(value=1, meta=42)
-c2 = CellWithObject(value=2, meta='turtle doves')
-joined_cells = [c1, c2]
-reveal_type(c1) # E: Revealed type is 'TypedDict(value=builtins.int, meta=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-reveal_type(c2) # E: Revealed type is 'TypedDict(value=builtins.int, meta=builtins.str, _fallback=typing.Mapping[builtins.str, builtins.object])'
-reveal_type(joined_cells) # E: Revealed type is 'builtins.list[TypedDict(value=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])]'
-[builtins fixtures/dict.pyi]
-
-[case testJoinOfDisjointTypedDictsIsEmptyTypedDict]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-Cell = TypedDict('Cell', {'value': object})
-d1 = Point(x=0, y=0)
-d2 = Cell(value='pear tree')
-joined_dicts = [d1, d2]
-reveal_type(d1) # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-reveal_type(d2) # E: Revealed type is 'TypedDict(value=builtins.str, _fallback=typing.Mapping[builtins.str, builtins.str])'
-reveal_type(joined_dicts) # E: Revealed type is 'builtins.list[TypedDict(_fallback=typing.Mapping[builtins.str, builtins.None])]'
-[builtins fixtures/dict.pyi]
-
-[case testJoinOfTypedDictWithCompatibleMappingIsMapping]
-from mypy_extensions import TypedDict
-from typing import Mapping
-Cell = TypedDict('Cell', {'value': int})
-left = Cell(value=42)
-right = {'score': 999} # type: Mapping[str, int]
-joined1 = [left, right]
-joined2 = [right, left]
-reveal_type(joined1) # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]'
-reveal_type(joined2) # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]'
-[builtins fixtures/dict.pyi]
-
--- TODO: Fix mypy stubs so that the following passes in the test suite
---[case testJoinOfTypedDictWithCompatibleMappingSupertypeIsSupertype]
---from mypy_extensions import TypedDict
---from typing import Sized
---Cell = TypedDict('Cell', {'value': int})
---left = Cell(value=42)
---right = {'score': 999} # type: Sized
---joined1 = [left, right]
---joined2 = [right, left]
---reveal_type(joined1) # E: Revealed type is 'builtins.list[typing.Sized*]'
---reveal_type(joined2) # E: Revealed type is 'builtins.list[typing.Sized*]'
---[builtins fixtures/dict.pyi]
-
-[case testJoinOfTypedDictWithIncompatibleMappingIsObject]
-from mypy_extensions import TypedDict
-from typing import Mapping
-Cell = TypedDict('Cell', {'value': int})
-left = Cell(value=42)
-right = {'score': 'zero'} # type: Mapping[str, str]
-joined1 = [left, right]
-joined2 = [right, left]
-reveal_type(joined1) # E: Revealed type is 'builtins.list[builtins.object*]'
-reveal_type(joined2) # E: Revealed type is 'builtins.list[builtins.object*]'
-[builtins fixtures/dict.pyi]
-
-[case testJoinOfTypedDictWithIncompatibleTypeIsObject]
-from mypy_extensions import TypedDict
-from typing import Mapping
-Cell = TypedDict('Cell', {'value': int})
-left = Cell(value=42)
-right = 42
-joined1 = [left, right]
-joined2 = [right, left]
-reveal_type(joined1) # E: Revealed type is 'builtins.list[builtins.object*]'
-reveal_type(joined2) # E: Revealed type is 'builtins.list[builtins.object*]'
-[builtins fixtures/dict.pyi]
-
-
--- Meet
-
-[case testMeetOfTypedDictsWithCompatibleCommonKeysHasAllKeysAndNewFallback]
-from mypy_extensions import TypedDict
-from typing import TypeVar, Callable
-XY = TypedDict('XY', {'x': int, 'y': int})
-YZ = TypedDict('YZ', {'y': int, 'z': int})
-T = TypeVar('T')
-def f(x: Callable[[T, T], None]) -> T: pass
-def g(x: XY, y: YZ) -> None: pass
-reveal_type(f(g)) # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, z=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-[builtins fixtures/dict.pyi]
-
-[case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited]
-# flags: --strict-optional
-from mypy_extensions import TypedDict
-from typing import TypeVar, Callable
-XYa = TypedDict('XYa', {'x': int, 'y': int})
-YbZ = TypedDict('YbZ', {'y': object, 'z': int})
-T = TypeVar('T')
-def f(x: Callable[[T, T], None]) -> T: pass
-def g(x: XYa, y: YbZ) -> None: pass
-reveal_type(f(g)) # E: Revealed type is '<uninhabited>'
-[builtins fixtures/dict.pyi]
-
-[case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback]
-from mypy_extensions import TypedDict
-from typing import TypeVar, Callable
-X = TypedDict('X', {'x': int})
-Z = TypedDict('Z', {'z': int})
-T = TypeVar('T')
-def f(x: Callable[[T, T], None]) -> T: pass
-def g(x: X, y: Z) -> None: pass
-reveal_type(f(g)) # E: Revealed type is 'TypedDict(x=builtins.int, z=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
-[builtins fixtures/dict.pyi]
-
-# TODO: It would be more accurate for the meet to be TypedDict instead.
-[case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow]
-# flags: --strict-optional
-from mypy_extensions import TypedDict
-from typing import TypeVar, Callable, Mapping
-X = TypedDict('X', {'x': int})
-M = Mapping[str, int]
-T = TypeVar('T')
-def f(x: Callable[[T, T], None]) -> T: pass
-def g(x: X, y: M) -> None: pass
-reveal_type(f(g)) # E: Revealed type is '<uninhabited>'
-[builtins fixtures/dict.pyi]
-
-[case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited]
-# flags: --strict-optional
-from mypy_extensions import TypedDict
-from typing import TypeVar, Callable, Mapping
-X = TypedDict('X', {'x': int})
-M = Mapping[str, str]
-T = TypeVar('T')
-def f(x: Callable[[T, T], None]) -> T: pass
-def g(x: X, y: M) -> None: pass
-reveal_type(f(g)) # E: Revealed type is '<uninhabited>'
-[builtins fixtures/dict.pyi]
-
-# TODO: It would be more accurate for the meet to be TypedDict instead.
-[case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow]
-# flags: --strict-optional
-from mypy_extensions import TypedDict
-from typing import TypeVar, Callable, Iterable
-X = TypedDict('X', {'x': int})
-I = Iterable[str]
-T = TypeVar('T')
-def f(x: Callable[[T, T], None]) -> T: pass
-def g(x: X, y: I) -> None: pass
-reveal_type(f(g)) # E: Revealed type is '<uninhabited>'
-[builtins fixtures/dict.pyi]
-
-
--- Constraint Solver
-
--- TODO: Figure out some way to trigger the ConstraintBuilderVisitor.visit_typeddict_type() path.
-
-
--- Methods
-
--- TODO: iter() doesn't accept TypedDictType as an argument type. Figure out why.
---[case testCanCallMappingMethodsOnTypedDict]
---from mypy_extensions import TypedDict
---Cell = TypedDict('Cell', {'value': int})
---c = Cell(value=42)
---c['value']
---iter(c)
---len(c)
---'value' in c
---c.keys()
---c.items()
---c.values()
---c.get('value')
---c == c
---c != c
---[builtins fixtures/dict.pyi]
-
-
--- Special Method: __getitem__
-
-[case testCanGetItemOfTypedDictWithValidStringLiteralKey]
-from mypy_extensions import TypedDict
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-reveal_type(p['type']) # E: Revealed type is 'builtins.str'
-reveal_type(p['x']) # E: Revealed type is 'builtins.int'
-reveal_type(p['y']) # E: Revealed type is 'builtins.int'
-[builtins fixtures/dict.pyi]
-
-[case testCanGetItemOfTypedDictWithValidBytesOrUnicodeLiteralKey]
-# flags: --python-version 2.7
-from mypy_extensions import TypedDict
-Cell = TypedDict('Cell', {'value': int})
-c = Cell(value=42)
-reveal_type(c['value']) # E: Revealed type is 'builtins.int'
-reveal_type(c[u'value']) # E: Revealed type is 'builtins.int'
-[builtins_py2 fixtures/dict.pyi]
-
-[case testCannotGetItemOfTypedDictWithInvalidStringLiteralKey]
-from mypy_extensions import TypedDict
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-p['z'] # E: 'z' is not a valid item name; expected one of ['type', 'x', 'y']
-[builtins fixtures/dict.pyi]
-
-[case testCannotGetItemOfTypedDictWithNonLiteralKey]
-from mypy_extensions import TypedDict
-from typing import Union
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-def get_coordinate(p: TaggedPoint, key: str) -> Union[str, int]:
- return p[key] # E: Cannot prove expression is a valid item name; expected one of ['type', 'x', 'y']
-[builtins fixtures/dict.pyi]
-
-
--- Special Method: __setitem__
-
-[case testCanSetItemOfTypedDictWithValidStringLiteralKeyAndCompatibleValueType]
-from mypy_extensions import TypedDict
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-p['type'] = 'two_d'
-p['x'] = 1
-[builtins fixtures/dict.pyi]
-
-[case testCannotSetItemOfTypedDictWithIncompatibleValueType]
-from mypy_extensions import TypedDict
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-p['x'] = 'y' # E: Argument 2 has incompatible type "str"; expected "int"
-[builtins fixtures/dict.pyi]
-
-[case testCannotSetItemOfTypedDictWithInvalidStringLiteralKey]
-from mypy_extensions import TypedDict
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-p['z'] = 1 # E: 'z' is not a valid item name; expected one of ['type', 'x', 'y']
-[builtins fixtures/dict.pyi]
-
-[case testCannotSetItemOfTypedDictWithNonLiteralKey]
-from mypy_extensions import TypedDict
-from typing import Union
-TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
-p = TaggedPoint(type='2d', x=42, y=1337)
-def set_coordinate(p: TaggedPoint, key: str, value: int) -> None:
- p[key] = value # E: Cannot prove expression is a valid item name; expected one of ['type', 'x', 'y']
-[builtins fixtures/dict.pyi]
-
-
--- Special Method: get
-
--- TODO: Implement support for these cases:
---[case testGetOfTypedDictWithValidStringLiteralKeyReturnsPreciseType]
---[case testGetOfTypedDictWithInvalidStringLiteralKeyIsError]
---[case testGetOfTypedDictWithNonLiteralKeyReturnsImpreciseType]
-
-
--- isinstance
-
--- TODO: Implement support for this case.
---[case testCannotIsInstanceTypedDictType]
-
--- scoping
-[case testTypedDictInClassNamespace]
-# https://github.com/python/mypy/pull/2553#issuecomment-266474341
-from mypy_extensions import TypedDict
-class C:
- def f(self):
- A = TypedDict('A', {'x': int})
- def g(self):
- A = TypedDict('A', {'y': int})
-C.A # E: "C" has no attribute "A"
-[builtins fixtures/dict.pyi]
-
-[case testTypedDictInFunction]
-from mypy_extensions import TypedDict
-def f() -> None:
- A = TypedDict('A', {'x': int})
-A # E: Name 'A' is not defined
-[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test
deleted file mode 100644
index 5ae96f9..0000000
--- a/test-data/unit/check-typevar-values.test
+++ /dev/null
@@ -1,505 +0,0 @@
--- Test cases for type variables with values restriction.
-
-
-[case testCallGenericFunctionWithTypeVarValueRestriction]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> None: pass
-f(1)
-f('x')
-f(object()) # E: Type argument 1 of "f" has incompatible value "object"
-
-[case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext]
-from typing import TypeVar, List
-T = TypeVar('T', int, str)
-def f(x: T) -> List[T]: pass
-i = [1]
-s = ['x']
-o = [object()]
-i = f(1)
-s = f('')
-o = f(1) # E: Type argument 1 of "f" has incompatible value "object"
-[builtins fixtures/list.pyi]
-
-[case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs]
-from typing import TypeVar, Any, cast
-T = TypeVar('T', int, str)
-def f(x: T) -> None: pass
-f(cast(Any, object()))
-[out]
-
-[case testCallGenericFunctionWithTypeVarValueRestrictionInDynamicFunc]
-from typing import TypeVar, Any
-T = TypeVar('T', int, str)
-def f(x: T) -> None: pass
-def g():
- f(object())
-[out]
-
-[case testCallGenericFunctionWithTypeVarValueRestrictionUsingSubtype]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> None: pass
-class S(str): pass
-f(S())
-[out]
-
-[case testCheckGenericFunctionBodyWithTypeVarValues]
-from typing import TypeVar
-class A:
- def f(self, x: int) -> A: return self
-class B:
- def f(self, x: int) -> B: return self
-AB = TypeVar('AB', A, B)
-def f(x: AB) -> AB:
- x = x.f(1)
- return x.f(1)
-
-[case testCheckGenericFunctionBodyWithTypeVarValues2]
-from typing import TypeVar
-class A:
- def f(self) -> A: return A()
- def g(self) -> B: return B()
-class B:
- def f(self) -> A: return A()
- def g(self) -> B: return B()
-AB = TypeVar('AB', A, B)
-def f(x: AB) -> AB:
- return x.f() # Error
-def g(x: AB) -> AB:
- return x.g() # Error
-[out]
-main:10: error: Incompatible return value type (got "A", expected "B")
-main:12: error: Incompatible return value type (got "B", expected "A")
-
-[case testTypeInferenceAndTypeVarValues]
-from typing import TypeVar
-class A:
- def f(self) -> A: return self
- def g(self) -> B: return B()
-class B:
- def f(self) -> B: return self
- def g(self) -> B: return B()
-AB = TypeVar('AB', A, B)
-def f(x: AB) -> AB:
- y = x
- if y:
- return y.f()
- else:
- return y.g() # E: Incompatible return value type (got "B", expected "A")
-[out]
-
-[case testTypeDeclaredBasedOnTypeVarWithValues]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T:
- a = None # type: T
- b = None # type: T
- a = x
- b = x
- a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
- b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[out]
-
-[case testIsinstanceAndTypeVarValues]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T:
- if isinstance(x, int):
- return 2
-def g(x: T) -> T:
- if isinstance(x, str):
- return ''
-def h(x: T) -> T:
- if isinstance(x, int):
- return '' # E: Incompatible return value type (got "str", expected "int")
- return x
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndTypeVarValues2]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T:
- if isinstance(x, int):
- return 2
- else:
- return ''
-def g(x: T) -> T:
- if isinstance(x, int):
- return '' # E: Incompatible return value type (got "str", expected "int")
- else:
- return 2 # E: Incompatible return value type (got "int", expected "str")
- return x
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndTypeVarValues3]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T:
- if isinstance(x, int):
- y = 1
- else:
- y = ''
- return y
-[builtins fixtures/isinstance.pyi]
-
-[case testIsinstanceAndTypeVarValues4]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T:
- if isinstance(x, int):
- y = 1
- else:
- y = object()
- return y # E: Incompatible return value type (got "object", expected "str")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceAndTypeVarValues5]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T:
- if isinstance(x, int):
- y = object()
- else:
- y = ''
- return y # E: Incompatible return value type (got "object", expected "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceWithUserDefinedTypeAndTypeVarValues]
-from typing import TypeVar
-class A: pass
-class B: pass
-T = TypeVar('T', A, B)
-def f(x: T) -> None:
- y = x
- if isinstance(x, A):
- # This is only checked when x is A, since A and B are not considered overlapping.
- x = y
- x = A()
- else:
- x = B()
- x = y
- x.foo() # E: "B" has no attribute "foo"
-S = TypeVar('S', int, str)
-def g(x: S) -> None:
- y = x
- if isinstance(x, int):
- x = y
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testIsinstanceWithUserDefinedTypeAndTypeVarValues2]
-from typing import TypeVar
-class S(str): pass
-T = TypeVar('T', S, int)
-def f(x: T) -> None:
- y = x
- if isinstance(x, S):
- # This is checked only when type of x is str.
- x = y
- x = S()
- x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "S")
- else:
- x = y
- x = 1
- x = S() # E: Incompatible types in assignment (expression has type "S", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testTypeVarValuesAndNestedCalls]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(m: T) -> int: pass
-def h(x: int) -> int: pass
-def g(a: T) -> None:
- h(f(a))
-[out]
-
-[case testGenericTypeWithTypevarValues]
-from typing import TypeVar, Generic, Any
-X = TypeVar('X', int, str)
-class A(Generic[X]): pass
-a = None # type: A[int]
-b = None # type: A[str]
-d = None # type: A[object] # E: Type argument 1 of "A" has incompatible value "object"
-c = None # type: A[Any]
-
-[case testConstructGenericTypeWithTypevarValuesAndTypeInference]
-from typing import TypeVar, Generic, Any, cast
-X = TypeVar('X', int, str)
-class A(Generic[X]):
- def __init__(self, x: X) -> None: pass
-A(1)
-A('x')
-A(cast(Any, object()))
-A(object()) # E: Type argument 1 of "A" has incompatible value "object"
-
-[case testGenericTypeWithTypevarValuesAndTypevarArgument]
-from typing import TypeVar, Generic
-class C: pass
-X = TypeVar('X', int, str)
-Y = TypeVar('Y', int, C)
-Z = TypeVar('Z')
-class D(Generic[X]):
- def __init__(self, x: X) -> None: pass
-def f(x: X) -> None:
- a = None # type: D[X]
-def g(x: Y) -> None:
- a = None # type: D[Y]
-def h(x: Z) -> None:
- a = None # type: D[Z]
-[out]
-main:11: error: Invalid type argument value for "D"
-main:13: error: Type variable "Z" not valid as type argument value for "D"
-
-[case testGenericTypeWithTypevarValuesAndSubtypePromotion]
-from typing import TypeVar, Generic
-X = TypeVar('X', int, str)
-class S(str): pass
-class C(Generic[X]):
- def __init__(self, x: X) -> None: pass
-x = None # type: C[str]
-y = C(S())
-x = y
-y = x
-c_int = C(1) # type: C[int]
-y = c_int # E: Incompatible types in assignment (expression has type C[int], variable has type C[str])
-
-[case testGenericTypeBodyWithTypevarValues]
-from typing import TypeVar, Generic
-class A:
- def f(self, x: int) -> None: pass
- def g(self, x: int) -> None: pass
- def h(self, x: str) -> None: pass
-class B:
- def f(self, x: int) -> None: pass
- def g(self, x: str) -> None: pass
- def h(self, x: int) -> None: pass
-X = TypeVar('X', A, B)
-class C(Generic[X]):
- def f(self, x: X) -> None:
- x.f(1)
- x.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str"
- x.h(1) # E: Argument 1 to "h" of "A" has incompatible type "int"; expected "str"
-[out]
-
-[case testAttributeInGenericTypeWithTypevarValues1]
-from typing import TypeVar, Generic
-X = TypeVar('X', int, str)
-class C(Generic[X]):
- x = None # type: X
- def f(self, x: X) -> None:
- self.x = x
- self.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-[out]
-
-[case testAttributeInGenericTypeWithTypevarValues2]
-from typing import TypeVar, Generic
-X = TypeVar('X', int, str)
-class C(Generic[X]):
- x = None # type: X
-cn = C() # type: C[int]
-cn.x = 1
-cn.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-cs = C() # type: C[str]
-cs.x = ''
-cs.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-
-[case testInferredAttributeInGenericClassBodyWithTypevarValues]
-from typing import TypeVar, Generic
-X = TypeVar('X', int, str)
-class C(Generic[X]):
- x = 1
-C.x = 1
-C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testMultipleClassTypevarsWithValues1]
-from typing import TypeVar, Generic
-class A:
- def f(self, x: int) -> None: pass
-class B:
- def f(self, x: str) -> None: pass
-X = TypeVar('X', A, B)
-Y = TypeVar('Y', int, str)
-class C(Generic[X, Y]):
- def f(self, x: X, y: Y) -> None:
- x.f(y)
-[out]
-main:10: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
-main:10: error: Argument 1 to "f" of "B" has incompatible type "int"; expected "str"
-
-[case testMultipleClassTypevarsWithValues2]
-from typing import TypeVar, Generic
-class A: pass
-class B: pass
-X = TypeVar('X', A, B)
-Y = TypeVar('Y', int, str)
-class C(Generic[X, Y]): pass
-a = None # type: C[A, int]
-b = None # type: C[B, str]
-c = None # type: C[int, int] # E: Type argument 1 of "C" has incompatible value "int"
-d = None # type: C[A, A] # E: Type argument 2 of "C" has incompatible value "A"
-
-[case testCallGenericFunctionUsingMultipleTypevarsWithValues]
-from typing import TypeVar
-class A: pass
-class B: pass
-X = TypeVar('X', A, B)
-Y = TypeVar('Y', int, str)
-def f(x: X, y: Y) -> None: pass
-f(A(), '')
-f(B(), 1)
-f(A(), A()) # E: Type argument 2 of "f" has incompatible value "A"
-f(1, 1) # E: Type argument 1 of "f" has incompatible value "int"
-
-[case testGenericFunctionWithNormalAndRestrictedTypevar]
-from typing import TypeVar, Generic
-X = TypeVar('X')
-Y = TypeVar('Y', int, str)
-class C(Generic[Y]):
- def __init__(self, y: Y) -> None: pass
-def f(x: X, y: Y, z: int) -> None:
- C(y)
- C(x) # Error
- z = x # Error
- z = y # Error
- y.foo # Error
-[out]
-main:8: error: Type argument 1 of "C" has incompatible value "X"
-main:9: error: Incompatible types in assignment (expression has type "X", variable has type "int")
-main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:11: error: "int" has no attribute "foo"
-main:11: error: "str" has no attribute "foo"
-
-[case testTypeVarWithValueInferredFromObjectReturnTypeContext]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def c1(x: object) -> None: pass
-def c2(x: int) -> None: pass
-def c3(x: str) -> None: pass
-def g(x: T) -> T: pass
-c1(g(''))
-c2(g(1))
-c3(g(''))
-c2(g('')) # E: Argument 1 to "c2" has incompatible type "str"; expected "int"
-c3(g(1)) # E: Argument 1 to "c3" has incompatible type "int"; expected "str"
-
-[case testTypeVarWithValueInferredFromObjectReturnTypeContext2]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-class ss(str): pass
-def c(x: ss) -> None: pass
-def g(x: T) -> T: pass
-c(g(''))
-c(g(1))
-[out]
-main:6: error: Argument 1 to "c" has incompatible type "str"; expected "ss"
-main:7: error: Argument 1 to "c" has incompatible type "int"; expected "ss"
-
-
--- Special cases
--- -------------
-
-
-[case testTypevarValuesSpecialCase1]
-from typing import TypeVar, Generic
-from abc import abstractmethod
-T = TypeVar('T', int, str)
-class A(Generic[T]):
- @abstractmethod
- def f(self) -> 'A[T]': pass
-class B(A[str]):
- @abstractmethod
- def f(self) -> 'B': pass
-class C(A[str]):
- @abstractmethod
- def f(self) -> int: # E: Return type of "f" incompatible with supertype "A"
- pass
-[out]
-
-[case testDefaultArgumentValueInGenericClassWithTypevarValues]
-from typing import TypeVar, Generic
-T = TypeVar('T', int, str)
-class C(Generic[T]):
- def f(self, x: int = None) -> None: pass
-
-[case testTypevarValuesWithOverloadedFunctionSpecialCase]
-from typing import TypeVar, overload, Callable
-
-T = TypeVar('T', int, str)
-def f(x: T) -> None:
- y = m(g, x)
- x = y
- y = object()
-
-A = TypeVar('A')
-R = TypeVar('R')
-def m(f: Callable[[A], R], it: A) -> A: pass
-
- at overload
-def g(x: int) -> int: return x
- at overload
-def g(x: str) -> str: return x
-[out]
-main:7: error: Incompatible types in assignment (expression has type "object", variable has type "int")
-main:7: error: Incompatible types in assignment (expression has type "object", variable has type "str")
-
-[case testGenericFunctionSubtypingWithTypevarValues]
-from typing import TypeVar
-class A: pass
-T = TypeVar('T', int, str)
-U = TypeVar('U', str, A, int)
-def f(x: T) -> T: pass
-def g(x: U) -> U: pass
-a = f
-a = f
-a = g
-b = g
-b = g
-b = f # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[U], U])
-
-[case testInnerFunctionWithTypevarValues]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-U = TypeVar('U', int, str)
-def outer(x: T) -> T:
- def inner(y: T) -> T:
- return x
- def inner2(y: U) -> U:
- return y
- inner(x)
- inner(3) # E: Argument 1 to "inner" has incompatible type "int"; expected "str"
- inner2(x)
- inner2(3)
- outer(3)
- return x
-[out]
-
-[case testInnerFunctionMutualRecursionWithTypevarValues]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def outer(x: T) -> T:
- def inner1(y: T) -> T:
- return inner2(y)
- def inner2(y: T) -> T:
- return inner1('a') # E: Argument 1 to "inner1" has incompatible type "str"; expected "int"
- return inner1(x)
-[out]
-
-[case testClassMemberTypeVarInFunctionBody]
-from typing import TypeVar
-class C:
- T = TypeVar('T', int)
- def f(self, x: T) -> T:
- A = C.T
- return x
-
-[case testParameterLessGenericAsRestriction]
-from typing import Sequence, Iterable, TypeVar
-S = TypeVar('S', Sequence, Iterable)
-def my_len(s: S) -> None: pass
-def crash() -> None: my_len((0,))
diff --git a/test-data/unit/check-underscores.test b/test-data/unit/check-underscores.test
deleted file mode 100644
index a1d88cb..0000000
--- a/test-data/unit/check-underscores.test
+++ /dev/null
@@ -1,16 +0,0 @@
-[case testUnderscoresRequire36]
-# flags: --fast-parser --python-version 3.5
-x = 1000_000 # E: Underscores in numeric literals are only supported in Python 3.6
-[out]
-
-[case testUnderscoresSyntaxError]
-# flags: --fast-parser --python-version 3.6
-x = 1000_000_ # E: invalid token
-[out]
-
-[case testUnderscoresBasics]
-# flags: --fast-parser --python-version 3.6
-x: int
-x = 1000_000
-x = 0x_FF_FF_FF_FF
-y: str = 1000_000.000_001 # E: Incompatible types in assignment (expression has type "float", variable has type "str")
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
deleted file mode 100644
index c51cb06..0000000
--- a/test-data/unit/check-unions.test
+++ /dev/null
@@ -1,219 +0,0 @@
--- Type checking of union types
-
-[case testUnion1]
-from typing import Union
-def f(x: Union[int, str]) -> None:
- if isinstance(x, int):
- y = 1
- y = x
- elif isinstance(x, str):
- z = 'a'
- z = x
-[builtins fixtures/isinstance.pyi]
-
-[case testUnion2]
-from typing import Union
-def f(x: Union[int, str]) -> None:
- if isinstance(x, int):
- y = 1
- y = x
- else:
- z = 'a'
- z = x
-[builtins fixtures/isinstance.pyi]
-
-[case testUnion3]
-from typing import Union
-def f(x: Union[int, str]) -> None:
- if isinstance(x, int):
- y = 1
- y = x
- else:
- z = 2
- z = x # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testUnionAnyIsInstance]
-from typing import Any, Union
-
-def func(v:Union[int, Any]) -> None:
- if isinstance(v, int):
- reveal_type(v) # E: Revealed type is 'builtins.int'
- else:
- reveal_type(v) # E: Revealed type is 'Any'
-[builtins fixtures/isinstance.pyi]
-[out]
-
-[case testUnionAttributeAccess]
-from typing import Union
-
-class A: y = 1
-class B: y = 2
-class C: pass
-
-w = None # type: Union[A, B]
-x = None # type: Union[A, C]
-y = None # type: int
-z = None # type: str
-
-y = w.y
-z = w.y # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-w.y = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-y = x.y # E: Some element of union has no attribute "y"
-z = x.y # E: Some element of union has no attribute "y"
-
-[builtins fixtures/isinstance.pyi]
-
-[case testUnionMethodCalls]
-from typing import Union
-
-class A:
- def foo(self) -> int: pass
-class B:
- def foo(self) -> int: pass
-class C:
- def foo(self) -> str: pass
-
-x = None # type: Union[A, B]
-y = None # type: Union[A, C]
-i = None # type: int
-
-x.foo()
-y.foo()
-i = x.foo()
-i = y.foo() # E: Incompatible types in assignment (expression has type "Union[int, str]", variable has type "int")
-
-[builtins fixtures/isinstance.pyi]
-
-[case testUnionIndexing]
-from typing import Union, List
-x = None # type: Union[List[int], str]
-x[2]
-x[2] + 1 # E: Unsupported operand types for + (likely involving Union)
-[builtins fixtures/isinstancelist.pyi]
-
-[case testUnionAsOverloadArg]
-from typing import Union, overload
- at overload
-def f(x: Union[int, str]) -> int: pass
- at overload
-def f(x: type) -> str: pass
-x = 0
-x = f(1)
-x = f('')
-s = ''
-s = f(int)
-s = f(1) # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-x = f(int) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testUnionWithNoneItem]
-from typing import Union
-def f() -> Union[int, None]: pass
-x = 1
-x = f()
-
-[case testOptional]
-from typing import Optional
-def f(x: Optional[int]) -> None: pass
-f(1)
-f(None)
-f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-
-[case testUnionSimplificationGenericFunction]
-from typing import TypeVar, Union, List
-T = TypeVar('T')
-def f(x: List[T]) -> Union[T, int]: pass
-def g(y: str) -> None: pass
-a = f([1])
-g(a) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
-[builtins fixtures/list.pyi]
-
-[case testUnionSimplificationGenericClass]
-from typing import TypeVar, Union, Generic
-T = TypeVar('T')
-U = TypeVar('U')
-class C(Generic[T, U]):
- def f(self, x: str) -> Union[T, U]: pass
-a = C() # type: C[int, int]
-b = a.f('a')
-a.f(b) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "str"
-
-[case testUnionOrderEquivalence]
-from typing import Union
-
-def foo(): pass
-
-S = str
-T = int
-
-if foo():
- def f(x: Union[int, str]) -> None: pass
-elif foo():
- def f(x: Union[str, int]) -> None: pass
-elif foo():
- def f(x: Union[int, str, int, int, str]) -> None: pass
-elif foo():
- def f(x: Union[int, str, float]) -> None: pass # E: All conditional function variants must have identical signatures
-elif foo():
- def f(x: Union[S, T]) -> None: pass
-elif foo():
- def f(x: Union[str]) -> None: pass # E: All conditional function variants must have identical signatures
-else:
- def f(x: Union[Union[int, T], Union[S, T], str]) -> None: pass
-
-# Checks bidirectionality of testing. The first definition of g is consistent with
-# the second, but not vice-versa.
-if foo():
- def g(x: Union[int, str, bytes]) -> None: pass
-else:
- def g(x: Union[int, str]) -> None: pass # E: All conditional function variants must have identical signatures
-
-[case testUnionSimplificationSpecialCases]
-from typing import Any, TypeVar, Union
-
-class C(Any): pass
-
-T = TypeVar('T')
-S = TypeVar('S')
-def u(x: T, y: S) -> Union[S, T]: pass
-
-a = None # type: Any
-
-reveal_type(u(C(), None)) # E: Revealed type is '__main__.C*'
-reveal_type(u(None, C())) # E: Revealed type is '__main__.C*'
-
-# This will be fixed later
-reveal_type(u(C(), a)) # E: Revealed type is 'Any'
-reveal_type(u(a, C())) # E: Revealed type is 'Any'
-
-reveal_type(u(C(), C())) # E: Revealed type is '__main__.C*'
-reveal_type(u(a, a)) # E: Revealed type is 'Any'
-
-[case testUnionSimplificationSpecialCase2]
-from typing import Any, TypeVar, Union
-
-class C(Any): pass
-
-T = TypeVar('T')
-S = TypeVar('S')
-def u(x: T, y: S) -> Union[S, T]: pass
-
-def f(x: T) -> None:
- reveal_type(u(C(), x)) # E: Revealed type is 'Union[T`-1, __main__.C*]'
- reveal_type(u(x, C())) # E: Revealed type is 'Union[__main__.C*, T`-1]'
-
-[case testUnionSimplificationSpecialCase3]
-from typing import Any, TypeVar, Generic, Union
-
-class C(Any): pass
-
-V = TypeVar('V')
-T = TypeVar('T')
-
-class M(Generic[V]):
- def get(self, default: T) -> Union[V, T]: ...
-
-def f(x: M[C]) -> None:
- y = x.get(None)
- reveal_type(y) # E: Revealed type is '__main__.C'
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
deleted file mode 100644
index a18a42b..0000000
--- a/test-data/unit/check-unreachable-code.test
+++ /dev/null
@@ -1,459 +0,0 @@
--- Type checker test cases for conditional checks that result in some
--- blocks classified as unreachable (they are not type checked or semantically
--- analyzed).
---
--- For example, we skip blocks that will not be executed on the active
--- Python version.
-
-[case testConditionalTypeAliasPY3]
-import typing
-def f(): pass
-PY3 = f()
-if PY3:
- t = int
- x = object() + 'x' # E: Unsupported left operand type for + ("object")
-else:
- t = str
- y = 'x' / 1
-x
-z = 1 # type: t
-
-[case testConditionalTypeAliasPY3_python2]
-import typing
-def f(): pass
-PY3 = f()
-if PY3:
- t = int
- x = object() + 'x'
-else:
- t = str
- y = 'x' / 1 # E: "str" has no attribute "__div__"
-y
-z = '' # type: t
-
-[case testConditionalAssignmentPY2]
-import typing
-def f(): pass
-PY2 = f()
-if PY2:
- x = object() + 'x'
-else:
- y = 'x' / 1 # E: Unsupported left operand type for / ("str")
-y
-
-[case testConditionalAssignmentPY2_python2]
-import typing
-def f(): pass
-PY2 = f()
-if PY2:
- x = object() + 'x' # E: Unsupported left operand type for + ("object")
-else:
- y = 'x' / 1
-x
-
-[case testConditionalImport]
-import typing
-def f(): pass
-PY2 = f()
-if PY2:
- import fuzzybar
- from barbar import *
- from pawwaw import a, bc
-else:
- import m
-[file m.py]
-import typing
-x = 1
-x = 'a'
-[out]
-tmp/m.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testNegatedMypyConditional]
-import typing
-MYPY = 0
-if not MYPY:
- import xyz753
-else:
- import pow123 # E
-[builtins fixtures/bool.pyi]
-[out]
-main:6: error: Cannot find module named 'pow123'
-main:6: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testMypyConditional]
-import typing
-MYPY = 0
-if MYPY:
- None + 1 # E: Unsupported left operand type for + (None)
-else:
- None + ''
-[builtins fixtures/bool.pyi]
-
-[case testTypeCheckingConditional]
-import typing
-if typing.TYPE_CHECKING:
- import pow123 # E
-else:
- import xyz753
-[out]
-main:3: error: Cannot find module named 'pow123'
-main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testTypeCheckingConditionalFromImport]
-from typing import TYPE_CHECKING
-if TYPE_CHECKING:
- import pow123 # E
-else:
- import xyz753
-[out]
-main:3: error: Cannot find module named 'pow123'
-main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testNegatedTypeCheckingConditional]
-import typing
-if not typing.TYPE_CHECKING:
- import pow123 # E
-else:
- import xyz753
-[builtins fixtures/bool.pyi]
-[out]
-main:5: error: Cannot find module named 'xyz753'
-main:5: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testUndefinedTypeCheckingConditional]
-if not TYPE_CHECKING: # E
- import pow123
-else:
- import xyz753
-[builtins fixtures/bool.pyi]
-[out]
-main:1: error: Name 'TYPE_CHECKING' is not defined
-main:4: error: Cannot find module named 'xyz753'
-main:4: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testConditionalClassDefPY3]
-def f(): pass
-PY3 = f()
-if PY3:
- pass
-else:
- class X(object):
- pass
-
-[case testUnreachabilityAndElifPY3]
-def f(): pass
-PY3 = f()
-if PY3:
- pass
-elif bool():
- import nonexistent
- 1 + ''
-else:
- import bad_name
- 1 + ''
-[builtins fixtures/bool.pyi]
-[out]
-
-[case testSysVersionInfo_python2]
-import sys
-if sys.version_info[0] >= 3:
- def foo():
- # type: () -> int
- return 0
-else:
- def foo():
- # type: () -> str
- return ''
-reveal_type(foo()) # E: Revealed type is 'builtins.str'
-[builtins_py2 fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfo]
-import sys
-if sys.version_info[0] >= 3:
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-reveal_type(foo()) # E: Revealed type is 'builtins.int'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoNegated_python2]
-import sys
-if not (sys.version_info[0] < 3):
- def foo():
- # type: () -> int
- return 0
-else:
- def foo():
- # type: () -> str
- return ''
-reveal_type(foo()) # E: Revealed type is 'builtins.str'
-[builtins_py2 fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoNegated]
-import sys
-if not (sys.version_info[0] < 3):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-reveal_type(foo()) # E: Revealed type is 'builtins.int'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced1]
-import sys
-if sys.version_info[:1] >= (3,):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced2]
-import sys
-if sys.version_info[:2] >= (3, 0):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced3]
-import sys
-if sys.version_info[:] >= (3, 0):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced4]
-import sys
-if sys.version_info[0:2] >= (3, 0):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced5]
-import sys
-if sys.version_info[0:] >= (3,):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced6]
-import sys
-if sys.version_info[1:] >= (5,):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced7]
-import sys
-if sys.version_info >= (3, 5):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced8]
-# Our pyversion only has (major, minor),
-# so testing for (major, minor, bugfix) is unsupported.
-import sys
-if sys.version_info >= (3, 5, 0):
- def foo() -> int: return 0
-else:
- def foo() -> str: return '' # E: All conditional function variants must have identical signatures
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoSliced9]
-# Our pyversion only has (major, minor),
-# so testing for (minor, bugfix) is unsupported (also it's silly :-).
-import sys
-if sys.version_info[1:] >= (5, 0):
- def foo() -> int: return 0
-else:
- def foo() -> str: return '' # E: All conditional function variants must have identical signatures
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysPlatform1]
-import sys
-if sys.platform == 'fictional':
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + ''
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysPlatform2]
-import sys
-if sys.platform != 'fictional':
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysPlatformNegated]
-import sys
-if not (sys.platform == 'fictional'):
- def foo() -> int: return 0
-else:
- def foo() -> str: return ''
-foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoClass]
-import sys
-if sys.version_info < (3, 5):
- class C:
- pass
-else:
- class C:
- def foo(self) -> int: return 0
-C().foo() + 0
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoImport]
-import sys
-if sys.version_info >= (3, 5):
- import collections
-else:
- collections = None
-Pt = collections.namedtuple('Pt', 'x y z')
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoVariable]
-import sys
-if sys.version_info >= (3, 5):
- x = ''
-else:
- x = 0
-x + ''
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoInClass]
-import sys
-class C:
- if sys.version_info >= (3, 5):
- def foo(self) -> int: return 0
- else:
- def foo(self) -> str: return ''
-reveal_type(C().foo()) # E: Revealed type is 'builtins.int'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysVersionInfoInFunction]
-import sys
-def foo() -> None:
- if sys.version_info >= (3, 5):
- x = ''
- else:
- x = 0
- reveal_type(x) # E: Revealed type is 'builtins.str'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysPlatformInMethod]
-import sys
-class C:
- def foo(self) -> None:
- if sys.platform != 'fictional':
- x = ''
- else:
- x = 0
- reveal_type(x) # E: Revealed type is 'builtins.str'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testSysPlatformInFunctionImport]
-import sys
-def foo() -> None:
- if sys.platform != 'fictional':
- import a
- else:
- import b as a
- a.x
-[file a.py]
-x = 1
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testCustomSysVersionInfo]
-# flags: --python-version 3.2
-import sys
-if sys.version_info == (3, 2):
- x = "foo"
-else:
- x = 3
-reveal_type(x) # E: Revealed type is 'builtins.str'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testCustomSysVersionInfo2]
-# flags: --python-version 3.1
-import sys
-if sys.version_info == (3, 2):
- x = "foo"
-else:
- x = 3
-reveal_type(x) # E: Revealed type is 'builtins.int'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testCustomSysPlatform]
-# flags: --platform linux
-import sys
-if sys.platform == 'linux':
- x = "foo"
-else:
- x = 3
-reveal_type(x) # E: Revealed type is 'builtins.str'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testCustomSysPlatform2]
-# flags: --platform win32
-import sys
-if sys.platform == 'linux':
- x = "foo"
-else:
- x = 3
-reveal_type(x) # E: Revealed type is 'builtins.int'
-[builtins fixtures/ops.pyi]
-[out]
-
-[case testCustomSysPlatformStartsWith]
-# flags: --platform win32
-import sys
-if sys.platform.startswith('win'):
- x = "foo"
-else:
- x = 3
-reveal_type(x) # E: Revealed type is 'builtins.str'
-[builtins fixtures/ops.pyi]
-[out]
diff --git a/test-data/unit/check-unsupported.test b/test-data/unit/check-unsupported.test
deleted file mode 100644
index 7f36e69..0000000
--- a/test-data/unit/check-unsupported.test
+++ /dev/null
@@ -1,15 +0,0 @@
--- Tests for unsupported features
-
-
-[case testDecorateOverloadedFunction]
-# The error messages are not the most informative ever.
-def d(x): pass
- at d
-def f(): pass
-def f(x): pass # E
-def g(): pass
- at d # E
-def g(x): pass
-[out]
-main:5: error: Name 'f' already defined
-main:7: error: Name 'g' already defined
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
deleted file mode 100644
index cad1dad..0000000
--- a/test-data/unit/check-varargs.test
+++ /dev/null
@@ -1,592 +0,0 @@
--- Test cases for the type checker related to varargs.
-
-
--- Varargs within body
--- -------------------
-
-
-[case testVarArgsWithinFunction]
-from typing import Tuple
-def f( *b: 'B') -> None:
- ab = None # type: Tuple[B, ...]
- ac = None # type: Tuple[C, ...]
- b = ac # E: Incompatible types in assignment (expression has type Tuple[C, ...], variable has type Tuple[B, ...])
- ac = b # E: Incompatible types in assignment (expression has type Tuple[B, ...], variable has type Tuple[C, ...])
- b = ab
- ab = b
-
-class B: pass
-class C: pass
-[builtins fixtures/tuple.pyi]
-[out]
-
-
-[case testVarArgsAreTuple]
-from typing import Tuple, Sequence
-def want_tuple(types: Tuple[type, ...]): pass
-def want_sequence(types: Sequence[type]): pass
-def test(*t: type) -> None:
- want_tuple(t)
- want_sequence(t)
-[builtins fixtures/tuple.pyi]
-[out]
-
-
--- Calling varargs function
--- ------------------------
-
-
-[case testCallingVarArgsFunction]
-
-a = None # type: A
-b = None # type: B
-c = None # type: C
-
-f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A"
-f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A"
-f(g()) # E: "g" does not return a value
-f(a, g()) # E: "g" does not return a value
-f()
-f(a)
-f(b)
-f(a, b, a, b)
-
-def f( *a: 'A') -> None: pass
-
-def g() -> None: pass
-
-class A: pass
-class B(A): pass
-class C: pass
-[builtins fixtures/list.pyi]
-
-[case testCallingVarArgsFunctionWithAlsoNormalArgs]
-
-a = None # type: A
-b = None # type: B
-c = None # type: C
-
-f(a) # E: Argument 1 to "f" has incompatible type "A"; expected "C"
-f(c, c) # E: Argument 2 to "f" has incompatible type "C"; expected "A"
-f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A"
-f(c)
-f(c, a)
-f(c, b, b, a, b)
-
-def f(a: 'C', *b: 'A') -> None: pass
-
-class A: pass
-class B(A): pass
-class C: pass
-[builtins fixtures/list.pyi]
-
-[case testCallingVarArgsFunctionWithDefaultArgs]
-
-a = None # type: A
-b = None # type: B
-c = None # type: C
-
-f(a) # E: Argument 1 to "f" has incompatible type "A"; expected "C"
-f(c, c) # E: Argument 2 to "f" has incompatible type "C"; expected "A"
-f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A"
-f()
-f(c)
-f(c, a)
-f(c, b, b, a, b)
-
-def f(a: 'C' = None, *b: 'A') -> None:
- pass
-
-class A: pass
-class B(A): pass
-class C: pass
-[builtins fixtures/list.pyi]
-
-[case testCallVarargsFunctionWithIterable]
-from typing import Iterable
-it1 = None # type: Iterable[int]
-it2 = None # type: Iterable[str]
-def f(*x: int) -> None: pass
-f(*it1)
-f(*it2) # E: Argument 1 to "f" has incompatible type *Iterable[str]; expected "int"
-[builtins fixtures/for.pyi]
-
-[case testCallVarargsFunctionWithIterableAndPositional]
-# flags: --fast-parser
-from typing import Iterable
-it1 = None # type: Iterable[int]
-def f(*x: int) -> None: pass
-f(*it1, 1, 2)
-f(*it1, 1, *it1, 2)
-f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
-[builtins fixtures/for.pyi]
-
-[case testCallVarargsFunctionWithTupleAndPositional]
-# flags: --fast-parser
-def f(*x: int) -> None: pass
-it1 = (1, 2)
-f(*it1, 1, 2)
-f(*it1, 1, *it1, 2)
-f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
-[builtins fixtures/for.pyi]
-
-
--- Calling varargs function + type inference
--- -----------------------------------------
-
-
-[case testTypeInferenceWithCalleeVarArgs]
-from typing import TypeVar
-T = TypeVar('T')
-a = None # type: A
-b = None # type: B
-c = None # type: C
-o = None # type: object
-
-a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-
-o = f()
-a = f(a)
-a = f(b)
-a = f(a, b, a)
-o = f(a, b, o)
-c = f(c)
-
-def f( *a: T) -> T:
- pass
-
-class A: pass
-class B(A): pass
-class C: pass
-[builtins fixtures/list.pyi]
-
-[case testTypeInferenceWithCalleeVarArgsAndDefaultArgs]
-from typing import TypeVar
-T = TypeVar('T')
-a = None # type: A
-o = None # type: object
-
-a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-a = f(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-a = f(a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-a = f(a, a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
-
-a = f(a)
-a = f(a, a)
-a = f(a, a, a)
-
-def f(a: T, b: T = None, *c: T) -> T:
- pass
-
-class A: pass
-[builtins fixtures/list.pyi]
-
-
--- Calling normal function with varargs
--- ------------------------------------
-
-
-[case testCallingWithListVarArgs]
-from typing import List, Any, cast
-aa = None # type: List[A]
-ab = None # type: List[B]
-a = None # type: A
-b = None # type: B
-
-f(*aa) # Fail
-f(a, *ab) # Ok
-f(a, b)
-(cast(Any, f))(*aa) # IDEA: Move to check-dynamic?
-(cast(Any, f))(a, *ab) # IDEA: Move to check-dynamic?
-
-def f(a: 'A', b: 'B') -> None:
- pass
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
-
-[case testCallingWithTupleVarArgs]
-
-a = None # type: A
-b = None # type: B
-c = None # type: C
-cc = None # type: CC
-
-f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, B]"; expected "C"
-f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, C]"; expected "A"
-f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type *"Tuple[B, B]"; expected "C"
-f(b, *(b, c)) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-f(*(a, b)) # E: Too few arguments for "f"
-f(*(a, b, c, c)) # E: Too many arguments for "f"
-f(a, *(b, c, c)) # E: Too many arguments for "f"
-f(*(a, b, c))
-f(a, *(b, c))
-f(a, b, *(c,))
-f(a, *(b, cc))
-
-def f(a: 'A', b: 'B', c: 'C') -> None: pass
-
-class A: pass
-class B: pass
-class C: pass
-class CC(C): pass
-[builtins fixtures/tuple.pyi]
-
-[case testInvalidVarArg]
-
-a = None # type: A
-
-f(*None)
-f(*a) # E: List or tuple expected as variable arguments
-f(*(a,))
-
-def f(a: 'A') -> None:
- pass
-
-class A: pass
-[builtins fixtures/tuple.pyi]
-
-
--- Calling varargs function with varargs
--- -------------------------------------
-
-
-[case testCallingVarArgsFunctionWithListVarArgs]
-from typing import List
-aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B)
-f(*aa) # Fail
-f(a, *aa) # Fail
-f(b, *ab) # Fail
-f(a, a, *ab) # Fail
-f(a, b, *aa) # Fail
-f(b, b, *ab) # Fail
-g(*ab) # Fail
-f(a, *ab)
-f(a, b, *ab)
-f(a, b, b, *ab)
-g(*aa)
-
-def f(a: 'A', *b: 'B') -> None: pass
-def g(a: 'A', *b: 'A') -> None: pass
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-main:3: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
-main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
-main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B"
-main:7: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
-main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-main:9: error: Argument 1 to "g" has incompatible type *List[B]; expected "A"
-
-[case testCallingVarArgsFunctionWithTupleVarArgs]
-
-a, b, c, cc = None, None, None, None # type: (A, B, C, CC)
-
-f(*(b, b, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, B]"; expected "A"
-f(*(a, a, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "B"
-f(*(a, b, a)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, A]"; expected "B"
-f(a, *(a, b)) # E: Argument 2 to "f" has incompatible type *"Tuple[A, B]"; expected "B"
-f(b, *(b, b)) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-f(b, b, *(b,)) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
-f(a, a, *(b,)) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
-f(a, b, *(a,)) # E: Argument 3 to "f" has incompatible type *"Tuple[A]"; expected "B"
-f(*()) # E: Too few arguments for "f"
-f(*(a, b, b))
-f(a, *(b, b))
-f(a, b, *(b,))
-
-def f(a: 'A', *b: 'B') -> None:
- pass
-
-class A: pass
-class B: pass
-class C: pass
-class CC(C): pass
-[builtins fixtures/list.pyi]
-
-
--- Varargs special cases
--- ---------------------
-
-
-[case testDynamicVarArg]
-from typing import Any
-d, a = None, None # type: (Any, A)
-f(a, a, *d) # Fail
-f(a, *d) # Fail
-f(*d) # Ok
-
-g(*d)
-g(a, *d)
-g(a, a, *d)
-
-def f(a: 'A') -> None: pass
-def g(a: 'A', *b: 'A') -> None: pass
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-main:3: error: Too many arguments for "f"
-main:4: error: Too many arguments for "f"
-
-[case testListVarArgsAndSubtyping]
-from typing import List
-aa = None # type: List[A]
-ab = None # type: List[B]
-
-g(*aa) # E: Argument 1 to "g" has incompatible type *List[A]; expected "B"
-f(*aa)
-f(*ab)
-g(*ab)
-
-def f( *a: 'A') -> None:
- pass
-
-def g( *a: 'B') -> None:
- pass
-
-class A: pass
-class B(A): pass
-[builtins fixtures/list.pyi]
-
-[case testCallerVarArgsAndDefaultArgs]
-
-a, b = None, None # type: (A, B)
-f(*()) # Fail
-f(a, *[a]) # Fail
-f(a, b, *[a]) # Fail
-f(*(a, a, b)) # Fail
-f(*(a,))
-f(*(a, b))
-f(*(a, b, b, b))
-f(a, *[])
-f(a, *[b])
-f(a, *[b, b])
-
-def f(a: 'A', b: 'B' = None, *c: 'B') -> None:
- pass
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-main:3: error: Too few arguments for "f"
-main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
-main:5: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
-main:6: error: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "B"
-
-[case testVarArgsAfterKeywordArgInCall1-skip]
-# see: mypy issue #2729
-def f(x: int, y: str) -> None: pass
-f(x=1, *[2])
-[builtins fixtures/list.pyi]
-[out]
-main:2: error: "f" gets multiple values for keyword argument "x"
-main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str"
-
-[case testVarArgsAfterKeywordArgInCall2-skip]
-# see: mypy issue #2729
-def f(x: int, y: str) -> None: pass
-f(y='x', *[1])
-[builtins fixtures/list.pyi]
-[out]
-main:2: error: "f" gets multiple values for keyword argument "y"
-main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str"
-
-[case testVarArgsAfterKeywordArgInCall3]
-def f(x: int, y: str) -> None: pass
-f(y='x', *(1,))
-[builtins fixtures/list.pyi]
-
-[case testVarArgsAfterKeywordArgInCall4]
-def f(x: int, *, y: str) -> None: pass
-f(y='x', *[1])
-[builtins fixtures/list.pyi]
-
-[case testVarArgsAfterKeywordArgInCall5]
-def f(x: int, *, y: str) -> None: pass
-f(y='x', *(1,))
-[builtins fixtures/list.pyi]
-
-
--- Overloads + varargs
--- -------------------
-
-
-[case testIntersectionTypesAndVarArgs]
-from typing import overload
-a, b = None, None # type: (A, B)
-
-b = f() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = f(b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-b = f(a, *[b]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(*()) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(*(a,)) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-b = f(*(a, b)) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f(*(b,)) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = f(*(b, b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-a = f(*[b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-
-a = f()
-a = f(a)
-a = f(a, b)
-b = f(b)
-b = f(b, b)
-a = f(a, *[b])
-a = f(*())
-a = f(*(a,))
-a = f(*(a, b))
-b = f(*(b,))
-b = f(*(b, b))
-b = f(*[b])
-
-class A: pass
-class B: pass
-
- at overload
-def f(a: A = None, *b: B) -> A: pass
-
- at overload
-def f(a: B, *b: B) -> B: pass
-[builtins fixtures/list.pyi]
-
-
--- Caller varargs + type inference
--- -------------------------------
-
-
-[case testCallerVarArgsListWithTypeInference]
-from typing import List, TypeVar, Tuple
-S = TypeVar('S')
-T = TypeVar('T')
-a, b, aa = None, None, None # type: (A, B, List[A])
-
-a, b = f(*aa) # Fail
-b, b = f(*aa) # Fail
-a, a = f(b, *aa) # Fail
-b, b = f(b, *aa) # Fail
-b, b = f(b, b, *aa) # Fail
-a, b = f(a, *a) # Fail
-a, b = f(*a) # Fail
-
-a, a = f(*aa)
-b, a = f(b, *aa)
-b, a = f(b, a, *aa)
-
-def f(a: S, *b: T) -> Tuple[S, T]:
- pass
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-main:6: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
-main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
-main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-main:9: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
-main:10: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
-main:11: error: List or tuple expected as variable arguments
-main:12: error: List or tuple expected as variable arguments
-
-[case testCallerVarArgsTupleWithTypeInference]
-from typing import TypeVar, Tuple
-S = TypeVar('S')
-T = TypeVar('T')
-a, b = None, None # type: (A, B)
-
-a, a = f(*(a, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
-b, b = f(a, *(b,)) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
-a, a = f(*(a, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
-b, b = f(a, *(b,)) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
-a, b = f(*(a, b, b)) # E: Too many arguments for "f"
-
-a, b = f(*(a, b))
-a, b = f(a, *(b,))
-
-def f(a: S, b: T) -> Tuple[S, T]: pass
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-
-[case testCallerVarargsAndComplexTypeInference]
-from typing import List, TypeVar, Generic, Tuple
-T = TypeVar('T')
-S = TypeVar('S')
-a, b = None, None # type: (A, B)
-ao = None # type: List[object]
-aa = None # type: List[A]
-ab = None # type: List[B]
-
-a, aa = G().f(*[a]) # Fail
-aa, a = G().f(*[a]) # Fail
-ab, aa = G().f(*[a]) # Fail
-
-ao, ao = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[None], variable has type List[object])
-aa, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[None], variable has type List[A])
-
-class G(Generic[T]):
- def f(self, *a: S) -> Tuple[List[S], List[T]]:
- pass
-
-class A: pass
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-main:9: error: Incompatible types in assignment (expression has type List[A], variable has type "A")
-main:9: error: Incompatible types in assignment (expression has type List[None], variable has type List[A])
-main:10: error: Incompatible types in assignment (expression has type List[None], variable has type "A")
-main:11: error: Incompatible types in assignment (expression has type List[None], variable has type List[A])
-main:11: error: Argument 1 to "f" of "G" has incompatible type *List[A]; expected "B"
-
-
--- Comment signatures
--- ------------------
-
-
-[case testVarArgsAndCommentSignature]
-import typing
-def f(*x): # type: (*int) -> None
- pass
-f(1)
-f(1, 2)
-f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
-f(1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
-[builtins fixtures/list.pyi]
-
-
--- Subtyping
--- ---------
-
-
-[case testVarArgsFunctionSubtyping]
-from typing import Callable
-x = None # type: Callable[[int], None]
-def f(*x: int) -> None: pass
-def g(*x: str) -> None: pass
-x = f
-x = g # E: Incompatible types in assignment (expression has type Callable[[StarArg(str)], None], variable has type Callable[[int], None])
-[builtins fixtures/list.pyi]
-[out]
-
-
--- Decorated method where self is implied by *args
--- -----------------------------------------------
-
-[case testVarArgsCallableSelf]
-from typing import Callable
-def cm(func) -> Callable[..., None]: pass
-class C:
- @cm
- def foo(self) -> None: pass
-C().foo()
-C().foo(1) # The decorator's return type says this should be okay
diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test
deleted file mode 100644
index ab5f66b..0000000
--- a/test-data/unit/check-warnings.test
+++ /dev/null
@@ -1,132 +0,0 @@
--- Test cases for warning generation.
-
--- Redundant casts
--- ---------------
-
-[case testRedundantCast]
-# flags: --warn-redundant-casts
-from typing import cast
-a = 1
-b = cast(str, a)
-c = cast(int, a)
-[out]
-main:5: note: Redundant cast to "int"
-
-[case testRedundantCastWithIsinstance]
-# flags: --warn-redundant-casts
-from typing import cast, Union
-x = 1 # type: Union[int, str]
-if isinstance(x, str):
- cast(str, x)
-[builtins fixtures/isinstance.pyi]
-[out]
-main:5: note: Redundant cast to "str"
-
-[case testCastToSuperclassNotRedundant]
-# flags: --warn-redundant-casts
-from typing import cast, TypeVar, List
-T = TypeVar('T')
-def add(xs: List[T], ys: List[T]) -> List[T]: pass
-class A: pass
-class B(A): pass
-a = A()
-b = B()
-# Without the cast, the following line would fail to type check.
-c = add([cast(A, b)], [a])
-[builtins fixtures/list.pyi]
-
-
--- Unused 'type: ignore' comments
--- ------------------------------
-
-[case testUnusedTypeIgnore]
-# flags: --warn-unused-ignores
-a = 1
-a = 'a' # type: ignore
-a = 2 # type: ignore # N: unused 'type: ignore' comment
-a = 'b' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-
-[case testUnusedTypeIgnoreImport]
-# flags: --warn-unused-ignores
-import banana # type: ignore
-import m # type: ignore
-from m import * # type: ignore
-[file m.py]
-pass
-[out]
-main:3: note: unused 'type: ignore' comment
-main:4: note: unused 'type: ignore' comment
-
-
--- No return
--- ---------
-
-[case testNoReturn]
-# flags: --warn-no-return
-def f() -> int:
- pass
-
-def g() -> int:
- if bool():
- return 1
-[builtins fixtures/list.pyi]
-[out]
-main:5: note: Missing return statement
-
-[case testNoReturnWhile]
-# flags: --warn-no-return
-def h() -> int:
- while True:
- if bool():
- return 1
-
-def i() -> int:
- while 1:
- if bool():
- return 1
- if bool():
- break
-
-def j() -> int:
- while 1:
- if bool():
- return 1
- if bool():
- continue
-[builtins fixtures/list.pyi]
-[out]
-main:7: note: Missing return statement
-
-[case testNoReturnExcept]
-# flags: --warn-no-return
-def f() -> int:
- try:
- return 1
- except:
- pass
-def g() -> int:
- try:
- pass
- except:
- return 1
- else:
- return 1
-def h() -> int:
- try:
- pass
- except:
- pass
- else:
- pass
- finally:
- return 1
-[builtins fixtures/exception.pyi]
-[out]
-main:2: note: Missing return statement
-
-[case testNoReturnEmptyBodyWithDocstring]
-def f() -> int:
- """Return the number of peppers."""
- # This might be an @abstractmethod, for example
- pass
-[out]
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
deleted file mode 100644
index 9a17285..0000000
--- a/test-data/unit/cmdline.test
+++ /dev/null
@@ -1,479 +0,0 @@
--- Tests for command line parsing
--- ------------------------------
---
--- The initial line specifies the command line, in the format
---
--- # cmd: mypy <options>
-
-
--- Directories/packages on the command line
--- ----------------------------------------
-
-[case testCmdlinePackage]
-# cmd: mypy pkg
-[file pkg/__init__.py]
-[file pkg/a.py]
-undef
-[file pkg/subpkg/__init__.py]
-[file pkg/subpkg/a.py]
-undef
-import pkg.subpkg.a
-[out]
-pkg/a.py:1: error: Name 'undef' is not defined
-pkg/subpkg/a.py:1: error: Name 'undef' is not defined
-
-[case testCmdlinePackageSlash]
-# cmd: mypy pkg/
-[file pkg/__init__.py]
-[file pkg/a.py]
-undef
-[file pkg/subpkg/__init__.py]
-[file pkg/subpkg/a.py]
-undef
-import pkg.subpkg.a
-[out]
-pkg/a.py:1: error: Name 'undef' is not defined
-pkg/subpkg/a.py:1: error: Name 'undef' is not defined
-
-[case testCmdlineNonPackage]
-# cmd: mypy dir
-[file dir/a.py]
-undef
-[file dir/subdir/a.py]
-undef
-[out]
-dir/a.py:1: error: Name 'undef' is not defined
-
-[case testCmdlineNonPackageSlash]
-# cmd: mypy dir/
-[file dir/a.py]
-undef
-[file dir/subdir/a.py]
-undef
-[out]
-dir/a.py:1: error: Name 'undef' is not defined
-
-[case testCmdlinePackageContainingSubdir]
-# cmd: mypy pkg
-[file pkg/__init__.py]
-[file pkg/a.py]
-undef
-[file pkg/subdir/a.py]
-undef
-[out]
-pkg/a.py:1: error: Name 'undef' is not defined
-
-[case testCmdlineNonPackageContainingPackage]
-# cmd: mypy dir
-[file dir/a.py]
-undef
-import subpkg.a
-[file dir/subpkg/__init__.py]
-[file dir/subpkg/a.py]
-undef
-[out]
-dir/subpkg/a.py:1: error: Name 'undef' is not defined
-dir/a.py:1: error: Name 'undef' is not defined
-
-[case testBadFileEncoding]
-# cmd: mypy a.py
-[file a.py]
-# coding: uft-8
-[out]
-mypy: can't decode file 'a.py': unknown encoding: uft-8
-
-[case testCannotIgnoreDuplicateModule]
-# cmd: mypy one/mod/__init__.py two/mod/__init__.py
-[file one/mod/__init__.py]
-# type: ignore
-[file two/mod/__init__.py]
-# type: ignore
-[out]
-two/mod/__init__.py: error: Duplicate module named 'mod'
-
-[case testFlagsFile]
-# cmd: mypy @flagsfile
-[file flagsfile]
--2
-main.py
-[file main.py]
-def f():
- try:
- 1/0
- except ZeroDivisionError, err:
- print err
-
-[case testConfigFile]
-# cmd: mypy main.py
-[file mypy.ini]
-[[mypy]
-python_version = 2.7
-[file main.py]
-def f():
- try:
- 1/0
- except ZeroDivisionError, err:
- print err
-
-[case testAltConfigFile]
-# cmd: mypy --config-file config.ini main.py
-[file config.ini]
-[[mypy]
-python_version = 2.7
-[file main.py]
-def f():
- try:
- 1/0
- except ZeroDivisionError, err:
- print err
-
-[case testPerFileConfigSection]
-# cmd: mypy x.py y.py z.py
-[file mypy.ini]
-[[mypy]
-hide_error_context = True
-disallow_untyped_defs = True
-[[mypy-y*]
-disallow_untyped_defs = False
-[[mypy-z*]
-disallow_untyped_calls = True
-[file x.py]
-def f(a):
- pass
-def g(a: int) -> int:
- return f(a)
-[file y.py]
-def f(a):
- pass
-def g(a: int) -> int:
- return f(a)
-[file z.py]
-def f(a):
- pass
-def g(a: int) -> int:
- return f(a)
-[out]
-z.py:1: error: Function is missing a type annotation
-z.py:4: error: Call to untyped function "f" in typed context
-x.py:1: error: Function is missing a type annotation
-
-[case testPerFileConfigSectionMultipleMatches]
-# cmd: mypy xx.py xy.py yx.py yy.py
-[file mypy.ini]
-[[mypy]
-hide_error_context = True
-[[mypy-*x*]
-disallow_untyped_defs = True
-[[mypy-*y*]
-disallow_untyped_calls = True
-[file xx.py]
-def f(a): pass
-def g(a: int) -> int: return f(a)
-[file xy.py]
-def f(a): pass
-def g(a: int) -> int: return f(a)
-[file yx.py]
-def f(a): pass
-def g(a: int) -> int: return f(a)
-[file yy.py]
-def f(a): pass
-def g(a: int) -> int: return f(a)
-[out]
-yy.py:2: error: Call to untyped function "f" in typed context
-yx.py:1: error: Function is missing a type annotation
-yx.py:2: error: Call to untyped function "f" in typed context
-xy.py:1: error: Function is missing a type annotation
-xy.py:2: error: Call to untyped function "f" in typed context
-xx.py:1: error: Function is missing a type annotation
-
-[case testMultipleGlobConfigSection]
-# cmd: mypy x.py y.py z.py
-[file mypy.ini]
-[[mypy]
-hide_error_context = True
-[[mypy-x*,z*]
-disallow_untyped_defs = True
-[file x.py]
-def f(a): pass
-[file y.py]
-def f(a): pass
-[file z.py]
-def f(a): pass
-[out]
-z.py:1: error: Function is missing a type annotation
-x.py:1: error: Function is missing a type annotation
-
-[case testConfigErrorNoSection]
-# cmd: mypy -c pass
-[file mypy.ini]
-[out]
-mypy.ini: No [mypy] section in config file
-
-[case testConfigErrorUnknownFlag]
-# cmd: mypy -c pass
-[file mypy.ini]
-[[mypy]
-bad = 0
-[out]
-mypy.ini: [mypy]: Unrecognized option: bad = 0
-
-[case testConfigErrorUnknownReport]
-# cmd: mypy -c pass
-[file mypy.ini]
-[[mypy]
-bad_report = .
-[out]
-mypy.ini: [mypy]: Unrecognized report type: bad_report
-
-[case testConfigErrorBadBoolean]
-# cmd: mypy -c pass
-[file mypy.ini]
-[[mypy]
-ignore_missing_imports = nah
-[out]
-mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah
-
-[case testConfigErrorNotPerFile]
-# cmd: mypy -c pass
-[file mypy.ini]
-[[mypy]
-[[mypy-*]
-strict_optional = True
-[out]
-mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (strict_optional)
-
-[case testCoberturaParser]
-# cmd: mypy --cobertura-xml-report build pkg
-[file pkg/__init__.py]
-[file pkg/a.py]
-from typing import Dict
-
-def foo() -> Dict:
- z = {'hello': 'world'}
- return z
-[file pkg/subpkg/__init__.py]
-[file pkg/subpkg/a.py]
-def bar() -> str:
- return 'world'
-def untyped_function():
- return 42
-[outfile build/cobertura.xml]
-<coverage timestamp="$TIMESTAMP" version="$VERSION" line-rate="0.8000" branch-rate="0">
- <sources>
- <source>$PWD</source>
- </sources>
- <packages>
- <package complexity="1.0" name="pkg" branch-rate="0" line-rate="1.0000">
- <classes>
- <class complexity="1.0" filename="pkg/__init__.py" name="__init__.py" branch-rate="0" line-rate="1.0">
- <methods/>
- <lines/>
- </class>
- <class complexity="1.0" filename="pkg/a.py" name="a.py" branch-rate="0" line-rate="1.0000">
- <methods/>
- <lines>
- <line branch="true" hits="1" number="3" precision="imprecise" condition-coverage="50% (1/2)"/>
- <line branch="false" hits="1" number="4" precision="precise"/>
- <line branch="false" hits="1" number="5" precision="precise"/>
- </lines>
- </class>
- </classes>
- </package>
- <package complexity="1.0" name="pkg.subpkg" branch-rate="0" line-rate="0.5000">
- <classes>
- <class complexity="1.0" filename="pkg/subpkg/__init__.py" name="__init__.py" branch-rate="0" line-rate="1.0">
- <methods/>
- <lines/>
- </class>
- <class complexity="1.0" filename="pkg/subpkg/a.py" name="a.py" branch-rate="0" line-rate="0.5000">
- <methods/>
- <lines>
- <line branch="false" hits="1" number="1" precision="precise"/>
- <line branch="false" hits="0" number="3" precision="any"/>
- </lines>
- </class>
- </classes>
- </package>
- </packages>
-</coverage>
-
-[case testConfigMypyPath]
-# cmd: mypy file.py
-[file mypy.ini]
-[[mypy]
-mypy_path =
- foo:bar
- , baz
-[file foo/foo.pyi]
-def foo(x: int) -> str: ...
-[file bar/bar.pyi]
-def bar(x: str) -> list: ...
-[file baz/baz.pyi]
-def baz(x: list) -> dict: ...
-[file file.py]
-import no_stubs
-from foo import foo
-from bar import bar
-from baz import baz
-baz(bar(foo(42)))
-baz(bar(foo('oof')))
-[out]
-file.py:1: error: Cannot find module named 'no_stubs'
-file.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-file.py:6: error: Argument 1 to "foo" has incompatible type "str"; expected "int"
-
-[case testIgnoreErrorsConfig]
-# cmd: mypy x.py y.py
-[file mypy.ini]
-[[mypy]
-[[mypy-x]
-ignore_errors = True
-[file x.py]
-"" + 0
-[file y.py]
-"" + 0
-[out]
-y.py:1: error: Unsupported operand types for + ("str" and "int")
-
-[case testConfigFollowImportsNormal]
-# cmd: mypy main.py
-[file main.py]
-from a import x
-x + 0
-x + '' # E
-import a
-a.x + 0
-a.x + '' # E
-a.y # E
-a + 0 # E
-[file mypy.ini]
-[[mypy]
-follow_imports = normal
-[file a.py]
-x = 0
-x += '' # Error reported here
-[out]
-a.py:2: error: Unsupported operand types for + ("int" and "str")
-main.py:3: error: Unsupported operand types for + ("int" and "str")
-main.py:6: error: Unsupported operand types for + ("int" and "str")
-main.py:7: error: "module" has no attribute "y"
-main.py:8: error: Unsupported operand types for + ("module" and "int")
-
-[case testConfigFollowImportsSilent]
-# cmd: mypy main.py
-[file main.py]
-from a import x
-x + ''
-import a
-a.x + ''
-a.y
-a + 0
-[file mypy.ini]
-[[mypy]
-follow_imports = silent
-[file a.py]
-x = 0
-x += '' # No error reported
-[out]
-main.py:2: error: Unsupported operand types for + ("int" and "str")
-main.py:4: error: Unsupported operand types for + ("int" and "str")
-main.py:5: error: "module" has no attribute "y"
-main.py:6: error: Unsupported operand types for + ("module" and "int")
-
-[case testConfigFollowImportsSkip]
-# cmd: mypy main.py
-[file main.py]
-from a import x
-reveal_type(x) # Expect Any
-import a
-reveal_type(a.x) # Expect Any
-[file mypy.ini]
-[[mypy]
-follow_imports = skip
-[file a.py]
-/ # No error reported
-[out]
-main.py:2: error: Revealed type is 'Any'
-main.py:4: error: Revealed type is 'Any'
-
-[case testConfigFollowImportsError]
-# cmd: mypy main.py
-[file main.py]
-from a import x
-reveal_type(x) # Expect Any
-import a # Error reported here
-reveal_type(a.x) # Expect Any
-[file mypy.ini]
-[[mypy]
-follow_imports = error
-[file a.py]
-/ # No error reported
-[out]
-main.py:1: note: Import of 'a' ignored
-main.py:1: note: (Using --follow-imports=error, module not passed on command line)
-main.py:2: error: Revealed type is 'Any'
-main.py:4: error: Revealed type is 'Any'
-
-[case testConfigFollowImportsSelective]
-# cmd: mypy main.py
-[file mypy.ini]
-[[mypy]
-[[mypy-normal]
-follow_imports = normal
-[[mypy-silent]
-follow_imports = silent
-[[mypy-skip]
-follow_imports = skip
-[[mypy-error]
-follow_imports = error
-[file main.py]
-import normal
-import silent
-import skip
-import error
-reveal_type(normal.x)
-reveal_type(silent.x)
-reveal_type(skip)
-reveal_type(error)
-[file normal.py]
-x = 0
-x += ''
-[file silent.py]
-x = 0
-x += ''
-[file skip.py]
-bla bla
-[file error.py]
-bla bla
-[out]
-main.py:4: note: Import of 'error' ignored
-main.py:4: note: (Using --follow-imports=error, module not passed on command line)
-normal.py:2: error: Unsupported operand types for + ("int" and "str")
-main.py:5: error: Revealed type is 'builtins.int'
-main.py:6: error: Revealed type is 'builtins.int'
-main.py:7: error: Revealed type is 'Any'
-main.py:8: error: Revealed type is 'Any'
-
-[case testConfigSilentMissingImportsOff]
-# cmd: mypy main.py
-[file main.py]
-import missing # Expect error here
-reveal_type(missing.x) # Expect Any
-[file mypy.ini]
-[[mypy]
-ignore_missing_imports = False
-[out]
-main.py:1: error: Cannot find module named 'missing'
-main.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main.py:2: error: Revealed type is 'Any'
-
-[case testConfigSilentMissingImportsOn]
-# cmd: mypy main.py
-[file main.py]
-import missing # No error here
-reveal_type(missing.x) # Expect Any
-[file mypy.ini]
-[[mypy]
-ignore_missing_imports = True
-[out]
-main.py:2: error: Revealed type is 'Any'
diff --git a/test-data/unit/fixtures/__new__.pyi b/test-data/unit/fixtures/__new__.pyi
deleted file mode 100644
index 4e2cc57..0000000
--- a/test-data/unit/fixtures/__new__.pyi
+++ /dev/null
@@ -1,14 +0,0 @@
-# builtins stub with object.__new__
-
-class object:
- def __init__(self) -> None: pass
-
- def __new__(cls): pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class int: pass
-class bool: pass
-class str: pass
-class function: pass
diff --git a/test-data/unit/fixtures/alias.pyi b/test-data/unit/fixtures/alias.pyi
deleted file mode 100644
index 5909cb6..0000000
--- a/test-data/unit/fixtures/alias.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Builtins test fixture with a type alias 'bytes'
-
-class object:
- def __init__(self) -> None: pass
-class type:
- def __init__(self, x) -> None: pass
-
-class int: pass
-class str: pass
-class function: pass
-
-bytes = str
diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi
deleted file mode 100644
index e4a6ffe..0000000
--- a/test-data/unit/fixtures/args.pyi
+++ /dev/null
@@ -1,29 +0,0 @@
-# Builtins stub used to support *args, **kwargs.
-
-from typing import TypeVar, Generic, Iterable, Tuple, Dict, Any, overload
-
-Tco = TypeVar('Tco', covariant=True)
-T = TypeVar('T')
-S = TypeVar('S')
-
-class object:
- def __init__(self) -> None: pass
- def __eq__(self, o: object) -> bool: pass
- def __ne__(self, o: object) -> bool: pass
-
-class type:
- @overload
- def __init__(self, o: object) -> None: pass
- @overload
- def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: pass
- def __call__(self, *args: Any, **kwargs: Any) -> Any: pass
-
-class tuple(Iterable[Tco], Generic[Tco]): pass
-class dict(Generic[T, S]): pass
-
-class int:
- def __eq__(self, o: object) -> bool: pass
-class str: pass
-class bool: pass
-class function: pass
-class module: pass
diff --git a/test-data/unit/fixtures/async_await.pyi b/test-data/unit/fixtures/async_await.pyi
deleted file mode 100644
index 7a166a0..0000000
--- a/test-data/unit/fixtures/async_await.pyi
+++ /dev/null
@@ -1,9 +0,0 @@
-import typing
-class object:
- def __init__(self): pass
-class type: pass
-class function: pass
-class int: pass
-class str: pass
-class list: pass
-class tuple: pass
diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi
deleted file mode 100644
index b463b02..0000000
--- a/test-data/unit/fixtures/bool.pyi
+++ /dev/null
@@ -1,15 +0,0 @@
-# builtins stub used in boolean-related test cases.
-
-from typing import builtinclass
-
- at builtinclass
-class object:
- def __init__(self) -> None: pass
-
-class type: pass
-class tuple: pass
-class function: pass
-class bool: pass
-class int: pass
-class str: pass
-class unicode: pass
diff --git a/test-data/unit/fixtures/callable.pyi b/test-data/unit/fixtures/callable.pyi
deleted file mode 100644
index ae58648..0000000
--- a/test-data/unit/fixtures/callable.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-from typing import Generic, Tuple, TypeVar, Union
-
-T = TypeVar('T')
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class tuple(Generic[T]): pass
-
-class function: pass
-
-def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
-
-def callable(x: object) -> bool: pass
-
-class int:
- def __add__(self, other: 'int') -> 'int': pass
- def __eq__(self, other: 'int') -> 'bool': pass
-class float: pass
-class bool(int): pass
-class str:
- def __add__(self, other: 'str') -> 'str': pass
- def __eq__(self, other: 'str') -> bool: pass
diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi
deleted file mode 100644
index 282839d..0000000
--- a/test-data/unit/fixtures/classmethod.pyi
+++ /dev/null
@@ -1,22 +0,0 @@
-import typing
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
- def mro(self) -> typing.Any: pass
-
-class function: pass
-
-# Dummy definitions.
-classmethod = object()
-staticmethod = object()
-
-class int:
- @classmethod
- def from_bytes(cls, bytes: bytes, byteorder: str) -> int: pass
-
-class str: pass
-class bytes: pass
-class bool: pass
diff --git a/test-data/unit/fixtures/complex.pyi b/test-data/unit/fixtures/complex.pyi
deleted file mode 100644
index d4135be..0000000
--- a/test-data/unit/fixtures/complex.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-# Builtins stub used for some float/complex test cases.
-
-class object:
- def __init__(self): pass
-
-class type: pass
-class function: pass
-class int: pass
-class float: pass
-class complex: pass
-class str: pass
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
deleted file mode 100644
index 18f3ebe..0000000
--- a/test-data/unit/fixtures/dict.pyi
+++ /dev/null
@@ -1,35 +0,0 @@
-# Builtins stub used in dictionary-related test cases.
-
-from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload
-
-T = TypeVar('T')
-KT = TypeVar('KT')
-VT = TypeVar('VT')
-
-class object:
- def __init__(self) -> None: pass
-
-class type: pass
-
-class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]):
- @overload
- def __init__(self, **kwargs: VT) -> None: pass
- @overload
- def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
- def __setitem__(self, k: KT, v: VT) -> None: pass
- def __iter__(self) -> Iterator[KT]: pass
- def update(self, a: Mapping[KT, VT]) -> None: pass
-
-class int: # for convenience
- def __add__(self, x: int) -> int: pass
-
-class str: pass # for keyword argument key type
-class unicode: pass # needed for py2 docstrings
-
-class list(Iterable[T], Generic[T]): # needed by some test cases
- def __iter__(self) -> Iterator[T]: pass
- def __mul__(self, x: int) -> list[T]: pass
-
-class tuple: pass
-class function: pass
-class float: pass
diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi
deleted file mode 100644
index 5a2482d..0000000
--- a/test-data/unit/fixtures/exception.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-
-class object:
- def __init__(self): pass
-
-class type: pass
-class tuple: pass
-class function: pass
-class int: pass
-class str: pass
-class unicode: pass
-class bool: pass
-
-class BaseException: pass
diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi
deleted file mode 100644
index 4762806..0000000
--- a/test-data/unit/fixtures/for.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# builtins stub used in for statement test cases
-
-from typing import TypeVar, Generic, Iterable, Iterator, Generator
-from abc import abstractmethod, ABCMeta
-
-t = TypeVar('t')
-
-class object:
- def __init__(self) -> None: pass
-
-class type: pass
-class tuple: pass
-class function: pass
-class bool: pass
-class int: pass # for convenience
-class str: pass # for convenience
-
-class list(Iterable[t], Generic[t]):
- def __iter__(self) -> Iterator[t]: pass
diff --git a/test-data/unit/fixtures/function.pyi b/test-data/unit/fixtures/function.pyi
deleted file mode 100644
index 768ca90..0000000
--- a/test-data/unit/fixtures/function.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-from typing import builtinclass
-
- at builtinclass
-class object:
- def __init__(self): pass
-
-class type: pass
-class function: pass
-class int: pass
-class str: pass
diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi
deleted file mode 100644
index c155a97..0000000
--- a/test-data/unit/fixtures/isinstance.pyi
+++ /dev/null
@@ -1,22 +0,0 @@
-from typing import builtinclass, Tuple, TypeVar, Generic, Union
-
-T = TypeVar('T')
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class tuple(Generic[T]): pass
-
-class function: pass
-
-def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
-
-class int:
- def __add__(self, other: 'int') -> 'int': pass
-class float: pass
-class bool(int): pass
-class str:
- def __add__(self, other: 'str') -> 'str': pass
diff --git a/test-data/unit/fixtures/isinstancelist.pyi b/test-data/unit/fixtures/isinstancelist.pyi
deleted file mode 100644
index 4b35698..0000000
--- a/test-data/unit/fixtures/isinstancelist.pyi
+++ /dev/null
@@ -1,44 +0,0 @@
-from typing import builtinclass, Iterable, Iterator, Generic, TypeVar, List, Mapping, overload, Tuple
-
- at builtinclass
-class object:
- def __init__(self) -> None: pass
-
- at builtinclass
-class type:
- def __init__(self, x) -> None: pass
-
-class tuple: pass
-class function: pass
-
-def isinstance(x: object, t: type) -> bool: pass
-
- at builtinclass
-class int:
- def __add__(self, x: int) -> int: pass
- at builtinclass
-class bool(int): pass
- at builtinclass
-class str:
- def __add__(self, x: str) -> str: pass
- def __getitem__(self, x: int) -> str: pass
-
-T = TypeVar('T')
-KT = TypeVar('KT')
-VT = TypeVar('VT')
-
-class list(Iterable[T], Generic[T]):
- def __iter__(self) -> Iterator[T]: pass
- def __mul__(self, x: int) -> list[T]: pass
- def __setitem__(self, x: int, v: T) -> None: pass
- def __getitem__(self, x: int) -> T: pass
- def __add__(self, x: List[T]) -> T: pass
-
-class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]):
- @overload
- def __init__(self, **kwargs: VT) -> None: pass
- @overload
- def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
- def __setitem__(self, k: KT, v: VT) -> None: pass
- def __iter__(self) -> Iterator[KT]: pass
- def update(self, a: Mapping[KT, VT]) -> None: pass
diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi
deleted file mode 100644
index 9413cf7..0000000
--- a/test-data/unit/fixtures/list.pyi
+++ /dev/null
@@ -1,30 +0,0 @@
-# Builtins stub used in list-related test cases.
-
-from typing import TypeVar, Generic, builtinclass, Iterable, Iterator, overload
-
-T = TypeVar('T')
-
- at builtinclass
-class object:
- def __init__(self): pass
-
-class type: pass
-class ellipsis: pass
-
-class list(Iterable[T], Generic[T]):
- @overload
- def __init__(self) -> None: pass
- @overload
- def __init__(self, x: Iterable[T]) -> None: pass
- def __iter__(self) -> Iterator[T]: pass
- def __add__(self, x: list[T]) -> list[T]: pass
- def __mul__(self, x: int) -> list[T]: pass
- def __getitem__(self, x: int) -> T: pass
- def append(self, x: T) -> None: pass
- def extend(self, x: Iterable[T]) -> None: pass
-
-class tuple: pass
-class function: pass
-class int: pass
-class str: pass
-class bool: pass
diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi
deleted file mode 100644
index fb2a4c2..0000000
--- a/test-data/unit/fixtures/module.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-from typing import Any, Dict, Generic, TypeVar
-
-T = TypeVar('T')
-S = TypeVar('S')
-
-class object:
- def __init__(self) -> None: pass
-class module:
- __name__ = ... # type: str
- __file__ = ... # type: str
- __dict__ = ... # type: Dict[str, Any]
-class type: pass
-class function: pass
-class int: pass
-class str: pass
-class bool: pass
-class tuple: pass
-class dict(Generic[T, S]): pass
diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi
deleted file mode 100644
index cc1b552..0000000
--- a/test-data/unit/fixtures/module_all.pyi
+++ /dev/null
@@ -1,15 +0,0 @@
-from typing import Generic, Sequence, TypeVar
-_T = TypeVar('_T')
-
-class object:
- def __init__(self) -> None: pass
-class module: pass
-class type: pass
-class function: pass
-class int: pass
-class str: pass
-class list(Generic[_T], Sequence[_T]):
- def append(self, x: _T): pass
- def extend(self, x: Sequence[_T]): pass
- def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
-class tuple: pass
diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi
deleted file mode 100644
index ed17d4d..0000000
--- a/test-data/unit/fixtures/module_all_python2.pyi
+++ /dev/null
@@ -1,16 +0,0 @@
-from typing import Generic, Sequence, TypeVar
-_T = TypeVar('_T')
-
-class object:
- def __init__(self) -> None: pass
-class module: pass
-class type: pass
-class function: pass
-class int: pass
-class str: pass
-class unicode: pass
-class list(Generic[_T], Sequence[_T]):
- def append(self, x: _T): pass
- def extend(self, x: Sequence[_T]): pass
- def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
-class tuple: pass
diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi
deleted file mode 100644
index 8956b79..0000000
--- a/test-data/unit/fixtures/ops.pyi
+++ /dev/null
@@ -1,58 +0,0 @@
-from typing import builtinclass, overload, Any, Generic, Sequence, Tuple, TypeVar
-
-Tco = TypeVar('Tco', covariant=True)
-
-# This is an extension of transform builtins with additional operations.
-
- at builtinclass
-class object:
- def __init__(self) -> None: pass
- def __eq__(self, o: 'object') -> 'bool': pass
- def __ne__(self, o: 'object') -> 'bool': pass
-
-class type: pass
-
-class slice: pass
-
-class tuple(Sequence[Tco], Generic[Tco]):
- def __getitem__(self, x: int) -> Tco: pass
- def __eq__(self, x: object) -> bool: pass
- def __ne__(self, x: object) -> bool: pass
- def __lt__(self, x: 'tuple') -> bool: pass
- def __le__(self, x: 'tuple') -> bool: pass
- def __gt__(self, x: 'tuple') -> bool: pass
- def __ge__(self, x: 'tuple') -> bool: pass
-
-class function: pass
-
-class bool: pass
-
-class str:
- def __init__(self, x: 'int') -> None: pass
- def __add__(self, x: 'str') -> 'str': pass
- def startswith(self, x: 'str') -> bool: pass
-
-class unicode: pass
-
-class int:
- def __add__(self, x: 'int') -> 'int': pass
- def __sub__(self, x: 'int') -> 'int': pass
- def __mul__(self, x: 'int') -> 'int': pass
- def __mod__(self, x: 'int') -> 'int': pass
- def __floordiv__(self, x: 'int') -> 'int': pass
- def __pos__(self) -> 'int': pass
- def __neg__(self) -> 'int': pass
- def __eq__(self, x: object) -> bool: pass
- def __ne__(self, x: object) -> bool: pass
- def __lt__(self, x: 'int') -> bool: pass
- def __le__(self, x: 'int') -> bool: pass
- def __gt__(self, x: 'int') -> bool: pass
- def __ge__(self, x: 'int') -> bool: pass
-
-class float: pass
-
-class BaseException: pass
-
-def __print(a1=None, a2=None, a3=None, a4=None): pass
-
-class module: pass
diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi
deleted file mode 100644
index b6ec4d4..0000000
--- a/test-data/unit/fixtures/primitives.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-# builtins stub with non-generic primitive types
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class int: pass
-class float: pass
-class complex: pass
-class bool: pass
-class str: pass
-class bytes: pass
-class bytearray: pass
-class tuple: pass
-class function: pass
diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi
deleted file mode 100644
index b2e747b..0000000
--- a/test-data/unit/fixtures/property.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-import typing
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class function: pass
-
-property = object() # Dummy definition.
-
-class int: pass
-class str: pass
-class bytes: pass
-class tuple: pass
-class bool: pass
diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi
deleted file mode 100644
index 61e48be..0000000
--- a/test-data/unit/fixtures/python2.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-from typing import Generic, Iterable, TypeVar
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class function: pass
-
-class int: pass
-class str: pass
-class unicode: pass
-
-T = TypeVar('T')
-class list(Iterable[T], Generic[T]): pass
-
-# Definition of None is implicit
diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi
deleted file mode 100644
index cb8bbcf..0000000
--- a/test-data/unit/fixtures/set.pyi
+++ /dev/null
@@ -1,21 +0,0 @@
-# Builtins stub used in set-related test cases.
-
-from typing import TypeVar, Generic, Iterator, Iterable, Set
-
-T = TypeVar('T')
-
-class object:
- def __init__(self) -> None: pass
-
-class type: pass
-class tuple: pass
-class function: pass
-
-class int: pass
-class str: pass
-
-class set(Iterable[T], Generic[T]):
- def __iter__(self) -> Iterator[T]: pass
- def add(self, x: T) -> None: pass
- def discard(self, x: T) -> None: pass
- def update(self, x: Set[T]) -> None: pass
diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi
deleted file mode 100644
index c01ffbb..0000000
--- a/test-data/unit/fixtures/slice.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-# Builtins stub used in slicing test cases.
-
-class object:
- def __init__(self): pass
-
-class type: pass
-class tuple: pass
-class function: pass
-
-class int: pass
-class str: pass
-
-class slice: pass
diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi
deleted file mode 100644
index 5f1013f..0000000
--- a/test-data/unit/fixtures/staticmethod.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-import typing
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x) -> None: pass
-
-class function: pass
-
-staticmethod = object() # Dummy definition.
-
-class int:
- @staticmethod
- def from_bytes(bytes: bytes, byteorder: str) -> int: pass
-
-class str: pass
-class unicode: pass
-class bytes: pass
diff --git a/test-data/unit/fixtures/transform.pyi b/test-data/unit/fixtures/transform.pyi
deleted file mode 100644
index afdc2bf..0000000
--- a/test-data/unit/fixtures/transform.pyi
+++ /dev/null
@@ -1,30 +0,0 @@
-# Builtins stubs used implicitly in program transformation test cases.
-
-class object:
- def __init__(self) -> None: pass
-
-class type: pass
-
-# str is handy for debugging; allows outputting messages.
-class str: pass
-
-# Primitive types int/float have special coercion behaviour (they may have
-# a different representation from ordinary values).
-
-class int: pass
-
-class float: pass
-
-
-# The functions below are special functions used in test cases; their
-# implementations are actually in the __dynchk module, but they are defined
-# here so that the semantic analyzer and the type checker are happy without
-# having to analyze the entire __dynchk module all the time.
-#
-# The transformation implementation has special case handling for these
-# functions; it's a bit ugly but it works for now.
-
-def __print(a1=None, a2=None, a3=None, a4=None):
- # Do not use *args since this would require list and break many test
- # cases.
- pass
diff --git a/test-data/unit/fixtures/tuple-simple.pyi b/test-data/unit/fixtures/tuple-simple.pyi
deleted file mode 100644
index b195dfa..0000000
--- a/test-data/unit/fixtures/tuple-simple.pyi
+++ /dev/null
@@ -1,20 +0,0 @@
-# Builtins stub used in some tuple-related test cases.
-#
-# This is a simpler version of tuple.py which is useful
-# and makes some test cases easier to write/debug.
-
-from typing import Iterable, TypeVar, Generic
-
-T = TypeVar('T')
-
-class object:
- def __init__(self): pass
-
-class type: pass
-class tuple(Generic[T]):
- def __getitem__(self, x: int) -> T: pass
-class function: pass
-
-# We need int for indexing tuples.
-class int: pass
-class str: pass # For convenience
diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi
deleted file mode 100644
index 2300cf4..0000000
--- a/test-data/unit/fixtures/tuple.pyi
+++ /dev/null
@@ -1,29 +0,0 @@
-# Builtins stub used in tuple-related test cases.
-
-from typing import Iterable, Iterator, TypeVar, Generic, Sequence
-
-Tco = TypeVar('Tco', covariant=True)
-
-class object:
- def __init__(self): pass
-
-class type:
- def __init__(self, *a) -> None: pass
- def __call__(self, *a) -> object: pass
-class tuple(Sequence[Tco], Generic[Tco]):
- def __iter__(self) -> Iterator[Tco]: pass
- def __getitem__(self, x: int) -> Tco: pass
-class function: pass
-
-# We need int for indexing tuples.
-class int: pass
-class bool: pass
-class str: pass # For convenience
-class unicode: pass
-
-T = TypeVar('T')
-
-class list(Sequence[T], Generic[T]): pass
-def isinstance(x: object, t: type) -> bool: pass
-
-def sum(iterable: Iterable[T], start: T = None) -> T: pass
diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi
deleted file mode 100644
index 78a41f9..0000000
--- a/test-data/unit/fixtures/union.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-# Builtins stub used in tuple-related test cases.
-
-from isinstance import isinstance
-from typing import Iterable, TypeVar
-
-class object:
- def __init__(self): pass
-
-class type: pass
-class function: pass
-
-# Current tuple types get special treatment in the type checker, thus there
-# is no need for type arguments here.
-class tuple: pass
-
-# We need int for indexing tuples.
-class int: pass
-class str: pass # For convenience
diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi
deleted file mode 100644
index eec6228..0000000
--- a/test-data/unit/lib-stub/__builtin__.pyi
+++ /dev/null
@@ -1,27 +0,0 @@
-class Any: pass
-
-class object:
- def __init__(self):
- # type: () -> None
- pass
-
-class type:
- def __init__(self, x):
- # type: (Any) -> None
- pass
-
-# These are provided here for convenience.
-class int: pass
-class float: pass
-
-class str: pass
-class unicode: pass
-
-class tuple: pass
-class function: pass
-
-class ellipsis: pass
-
-def print(*args, end=''): pass
-
-# Definition of None is implicit
diff --git a/test-data/unit/lib-stub/abc.pyi b/test-data/unit/lib-stub/abc.pyi
deleted file mode 100644
index 4afe734..0000000
--- a/test-data/unit/lib-stub/abc.pyi
+++ /dev/null
@@ -1,3 +0,0 @@
-class ABCMeta: pass
-abstractmethod = object()
-abstractproperty = object()
diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi
deleted file mode 100644
index 9a636bf..0000000
--- a/test-data/unit/lib-stub/builtins.pyi
+++ /dev/null
@@ -1,23 +0,0 @@
-class Any: pass
-
-class object:
- def __init__(self) -> None: pass
-
-class type:
- def __init__(self, x: Any) -> None: pass
-
-# These are provided here for convenience.
-class int:
- def __add__(self, other: 'int') -> 'int': pass
-class float: pass
-
-class str:
- def __add__(self, other: 'str') -> 'str': pass
-class bytes: pass
-
-class tuple: pass
-class function: pass
-
-class ellipsis: pass
-
-# Definition of None is implicit
diff --git a/test-data/unit/lib-stub/collections.pyi b/test-data/unit/lib-stub/collections.pyi
deleted file mode 100644
index 00b7cea..0000000
--- a/test-data/unit/lib-stub/collections.pyi
+++ /dev/null
@@ -1,3 +0,0 @@
-import typing
-
-namedtuple = object()
diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi
deleted file mode 100644
index 2bfc072..0000000
--- a/test-data/unit/lib-stub/mypy_extensions.pyi
+++ /dev/null
@@ -1,6 +0,0 @@
-from typing import Dict, Type, TypeVar
-
-T = TypeVar('T')
-
-
-def TypedDict(typename: str, fields: Dict[str, Type[T]]) -> Type[dict]: pass
diff --git a/test-data/unit/lib-stub/sys.pyi b/test-data/unit/lib-stub/sys.pyi
deleted file mode 100644
index 3959cb0..0000000
--- a/test-data/unit/lib-stub/sys.pyi
+++ /dev/null
@@ -1,2 +0,0 @@
-version_info = (0, 0, 0, '', 0)
-platform = ''
diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi
deleted file mode 100644
index aa0a19f..0000000
--- a/test-data/unit/lib-stub/types.pyi
+++ /dev/null
@@ -1,4 +0,0 @@
-from typing import TypeVar
-T = TypeVar('T')
-def coroutine(func: T) -> T:
- return func
diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi
deleted file mode 100644
index 77a7b34..0000000
--- a/test-data/unit/lib-stub/typing.pyi
+++ /dev/null
@@ -1,90 +0,0 @@
-# Stub for typing module. Many of the definitions have special handling in
-# the type checker, so they can just be initialized to anything.
-
-from abc import abstractmethod
-
-cast = 0
-overload = 0
-Any = 0
-Union = 0
-Optional = 0
-TypeVar = 0
-Generic = 0
-Tuple = 0
-Callable = 0
-builtinclass = 0
-_promote = 0
-NamedTuple = 0
-Type = 0
-no_type_check = 0
-
-# Type aliases.
-List = 0
-Dict = 0
-Set = 0
-
-T = TypeVar('T')
-U = TypeVar('U')
-V = TypeVar('V')
-S = TypeVar('S')
-
-class Container(Generic[T]):
- @abstractmethod
- # Use int because bool isn't in the default test builtins
- def __contains__(self, arg: T) -> int: pass
-
-class Sized:
- @abstractmethod
- def __len__(self) -> int: pass
-
-class Iterable(Generic[T]):
- @abstractmethod
- def __iter__(self) -> 'Iterator[T]': pass
-
-class Iterator(Iterable[T], Generic[T]):
- @abstractmethod
- def __next__(self) -> T: pass
-
-class Generator(Iterator[T], Generic[T, U, V]):
- @abstractmethod
- def send(self, value: U) -> T: pass
-
- @abstractmethod
- def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass
-
- @abstractmethod
- def close(self) -> None: pass
-
- @abstractmethod
- def __iter__(self) -> 'Generator[T, U, V]': pass
-
-class Awaitable(Generic[T]):
- @abstractmethod
- def __await__(self) -> Generator[Any, Any, T]: pass
-
-class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]):
- pass
-
-class AsyncIterable(Generic[T]):
- @abstractmethod
- def __aiter__(self) -> 'AsyncIterator[T]': pass
-
-class AsyncIterator(AsyncIterable[T], Generic[T]):
- def __aiter__(self) -> 'AsyncIterator[T]': return self
- @abstractmethod
- def __anext__(self) -> Awaitable[T]: pass
-
-class Sequence(Iterable[T], Generic[T]):
- @abstractmethod
- def __getitem__(self, n: Any) -> T: pass
-
-class Mapping(Generic[T, U]): pass
-
-class MutableMapping(Generic[T, U]): pass
-
-def NewType(name: str, tp: Type[T]) -> Callable[[T], T]:
- def new_type(x):
- return x
- return new_type
-
-TYPE_CHECKING = 1
diff --git a/test-data/unit/parse-errors.test b/test-data/unit/parse-errors.test
deleted file mode 100644
index 22a3c5c..0000000
--- a/test-data/unit/parse-errors.test
+++ /dev/null
@@ -1,448 +0,0 @@
--- Test cases for parser errors. Each test case consists of two sections.
--- The first section contains [case NAME] followed by the input code, while
--- the second section contains [out] followed by the output from the parser.
---
--- The input file name in errors is "file".
---
--- Comments starting with "--" in this file will be ignored, except for lines
--- starting with "----" that are not ignored. The first two dashes of these
--- lines are interpreted as escapes and removed.
-
-[case testInvalidFunction]
-def f()
- pass
-[out]
-file:1: error: invalid syntax
-
-[case testMissingIndent]
-if x:
-1
-[out]
-file:2: error: invalid syntax
-
-[case testUnexpectedIndent]
-1
- 2
-[out]
-file:2: error: unexpected indent
-
-[case testInconsistentIndent]
-if x:
- 1
- 1
-[out]
-file:3: error: unexpected indent
-
-[case testInconsistentIndent2]
-if x:
- 1
- 1
-[out]
-file:3: error: unindent does not match any outer indentation level
-
-[case testInvalidBinaryOp]
-1>
-a*
-a+1*
-[out]
-file:1: error: invalid syntax
-
-[case testDoubleStar]
-**a
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidSuperClass]
-class A(C[):
- pass
-[out]
-file:1: error: invalid syntax
-
-[case testMissingSuperClass]
-class A(:
- pass
-[out]
-file:1: error: invalid syntax
-
-[case testUnexpectedEof]
-if 1:
-[out]
-file:1: error: unexpected EOF while parsing
-
-[case testInvalidKeywordArguments1]
-f(x=y, z)
-[out]
-file:1: error: positional argument follows keyword argument
-
-[case testInvalidKeywordArguments2]
-f(**x, y)
-[out]
-file:1: error: positional argument follows keyword argument unpacking
-
-[case testInvalidBareAsteriskAndVarArgs2]
-def f(*x: A, *) -> None: pass
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidBareAsteriskAndVarArgs3]
-def f(*, *x: A) -> None: pass
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidBareAsteriskAndVarArgs4]
-def f(*, **x: A) -> None: pass
-[out]
-file:1: error: named arguments must follow bare *
-
-[case testInvalidBareAsterisk1]
-def f(*) -> None: pass
-[out]
-file:1: error: named arguments must follow bare *
-
-[case testInvalidBareAsterisk2]
-def f(x, *) -> None: pass
-[out]
-file:1: error: named arguments must follow bare *
-
-[case testInvalidFuncDefArgs1]
-def f(x = y, x): pass
-[out]
-file:1: error: non-default argument follows default argument
-
-[case testInvalidFuncDefArgs3]
-def f(**x, y):
- pass
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidFuncDefArgs4]
-def f(**x, y=x):
- pass
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidStringLiteralType]
-def f(x:
- 'A['
- ) -> None: pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidStringLiteralType2]
-def f(x:
- 'A B'
- ) -> None: pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidTypeComment]
-0
-x = 0 # type: A A
-[out]
-file:2: error: syntax error in type comment
-
-[case testInvalidTypeComment2]
-0
-x = 0 # type: A[
-[out]
-file:2: error: syntax error in type comment
-
-[case testInvalidTypeComment3]
-0
-x = 0 # type:
-[out]
-file:2: error: syntax error in type comment
-
-[case testInvalidTypeComment4]
-0
-x = 0 # type: *
-[out]
-file:2: error: syntax error in type comment
-
-[case testInvalidMultilineLiteralType]
-def f() -> "A\nB": pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment1]
-def f(): # type: x
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment2]
-def f(): # type:
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment3]
-def f(): # type: (
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment4]
-def f(): # type: (.
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment5]
-def f(): # type: (x
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment6]
-def f(): # type: (x)
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment7]
-def f(): # type: (x) -
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment8]
-def f(): # type: (x) ->
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment9]
-def f(): # type: (x) -> .
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testInvalidSignatureInComment10]
-def f(): # type: (x) -> x x
- pass
-[out]
-file:1: error: syntax error in type comment
-
-[case testDuplicateSignatures1]
-def f() -> None: # type: () -> None
- pass
-def f(): # type: () -> None
- pass
-[out]
-file:1: error: Function has duplicate type signatures
-
-[case testDuplicateSignatures2]
-def f(x, y: Z): # type: (x, y) -> z
- pass
-[out]
-file:1: error: Function has duplicate type signatures
-
-[case testTooManyTypes]
-def f(x, y): # type: (X, Y, Z) -> z
- pass
-[out]
-file:1: error: Type signature has too many arguments
-
-[case testTooFewTypes]
-def f(x, y): # type: (X) -> z
- pass
-[out]
-file:1: error: Type signature has too few arguments
-
-[case testCommentFunctionAnnotationVarArgMispatch-skip]
-# see mypy issue #1997
-def f(x): # type: (*X) -> Y
- pass
-def g(*x): # type: (X) -> Y
- pass
-[out]
-file:1: error: Inconsistent use of '*' in function signature
-file:3: error: Inconsistent use of '*' in function signature
-
-[case testCommentFunctionAnnotationVarArgMispatch2-skip]
-# see mypy issue #1997
-def f(*x, **y): # type: (**X, *Y) -> Z
- pass
-def g(*x, **y): # type: (*X, *Y) -> Z
- pass
-[out]
-file:1: error: Inconsistent use of '*' in function signature
-file:3: error: syntax error in type comment
-file:3: error: Inconsistent use of '*' in function signature
-file:3: error: Inconsistent use of '**' in function signature
-
-[case testPrintStatementInPython3-skip]
-print 1
-[out]
-file:1: error: Missing parentheses in call to 'print'
-
-[case testInvalidConditionInConditionalExpression]
-1 if 2, 3 else 4
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidConditionInConditionalExpression2]
-1 if x for y in z else 4
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidConditionInConditionalExpression2]
-1 if x else for y in z
-[out]
-file:1: error: invalid syntax
-
-[case testYieldFromNotRightParameter]
-def f():
- yield from
-[out]
-file:2: error: invalid syntax
-
-[case testYieldFromAfterReturn]
-def f():
- return yield from h()
-[out]
-file:2: error: invalid syntax
-
-[case testImportDotModule]
-import .x
-[out]
-file:1: error: invalid syntax
-
-[case testImportDot]
-import .
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidFunctionName]
-def while(): pass
-[out]
-file:1: error: invalid syntax
-
-[case testInvalidEllipsis1]
-...0
-..._
-...a
-[out]
-file:1: error: invalid syntax
-
-[case testBlockStatementInSingleLineIf]
-if 1: if 2: pass
-[out]
-file:1: error: invalid syntax
-
-[case testBlockStatementInSingleLineIf2]
-if 1: while 2: pass
-[out]
-file:1: error: invalid syntax
-
-[case testBlockStatementInSingleLineIf3]
-if 1: for x in y: pass
-[out]
-file:1: error: invalid syntax
-
-[case testUnexpectedEllipsis]
-a = a...
-[out]
-file:1: error: invalid syntax
-
-[case testParseErrorBeforeUnicodeLiteral]
-x u'y'
-[out]
-file:1: error: invalid syntax
-
-[case testParseErrorInExtendedSlicing]
-x[:,
-[out]
-file:1: error: unexpected EOF while parsing
-
-[case testParseErrorInExtendedSlicing2]
-x[:,::
-[out]
-file:1: error: unexpected EOF while parsing
-
-[case testParseErrorInExtendedSlicing3]
-x[:,:
-[out]
-file:1: error: unexpected EOF while parsing
-
-[case testPython2OctalIntLiteralInPython3]
-0377
-[out]
-file:1: error: invalid token
-
-[case testInvalidEncoding]
-# foo
-# coding: uft-8
-[out]
-file:0: error: unknown encoding: uft-8
-
-[case testInvalidEncoding2]
-# coding=Uft.8
-[out]
-file:0: error: unknown encoding: Uft.8
-
-[case testInvalidEncoding3]
-#!/usr/bin python
-# vim: set fileencoding=uft8 :
-[out]
-file:0: error: unknown encoding: uft8
-
-[case testDoubleEncoding]
-# coding: uft8
-# coding: utf8
-# The first coding cookie should be used and fail.
-[out]
-file:0: error: unknown encoding: uft8
-
-[case testDoubleEncoding2]
-# Again the first cookie should be used and fail.
-# coding: uft8
-# coding: utf8
-[out]
-file:0: error: unknown encoding: uft8
-
-[case testLongLiteralInPython3]
-2L
-0x2L
-[out]
-file:1: error: invalid syntax
-
-[case testPython2LegacyInequalityInPython3]
-1 <> 2
-[out]
-file:1: error: invalid syntax
-
-[case testLambdaInListComprehensionInPython3]
-([ 0 for x in 1, 2 if 3 ])
-[out]
-file:1: error: invalid syntax
-
-[case testTupleArgListInPython3]
-def f(x, (y, z)): pass
-[out]
-file:1: error: invalid syntax
-
-[case testBackquoteInPython3]
-`1 + 2`
-[out]
-file:1: error: invalid syntax
-
-[case testSmartQuotes]
-foo = ‘bar’
-[out]
-file:1: error: invalid character in identifier
-
-[case testExceptCommaInPython3]
-try:
- pass
-except KeyError, IndexError:
- pass
-[out]
-file:3: error: invalid syntax
-
-[case testLocalVarWithTypeOnNextLine]
-x = 0
- # type: int
-[out]
-file:2: error: misplaced type annotation
diff --git a/test-data/unit/parse-python2.test b/test-data/unit/parse-python2.test
deleted file mode 100644
index 7abc157..0000000
--- a/test-data/unit/parse-python2.test
+++ /dev/null
@@ -1,399 +0,0 @@
--- Test cases for parser -- Python 2 syntax.
---
--- See parse.test for a description of this file format.
-
-[case testEmptyFile]
-[out]
-MypyFile:1()
-
-[case testStringLiterals]
-'bar'
-u'foo'
-ur'foo'
-u'''bar'''
-b'foo'
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(bar))
- ExpressionStmt:2(
- UnicodeExpr(foo))
- ExpressionStmt:3(
- UnicodeExpr(foo))
- ExpressionStmt:4(
- UnicodeExpr(bar))
- ExpressionStmt:5(
- StrExpr(foo)))
-
-[case testSimplePrint]
-print 1
-print 2, 3
-print (4, 5)
-[out]
-MypyFile:1(
- PrintStmt:1(
- IntExpr(1)
- Newline)
- PrintStmt:2(
- IntExpr(2)
- IntExpr(3)
- Newline)
- PrintStmt:3(
- TupleExpr:3(
- IntExpr(4)
- IntExpr(5))
- Newline))
-
-[case testPrintWithNoArgs]
-print
-[out]
-MypyFile:1(
- PrintStmt:1(
- Newline))
-
-[case testPrintWithTarget]
-print >>foo
-[out]
-MypyFile:1(
- PrintStmt:1(
- Target(
- NameExpr(foo))
- Newline))
-
-[case testPrintWithTargetAndArgs]
-print >>foo, x
-[out]
-MypyFile:1(
- PrintStmt:1(
- NameExpr(x)
- Target(
- NameExpr(foo))
- Newline))
-
-[case testPrintWithTargetAndArgsAndTrailingComma]
-print >>foo, x, y,
-[out]
-MypyFile:1(
- PrintStmt:1(
- NameExpr(x)
- NameExpr(y)
- Target(
- NameExpr(foo))))
-
-[case testSimpleWithTrailingComma]
-print 1,
-print 2, 3,
-print (4, 5),
-[out]
-MypyFile:1(
- PrintStmt:1(
- IntExpr(1))
- PrintStmt:2(
- IntExpr(2)
- IntExpr(3))
- PrintStmt:3(
- TupleExpr:3(
- IntExpr(4)
- IntExpr(5))))
-
-[case testOctalIntLiteral]
-00
-01
-0377
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IntExpr(0))
- ExpressionStmt:2(
- IntExpr(1))
- ExpressionStmt:3(
- IntExpr(255)))
-
-[case testLongLiteral-skip]
-# see typed_ast issue #26
-0L
-123L
-012L
-0x123l
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IntExpr(0))
- ExpressionStmt:2(
- IntExpr(123))
- ExpressionStmt:3(
- IntExpr(10))
- ExpressionStmt:4(
- IntExpr(291)))
-
-[case testTryExceptWithComma]
-try:
- x
-except Exception, e:
- y
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- ExpressionStmt:2(
- NameExpr(x)))
- NameExpr(Exception)
- NameExpr(e)
- Block:3(
- ExpressionStmt:4(
- NameExpr(y)))))
-
-[case testTryExceptWithNestedComma]
-try:
- x
-except (KeyError, IndexError):
- y
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- ExpressionStmt:2(
- NameExpr(x)))
- TupleExpr:3(
- NameExpr(KeyError)
- NameExpr(IndexError))
- Block:3(
- ExpressionStmt:4(
- NameExpr(y)))))
-
-[case testExecStatement]
-exec a
-[out]
-MypyFile:1(
- ExecStmt:1(
- NameExpr(a)))
-
-[case testExecStatementWithIn]
-exec a in globals()
-[out]
-MypyFile:1(
- ExecStmt:1(
- NameExpr(a)
- CallExpr:1(
- NameExpr(globals)
- Args())))
-
-[case testExecStatementWithInAnd2Expressions]
-exec a in x, y
-[out]
-MypyFile:1(
- ExecStmt:1(
- NameExpr(a)
- NameExpr(x)
- NameExpr(y)))
-
-[case testEllipsisInExpression_python2]
-x = ... # E: invalid syntax
-[out]
-
-[case testStrLiteralConcatenationWithMixedLiteralTypes]
-u'foo' 'bar'
-'bar' u'foo'
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- UnicodeExpr(foobar))
- ExpressionStmt:2(
- UnicodeExpr(barfoo)))
-
-[case testLegacyInequality]
-1 <> 2
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ComparisonExpr:1(
- !=
- IntExpr(1)
- IntExpr(2))))
-
-[case testLambdaInListComprehensionInPython2]
-([ 0 for x in 1, 2 if 3 ])
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ListComprehension:1(
- GeneratorExpr:1(
- IntExpr(0)
- NameExpr(x)
- TupleExpr:1(
- IntExpr(1)
- IntExpr(2))
- IntExpr(3)))))
-
-[case testTupleArgListInPython2]
-def f(x, (y, z)): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x)
- Var(__tuple_arg_2))
- Block:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(y)
- NameExpr(z))
- NameExpr(__tuple_arg_2))
- PassStmt:1())))
-
-[case testTupleArgListWithTwoTupleArgsInPython2]
-def f((x, y), (z, zz)): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(__tuple_arg_1)
- Var(__tuple_arg_2))
- Block:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- NameExpr(__tuple_arg_1))
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(z)
- NameExpr(zz))
- NameExpr(__tuple_arg_2))
- PassStmt:1())))
-
-[case testTupleArgListWithInitializerInPython2]
-def f((y, z) = (1, 2)): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(__tuple_arg_1))
- Init(
- AssignmentStmt:1(
- NameExpr(__tuple_arg_1)
- TupleExpr:1(
- IntExpr(1)
- IntExpr(2))))
- Block:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(y)
- NameExpr(z))
- NameExpr(__tuple_arg_1))
- PassStmt:1())))
-
-[case testLambdaTupleArgListInPython2]
-lambda (x, y): z
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- FuncExpr:1(
- Args(
- Var(__tuple_arg_1))
- Block:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- NameExpr(__tuple_arg_1))
- ReturnStmt:1(
- NameExpr(z))))))
-
-[case testLambdaSingletonTupleArgListInPython2]
-lambda (x,): z
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- FuncExpr:1(
- Args(
- Var(__tuple_arg_1))
- Block:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x))
- NameExpr(__tuple_arg_1))
- ReturnStmt:1(
- NameExpr(z))))))
-
-[case testLambdaNoTupleArgListInPython2]
-lambda (x): z
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- FuncExpr:1(
- Args(
- Var(x))
- Block:1(
- ReturnStmt:1(
- NameExpr(z))))))
-
-[case testInvalidExprInTupleArgListInPython2_1]
-def f(x, ()): pass
-[out]
-main:1: error: invalid syntax
-
-[case testInvalidExprInTupleArgListInPython2_2]
-def f(x, (y, x[1])): pass
-[out]
-main:1: error: invalid syntax
-
-[case testListLiteralAsTupleArgInPython2]
-def f(x, [x]): pass
-[out]
-main:1: error: invalid syntax
-
-[case testTupleArgAfterStarArgInPython2]
-def f(*a, (b, c)): pass
-[out]
-main:1: error: invalid syntax
-
-[case testTupleArgAfterStarStarArgInPython2]
-def f(*a, (b, c)): pass
-[out]
-main:1: error: invalid syntax
-
-[case testParenthesizedArgumentInPython2]
-def f(x, (y)): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x)
- Var(y))
- Block:1(
- PassStmt:1())))
-
-[case testDuplicateNameInTupleArgList_python2]
-def f(a, (a, b)):
- pass
-def g((x, (x, y))):
- pass
-[out]
-main:1: error: duplicate argument 'a' in function definition
-main:3: error: duplicate argument 'x' in function definition
-
-[case testBackquotesInPython2]
-`1 + 2`
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- BackquoteExpr:1(
- OpExpr:1(
- +
- IntExpr(1)
- IntExpr(2)))))
-
-[case testBackquoteSpecialCasesInPython2]
-`1, 2`
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- BackquoteExpr:1(
- TupleExpr:1(
- IntExpr(1)
- IntExpr(2)))))
diff --git a/test-data/unit/parse.test b/test-data/unit/parse.test
deleted file mode 100644
index 4335fff..0000000
--- a/test-data/unit/parse.test
+++ /dev/null
@@ -1,3386 +0,0 @@
--- Test cases for parser. Each test case consists of two sections.
--- The first section contains [case NAME] followed by the input code, while
--- the second section contains [out] followed by the output from the parser.
---
--- Lines starting with "--" in this file will be ignored, except for lines
--- starting with "----" that are not ignored. The first two dashes of these
--- lines are interpreted as escapes and removed.
-
-[case testEmptyFile]
-[out]
-MypyFile:1()
-
-[case testExpressionStatement]
-1
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IntExpr(1)))
-
-[case testAssignment]
-x = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(1)))
-
-[case testExpressionBasics]
-x = f(1, None)
-123 * (2 + x)
-"hello".lower()
--1.23
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- CallExpr:1(
- NameExpr(f)
- Args(
- IntExpr(1)
- NameExpr(None))))
- ExpressionStmt:2(
- OpExpr:2(
- *
- IntExpr(123)
- OpExpr:2(
- +
- IntExpr(2)
- NameExpr(x))))
- ExpressionStmt:3(
- CallExpr:3(
- MemberExpr:3(
- StrExpr(hello)
- lower)
- Args()))
- ExpressionStmt:4(
- UnaryExpr:4(
- -
- FloatExpr(1.23))))
-
-[case testSingleQuotedStr]
-''
-'foo'
-'foo\
-bar'
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr())
- ExpressionStmt:2(
- StrExpr(foo))
- ExpressionStmt:3(
- StrExpr(foobar)))
-
-[case testDoubleQuotedStr]
-""
-"foo"
-"foo\
-bar"
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr())
- ExpressionStmt:2(
- StrExpr(foo))
- ExpressionStmt:3(
- StrExpr(foobar)))
-
-[case testTripleQuotedStr]
-''''''
-'''foo'''
-'''foo\
-bar'''
-'''\nfoo
-bar'''
-'''fo''bar'''
-""""""
-"""foo"""
-"""foo\
-bar"""
-"""\nfoo
-bar"""
-"""fo""bar"""
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr())
- ExpressionStmt:2(
- StrExpr(foo))
- ExpressionStmt:3(
- StrExpr(foobar))
- ExpressionStmt:5(
- StrExpr(\u000afoo\u000abar))
- ExpressionStmt:6(
- StrExpr(fo''bar))
- ExpressionStmt:7(
- StrExpr())
- ExpressionStmt:8(
- StrExpr(foo))
- ExpressionStmt:9(
- StrExpr(foobar))
- ExpressionStmt:11(
- StrExpr(\u000afoo\u000abar))
- ExpressionStmt:12(
- StrExpr(fo""bar)))
-
-[case testRawStr]
-r'x\n\''
-r"x\n\""
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(x\n\'))
- ExpressionStmt:2(
- StrExpr(x\n\")))
---" fix syntax highlight
-
-[case testBytes]
-b'foo'
-b"foo\
-bar"
-br'x\n\''
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- BytesExpr(foo))
- ExpressionStmt:2(
- BytesExpr(foobar))
- ExpressionStmt:3(
- BytesExpr(x\\n\\')))
-
-[case testEscapesInStrings]
-'\r\n\t\x2f\u123f'
-b'\r\n\t\x2f\u123f'
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(\u000d\u000a\u0009/\u123f))
- ExpressionStmt:2(
- BytesExpr(\r\n\t/\\\u123f)))
--- Note \\u in the b'...' case (\u sequence not translated)
-
-[case testEscapedQuote]
-'\''
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(')))
---'
-
-[case testOctalEscapes]
-'\0\1\177\1234'
-b'\1\476'
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(\u0000\u0001\u007fS4))
- ExpressionStmt:2(
- BytesExpr(\x01>)))
-
-[case testUnicodeLiteralInPython3]
-u'foo'
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(foo)))
-
-[case testArrays]
-a = []
-a = [1, 2]
-a[[1]] = a[2]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- ListExpr:1())
- AssignmentStmt:2(
- NameExpr(a)
- ListExpr:2(
- IntExpr(1)
- IntExpr(2)))
- AssignmentStmt:3(
- IndexExpr:3(
- NameExpr(a)
- ListExpr:3(
- IntExpr(1)))
- IndexExpr:3(
- NameExpr(a)
- IntExpr(2))))
-
-[case testTuples]
-()
-(1,)
-(1, foo)
-a, b = 1, (2, 3)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- TupleExpr:1())
- ExpressionStmt:2(
- TupleExpr:2(
- IntExpr(1)))
- ExpressionStmt:3(
- TupleExpr:3(
- IntExpr(1)
- NameExpr(foo)))
- AssignmentStmt:4(
- TupleExpr:4(
- NameExpr(a)
- NameExpr(b))
- TupleExpr:4(
- IntExpr(1)
- TupleExpr:4(
- IntExpr(2)
- IntExpr(3)))))
-
-[case testSimpleFunction]
-def main():
- 1
-[out]
-MypyFile:1(
- FuncDef:1(
- main
- Block:1(
- ExpressionStmt:2(
- IntExpr(1)))))
-
-[case testPass]
-def f():
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- PassStmt:2())))
-
-[case testIf]
-if 1:
- 2
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- ExpressionStmt:2(
- IntExpr(2)))))
-
-[case testIfElse]
-if 1:
- 2
-else:
- 3
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- ExpressionStmt:2(
- IntExpr(2)))
- Else(
- ExpressionStmt:4(
- IntExpr(3)))))
-
-[case testIfElif]
-if 1:
- 2
-elif 3:
- 4
-elif 5:
- 6
-else:
- 7
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- ExpressionStmt:2(
- IntExpr(2)))
- Else(
- IfStmt:3(
- If(
- IntExpr(3))
- Then(
- ExpressionStmt:4(
- IntExpr(4)))
- Else(
- IfStmt:5(
- If(
- IntExpr(5))
- Then(
- ExpressionStmt:6(
- IntExpr(6)))
- Else(
- ExpressionStmt:8(
- IntExpr(7)))))))))
-
-[case testWhile]
-while 1:
- pass
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(1)
- Block:1(
- PassStmt:2())))
-
-[case testReturn]
-def f():
- return 1
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ReturnStmt:2(
- IntExpr(1)))))
-
-
-[case testReturnWithoutValue]
-def f():
- return
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ReturnStmt:2())))
-
-[case testBreak]
-while 1:
- break
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(1)
- Block:1(
- BreakStmt:2())))
-
-[case testLargeBlock]
-if 1:
- x = 1
- while 2:
- pass
- y = 2
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- AssignmentStmt:2(
- NameExpr(x)
- IntExpr(1))
- WhileStmt:3(
- IntExpr(2)
- Block:3(
- PassStmt:4()))
- AssignmentStmt:5(
- NameExpr(y)
- IntExpr(2)))))
-
-[case testSimpleClass]
-class A:
- def f(self):
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- f
- Args(
- Var(self))
- Block:2(
- PassStmt:3()))))
-
-[case testGlobalVarWithType]
-x = 0 # type: int
-y = False # type: bool
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(0)
- int?)
- AssignmentStmt:2(
- NameExpr(y)
- NameExpr(False)
- bool?))
-
-[case testLocalVarWithType]
-def f():
- x = 0 # type: int
- y = False # type: bool
- a = None # type: Any
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- AssignmentStmt:2(
- NameExpr(x)
- IntExpr(0)
- int?)
- AssignmentStmt:3(
- NameExpr(y)
- NameExpr(False)
- bool?)
- AssignmentStmt:4(
- NameExpr(a)
- NameExpr(None)
- Any?))))
-
-[case testFunctionDefWithType]
-def f(y: str) -> int:
- return
-class A:
- def f(self, a: int, b: Any) -> x:
- pass
- def g(self) -> Any:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(y))
- def (y: str?) -> int?
- Block:1(
- ReturnStmt:2()))
- ClassDef:3(
- A
- FuncDef:4(
- f
- Args(
- Var(self)
- Var(a)
- Var(b))
- def (self: Any, a: int?, b: Any?) -> x?
- Block:4(
- PassStmt:5()))
- FuncDef:6(
- g
- Args(
- Var(self))
- def (self: Any) -> Any?
- Block:6(
- PassStmt:7()))))
-
-[case testFuncWithNoneReturn]
-def f() -> None:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> None?
- Block:1(
- PassStmt:2())))
-
-[case testVarDefWithGenericType]
-x = None # type: List[str]
-y = None # type: Dict[int, Any]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- NameExpr(None)
- List?[str?])
- AssignmentStmt:2(
- NameExpr(y)
- NameExpr(None)
- Dict?[int?, Any?]))
-
-[case testSignatureWithGenericTypes]
-def f(y: t[Any, x]) -> a[b[c], d]:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(y))
- def (y: t?[Any?, x?]) -> a?[b?[c?], d?]
- Block:1(
- PassStmt:2())))
-
-[case testParsingExpressionsWithLessAndGreaterThan]
-# The operators < > can sometimes be confused with generic types.
-x = a < b > c
-f(x < b, y > c)
-a < b > 1
-x < b, y > 2
-(a < b > c)
-[out]
-MypyFile:1(
- AssignmentStmt:2(
- NameExpr(x)
- ComparisonExpr:2(
- <
- >
- NameExpr(a)
- NameExpr(b)
- NameExpr(c)))
- ExpressionStmt:3(
- CallExpr:3(
- NameExpr(f)
- Args(
- ComparisonExpr:3(
- <
- NameExpr(x)
- NameExpr(b))
- ComparisonExpr:3(
- >
- NameExpr(y)
- NameExpr(c)))))
- ExpressionStmt:4(
- ComparisonExpr:4(
- <
- >
- NameExpr(a)
- NameExpr(b)
- IntExpr(1)))
- ExpressionStmt:5(
- TupleExpr:5(
- ComparisonExpr:5(
- <
- NameExpr(x)
- NameExpr(b))
- ComparisonExpr:5(
- >
- NameExpr(y)
- IntExpr(2))))
- ExpressionStmt:6(
- ComparisonExpr:6(
- <
- >
- NameExpr(a)
- NameExpr(b)
- NameExpr(c))))
-
-[case testLineContinuation]
-if (1 +
- 2):
- pass
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- OpExpr:1(
- +
- IntExpr(1)
- IntExpr(2)))
- Then(
- PassStmt:3())))
-
-[case testMultipleVarDef]
-x, y = z # type: int, a[c]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- NameExpr(z)
- Tuple[int?, a?[c?]]))
-
-[case testMultipleVarDef2]
-(xx, z, i) = 1 # type: (a[c], Any, int)
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(xx)
- NameExpr(z)
- NameExpr(i))
- IntExpr(1)
- Tuple[a?[c?], Any?, int?]))
-
-[case testMultipleVarDef3]
-(xx, (z, i)) = 1 # type: (a[c], (Any, int))
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(xx)
- TupleExpr:1(
- NameExpr(z)
- NameExpr(i)))
- IntExpr(1)
- Tuple[a?[c?], Tuple[Any?, int?]]))
-
-[case testAnnotateAssignmentViaSelf]
-class A:
- def __init__(self):
- self.x = 1 # type: int
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- __init__
- Args(
- Var(self))
- Block:2(
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(self)
- x)
- IntExpr(1)
- int?)))))
-
-[case testCommentAfterTypeComment]
-x = 0 # type: int # bar!
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(0)
- int?))
-
-[case testMultilineAssignmentAndAnnotations]
-(x,
- y) = (1,
- 2) # type: foo, bar
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- TupleExpr:2(
- IntExpr(1)
- IntExpr(2))
- Tuple[foo?, bar?]))
-
-[case testWhitespaceAndCommentAnnotation]
-x = 1#type:int
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(1)
- int?))
-
-[case testWhitespaceAndCommentAnnotation2]
-x = 1# type: int
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(1)
- int?))
-
-[case testWhitespaceAndCommentAnnotation3]
-x = 1# type : int # not recognized!
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(1)))
-
-[case testInvalidAnnotation]
-x=1 ##type: int
-y=1 #.type: int
-z=1 # Type: int
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(1))
- AssignmentStmt:2(
- NameExpr(y)
- IntExpr(1))
- AssignmentStmt:3(
- NameExpr(z)
- IntExpr(1)))
-
-[case testEmptyClass]
-class C:
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- C
- PassStmt:2()))
-
-[case testOperatorPrecedence]
-a | b ^ c
-a & b << c
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- OpExpr:1(
- |
- NameExpr(a)
- OpExpr:1(
- ^
- NameExpr(b)
- NameExpr(c))))
- ExpressionStmt:2(
- OpExpr:2(
- &
- NameExpr(a)
- OpExpr:2(
- <<
- NameExpr(b)
- NameExpr(c)))))
-
-[case testOperatorAssociativity]
-1 - 2 + 3
-1 << 2 << 3
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- OpExpr:1(
- +
- OpExpr:1(
- -
- IntExpr(1)
- IntExpr(2))
- IntExpr(3)))
- ExpressionStmt:2(
- OpExpr:2(
- <<
- OpExpr:2(
- <<
- IntExpr(1)
- IntExpr(2))
- IntExpr(3))))
-
-[case testUnaryOperators]
--2 * +3 * ~3 * 2
-~3**2
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- OpExpr:1(
- *
- OpExpr:1(
- *
- OpExpr:1(
- *
- UnaryExpr:1(
- -
- IntExpr(2))
- UnaryExpr:1(
- +
- IntExpr(3)))
- UnaryExpr:1(
- ~
- IntExpr(3)))
- IntExpr(2)))
- ExpressionStmt:2(
- UnaryExpr:2(
- ~
- OpExpr:2(
- **
- IntExpr(3)
- IntExpr(2)))))
-
-[case testSingleLineBodies]
-if 1: pass
-while 1: pass
-def f(): pass
-def g() -> int: return 1
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- PassStmt:1()))
- WhileStmt:2(
- IntExpr(1)
- Block:2(
- PassStmt:2()))
- FuncDef:3(
- f
- Block:3(
- PassStmt:3()))
- FuncDef:4(
- g
- def () -> int?
- Block:4(
- ReturnStmt:4(
- IntExpr(1)))))
-
-[case testForStatement]
-for x in y:
- pass
-for x, (y, w) in z:
- 1
-for [x, (y, w)] in z:
- 1
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x)
- NameExpr(y)
- Block:1(
- PassStmt:2()))
- ForStmt:3(
- TupleExpr:3(
- NameExpr(x)
- TupleExpr:3(
- NameExpr(y)
- NameExpr(w)))
- NameExpr(z)
- Block:3(
- ExpressionStmt:4(
- IntExpr(1))))
- ForStmt:5(
- ListExpr:5(
- NameExpr(x)
- TupleExpr:5(
- NameExpr(y)
- NameExpr(w)))
- NameExpr(z)
- Block:5(
- ExpressionStmt:6(
- IntExpr(1)))))
-
-[case testGlobalDecl]
-global x
-def f():
- global x, y
-[out]
-MypyFile:1(
- GlobalDecl:1(
- x)
- FuncDef:2(
- f
- Block:2(
- GlobalDecl:3(
- x
- y))))
-
-[case testNonlocalDecl]
-def f():
- def g():
- nonlocal x, y
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- FuncDef:2(
- g
- Block:2(
- NonlocalDecl:3(
- x
- y))))))
-
-[case testRaiseStatement]
-raise foo
-[out]
-MypyFile:1(
- RaiseStmt:1(
- NameExpr(foo)))
-
-[case testRaiseWithoutArg]
-try:
- pass
-except:
- raise
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- Block:3(
- RaiseStmt:4())))
-
-[case testRaiseFrom]
-raise e from x
-[out]
-MypyFile:1(
- RaiseStmt:1(
- NameExpr(e)
- NameExpr(x)))
-
-[case testBaseclasses]
-class A(B):
- pass
-class A(B[T], C[Any, d[x]]):
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- BaseTypeExpr(
- NameExpr(B))
- PassStmt:2())
- ClassDef:3(
- A
- BaseTypeExpr(
- IndexExpr:3(
- NameExpr(B)
- NameExpr(T))
- IndexExpr:3(
- NameExpr(C)
- TupleExpr:3(
- NameExpr(Any)
- IndexExpr:3(
- NameExpr(d)
- NameExpr(x)))))
- PassStmt:4()))
-
-[case testIsNot]
-x is not y
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ComparisonExpr:1(
- is not
- NameExpr(x)
- NameExpr(y))))
-
-[case testNotIn]
-x not in y
-not x not in y
-x not in y | z
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ComparisonExpr:1(
- not in
- NameExpr(x)
- NameExpr(y)))
- ExpressionStmt:2(
- UnaryExpr:2(
- not
- ComparisonExpr:2(
- not in
- NameExpr(x)
- NameExpr(y))))
- ExpressionStmt:3(
- ComparisonExpr:3(
- not in
- NameExpr(x)
- OpExpr:3(
- |
- NameExpr(y)
- NameExpr(z)))))
-
-[case testNotAsBinaryOp]
-x not y # E: invalid syntax
-[out]
-
-[case testNotIs]
-x not is y # E: invalid syntax
-[out]
-
-[case testBinaryNegAsBinaryOp]
-1 ~ 2 # E: invalid syntax
-[out]
-
-[case testDictionaryExpression]
-{}
-{1:x}
-{1:x, 2 or 1:2 and 3}
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- DictExpr:1())
- ExpressionStmt:2(
- DictExpr:2(
- IntExpr(1)
- NameExpr(x)))
- ExpressionStmt:3(
- DictExpr:3(
- IntExpr(1)
- NameExpr(x)
- OpExpr:3(
- or
- IntExpr(2)
- IntExpr(1))
- OpExpr:3(
- and
- IntExpr(2)
- IntExpr(3)))))
-
-[case testImport]
-import x
-import y.z.foo, __foo__.bar
-[out]
-MypyFile:1(
- Import:1(x)
- Import:2(y.z.foo, __foo__.bar))
-
-[case testVariableTypeWithQualifiedName]
-x = None # type: x.y
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- NameExpr(None)
- x.y?))
-
-[case testTypeInSignatureWithQualifiedName]
-def f() -> x.y[a.b.c]: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> x.y?[a.b.c?]
- Block:1(
- PassStmt:1())))
-
-[case testImportFrom]
-from m import x
-from m.n import x, y, z
-[out]
-MypyFile:1(
- ImportFrom:1(m, [x])
- ImportFrom:2(m.n, [x, y, z]))
-
-[case testImportFromAs]
-from m import x as y
-from x import y, z as a, c as c
-[out]
-MypyFile:1(
- ImportFrom:1(m, [x : y])
- ImportFrom:2(x, [y, z : a, c : c]))
-
-[case testImportStar]
-from x import *
-[out]
-MypyFile:1(
- ImportAll:1(x))
-
-[case testImportsInDifferentPlaces]
-1
-import x
-def f():
- from x import y
- from z import *
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IntExpr(1))
- Import:2(x)
- FuncDef:3(
- f
- Block:3(
- ImportFrom:4(x, [y])
- ImportAll:5(z))))
-
-[case testImportWithExtraComma]
-from x import (y, z,)
-[out]
-MypyFile:1(
- ImportFrom:1(x, [y, z]))
-
-[case testDefaultArgs]
-def f(x=1):
- pass
-def g(x, y=1+2, z=(1, 2)):
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Init(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(1)))
- Block:1(
- PassStmt:2()))
- FuncDef:3(
- g
- Args(
- Var(x)
- Var(y)
- Var(z))
- Init(
- AssignmentStmt:3(
- NameExpr(y)
- OpExpr:3(
- +
- IntExpr(1)
- IntExpr(2)))
- AssignmentStmt:3(
- NameExpr(z)
- TupleExpr:3(
- IntExpr(1)
- IntExpr(2))))
- Block:3(
- PassStmt:4())))
-
-[case testTryFinally]
-try:
- 1
-finally:
- 2
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- ExpressionStmt:2(
- IntExpr(1)))
- Finally(
- ExpressionStmt:4(
- IntExpr(2)))))
-
-[case testTry]
-try:
- 1
-except x:
- 2
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- ExpressionStmt:2(
- IntExpr(1)))
- NameExpr(x)
- Block:3(
- ExpressionStmt:4(
- IntExpr(2)))))
-
-[case testComplexTry]
-try:
- 1
-except x as y:
- 2
-except x.y:
- 3
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- ExpressionStmt:2(
- IntExpr(1)))
- NameExpr(x)
- NameExpr(y)
- Block:3(
- ExpressionStmt:4(
- IntExpr(2)))
- MemberExpr:5(
- NameExpr(x)
- y)
- Block:5(
- ExpressionStmt:6(
- IntExpr(3)))))
-
-[case testGeneratorExpression]
-(x for y in z)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- GeneratorExpr:1(
- NameExpr(x)
- NameExpr(y)
- NameExpr(z))))
-
-[case testGeneratorExpressionNested]
-(x for y, (p, q) in z)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- GeneratorExpr:1(
- NameExpr(x)
- TupleExpr:1(
- NameExpr(y)
- TupleExpr:1(
- NameExpr(p)
- NameExpr(q)))
- NameExpr(z))))
-
-[case testListComprehension]
-x=[x for y in z]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- ListComprehension:1(
- GeneratorExpr:1(
- NameExpr(x)
- NameExpr(y)
- NameExpr(z)))))
-
-[case testComplexListComprehension]
-x=[(x, y) for y, z in (1, 2)]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- ListComprehension:1(
- GeneratorExpr:1(
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- TupleExpr:1(
- NameExpr(y)
- NameExpr(z))
- TupleExpr:1(
- IntExpr(1)
- IntExpr(2))))))
-
-[case testListComprehension2]
-([x + 1 for x in a])
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ListComprehension:1(
- GeneratorExpr:1(
- OpExpr:1(
- +
- NameExpr(x)
- IntExpr(1))
- NameExpr(x)
- NameExpr(a)))))
-
-[case testSlices]
-x[1:2]
-x[:1]
-x[1:]
-x[:]
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IndexExpr:1(
- NameExpr(x)
- SliceExpr:-1(
- IntExpr(1)
- IntExpr(2))))
- ExpressionStmt:2(
- IndexExpr:2(
- NameExpr(x)
- SliceExpr:-1(
- <empty>
- IntExpr(1))))
- ExpressionStmt:3(
- IndexExpr:3(
- NameExpr(x)
- SliceExpr:-1(
- IntExpr(1)
- <empty>)))
- ExpressionStmt:4(
- IndexExpr:4(
- NameExpr(x)
- SliceExpr:-1(
- <empty>
- <empty>))))
-
-[case testSliceWithStride]
-x[1:2:3]
-x[1::2]
-x[:1:2]
-x[::2]
-x[1:2:]
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IndexExpr:1(
- NameExpr(x)
- SliceExpr:-1(
- IntExpr(1)
- IntExpr(2)
- IntExpr(3))))
- ExpressionStmt:2(
- IndexExpr:2(
- NameExpr(x)
- SliceExpr:-1(
- IntExpr(1)
- <empty>
- IntExpr(2))))
- ExpressionStmt:3(
- IndexExpr:3(
- NameExpr(x)
- SliceExpr:-1(
- <empty>
- IntExpr(1)
- IntExpr(2))))
- ExpressionStmt:4(
- IndexExpr:4(
- NameExpr(x)
- SliceExpr:-1(
- <empty>
- <empty>
- IntExpr(2))))
- ExpressionStmt:5(
- IndexExpr:5(
- NameExpr(x)
- SliceExpr:-1(
- IntExpr(1)
- IntExpr(2)))))
-
-[case testYield]
-def f():
- yield x + 1
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ExpressionStmt:2(
- YieldExpr:2(
- OpExpr:2(
- +
- NameExpr(x)
- IntExpr(1)))))))
-
-[case testYieldFrom]
-def f():
- yield from h()
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ExpressionStmt:2(
- YieldFromExpr:2(
- CallExpr:2(
- NameExpr(h)
- Args()))))))
-
-[case testYieldFromAssignment]
-def f():
- a = yield from h()
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- AssignmentStmt:2(
- NameExpr(a)
- YieldFromExpr:2(
- CallExpr:2(
- NameExpr(h)
- Args()))))))
-
-[case testDel]
-del x
-del x[0], y[1]
-[out]
-MypyFile:1(
- DelStmt:1(
- NameExpr(x))
- DelStmt:2(
- TupleExpr:2(
- IndexExpr:2(
- NameExpr(x)
- IntExpr(0))
- IndexExpr:2(
- NameExpr(y)
- IntExpr(1)))))
-
-[case testExtraCommas]
-1, 2,
-+[1, 2,]
-f(1,)
-{1:2,}
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- TupleExpr:1(
- IntExpr(1)
- IntExpr(2)))
- ExpressionStmt:2(
- UnaryExpr:2(
- +
- ListExpr:2(
- IntExpr(1)
- IntExpr(2))))
- ExpressionStmt:3(
- CallExpr:3(
- NameExpr(f)
- Args(
- IntExpr(1))))
- ExpressionStmt:4(
- DictExpr:4(
- IntExpr(1)
- IntExpr(2))))
-
-[case testExtraCommaInFunc]
-def f(x,):
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Block:1(
- PassStmt:2())))
-
-[case testLambda]
-lambda: 1
-lambda x: y + 1
-lambda x, y: 1
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- FuncExpr:1(
- Block:1(
- ReturnStmt:1(
- IntExpr(1)))))
- ExpressionStmt:2(
- FuncExpr:2(
- Args(
- Var(x))
- Block:2(
- ReturnStmt:2(
- OpExpr:2(
- +
- NameExpr(y)
- IntExpr(1))))))
- ExpressionStmt:3(
- FuncExpr:3(
- Args(
- Var(x)
- Var(y))
- Block:3(
- ReturnStmt:3(
- IntExpr(1))))))
-
-[case testComplexLambda]
-lambda x=2: x
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- FuncExpr:1(
- Args(
- Var(x))
- Init(
- AssignmentStmt:1(
- NameExpr(x)
- IntExpr(2)))
- Block:1(
- ReturnStmt:1(
- NameExpr(x))))))
-
-[case testLambdaPrecedence]
-lambda x: 1, 2
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- TupleExpr:1(
- FuncExpr:1(
- Args(
- Var(x))
- Block:1(
- ReturnStmt:1(
- IntExpr(1))))
- IntExpr(2))))
-
-[case testForIndicesInParens]
-for (i, j) in x:
- pass
-[out]
-MypyFile:1(
- ForStmt:1(
- TupleExpr:1(
- NameExpr(i)
- NameExpr(j))
- NameExpr(x)
- Block:1(
- PassStmt:2())))
-
-[case testForAndTrailingCommaAfterIndexVar]
-for i, in x:
- pass
-[out]
-MypyFile:1(
- ForStmt:1(
- TupleExpr:1(
- NameExpr(i))
- NameExpr(x)
- Block:1(
- PassStmt:2())))
-
-[case testListComprehensionAndTrailingCommaAfterIndexVar]
-x = [a for b, in c]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- ListComprehension:1(
- GeneratorExpr:1(
- NameExpr(a)
- TupleExpr:1(
- NameExpr(b))
- NameExpr(c)))))
-
-[case testForAndTrailingCommaAfterIndexVars]
-for i, j, in x:
- pass
-[out]
-MypyFile:1(
- ForStmt:1(
- TupleExpr:1(
- NameExpr(i)
- NameExpr(j))
- NameExpr(x)
- Block:1(
- PassStmt:2())))
-
-[case testGeneratorWithCondition]
-(x for y in z if 0)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- GeneratorExpr:1(
- NameExpr(x)
- NameExpr(y)
- NameExpr(z)
- IntExpr(0))))
-
-[case testListComprehensionWithCondition]
-raise [x for y in z if 0]
-[out]
-MypyFile:1(
- RaiseStmt:1(
- ListComprehension:1(
- GeneratorExpr:1(
- NameExpr(x)
- NameExpr(y)
- NameExpr(z)
- IntExpr(0)))))
-
-[case testListComprehensionWithConditions]
-raise [x for y in z if 0 if 1]
-[out]
-MypyFile:1(
- RaiseStmt:1(
- ListComprehension:1(
- GeneratorExpr:1(
- NameExpr(x)
- NameExpr(y)
- NameExpr(z)
- IntExpr(0)
- IntExpr(1)))))
-
-[case testListComprehensionWithCrazyConditions]
-raise [x for y in z if (1 if 2 else 3) if 1]
-[out]
-MypyFile:1(
- RaiseStmt:1(
- ListComprehension:1(
- GeneratorExpr:1(
- NameExpr(x)
- NameExpr(y)
- NameExpr(z)
- ConditionalExpr:1(
- Condition(
- IntExpr(2))
- IntExpr(1)
- IntExpr(3))
- IntExpr(1)))))
-
-[case testDictionaryComprehension]
-a = {x: y for x, y in xys}
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- DictionaryComprehension:1(
- NameExpr(x)
- NameExpr(y)
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- NameExpr(xys))))
-
-[case testDictionaryComprehensionComplex]
-a = {x: y for x, y in xys for p, q in pqs if c}
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- DictionaryComprehension:1(
- NameExpr(x)
- NameExpr(y)
- TupleExpr:1(
- NameExpr(x)
- NameExpr(y))
- TupleExpr:1(
- NameExpr(p)
- NameExpr(q))
- NameExpr(xys)
- NameExpr(pqs)
- NameExpr(c))))
-
-[case testSetComprehension]
-a = {i for i in l}
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- SetComprehension:1(
- GeneratorExpr:1(
- NameExpr(i)
- NameExpr(i)
- NameExpr(l)))))
-
-[case testSetComprehensionComplex]
-a = {x + p for x in xys for p in pqs if c}
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- SetComprehension:1(
- GeneratorExpr:1(
- OpExpr:1(
- +
- NameExpr(x)
- NameExpr(p))
- NameExpr(x)
- NameExpr(p)
- NameExpr(xys)
- NameExpr(pqs)
- NameExpr(c)))))
-
-[case testWithStatement]
-with open('foo') as f:
- pass
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- CallExpr:1(
- NameExpr(open)
- Args(
- StrExpr(foo))))
- Target(
- NameExpr(f))
- Block:1(
- PassStmt:2())))
-
-[case testWithStatementWithoutTarget]
-with foo:
- pass
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- NameExpr(foo))
- Block:1(
- PassStmt:2())))
-
-[case testHexOctBinLiterals]
-0xa, 0Xaf, 0o7, 0O12, 0b1, 0B101
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- TupleExpr:1(
- IntExpr(10)
- IntExpr(175)
- IntExpr(7)
- IntExpr(10)
- IntExpr(1)
- IntExpr(5))))
-
-[case testImportFromWithParens]
-from x import (y)
-from x import (y,
- z)
-[out]
-MypyFile:1(
- ImportFrom:1(x, [y])
- ImportFrom:2(x, [y, z]))
-
-[case testContinueStmt]
-while 1:
- continue
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(1)
- Block:1(
- ContinueStmt:2())))
-
-[case testStrLiteralConcatenate]
-'f' 'bar'
-('x'
- 'y'
- 'z')
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StrExpr(fbar))
- ExpressionStmt:2(
- StrExpr(xyz)))
-
-[case testCatchAllExcept]
-try:
- 1
-except:
- pass
-try:
- 1
-except x:
- pass
-except:
- 2
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- ExpressionStmt:2(
- IntExpr(1)))
- Block:3(
- PassStmt:4()))
- TryStmt:5(
- Block:5(
- ExpressionStmt:6(
- IntExpr(1)))
- NameExpr(x)
- Block:7(
- PassStmt:8())
- Block:9(
- ExpressionStmt:10(
- IntExpr(2)))))
-
-[case testTryElse]
-try:
- pass
-except x:
- 1
-else:
- 2
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- NameExpr(x)
- Block:3(
- ExpressionStmt:4(
- IntExpr(1)))
- Else(
- ExpressionStmt:6(
- IntExpr(2)))))
-
-[case testExceptWithMultipleTypes]
-try:
- pass
-except (x, y):
- pass
-except (a, b, c) as e:
- pass
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- TupleExpr:3(
- NameExpr(x)
- NameExpr(y))
- Block:3(
- PassStmt:4())
- TupleExpr:5(
- NameExpr(a)
- NameExpr(b)
- NameExpr(c))
- NameExpr(e)
- Block:5(
- PassStmt:6())))
-
-[case testNestedFunctions]
-def f():
- def g():
- pass
-def h() -> int:
- def g() -> int:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- FuncDef:2(
- g
- Block:2(
- PassStmt:3()))))
- FuncDef:4(
- h
- def () -> int?
- Block:4(
- FuncDef:5(
- g
- def () -> int?
- Block:5(
- PassStmt:6())))))
-
-[case testStatementsAndDocStringsInClassBody]
-class A:
- "doc string"
- x = y
- def f(self):
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- ExpressionStmt:2(
- StrExpr(doc string))
- AssignmentStmt:3(
- NameExpr(x)
- NameExpr(y))
- FuncDef:4(
- f
- Args(
- Var(self))
- Block:4(
- PassStmt:5()))))
-
-[case testSingleLineClass]
-class a: pass
-[out]
-MypyFile:1(
- ClassDef:1(
- a
- PassStmt:1()))
-
-[case testDecorator]
- at property
-def f():
- pass
-[out]
-MypyFile:1(
- Decorator:1(
- Var(f)
- NameExpr(property)
- FuncDef:2(
- f
- Block:2(
- PassStmt:3()))))
-
-[case testComplexDecorator]
- at foo(bar, 1)
- at zar
-def f() -> int:
- pass
-[out]
-MypyFile:1(
- Decorator:1(
- Var(f)
- CallExpr:1(
- NameExpr(foo)
- Args(
- NameExpr(bar)
- IntExpr(1)))
- NameExpr(zar)
- FuncDef:3(
- f
- def () -> int?
- Block:3(
- PassStmt:4()))))
-
-[case testKeywordArgInCall]
-f(x=1)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- CallExpr:1(
- NameExpr(f)
- Args()
- KwArgs(
- x
- IntExpr(1)))))
-
-[case testComplexKeywordArgs]
-f(x, y=1 or 2, z=y)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- CallExpr:1(
- NameExpr(f)
- Args(
- NameExpr(x))
- KwArgs(
- y
- OpExpr:1(
- or
- IntExpr(1)
- IntExpr(2)))
- KwArgs(
- z
- NameExpr(y)))))
-
-[case testChainedAssignment]
-x = z = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x)
- NameExpr(z))
- IntExpr(1)))
-
-[case testVarArgs]
-def f(x, *a): pass
-f(1, *2)
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- VarArg(
- Var(a))
- Block:1(
- PassStmt:1()))
- ExpressionStmt:2(
- CallExpr:2(
- NameExpr(f)
- Args(
- IntExpr(1)
- IntExpr(2))
- VarArg)))
-
-[case testVarArgWithType]
-def f(x: str, *a: int): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: str?, *a: int?) -> Any
- VarArg(
- Var(a))
- Block:1(
- PassStmt:1())))
-
-[case testDictVarArgs]
-def f(x, **a): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- DictVarArg(
- Var(a))
- Block:1(
- PassStmt:1())))
-
-[case testBothVarArgs]
-def f(x, *a, **b): pass
-def g(*a, **b): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- VarArg(
- Var(a))
- DictVarArg(
- Var(b))
- Block:1(
- PassStmt:1()))
- FuncDef:2(
- g
- VarArg(
- Var(a))
- DictVarArg(
- Var(b))
- Block:2(
- PassStmt:2())))
-
-[case testDictVarArgsWithType]
-def f(x: X, **a: A) -> None: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: X?, **a: A?) -> None?
- DictVarArg(
- Var(a))
- Block:1(
- PassStmt:1())))
-
-[case testCallDictVarArgs]
-f(**x)
-f(x, **y)
-f(*x, **y)
-f(x, *y, **z)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- CallExpr:1(
- NameExpr(f)
- Args()
- DictVarArg(
- NameExpr(x))))
- ExpressionStmt:2(
- CallExpr:2(
- NameExpr(f)
- Args(
- NameExpr(x))
- DictVarArg(
- NameExpr(y))))
- ExpressionStmt:3(
- CallExpr:3(
- NameExpr(f)
- Args(
- NameExpr(x))
- VarArg
- DictVarArg(
- NameExpr(y))))
- ExpressionStmt:4(
- CallExpr:4(
- NameExpr(f)
- Args(
- NameExpr(x)
- NameExpr(y))
- VarArg
- DictVarArg(
- NameExpr(z)))))
-
-[case testAssert]
-assert x == y
-[out]
-MypyFile:1(
- AssertStmt:1(
- ComparisonExpr:1(
- ==
- NameExpr(x)
- NameExpr(y))))
-
-[case testYieldWithoutExpressions]
-def f():
- yield
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ExpressionStmt:2(
- YieldExpr:2()))))
-
-[case testConditionalExpression]
-x if y else z
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ConditionalExpr:1(
- Condition(
- NameExpr(y))
- NameExpr(x)
- NameExpr(z))))
-
-[case testConditionalExpressionInListComprehension]
-a = [x if y else z for a in b]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- ListComprehension:1(
- GeneratorExpr:1(
- ConditionalExpr:1(
- Condition(
- NameExpr(y))
- NameExpr(x)
- NameExpr(z))
- NameExpr(a)
- NameExpr(b)))))
-
-[case testConditionalExpressionInTuple]
-1 if 2 else 3, 4
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- TupleExpr:1(
- ConditionalExpr:1(
- Condition(
- IntExpr(2))
- IntExpr(1)
- IntExpr(3))
- IntExpr(4))))
-
-[case testSetLiteral]
-{x or y}
-{1, 2}
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- SetExpr:1(
- OpExpr:1(
- or
- NameExpr(x)
- NameExpr(y))))
- ExpressionStmt:2(
- SetExpr:2(
- IntExpr(1)
- IntExpr(2))))
-
-[case testSetLiteralWithExtraComma]
-{x,}
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- SetExpr:1(
- NameExpr(x))))
-
-[case testImportAs]
-import x as y
-import x, z as y, a.b as c, d as d
-[out]
-MypyFile:1(
- Import:1(x : y)
- Import:2(x, z : y, a.b : c, d : d))
-
-[case testForAndElse]
-for x in y:
- pass
-else:
- x
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x)
- NameExpr(y)
- Block:1(
- PassStmt:2())
- Else(
- ExpressionStmt:4(
- NameExpr(x)))))
-
-[case testWhileAndElse]
-while x:
- pass
-else:
- y
-[out]
-MypyFile:1(
- WhileStmt:1(
- NameExpr(x)
- Block:1(
- PassStmt:2())
- Else(
- ExpressionStmt:4(
- NameExpr(y)))))
-
-[case testWithAndMultipleOperands]
-with x as y, a as b:
- pass
-with x(), y():
- pass
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- NameExpr(x))
- Target(
- NameExpr(y))
- Expr(
- NameExpr(a))
- Target(
- NameExpr(b))
- Block:1(
- PassStmt:2()))
- WithStmt:3(
- Expr(
- CallExpr:3(
- NameExpr(x)
- Args()))
- Expr(
- CallExpr:3(
- NameExpr(y)
- Args()))
- Block:3(
- PassStmt:4())))
-
-[case testOperatorAssignment]
-x += 1
-x -= 1
-x *= 1
-x /= 1
-x //= 1
-x %= 1
-x **= 1
-x |= 1
-x &= 1
-x ^= 1
-x >>= 1
-x <<= 1
-[out]
-MypyFile:1(
- OperatorAssignmentStmt:1(
- +
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:2(
- -
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:3(
- *
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:4(
- /
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:5(
- //
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:6(
- %
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:7(
- **
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:8(
- |
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:9(
- &
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:10(
- ^
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:11(
- >>
- NameExpr(x)
- IntExpr(1))
- OperatorAssignmentStmt:12(
- <<
- NameExpr(x)
- IntExpr(1)))
-
-[case testNestedClasses]
-class A:
- class B:
- pass
- class C:
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- ClassDef:2(
- B
- PassStmt:3())
- ClassDef:4(
- C
- PassStmt:5())))
-
-[case testTryWithExceptAndFinally]
-try:
- pass
-except x:
- x
-finally:
- y
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- NameExpr(x)
- Block:3(
- ExpressionStmt:4(
- NameExpr(x)))
- Finally(
- ExpressionStmt:6(
- NameExpr(y)))))
-
-[case testBareAsteriskInFuncDef]
-def f(x, *, y=1): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- MaxPos(1)
- Args(
- Var(x)
- Var(y))
- Init(
- AssignmentStmt:1(
- NameExpr(y)
- IntExpr(1)))
- Block:1(
- PassStmt:1())))
-
-[case testBareAsteriskInFuncDefWithSignature]
-def f(x: A, *, y: B = 1) -> None: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- MaxPos(1)
- Args(
- Var(x)
- Var(y))
- def (x: A?, *, y: B? =) -> None?
- Init(
- AssignmentStmt:1(
- NameExpr(y)
- IntExpr(1)))
- Block:1(
- PassStmt:1())))
-
-[case testBareAsteriskNamedDefault]
-def f(*, y: B = 1) -> None: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- MaxPos(0)
- Args(
- Var(y))
- def (*, y: B? =) -> None?
- Init(
- AssignmentStmt:1(
- NameExpr(y)
- IntExpr(1)))
- Block:1(
- PassStmt:1())))
-
-[case testBareAsteriskNamedNoDefault]
-def f(*, y: B) -> None: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- MaxPos(0)
- Args(
- Var(y))
- def (*, y: B?) -> None?
- Block:1(
- PassStmt:1())))
-
-[case testSuperExpr]
-super().x
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- SuperExpr:1(
- x)))
-
-[case testKeywordAndDictArgs]
-f(x = y, **kwargs)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- CallExpr:1(
- NameExpr(f)
- Args()
- KwArgs(
- x
- NameExpr(y))
- DictVarArg(
- NameExpr(kwargs)))))
-
-[case testSimpleFunctionType]
-f = None # type: Callable[[], None]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(f)
- NameExpr(None)
- Callable?[<TypeList >, None?]))
-
-[case testFunctionTypeWithArgument]
-f = None # type: Callable[[str], int]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(f)
- NameExpr(None)
- Callable?[<TypeList str?>, int?]))
-
-[case testFunctionTypeWithTwoArguments]
-f = None # type: Callable[[a[b], x.y], List[int]]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(f)
- NameExpr(None)
- Callable?[<TypeList a?[b?], x.y?>, List?[int?]]))
-
-[case testFunctionTypeWithExtraComma]
-def f(x: Callable[[str,], int]): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: Callable?[<TypeList str?>, int?]) -> Any
- Block:1(
- PassStmt:1())))
-
-[case testSimpleStringLiteralType]
-def f() -> 'A': pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> A?
- Block:1(
- PassStmt:1())))
-
-[case testGenericStringLiteralType]
-def f() -> 'A[B, C]': pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> A?[B?, C?]
- Block:1(
- PassStmt:1())))
-
-[case testPartialStringLiteralType]
-def f() -> A['B', C]: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> A?[B?, C?]
- Block:1(
- PassStmt:1())))
-
-[case testWhitespaceInStringLiteralType]
-def f() -> ' A [ X ] ': pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> A?[X?]
- Block:1(
- PassStmt:1())))
-
-[case testEscapeInStringLiteralType]
-def f() -> '\x41': pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> A?
- Block:1(
- PassStmt:1())))
-
-[case testMetaclass]
-class Foo(metaclass=Bar): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- Foo
- Metaclass(Bar)
- PassStmt:1()))
-
-[case testQualifiedMetaclass]
-class Foo(metaclass=foo.Bar): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- Foo
- Metaclass(foo.Bar)
- PassStmt:1()))
-
-[case testBaseAndMetaclass]
-class Foo(foo.bar[x], metaclass=Bar): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- Foo
- Metaclass(Bar)
- BaseTypeExpr(
- IndexExpr:1(
- MemberExpr:1(
- NameExpr(foo)
- bar)
- NameExpr(x)))
- PassStmt:1()))
-
-[case testClassKeywordArgs]
-class Foo(_root=None): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- Foo
- PassStmt:1()))
-
-[case testClassKeywordArgsBeforeMeta]
-class Foo(_root=None, metaclass=Bar): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- Foo
- Metaclass(Bar)
- PassStmt:1()))
-
-[case testClassKeywordArgsAfterMeta]
-class Foo(metaclass=Bar, _root=None): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- Foo
- Metaclass(Bar)
- PassStmt:1()))
-
-[case testNamesThatAreNoLongerKeywords]
-any = interface
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(any)
- NameExpr(interface)))
-
-[case testFunctionOverload]
- at foo
-def f() -> x: pass
- at foo
-def f() -> y: pass
-[out]
-MypyFile:1(
- OverloadedFuncDef:1(
- Decorator:1(
- Var(f)
- NameExpr(foo)
- FuncDef:2(
- f
- def () -> x?
- Block:2(
- PassStmt:2())))
- Decorator:3(
- Var(f)
- NameExpr(foo)
- FuncDef:4(
- f
- def () -> y?
- Block:4(
- PassStmt:4())))))
-
-[case testFunctionOverloadAndOtherStatements]
-x
- at foo
-def f() -> x: pass
- at foo
-def f() -> y: pass
-x
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(x))
- OverloadedFuncDef:2(
- Decorator:2(
- Var(f)
- NameExpr(foo)
- FuncDef:3(
- f
- def () -> x?
- Block:3(
- PassStmt:3())))
- Decorator:4(
- Var(f)
- NameExpr(foo)
- FuncDef:5(
- f
- def () -> y?
- Block:5(
- PassStmt:5()))))
- ExpressionStmt:6(
- NameExpr(x)))
-
-[case testFunctionOverloadWithThreeVariants]
- at foo
-def f() -> x: pass
- at foo
-def f() -> y: pass
- at foo
-def f(y): pass
-[out]
-MypyFile:1(
- OverloadedFuncDef:1(
- Decorator:1(
- Var(f)
- NameExpr(foo)
- FuncDef:2(
- f
- def () -> x?
- Block:2(
- PassStmt:2())))
- Decorator:3(
- Var(f)
- NameExpr(foo)
- FuncDef:4(
- f
- def () -> y?
- Block:4(
- PassStmt:4())))
- Decorator:5(
- Var(f)
- NameExpr(foo)
- FuncDef:6(
- f
- Args(
- Var(y))
- Block:6(
- PassStmt:6())))))
-
-[case testDecoratorsThatAreNotOverloads]
- at foo
-def f() -> x: pass
- at foo
-def g() -> y: pass
-[out]
-MypyFile:1(
- Decorator:1(
- Var(f)
- NameExpr(foo)
- FuncDef:2(
- f
- def () -> x?
- Block:2(
- PassStmt:2())))
- Decorator:3(
- Var(g)
- NameExpr(foo)
- FuncDef:4(
- g
- def () -> y?
- Block:4(
- PassStmt:4()))))
-
-[case testFunctionOverloadWithinFunction]
-def f():
- @foo
- def g(): pass
- @foo
- def g() -> x: pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- OverloadedFuncDef:2(
- Decorator:2(
- Var(g)
- NameExpr(foo)
- FuncDef:3(
- g
- Block:3(
- PassStmt:3())))
- Decorator:4(
- Var(g)
- NameExpr(foo)
- FuncDef:5(
- g
- def () -> x?
- Block:5(
- PassStmt:5())))))))
-
-[case testCommentFunctionAnnotation]
-def f(): # type: () -> A
- pass
-def g(x): # type: (A) -> B
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def () -> A?
- Block:1(
- PassStmt:2()))
- FuncDef:3(
- g
- Args(
- Var(x))
- def (x: A?) -> B?
- Block:3(
- PassStmt:4())))
-
-[case testCommentMethodAnnotation]
-class A:
- def f(self): # type: () -> A
- pass
- def g(xself, x): # type: (A) -> B
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- f
- Args(
- Var(self))
- def (self: Any) -> A?
- Block:2(
- PassStmt:3()))
- FuncDef:4(
- g
- Args(
- Var(xself)
- Var(x))
- def (xself: Any, x: A?) -> B?
- Block:4(
- PassStmt:5()))))
-
-[case testCommentMethodAnnotationAndNestedFunction]
-class A:
- def f(self): # type: () -> A
- def g(x): # type: (A) -> B
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- f
- Args(
- Var(self))
- def (self: Any) -> A?
- Block:2(
- FuncDef:3(
- g
- Args(
- Var(x))
- def (x: A?) -> B?
- Block:3(
- PassStmt:4()))))))
-
-[case testCommentFunctionAnnotationOnSeparateLine]
-def f(x):
- # type: (X) -> Y
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: X?) -> Y?
- Block:1(
- PassStmt:3())))
-
-[case testCommentFunctionAnnotationOnSeparateLine2]
-def f(x):
-
- # type: (X) -> Y # bar
-
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: X?) -> Y?
- Block:1(
- PassStmt:5())))
-
-[case testCommentFunctionAnnotationAndVarArg]
-def f(x, *y): # type: (X, *Y) -> Z
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: X?, *y: Y?) -> Z?
- VarArg(
- Var(y))
- Block:1(
- PassStmt:2())))
-
-[case testCommentFunctionAnnotationAndAllVarArgs]
-def f(x, *y, **z): # type: (X, *Y, **Z) -> A
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- def (x: X?, *y: Y?, **z: Z?) -> A?
- VarArg(
- Var(y))
- DictVarArg(
- Var(z))
- Block:1(
- PassStmt:2())))
-
-[case testClassDecorator]
- at foo
-class X: pass
- at foo(bar)
- at x.y
-class Z: pass
-[out]
-MypyFile:1(
- ClassDef:1(
- X
- Decorators(
- NameExpr(foo))
- PassStmt:2())
- ClassDef:3(
- Z
- Decorators(
- CallExpr:3(
- NameExpr(foo)
- Args(
- NameExpr(bar)))
- MemberExpr:4(
- NameExpr(x)
- y))
- PassStmt:5()))
-
-[case testTrailingSemicolon]
-def x():
- pass;
-
-def y():
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- x
- Block:1(
- PassStmt:2()))
- FuncDef:4(
- y
- Block:4(
- PassStmt:5())))
-
-[case testEmptySuperClass]
-class A():
- pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- PassStmt:2()))
-
-[case testStarExpression]
-*a
-*a, b
-a, *b
-a, (*x, y)
-a, (x, *y)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StarExpr:1(
- NameExpr(a)))
- ExpressionStmt:2(
- TupleExpr:2(
- StarExpr:2(
- NameExpr(a))
- NameExpr(b)))
- ExpressionStmt:3(
- TupleExpr:3(
- NameExpr(a)
- StarExpr:3(
- NameExpr(b))))
- ExpressionStmt:4(
- TupleExpr:4(
- NameExpr(a)
- TupleExpr:4(
- StarExpr:4(
- NameExpr(x))
- NameExpr(y))))
- ExpressionStmt:5(
- TupleExpr:5(
- NameExpr(a)
- TupleExpr:5(
- NameExpr(x)
- StarExpr:5(
- NameExpr(y))))))
-
-[case testStarExpressionParenthesis]
-*(a)
-*(a,b)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- StarExpr:1(
- NameExpr(a)))
- ExpressionStmt:2(
- StarExpr:2(
- TupleExpr:2(
- NameExpr(a)
- NameExpr(b)))))
-
-[case testStarExpressionInFor]
-for *a in b:
- pass
-
-for a, *b in c:
- pass
-
-for *a, b in c:
- pass
-[out]
-MypyFile:1(
- ForStmt:1(
- StarExpr:1(
- NameExpr(a))
- NameExpr(b)
- Block:1(
- PassStmt:2()))
- ForStmt:4(
- TupleExpr:4(
- NameExpr(a)
- StarExpr:4(
- NameExpr(b)))
- NameExpr(c)
- Block:4(
- PassStmt:5()))
- ForStmt:7(
- TupleExpr:7(
- StarExpr:7(
- NameExpr(a))
- NameExpr(b))
- NameExpr(c)
- Block:7(
- PassStmt:8())))
-
-[case testStarExprInGeneratorExpr]
-(x for y, *p in z)
-(x for *p, y in z)
-(x for y, *p, q in z)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- GeneratorExpr:1(
- NameExpr(x)
- TupleExpr:1(
- NameExpr(y)
- StarExpr:1(
- NameExpr(p)))
- NameExpr(z)))
- ExpressionStmt:2(
- GeneratorExpr:2(
- NameExpr(x)
- TupleExpr:2(
- StarExpr:2(
- NameExpr(p))
- NameExpr(y))
- NameExpr(z)))
- ExpressionStmt:3(
- GeneratorExpr:3(
- NameExpr(x)
- TupleExpr:3(
- NameExpr(y)
- StarExpr:3(
- NameExpr(p))
- NameExpr(q))
- NameExpr(z))))
-
-[case testParseNamedtupleBaseclass]
-class A(namedtuple('x', ['y'])): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- BaseTypeExpr(
- CallExpr:1(
- NameExpr(namedtuple)
- Args(
- StrExpr(x)
- ListExpr:1(
- StrExpr(y)))))
- PassStmt:1()))
-
-[case testEllipsis]
-...
-a[1,...,2]
-....__class__
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- Ellipsis)
- ExpressionStmt:2(
- IndexExpr:2(
- NameExpr(a)
- TupleExpr:2(
- IntExpr(1)
- Ellipsis
- IntExpr(2))))
- ExpressionStmt:3(
- MemberExpr:3(
- Ellipsis
- __class__)))
-
-[case testFunctionWithManyKindsOfArgs]
-def f(x, *args, y=None, **kw): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- MaxPos(1)
- Args(
- Var(x)
- Var(y))
- Init(
- AssignmentStmt:1(
- NameExpr(y)
- NameExpr(None)))
- VarArg(
- Var(args))
- DictVarArg(
- Var(kw))
- Block:1(
- PassStmt:1())))
-
-[case testIfWithSemicolons]
-if 1: a; b
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- ExpressionStmt:1(
- NameExpr(a))
- ExpressionStmt:1(
- NameExpr(b)))))
-
-[case testIfWithSemicolonsNested]
-while 2:
- if 1: a; b
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(2)
- Block:1(
- IfStmt:2(
- If(
- IntExpr(1))
- Then(
- ExpressionStmt:2(
- NameExpr(a))
- ExpressionStmt:2(
- NameExpr(b)))))))
-
-[case testIfElseWithSemicolons]
-if 1: global x; y = 1
-else: x = 1; return 3
-4
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- GlobalDecl:1(
- x)
- AssignmentStmt:1(
- NameExpr(y)
- IntExpr(1)))
- Else(
- AssignmentStmt:2(
- NameExpr(x)
- IntExpr(1))
- ReturnStmt:2(
- IntExpr(3))))
- ExpressionStmt:3(
- IntExpr(4)))
-
-[case testIfElseWithSemicolonsNested]
-while 2:
- if 1: global x; y = 1
- else: x = 1; return 3
-4
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(2)
- Block:1(
- IfStmt:2(
- If(
- IntExpr(1))
- Then(
- GlobalDecl:2(
- x)
- AssignmentStmt:2(
- NameExpr(y)
- IntExpr(1)))
- Else(
- AssignmentStmt:3(
- NameExpr(x)
- IntExpr(1))
- ReturnStmt:3(
- IntExpr(3))))))
- ExpressionStmt:4(
- IntExpr(4)))
-
-[case testKeywordArgumentAfterStarArgumentInCall]
-f(x=1, *y)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- CallExpr:1(
- NameExpr(f)
- Args(
- NameExpr(y))
- VarArg
- KwArgs(
- x
- IntExpr(1)))))
-
-[case testConditionalExpressionInSetComprehension]
-{ 1 if x else 2 for x in y }
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- SetComprehension:1(
- GeneratorExpr:1(
- ConditionalExpr:1(
- Condition(
- NameExpr(x))
- IntExpr(1)
- IntExpr(2))
- NameExpr(x)
- NameExpr(y)))))
-
-[case testConditionalExpressionInListComprehension]
-a = [ 1 if x else 2 for x in y ]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a)
- ListComprehension:1(
- GeneratorExpr:1(
- ConditionalExpr:1(
- Condition(
- NameExpr(x))
- IntExpr(1)
- IntExpr(2))
- NameExpr(x)
- NameExpr(y)))))
-
-[case testComplexWithLvalue]
-with x as y.z: pass
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- NameExpr(x))
- Target(
- MemberExpr:1(
- NameExpr(y)
- z))
- Block:1(
- PassStmt:1())))
-
-[case testRelativeImportWithEllipsis]
-from ... import x
-[out]
-MypyFile:1(
- ImportFrom:1(..., [x]))
-
-[case testRelativeImportWithEllipsis2]
-from .... import x
-[out]
-MypyFile:1(
- ImportFrom:1(...., [x]))
-
-[case testParseExtendedSlicing]
-a[:, :]
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IndexExpr:1(
- NameExpr(a)
- TupleExpr:-1(
- SliceExpr:-1(
- <empty>
- <empty>)
- SliceExpr:-1(
- <empty>
- <empty>)))))
-
-[case testParseExtendedSlicing2]
-a[1:2:, :,]
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IndexExpr:1(
- NameExpr(a)
- TupleExpr:-1(
- SliceExpr:-1(
- IntExpr(1)
- IntExpr(2))
- SliceExpr:-1(
- <empty>
- <empty>)))))
-
-[case testParseExtendedSlicing3]
-a[1:2:3, ..., 1]
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- IndexExpr:1(
- NameExpr(a)
- TupleExpr:-1(
- SliceExpr:-1(
- IntExpr(1)
- IntExpr(2)
- IntExpr(3))
- Ellipsis
- IntExpr(1)))))
-
-[case testParseIfExprInDictExpr]
-test = { 'spam': 'eggs' if True else 'bacon' }
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(test)
- DictExpr:1(
- StrExpr(spam)
- ConditionalExpr:1(
- Condition(
- NameExpr(True))
- StrExpr(eggs)
- StrExpr(bacon)))))
-
-[case testIgnoreLine]
-import x # type: ignore
-[out]
-MypyFile:1(
- Import:1(x)
- IgnoredLines(1))
-
-[case testIgnore2Lines]
-x
-y # type: ignore
-z # type: ignore
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(x))
- ExpressionStmt:2(
- NameExpr(y))
- ExpressionStmt:3(
- NameExpr(z))
- IgnoredLines(2, 3))
-
-[case testCommentedOutIgnoreAnnotation]
-y ## type: ignore
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(y)))
-
-[case testInvalidIgnoreAnnotations]
-y # type: ignored
-y # type: IGNORE
-y # type : ignore
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(y))
- ExpressionStmt:2(
- NameExpr(y))
- ExpressionStmt:3(
- NameExpr(y)))
-
-[case testSpaceInIgnoreAnnotations]
-y # type: ignore # foo
-y #type:ignore
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(y))
- ExpressionStmt:2(
- NameExpr(y))
- IgnoredLines(1, 2))
-
-[case testIgnoreAnnotationAndMultilineStatement]
-x = {
- 1: 2 # type: ignore
-}
-y = { # type: ignore
- 1: 2
-} # type: ignore
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x)
- DictExpr:1(
- IntExpr(1)
- IntExpr(2)))
- AssignmentStmt:4(
- NameExpr(y)
- DictExpr:4(
- IntExpr(1)
- IntExpr(2)))
- IgnoredLines(2, 4, 6))
-
-[case testIgnoreAnnotationAndMultilineStatement2]
-from m import ( # type: ignore
- x, y
-)
-[out]
-MypyFile:1(
- ImportFrom:1(m, [x, y])
- IgnoredLines(1))
-
-[case testYieldExpression]
-def f():
- x = yield f()
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- AssignmentStmt:2(
- NameExpr(x)
- YieldExpr:2(
- CallExpr:2(
- NameExpr(f)
- Args()))))))
-
-[case testForWithSingleItemTuple]
-for x in 1,: pass
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x)
- TupleExpr:1(
- IntExpr(1))
- Block:1(
- PassStmt:1())))
-
-[case testIsoLatinUnixEncoding]
-# coding: iso-latin-1-unix
-[out]
-MypyFile:1()
-
-[case testLatinUnixEncoding]
-# coding: latin-1-unix
-[out]
-MypyFile:1()
-
-[case testLatinUnixEncoding]
-# coding: iso-latin-1
-[out]
-MypyFile:1()
-
-[case testYieldExpressionInParens]
-def f():
- (yield)
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ExpressionStmt:2(
- YieldExpr:2()))))
diff --git a/test-data/unit/python2eval.test b/test-data/unit/python2eval.test
deleted file mode 100644
index b750de8..0000000
--- a/test-data/unit/python2eval.test
+++ /dev/null
@@ -1,474 +0,0 @@
--- Test cases for type checking mypy programs using full stubs and running
--- using CPython (Python 2 mode).
---
--- These are mostly regression tests -- no attempt is made to make these
--- complete.
-
-
-[case testAbs2_python2]
-n = None # type: int
-f = None # type: float
-n = abs(1)
-abs(1) + 'x' # Error
-f = abs(1.1)
-abs(1.1) + 'x' # Error
-[out]
-_program.py:4: error: Unsupported operand types for + ("int" and "str")
-_program.py:6: error: Unsupported operand types for + ("float" and "str")
-
-[case testUnicode_python2]
-x = unicode('xyz', 'latin1')
-print x
-x = u'foo'
-print repr(x)
-[out]
-xyz
-u'foo'
-
-[case testXrangeAndRange_python2]
-for i in xrange(2):
- print i
-for i in range(3):
- print i
-[out]
-0
-1
-0
-1
-2
-
-[case testIterator_python2]
-import typing, sys
-x = iter('bar')
-print x.next(), x.next()
-[out]
-b a
-
-[case testEncodeAndDecode_python2]
-print 'a'.encode('latin1')
-print 'b'.decode('latin1')
-print u'c'.encode('latin1')
-print u'd'.decode('latin1')
-[out]
-a
-b
-c
-d
-
-[case testHasKey_python2]
-d = {1: 'x'}
-print d.has_key(1)
-print d.has_key(2)
-[out]
-True
-False
-
-[case testIntegerDivision_python2]
-x = 1 / 2
-x()
-[out]
-_program.py:2: error: "int" not callable
-
-[case testFloatDivision_python2]
-x = 1.0 / 2.0
-x = 1.0 / 2
-x = 1 / 2.0
-x = 1.5
-[out]
-
-[case testAnyStr_python2]
-from typing import AnyStr
-def f(x): # type: (AnyStr) -> AnyStr
- if isinstance(x, str):
- return 'foo'
- else:
- return u'zar'
-print f('')
-print f(u'')
-[out]
-foo
-zar
-
-[case testGenericPatterns_python2]
-from typing import Pattern
-import re
-p = None # type: Pattern[unicode]
-p = re.compile(u'foo*')
-b = None # type: Pattern[str]
-b = re.compile('foo*')
-print(p.match(u'fooo').group(0))
-[out]
-fooo
-
-[case testGenericMatch_python2]
-from typing import Match
-import re
-def f(m): # type: (Match[str]) -> None
- print(m.group(0))
-f(re.match('x*', 'xxy'))
-[out]
-xx
-
-[case testVariableLengthTuple_python2]
-from typing import Tuple, cast
-x = cast(Tuple[int, ...], ())
-print(x)
-[out]
-()
-
-[case testFromFuturePrintFunction_python2]
-from __future__ import print_function
-print('a', 'b')
-[out]
-a b
-
-[case testFromFutureImportUnicodeLiterals_python2]
-from __future__ import unicode_literals
-print '>', ['a', b'b', u'c']
-[out]
-> [u'a', 'b', u'c']
-
-[case testUnicodeLiteralsKwargs_python2]
-from __future__ import unicode_literals
-def f(**kwargs): # type: (...) -> None
- pass
-params = {'a': 'b'}
-f(**params)
-[out]
-
-[case testUnicodeStringKwargs_python2]
-def f(**kwargs): # type: (...) -> None
- pass
-params = {u'a': 'b'}
-f(**params)
-[out]
-
-[case testStrKwargs_python2]
-def f(**kwargs): # type: (...) -> None
- pass
-params = {'a': 'b'}
-f(**params)
-[out]
-
-[case testFromFutureImportUnicodeLiterals2_python2]
-from __future__ import unicode_literals
-def f(x): # type: (str) -> None
- pass
-f(b'')
-f(u'')
-f('')
-[out]
-_program.py:5: error: Argument 1 to "f" has incompatible type "unicode"; expected "str"
-_program.py:6: error: Argument 1 to "f" has incompatible type "unicode"; expected "str"
-
-[case testStrUnicodeCompatibility_python2]
-import typing
-def f(s): # type: (unicode) -> None
- pass
-f(u'')
-f('')
-[out]
-
-[case testStrUnicodeCompatibilityInBuiltins_python2]
-import typing
-'x'.count('x')
-'x'.count(u'x')
-[out]
-
-[case testTupleAsSubtypeOfSequence_python2]
-from typing import TypeVar, Sequence
-T = TypeVar('T')
-def f(a): # type: (Sequence[T]) -> None
- print a
-f(tuple())
-[out]
-()
-
-[case testReadOnlyProperty_python2]
-import typing
-class A:
- @property
- def foo(self): # type: () -> int
- return 1
-print(A().foo + 2)
-[out]
-3
-
-[case testIOTypes_python2]
-from typing import IO, TextIO, BinaryIO, Any
-class X(IO[str]): pass
-class Y(TextIO): pass
-class Z(BinaryIO): pass
-[out]
-
-[case testOpenReturnType_python2]
-import typing
-f = open('/tmp/xyz', 'w')
-f.write(u'foo')
-f.write('bar')
-f.close()
-[out]
-_program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "unicode"; expected "str"
-
-[case testPrintFunctionWithFileArg_python2]
-from __future__ import print_function
-import typing
-if 1 == 2: # Don't want to run the code below, since it would create a file.
- f = open('/tmp/xyz', 'w')
- print('foo', file=f)
- f.close()
-print('ok')
-[out]
-ok
-
-[case testStringIO_python2]
-import typing
-import io
-c = io.StringIO()
-c.write(u'\x89')
-print(repr(c.getvalue()))
-[out]
-u'\x89'
-
-[case testBytesIO_python2]
-import typing
-import io
-c = io.BytesIO()
-c.write('\x89')
-print(repr(c.getvalue()))
-[out]
-'\x89'
-
-[case testTextIOWrapper_python2]
-import typing
-import io
-b = io.BytesIO(u'\xab'.encode('utf8'))
-w = io.TextIOWrapper(b, encoding='utf8')
-print(repr(w.read()))
-[out]
-u'\xab'
-
-[case testIoOpen_python2]
-import typing
-import io
-if 1 == 2: # Only type check, do not execute
- f = io.open('/tmp/xyz', 'w', encoding='utf8')
- f.write(u'\xab')
- f.close()
-print 'ok'
-[out]
-ok
-
-[case testUnionType_python2]
-from typing import Union
-y = None # type: Union[int, str]
-def f(x): # type: (Union[int, str]) -> str
- if isinstance(x, int):
- x = str(x)
- return x
-print f(12)
-print f('ab')
-[out]
-12
-ab
-
-[case testStrAdd_python2]
-import typing
-s = ''
-u = u''
-n = 0
-n = s + '' # E
-s = s + u'' # E
-[out]
-_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-_program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str")
-
-[case testStrJoin_python2]
-import typing
-s = ''
-u = u''
-n = 0
-n = ''.join(['']) # Error
-s = ''.join([u'']) # Error
-[out]
-_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-_program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str")
-
-[case testNamedTuple_python2]
-import typing
-from collections import namedtuple
-X = namedtuple('X', ['a', 'b'])
-x = X(a=1, b='s')
-print x.a, x.b
-[out]
-1 s
-
-[case testNamedTupleError_python2]
-import typing
-from collections import namedtuple
-X = namedtuple('X', ['a', 'b'])
-x = X(a=1, b='s')
-x.c
-[out]
-_program.py:5: error: "X" has no attribute "c"
-
-[case testAssignToComplexReal_python2]
-import typing
-x = 4j
-y = x.real
-y = x # Error
-x.imag = 2.0 # Error
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float")
-_program.py:5: error: Property "imag" defined in "complex" is read-only
-
-[case testComplexArithmetic_python2]
-import typing
-print 5 + 8j
-print 3j * 2.0
-print 4j / 2.0
-[out]
-(5+8j)
-6j
-2j
-
-[case testNamedTupleWithTypes_python2]
-from typing import NamedTuple
-N = NamedTuple('N', [('a', int), ('b', str)])
-n = N(1, 'x')
-print n
-a, b = n
-print a, b
-print n[0]
-[out]
-N(a=1, b='x')
-1 x
-1
-
-[case testUnionTypeAlias_python2]
-from typing import Union
-U = Union[int, str]
-u = 1 # type: U
-u = 1.1
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]")
-
-[case testSuperNew_python2]
-from typing import Dict, Any
-class MyType(type):
- def __new__(cls, name, bases, namespace):
- # type: (str, tuple, Dict[str, Any]) -> type
- return super(MyType, cls).__new__(cls, name + 'x', bases, namespace)
-class A(object):
- __metaclass__ = MyType
-print(type(A()).__name__)
-[out]
-Ax
-
-[case testSequenceIndexAndCount_python2]
-from typing import Sequence
-def f(x): # type: (Sequence[int]) -> None
- print(x.index(1))
- print(x.count(1))
-f([0, 0, 1, 1, 1])
-[out]
-2
-3
-
-[case testOptional_python2]
-from typing import Optional
-def f(): # type: () -> Optional[int]
- pass
-x = f()
-y = 1
-y = x
-
-[case testUnicodeAndOverloading_python2]
-from m import f
-f(1)
-f('')
-f(u'')
-f(b'')
-[file m.pyi]
-from typing import overload
- at overload
-def f(x): # type: (unicode) -> int
- pass
- at overload
-def f(x): # type: (bytearray) -> int
- pass
-[out]
-_program.py:2: error: No overload variant of "f" matches argument types [builtins.int]
-
-[case testByteArrayStrCompatibility_python2]
-def f(x): # type: (str) -> None
- pass
-f(bytearray('foo'))
-
-[case testAbstractProperty_python2]
-from abc import abstractproperty, ABCMeta
-class A:
- __metaclass__ = ABCMeta
- @abstractproperty
- def x(self): # type: () -> int
- pass
-class B(A):
- @property
- def x(self): # type: () -> int
- return 3
-b = B()
-print b.x + 1
-[out]
-4
-
-[case testReModuleBytesPython2]
-# Regression tests for various overloads in the re module -- bytes version
-import re
-if False:
- bre = b'a+'
- bpat = re.compile(bre)
- bpat = re.compile(bpat)
- re.search(bre, b'').groups()
- re.search(bre, u'')
- re.search(bpat, b'').groups()
- re.search(bpat, u'')
- # match(), split(), findall(), finditer() are much the same, so skip those.
- # sub(), subn() have more overloads and we are checking these:
- re.sub(bre, b'', b'') + b''
- re.sub(bpat, b'', b'') + b''
- re.sub(bre, lambda m: b'', b'') + b''
- re.sub(bpat, lambda m: b'', b'') + b''
- re.subn(bre, b'', b'')[0] + b''
- re.subn(bpat, b'', b'')[0] + b''
- re.subn(bre, lambda m: b'', b'')[0] + b''
- re.subn(bpat, lambda m: b'', b'')[0] + b''
-[out]
-
-[case testReModuleStringPython2]
-# Regression tests for various overloads in the re module -- string version
-import re
-ure = u'a+'
-upat = re.compile(ure)
-upat = re.compile(upat)
-re.search(ure, u'a').groups()
-re.search(ure, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence
-re.search(upat, u'a').groups()
-re.search(upat, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence
-# match(), split(), findall(), finditer() are much the same, so skip those.
-# sus(), susn() have more overloads and we are checking these:
-re.sub(ure, u'', u'') + u''
-re.sub(upat, u'', u'') + u''
-re.sub(ure, lambda m: u'', u'') + u''
-re.sub(upat, lambda m: u'', u'') + u''
-re.subn(ure, u'', u'')[0] + u''
-re.subn(upat, u'', u'')[0] + u''
-re.subn(ure, lambda m: u'', u'')[0] + u''
-re.subn(upat, lambda m: u'', u'')[0] + u''
-[out]
-
-[case testYieldRegressionTypingAwaitable_python2]
-# Make sure we don't reference typing.Awaitable in Python 2 mode.
-def g(): # type: () -> int
- yield
-[out]
-_program.py:2: error: The return type of a generator function should be "Generator" or one of its supertypes
diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test
deleted file mode 100644
index 6d16903..0000000
--- a/test-data/unit/pythoneval-asyncio.test
+++ /dev/null
@@ -1,486 +0,0 @@
--- Test cases for type checking mypy programs using full stubs and running
--- using CPython.
---
--- These are mostly regression tests -- no attempt is made to make these
--- complete.
---
--- This test file check Asyncio and yield from interaction
-
-[case testImportAsyncio]
-import asyncio
-print('Imported')
-[out]
-Imported
-
-[case testSimpleCoroutineSleep]
-from typing import Any, Generator
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def greet_every_two_seconds() -> 'Generator[Any, None, None]':
- n = 0
- while n < 5:
- print('Prev', n)
- yield from asyncio.sleep(0.1)
- print('After', n)
- n += 1
-
-loop = asyncio.get_event_loop()
-try:
- loop.run_until_complete(greet_every_two_seconds())
-finally:
- loop.close()
-[out]
-Prev 0
-After 0
-Prev 1
-After 1
-Prev 2
-After 2
-Prev 3
-After 3
-Prev 4
-After 4
-
-[case testCoroutineCallingOtherCoroutine]
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def compute(x: int, y: int) -> 'Generator[Any, None, int]':
- print("Compute %s + %s ..." % (x, y))
- yield from asyncio.sleep(0.1)
- return x + y # Here the int is wrapped in Future[int]
-
- at asyncio.coroutine
-def print_sum(x: int, y: int) -> 'Generator[Any, None, None]':
- result = yield from compute(x, y) # The type of result will be int (is extracted from Future[int]
- print("%s + %s = %s" % (x, y, result))
-
-loop = asyncio.get_event_loop()
-loop.run_until_complete(print_sum(1, 2))
-loop.close()
-[out]
-Compute 1 + 2 ...
-1 + 2 = 3
-
-[case testCoroutineChangingFuture]
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(0.1)
- future.set_result('Future is done!')
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[str]
-asyncio.Task(slow_operation(future))
-loop.run_until_complete(future)
-print(future.result())
-loop.close()
-[out]
-Future is done!
-
-[case testFunctionAssignedAsCallback]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Future, AbstractEventLoop
-
- at asyncio.coroutine
-def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(1)
- future.set_result('Callback works!')
-
-def got_result(future: 'Future[str]') -> None:
- print(future.result())
- loop.stop()
-
-loop = asyncio.get_event_loop() # type: AbstractEventLoop
-future = asyncio.Future() # type: Future[str]
-asyncio.Task(slow_operation(future)) # Here create a task with the function. (The Task need a Future[T] as first argument)
-future.add_done_callback(got_result) # and assign the callback to the future
-try:
- loop.run_forever()
-finally:
- loop.close()
-[out]
-Callback works!
-
-[case testMultipleTasks]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Task, Future
- at asyncio.coroutine
-def factorial(name, number) -> 'Generator[Any, None, None]':
- f = 1
- for i in range(2, number+1):
- print("Task %s: Compute factorial(%s)..." % (name, i))
- yield from asyncio.sleep(0.1)
- f *= i
- print("Task %s: factorial(%s) = %s" % (name, number, f))
-
-loop = asyncio.get_event_loop()
-tasks = [
- asyncio.Task(factorial("A", 2)),
- asyncio.Task(factorial("B", 3)),
- asyncio.Task(factorial("C", 4))]
-loop.run_until_complete(asyncio.wait(tasks))
-loop.close()
-[out]
-Task A: Compute factorial(2)...
-Task B: Compute factorial(2)...
-Task C: Compute factorial(2)...
-Task A: factorial(2) = 2
-Task B: Compute factorial(3)...
-Task C: Compute factorial(3)...
-Task B: factorial(3) = 6
-Task C: Compute factorial(4)...
-Task C: factorial(4) = 24
-
-
-[case testConcatenatedCoroutines]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def h4() -> 'Generator[Any, None, int]':
- x = yield from future
- return x
-
- at asyncio.coroutine
-def h3() -> 'Generator[Any, None, int]':
- x = yield from h4()
- print("h3: %s" % x)
- return x
-
- at asyncio.coroutine
-def h2() -> 'Generator[Any, None, int]':
- x = yield from h3()
- print("h2: %s" % x)
- return x
-
- at asyncio.coroutine
-def h() -> 'Generator[Any, None, None]':
- x = yield from h2()
- print("h: %s" % x)
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[int]
-future.set_result(42)
-loop.run_until_complete(h())
-print("Outside %s" % future.result())
-loop.close()
-[out]
-h3: 42
-h2: 42
-h: 42
-Outside 42
-
-[case testConcatenatedCoroutinesReturningFutures]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def h4() -> 'Generator[Any, None, Future[int]]':
- yield from asyncio.sleep(0.1)
- f = asyncio.Future() #type: Future[int]
- return f
-
- at asyncio.coroutine
-def h3() -> 'Generator[Any, None, Future[Future[int]]]':
- x = yield from h4()
- x.set_result(42)
- f = asyncio.Future() #type: Future[Future[int]]
- f.set_result(x)
- return f
-
- at asyncio.coroutine
-def h() -> 'Generator[Any, None, None]':
- print("Before")
- x = yield from h3()
- y = yield from x
- z = yield from y
- print(z)
- def normalize(future):
- # The str conversion seems inconsistent; not sure exactly why. Normalize
- # the result.
- return str(future).replace('<Future finished ', 'Future<')
- print(normalize(y))
- print(normalize(x))
-
-loop = asyncio.get_event_loop()
-loop.run_until_complete(h())
-loop.close()
-[out]
-Before
-42
-Future<result=42>
-Future<result=Future<result=42>>
-
-
-[case testCoroutineWithOwnClass]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
-class A:
- def __init__(self, x: int) -> None:
- self.x = x
-
- at asyncio.coroutine
-def h() -> 'Generator[Any, None, None]':
- x = yield from future
- print("h: %s" % x.x)
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[A]
-future.set_result(A(42))
-loop.run_until_complete(h())
-print("Outside %s" % future.result().x)
-loop.close()
-[out]
-h: 42
-Outside 42
-
-
--- Errors
-
-[case testErrorAssigningCoroutineThatDontReturn]
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def greet() -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(0.2)
- print('Hello World')
-
- at asyncio.coroutine
-def test() -> 'Generator[Any, None, None]':
- yield from greet()
- x = yield from greet() # Error
-
-loop = asyncio.get_event_loop()
-try:
- loop.run_until_complete(test())
-finally:
- loop.close()
-[out]
-_program.py:13: error: Function does not return a value
-
-[case testErrorReturnIsNotTheSameType]
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def compute(x: int, y: int) -> 'Generator[Any, None, int]':
- print("Compute %s + %s ..." % (x, y))
- yield from asyncio.sleep(0.1)
- return str(x + y) # Error
-
- at asyncio.coroutine
-def print_sum(x: int, y: int) -> 'Generator[Any, None, None]':
- result = yield from compute(x, y)
- print("%s + %s = %s" % (x, y, result))
-
-loop = asyncio.get_event_loop()
-loop.run_until_complete(print_sum(1, 2))
-loop.close()
-
-[out]
-_program.py:9: error: Incompatible return value type (got "str", expected "int")
-
-[case testErrorSetFutureDifferentInternalType]
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(1)
- future.set_result(42) # Error
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[str]
-asyncio.Task(slow_operation(future))
-loop.run_until_complete(future)
-print(future.result())
-loop.close()
-[out]
-_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str"
-
-
-[case testErrorUsingDifferentFutureType]
-from typing import Any, Generator
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(1)
- future.set_result(42)
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[str]
-asyncio.Task(slow_operation(future)) # Error
-loop.run_until_complete(future)
-print(future.result())
-loop.close()
-[out]
-_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
-
-[case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType]
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
-asyncio.coroutine
-def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(1)
- future.set_result('42') #Try to set an str as result to a Future[int]
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[str]
-asyncio.Task(slow_operation(future)) # Error
-loop.run_until_complete(future)
-print(future.result())
-loop.close()
-[out]
-_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int"
-_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
-
-[case testErrorSettingCallbackWithDifferentFutureType]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Future, AbstractEventLoop
-
- at asyncio.coroutine
-def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
- yield from asyncio.sleep(1)
- future.set_result('Future is done!')
-
-def got_result(future: 'Future[int]') -> None:
- print(future.result())
- loop.stop()
-
-loop = asyncio.get_event_loop() # type: AbstractEventLoop
-future = asyncio.Future() # type: Future[str]
-asyncio.Task(slow_operation(future))
-future.add_done_callback(got_result) # Error
-
-try:
- loop.run_forever()
-finally:
- loop.close()
-[out]
-_program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type Callable[[Future[int]], None]; expected Callable[[Future[str]], Any]
-
-[case testErrorOneMoreFutureInReturnType]
-import typing
-from typing import Any, Generator
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def h4() -> 'Generator[Any, None, Future[int]]':
- yield from asyncio.sleep(1)
- f = asyncio.Future() #type: Future[int]
- return f
-
- at asyncio.coroutine
-def h3() -> 'Generator[Any, None, Future[Future[Future[int]]]]':
- x = yield from h4()
- x.set_result(42)
- f = asyncio.Future() #type: Future[Future[int]]
- f.set_result(x)
- return f
-
- at asyncio.coroutine
-def h() -> 'Generator[Any, None, None]':
- print("Before")
- x = yield from h3()
- y = yield from x
- z = yield from y
- print(z)
- print(y)
- print(x)
-
-loop = asyncio.get_event_loop()
-loop.run_until_complete(h())
-loop.close()
-[out]
-_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[Future[Future[int]]])
-
-[case testErrorOneLessFutureInReturnType]
-import typing
-from typing import Any, Generator
-import asyncio
-from asyncio import Future
-
- at asyncio.coroutine
-def h4() -> 'Generator[Any, None, Future[int]]':
- yield from asyncio.sleep(1)
- f = asyncio.Future() #type: Future[int]
- return f
-
- at asyncio.coroutine
-def h3() -> 'Generator[Any, None, Future[int]]':
- x = yield from h4()
- x.set_result(42)
- f = asyncio.Future() #type: Future[Future[int]]
- f.set_result(x)
- return f
-
- at asyncio.coroutine
-def h() -> 'Generator[Any, None, None]':
- print("Before")
- x = yield from h3()
- y = yield from x
- print(y)
- print(x)
-
-loop = asyncio.get_event_loop()
-loop.run_until_complete(h())
-loop.close()
-[out]
-_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[int])
-
-[case testErrorAssignmentDifferentType]
-import typing
-from typing import Generator, Any
-import asyncio
-from asyncio import Future
-
-class A:
- def __init__(self, x: int) -> None:
- self.x = x
-
-class B:
- def __init__(self, x: int) -> None:
- self.x = x
-
- at asyncio.coroutine
-def h() -> 'Generator[Any, None, None]':
- x = yield from future # type: B # Error
- print("h: %s" % x.x)
-
-loop = asyncio.get_event_loop()
-future = asyncio.Future() # type: Future[A]
-future.set_result(A(42))
-loop.run_until_complete(h())
-loop.close()
-[out]
-_program.py:16: error: Incompatible types in assignment (expression has type "A", variable has type "B")
diff --git a/test-data/unit/pythoneval-enum.test b/test-data/unit/pythoneval-enum.test
deleted file mode 100644
index 3ae2df5..0000000
--- a/test-data/unit/pythoneval-enum.test
+++ /dev/null
@@ -1,134 +0,0 @@
--- Test cases for type checking mypy programs using full stubs and running
--- using CPython.
---
--- These are mostly regression tests -- no attempt is made to make these
--- complete.
---
--- This test file checks Enum
-
-[case testEnumBasics]
-from enum import Enum
-class Medal(Enum):
- gold = 1
- silver = 2
- bronze = 3
-m = Medal.gold
-m = 1
-[out]
-_program.py:7: error: Incompatible types in assignment (expression has type "int", variable has type "Medal")
-
-[case testEnumNameAndValue]
-from enum import Enum
-class Truth(Enum):
- true = True
- false = False
-x = ''
-x = Truth.true.name
-print(Truth.true.name)
-print(Truth.false.value)
-[out]
-true
-False
-
-[case testEnumUnique]
-import enum
- at enum.unique
-class E(enum.Enum):
- x = 1
- y = 1 # NOTE: This duplicate value is not detected by mypy at the moment
-x = 1
-x = E.x
-[out]
-_program.py:7: error: Incompatible types in assignment (expression has type "E", variable has type "int")
-
-[case testIntEnum_assignToIntVariable]
-from enum import IntEnum
-class N(IntEnum):
- x = 1
- y = 1
-n = 1
-n = N.x # Subclass of int, so it's okay
-s = ''
-s = N.y
-[out]
-_program.py:8: error: Incompatible types in assignment (expression has type "N", variable has type "str")
-
-[case testIntEnum_functionTakingIntEnum]
-from enum import IntEnum
-class SomeIntEnum(IntEnum):
- x = 1
-def takes_some_int_enum(n: SomeIntEnum):
- pass
-takes_some_int_enum(SomeIntEnum.x)
-takes_some_int_enum(1) # Error
-takes_some_int_enum(SomeIntEnum(1)) # How to deal with the above
-[out]
-_program.py:7: error: Argument 1 to "takes_some_int_enum" has incompatible type "int"; expected "SomeIntEnum"
-
-[case testIntEnum_functionTakingInt]
-from enum import IntEnum
-class SomeIntEnum(IntEnum):
- x = 1
-def takes_int(i: int):
- pass
-takes_int(SomeIntEnum.x)
-takes_int(2)
-
-[case testIntEnum_functionReturningIntEnum]
-from enum import IntEnum
-class SomeIntEnum(IntEnum):
- x = 1
-def returns_some_int_enum() -> SomeIntEnum:
- return SomeIntEnum.x
-an_int = 1
-an_int = returns_some_int_enum()
-
-an_enum = SomeIntEnum.x
-an_enum = returns_some_int_enum()
-[out]
-
-[case testEnumMethods]
-from enum import Enum
-
-class Color(Enum):
- red = 1
- green = 2
-
- def m(self, x: int): pass
- @staticmethod
- def m2(x: int): pass
-
-Color.red.m('')
-Color.m2('')
-[out]
-_program.py:11: error: Argument 1 to "m" of "Color" has incompatible type "str"; expected "int"
-_program.py:12: error: Argument 1 to "m2" of "Color" has incompatible type "str"; expected "int"
-
-[case testIntEnum_ExtendedIntEnum_functionTakingExtendedIntEnum]
-from enum import IntEnum
-class ExtendedIntEnum(IntEnum):
- pass
-class SomeExtIntEnum(ExtendedIntEnum):
- x = 1
-
-def takes_int(i: int):
- pass
-takes_int(SomeExtIntEnum.x)
-
-def takes_some_ext_int_enum(s: SomeExtIntEnum):
- pass
-takes_some_ext_int_enum(SomeExtIntEnum.x)
-
-
-[case testNamedTupleEnum]
-from typing import NamedTuple
-from enum import Enum
-
-N = NamedTuple('N', [('bar', int)])
-
-class E(N, Enum):
- X = N(1)
-
-def f(x: E) -> None: pass
-
-f(E.X)
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
deleted file mode 100644
index 7945fe5..0000000
--- a/test-data/unit/pythoneval.test
+++ /dev/null
@@ -1,1214 +0,0 @@
--- Test cases for type checking mypy programs using full stubs and running
--- using CPython.
---
--- These are mostly regression tests -- no attempt is made to make these
--- complete.
-
-
-[case testHello]
-import typing
-print('hello, world')
-[out]
-hello, world
-
--- Skipped because different typing package versions have different repr()s.
-[case testAbstractBaseClasses-skip]
-import re
-from typing import Sized, Sequence, Iterator, Iterable, Mapping, AbstractSet
-
-def check(o, t):
- rep = re.sub('0x[0-9a-fA-F]+', '0x...', repr(o))
- rep = rep.replace('sequenceiterator', 'str_iterator')
- trep = str(t).replace('_abcoll.Sized', 'collections.abc.Sized')
- print(rep, trep, isinstance(o, t))
-
-def f():
- check('x', Sized)
- check([1], Sequence)
- check({1:3}, Sequence)
- check(iter('x'), Iterator)
- check('x', Iterable)
- check({}, Mapping)
- check(set([1]), AbstractSet)
-
-f()
-[out]
-'x' <class 'collections.abc.Sized'> True
-[1] typing.Sequence True
-{1: 3} typing.Sequence False
-<str_iterator object at 0x...> typing.Iterator True
-'x' typing.Iterable True
-{} typing.Mapping True
-{1} typing.AbstractSet True
-
-[case testSized]
-from typing import Sized
-class A(Sized):
- def __len__(self): return 5
-print(len(A()))
-[out]
-5
-
-[case testReversed]
-from typing import Reversible
-class A(Reversible):
- def __iter__(self): return iter('oof')
- def __reversed__(self): return iter('foo')
-print(list(reversed(range(5))))
-print(list(reversed([1,2,3])))
-print(list(reversed('abc')))
-print(list(reversed(A())))
-[out]
--- Duplicate [ at line beginning.
-[[4, 3, 2, 1, 0]
-[[3, 2, 1]
-[['c', 'b', 'a']
-[['f', 'o', 'o']
-
-[case testIntAndFloatConversion]
-from typing import SupportsInt, SupportsFloat
-class A(SupportsInt):
- def __int__(self): return 5
-class B(SupportsFloat):
- def __float__(self): return 1.2
-print(int(1))
-print(int(6.2))
-print(int('3'))
-print(int(b'4'))
-print(int(A()))
-print(float(-9))
-print(float(B()))
-[out]
-1
-6
-3
-4
-5
--9.0
-1.2
-
-[case testAbs]
-from typing import SupportsAbs
-class A(SupportsAbs[float]):
- def __abs__(self) -> float: return 5.5
-
-print(abs(-1))
-print(abs(-1.2))
-print(abs(A()))
-[out]
-1
-1.2
-5.5
-
-[case testAbs2]
-
-n = None # type: int
-f = None # type: float
-n = abs(1)
-abs(1) + 'x' # Error
-f = abs(1.1)
-abs(1.1) + 'x' # Error
-[out]
-_program.py:5: error: Unsupported operand types for + ("int" and "str")
-_program.py:7: error: Unsupported operand types for + ("float" and "str")
-
-[case testRound]
-from typing import SupportsRound
-class A(SupportsRound):
- def __round__(self, ndigits=0): return 'x%d' % ndigits
-print(round(1.6))
-print(round(A()))
-print(round(A(), 2))
-[out]
-2
-x0
-x2
-
-[case testCallMethodViaTypeObject]
-import typing
-print(list.__add__([1, 2], [3, 4]))
-[out]
-[[1, 2, 3, 4]
-
-[case testClassDataAttribute]
-import typing
-class A:
- x = 0
-print(A.x)
-A.x += 1
-print(A.x)
-[out]
-0
-1
-
-[case testInheritedClassAttribute]
-import typing
-class A:
- x = 1
- def f(self) -> None: print('f')
-class B(A):
- pass
-B.f(None)
-print(B.x)
-[out]
-f
-1
-
-[case testFunctionDecorator]
-from typing import TypeVar, cast
-ftype = TypeVar('ftype')
-def logged(f: ftype) -> ftype:
- def g(*args, **kwargs):
- print('enter', f.__name__)
- r = f(*args, **kwargs)
- print('exit', f.__name__)
- return r
- return cast(ftype, g)
-
- at logged
-def foo(s: str) -> str:
- print('foo', s)
- return s + '!'
-
-print(foo('y'))
-print(foo('x'))
-[out]
-enter foo
-foo y
-exit foo
-y!
-enter foo
-foo x
-exit foo
-x!
-
-[case testModuleAttributes]
-import math
-import typing
-print(math.__name__)
-print(type(math.__dict__))
-print(type(math.__doc__ or ''))
-print(math.__class__)
-[out]
-math
-<class 'dict'>
-<class 'str'>
-<class 'module'>
-
-[case testSpecialAttributes]
-import typing
-class A: pass
-print(object().__doc__)
-print(A().__class__)
-[out]
-The most base type
-<class '__main__.A'>
-
-[case testFunctionAttributes]
-import typing
-ord.__class__
-print(type(ord.__doc__ + ''))
-print(ord.__name__)
-print(ord.__module__)
-[out]
-<class 'str'>
-ord
-builtins
-
-[case testTypeAttributes]
-import typing
-print(str.__class__)
-print(type(str.__doc__))
-print(str.__name__)
-print(str.__module__)
-print(str.__dict__ is not None)
-[out]
-<class 'type'>
-<class 'str'>
-str
-builtins
-True
-
-[case testBoolCompatibilityWithInt]
-import typing
-x = 0
-x = True
-print(bool('x'))
-print(bool(''))
-[out]
-True
-False
-
-[case testCallBuiltinTypeObjectsWithoutArguments]
-import typing
-print(int())
-print(repr(str()))
-print(repr(bytes()))
-print(float())
-print(bool())
-[out]
-0
-''
-b''
-0.0
-False
-
-[case testIntegerDivision]
-import typing
-x = 1 / 2
-x = 1.5
-[out]
-
-[case testStaticmethod]
-import typing
-class A:
- @staticmethod
- def f(x: str) -> int: return int(x)
-print(A.f('12'))
-print(A().f('34'))
-[out]
-12
-34
-
-[case testClassmethod]
-import typing
-class A:
- @classmethod
- def f(cls, x: str) -> int: return int(x)
-print(A.f('12'))
-print(A().f('34'))
-[out]
-12
-34
-
-[case testIntMethods]
-import typing
-print(int.from_bytes(b'ab', 'big'))
-n = 0
-print(n.from_bytes(b'ac', 'big'))
-print(n.from_bytes([2, 3], 'big'))
-print(n.to_bytes(2, 'big'))
-[out]
-24930
-24931
-515
-b'\x00\x00'
-
-[case testFloatMethods]
-import typing
-print(1.5.as_integer_ratio())
-print(1.5.hex())
-print(2.0.is_integer())
-print(float.fromhex('0x1.8'))
-[out]
-(3, 2)
-0x1.8000000000000p+0
-True
-1.5
-
-[case testArray]
-import typing
-import array
-array.array('b', [1, 2])
-[out]
-
-[case testDictFromkeys]
-import typing
-d = dict.fromkeys('foo')
-d['x'] = 2
-d2 = dict.fromkeys([1, 2], b'')
-d2[2] = b'foo'
-[out]
-
-[case testReadOnlyProperty]
-class A:
- x = 2
- @property
- def f(self) -> int:
- return self.x + 1
-print(A().f)
-[out]
-3
-
-[case testIsinstanceWithTuple]
-from typing import cast, Any
-x = cast(Any, (1, 'x'))
-if isinstance(x, tuple):
- print(x[0], x[1])
-[out]
-1 x
-
-[case testTypevarValues]
-from typing import TypeVar
-T = TypeVar('T', str, bytes)
-def f(x: T) -> T:
- if isinstance(x, str):
- return 'foo'
- else:
- return b'bar'
-print(f(''))
-print(f(b''))
-[out]
-foo
-b'bar'
-
-[case testAnyStr]
-from typing import AnyStr
-def f(x: AnyStr) -> AnyStr:
- if isinstance(x, str):
- return 'foo'
- else:
- return b'zar'
-print(f(''))
-print(f(b''))
-[out]
-foo
-b'zar'
-
-[case testNameNotImportedFromTyping]
-import typing
-cast(int, 2)
-[out]
-_program.py:2: error: Name 'cast' is not defined
-
-[case testBinaryIOType]
-from typing import BinaryIO
-def f(f: BinaryIO) -> None:
- f.write(b'foo')
- f.write(bytearray(b'foo'))
-[out]
-
-[case testIOTypes]
-from typing import IO
-import sys
-def txt(f: IO[str]) -> None:
- f.write('foo')
- f.write(b'foo')
-def bin(f: IO[bytes]) -> None:
- f.write(b'foo')
- f.write(bytearray(b'foo'))
-txt(sys.stdout)
-bin(sys.stdout)
-[out]
-_program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str"
-_program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected IO[bytes]
-
-[case testBuiltinOpen]
-f = open('x')
-f.write('x')
-f.write(b'x')
-f.foobar()
-[out]
-_program.py:4: error: IO[Any] has no attribute "foobar"
-
-[case testGenericPatterns]
-from typing import Pattern
-import re
-p = None # type: Pattern[str]
-p = re.compile('foo*')
-b = None # type: Pattern[bytes]
-b = re.compile(b'foo*')
-print(p.match('fooo').group(0))
-[out]
-fooo
-
-[case testGenericMatch]
-from typing import Match
-import re
-def f(m: Match[bytes]) -> None:
- print(m.group(0))
-f(re.match(b'x*', b'xxy'))
-[out]
-b'xx'
-
-[case testMultipleTypevarsWithValues]
-from typing import TypeVar
-
-T = TypeVar('T', int, str)
-S = TypeVar('S', int, str)
-
-def f(t: T, s: S) -> None:
- t + s
-[out]
-_program.py:7: error: Unsupported operand types for + ("int" and "str")
-_program.py:7: error: Unsupported operand types for + ("str" and "int")
-
-[case testSystemExitCode]
-import typing
-print(SystemExit(5).code)
-[out]
-5
-
-[case testIntFloatDucktyping]
-
-x = None # type: float
-x = 2.2
-x = 2
-def f(x: float) -> None: pass
-f(1.1)
-f(1)
-[out]
-
-[case testsFloatOperations]
-import typing
-print(1.5 + 1.5)
-print(1.5 + 1)
-[out]
-3.0
-2.5
-
-[case testMathFunctionWithIntArgument]
-import typing
-import math
-math.sin(2)
-math.sin(2.2)
-
-[case testAbsReturnType]
-
-f = None # type: float
-n = None # type: int
-n = abs(2)
-f = abs(2.2)
-abs(2.2) + 'x'
-[out]
-_program.py:6: error: Unsupported operand types for + ("float" and "str")
-
-[case testROperatorMethods]
-
-b = None # type: bytes
-s = None # type: str
-s = b'foo' * 5 # Error
-b = 5 * b'foo'
-b = b'foo' * 5
-s = 5 * 'foo'
-s = 'foo' * 5
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "bytes", variable has type "str")
-
-[case testROperatorMethods2]
-import typing
-print(2 / 0.5)
-print(' ', 2 * [3, 4])
-[out]
-4.0
- [3, 4, 3, 4]
-
-[case testNotImplemented]
-import typing
-class A:
- def __add__(self, x: int) -> int:
- if isinstance(x, int):
- return x + 1
- return NotImplemented
-class B:
- def __radd__(self, x: A) -> str:
- return 'x'
-print(A() + 1)
-print(A() + B())
-[out]
-2
-x
-
-[case testMappingMethods]
-# Regression test
-from typing import Mapping
-x = {'x': 'y'} # type: Mapping[str, str]
-print('x' in x)
-print('y' in x)
-[out]
-True
-False
-
-[case testOverlappingOperatorMethods]
-
-class X: pass
-class A:
- def __add__(self, x) -> int:
- if isinstance(x, X):
- return 1
- return NotImplemented
-class B:
- def __radd__(self, x: A) -> str: return 'x'
-class C(X, B): pass
-b = None # type: B
-b = C()
-print(A() + b)
-[out]
-_program.py:9: error: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping
-
-[case testBytesAndBytearrayComparisons]
-import typing
-print(b'ab' < bytearray(b'b'))
-print(bytearray(b'ab') < b'a')
-[out]
-True
-False
-
-[case testBytesAndBytearrayComparisons2]
-import typing
-'' < b''
-b'' < ''
-'' < bytearray()
-bytearray() < ''
-[out]
-_program.py:2: error: Unsupported operand types for > ("bytes" and "str")
-_program.py:3: error: Unsupported operand types for > ("str" and "bytes")
-_program.py:4: error: Unsupported operand types for > ("bytearray" and "str")
-_program.py:5: error: Unsupported operand types for > ("str" and "bytearray")
-
-[case testInplaceOperatorMethod]
-import typing
-a = [1]
-print('', a.__iadd__([2]))
-print('', a)
-[out]
- [1, 2]
- [1, 2]
-
-[case testListInplaceAdd]
-import typing
-a = [1]
-a += iter([2, 3])
-print(tuple(a))
-[out]
-(1, 2, 3)
-
-[case testListConcatenateWithIterable]
-import typing
-[1] + iter([2, 3])
-[out]
-_program.py:2: error: Unsupported operand types for + ("list" and Iterator[int])
-
-[case testInferHeterogeneousListOfIterables]
-from typing import Sequence
-s = ['x', 'y'] # type: Sequence[str]
-a = [['x', 'x'], 'fo', s, iter('foo'), {'aa'}]
-for i, x in enumerate(a):
- print(i, next(iter(x)))
-[out]
-0 x
-1 f
-2 x
-3 f
-4 aa
-
-[case testTextIOProperties]
-import typing
-import sys
-print(type(sys.stdin.encoding))
-print(type(sys.stdin.errors))
-sys.stdin.line_buffering
-sys.stdin.buffer
-sys.stdin.newlines
-[out]
-<class 'str'>
-<class 'str'>
-
-[case testIOProperties]
-import typing
-import sys
-print(sys.stdin.name)
-print(sys.stdin.buffer.mode)
-[out]
-<stdin>
-rb
-
-[case testSetUnion]
-import typing
-s = {'x', 'y'}
-print('>', sorted(s.union('foo')))
-[out]
-> ['f', 'o', 'x', 'y']
-
-[case testFromFuturePrintFunction]
-from __future__ import print_function
-print('a', 'b')
-[out]
-a b
-
-[case testLenOfTuple]
-import typing
-print(len((1, 'x')))
-[out]
-2
-
-[case testListMethods]
-import typing
-import sys
-l = [0, 1, 2, 3, 4]
-if sys.version >= '3.3':
- l.clear()
-else:
- l = []
-l.append(0)
-print('>', l)
-if sys.version >= '3.3':
- m = l.copy()
-else:
- m = l[:]
-m.extend([1, 2, 3, 4])
-print('>', m)
-print(l.index(0))
-print(l.index(0, 0))
-print(l.index(0, 0, 1))
-try:
- print(l.index(1))
- print('expected ValueError')
-except ValueError:
- pass
-l.insert(0, 1)
-print('>', l)
-print(l.pop(0))
-print(l.pop())
-m.remove(0)
-try:
- m.remove(0)
- print('expected ValueError')
-except ValueError:
- pass
-m.reverse()
-m.sort()
-m.sort(key=lambda x: -x)
-m.sort(reverse=False)
-m.sort(key=lambda x: -x, reverse=True)
-print('>', m)
-[out]
-> [0]
-> [0, 1, 2, 3, 4]
-0
-0
-0
-> [1, 0]
-1
-0
-> [1, 2, 3, 4]
-
-[case testListOperators]
-import typing
-l = [0, 1]
-print('+', l + [2])
-print('*', l * 2)
-print('*', 2 * l)
-print('in', 1 in l)
-print('==', l == [1, 2])
-print('!=', l != [1, 2])
-print('>', l > [1, 2, 3])
-print('>=', l >= [1, 2, 3])
-print('<', l < [1, 2, 3])
-print('<=', l <= [1, 2, 3])
-print('>[0]', l[0])
-l += [2]
-print('+=', l)
-l *= 2
-print('*=', l)
-print('iter', list(iter(l)))
-print('len', len(l))
-print('repr', repr(l))
-l[:3] = []
-print('setslice', l)
-print('reversed', list(reversed(l)))
-[out]
-+ [0, 1, 2]
-* [0, 1, 0, 1]
-* [0, 1, 0, 1]
-in True
-== False
-!= True
-> False
->= False
-< True
-<= True
->[0] 0
-+= [0, 1, 2]
-*= [0, 1, 2, 0, 1, 2]
-iter [0, 1, 2, 0, 1, 2]
-len 6
-repr [0, 1, 2, 0, 1, 2]
-setslice [0, 1, 2]
-reversed [2, 1, 0]
-
-[case testTupleAsSubtypeOfSequence]
-from typing import TypeVar, Sequence
-T = TypeVar('T')
-def f(a: Sequence[T]) -> None: print(a)
-f(tuple())
-[out]
-()
-
-[case testMapWithLambdaSpecialCase-skip]
-# TODO: Fix this; this was broken at some point but not sure why.
-from typing import List, Iterator
-a = [[1], [3]]
-b = map(lambda y: y[0], a)
-print('>', list(b))
-[out]
-> [1, 3]
-
-[case testInternalBuiltinDefinition]
-import typing
-def f(x: _T) -> None: pass
-[out]
-_program.py:2: error: Name '_T' is not defined
-
-[case testVarArgsFunctionSubtyping]
-import typing
-def f(*args: str) -> str: return args[0]
-map(f, ['x'])
-map(f, [1])
-[out]
-_program.py:4: error: Argument 1 to "map" has incompatible type Callable[[StarArg(str)], str]; expected Callable[[int], str]
-
-[case testMapStr]
-import typing
-x = range(3)
-a = list(map(str, x))
-a + 1
-[out]
-_program.py:4: error: Unsupported operand types for + (List[str] and "int")
-
-[case testNamedTuple]
-import typing
-from collections import namedtuple
-X = namedtuple('X', ['a', 'b'])
-x = X(a=1, b='s')
-print(x.a, x.b)
-[out]
-1 s
-
-[case testNamedTupleShortSyntax]
-import typing
-from collections import namedtuple
-X = namedtuple('X', ' a b ')
-x = X(a=1, b='s')
-print(x.a, x.b)
-[out]
-1 s
-
-[case testNamedTupleError]
-import typing
-from collections import namedtuple
-X = namedtuple('X', ['a', 'b'])
-x = X(a=1, b='s')
-x.c
-[out]
-_program.py:5: error: "X" has no attribute "c"
-
-[case testNamedTupleTupleOperations]
-from typing import Iterable
-from collections import namedtuple
-X = namedtuple('X', ['a', 'b'])
-def f(x: Iterable[int]) -> None: pass
-x = X(a=1, b='s')
-f(x)
-print(len(x))
-print(x.index(1))
-print(x.count(1))
-print(x + x)
-[out]
-2
-0
-1
-(1, 's', 1, 's')
-
-[case testNamedTupleWithTypes]
-from typing import NamedTuple
-N = NamedTuple('N', [('a', int), ('b', str)])
-n = N(1, 'x')
-print(n)
-a, b = n
-print(a, b)
-print(n[0])
-[out]
-N(a=1, b='x')
-1 x
-1
-
-[case testRelativeImport]
-import typing
-from m import x
-print(x)
-[file m/__init__.py]
-from .n import x
-[file m/n.py]
-x = 1
-[out]
-1
-
-[case testRelativeImport2]
-import typing
-from m.n import x
-print(x)
-[file m/__init__.py]
-[file m/n.py]
-from .nn import x
-[file m/nn.py]
-x = 2
-[out]
-2
-
-[case testPyiTakesPrecedenceOverPy]
-import m
-m.f(1)
-[file m.py]
-def f(x):
- print(x)
-[file m.pyi]
-import typing
-def f(x: str) -> None: pass
-[out]
-_program.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
-
-[case testAssignToComplexReal]
-import typing
-x = 4j
-y = x.real
-y = x # Error
-x.real = 2.0 # Error
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float")
-_program.py:5: error: Property "real" defined in "complex" is read-only
-
-[case testComplexArithmetic]
-import typing
-print(5 + 8j)
-print(3j * 2.0)
-print(4J / 2.0)
-[out]
-(5+8j)
-6j
-2j
-
-[case testComplexArithmetic2]
-import typing
-x = 5 + 8j
-x = ''
-y = 3j * 2.0
-y = ''
-[out]
-_program.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "complex")
-_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "complex")
-
-[case testUnionTypeAlias]
-from typing import Union
-U = Union[int, str]
-u = 1 # type: U
-u = 1.1
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]")
-
-[case testTupleTypeAlias]
-from typing import Tuple
-A = Tuple[int, str]
-u = 1, 'x' # type: A
-u = 1
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "Tuple[int, str]")
-
-[case testCallableTypeAlias]
-from typing import Callable
-A = Callable[[int], None]
-def f(x: A) -> None:
- x(1)
- x('')
-[out]
-_program.py:5: error: Argument 1 has incompatible type "str"; expected "int"
-
-[case testSuperNew]
-from typing import Dict, Any
-class MyType(type):
- def __new__(cls, name: str, bases: tuple, namespace: Dict[str, Any]) -> type:
- return super().__new__(cls, name + 'x', bases, namespace)
-class A(metaclass=MyType): pass
-print(type(A()).__name__)
-[out]
-Ax
-
-[case testSequenceIndexAndCount]
-from typing import Sequence
-def f(x: Sequence[int]) -> None:
- print(x.index(1))
- print(x.count(1))
-f([0, 0, 1, 1, 1])
-[out]
-2
-3
-
-[case testEscapeInTripleQuotedStrLiteral]
-print('''\'''')
-print(r"""\"""$""")
-[out]
-'
-\"""$
-
-[case testSubclassBothGenericAndNonGenericABC]
-from typing import Generic, TypeVar
-from abc import ABCMeta
-T = TypeVar('T')
-class A(metaclass=ABCMeta): pass
-class B(Generic[T]): pass
-class C(A, B): pass
-class D(B, A): pass
-class E(A, B[T], Generic[T]): pass
-class F(B[T], A, Generic[T]): pass
-def f(e: E[int], f: F[int]) -> None: pass
-[out]
-
-[case testOptional]
-from typing import Optional
-def f() -> Optional[int]: pass
-x = f()
-y = 1
-y = x
-
-[case testAppendToStarArg]
-import typing
-def f(*x: int) -> None:
- x.append(1)
-f(1)
-[out]
-_program.py:3: error: Tuple[int, ...] has no attribute "append"
-
-[case testExit]
-print('a')
-exit(2)
-print('b')
-[out]
-a
-
-[case testTypeVariableTypeComparability]
-from typing import TypeVar
-T = TypeVar('T')
-def eq(x: T, y: T, z: T) -> T:
- if x == y:
- return y
- else:
- return z
-print(eq(1, 2, 3))
-print(eq('x', 'x', 'z'))
-[out]
-3
-x
-
-[case testIntDecimalCompatibility]
-import typing
-from decimal import Decimal
-print(Decimal(1) + 2)
-print(Decimal(1) - 2)
-print(1 + Decimal('2.34'))
-print(1 - Decimal('2.34'))
-print(2 * Decimal('2.34'))
-[out]
-3
--1
-3.34
--1.34
-4.68
-
-[case testInstantiateBuiltinTypes]
-from typing import Dict, Set, List
-d = dict() # type: Dict[int, str]
-s = set() # type: Set[int]
-l = list() # type: List[int]
-str()
-bytes()
-bytearray()
-int()
-float()
-complex()
-slice(1)
-bool()
-
-[case testVariableLengthTuple]
-from typing import Tuple
-def p(t: Tuple[int, ...]) -> None:
- for n in t:
- print(n)
-p((1, 3, 2))
-[out]
-1
-3
-2
-
-[case testVariableLengthTupleError]
-from typing import Tuple
-def p(t: Tuple[str, ...]) -> None:
- n = 5
- print(t[n])
- for s in t:
- s()
-''.startswith(('x', 'y'))
-''.startswith(('x', b'y'))
-[out]
-_program.py:6: error: "str" not callable
-_program.py:8: error: Argument 1 to "startswith" of "str" has incompatible type "Tuple[str, bytes]"; expected "Union[str, Tuple[str, ...]]"
-
-[case testMultiplyTupleByInteger]
-n = 4
-t = ('',) * n
-t + 1
-[out]
-_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
-
-[case testMultiplyTupleByIntegerReverse]
-n = 4
-t = n * ('',)
-t + 1
-[out]
-_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
-
-[case testDictWithKeywordArgs]
-from typing import Dict, Any, List
-d1 = dict(a=1, b=2) # type: Dict[str, int]
-d2 = dict(a=1, b='') # type: Dict[str, int] # E
-d3 = dict(a=1, b=1)
-d3.xyz # E
-d4 = dict(a=1, b='') # type: Dict[str, Any]
-result = dict(x=[], y=[]) # type: Dict[str, List[str]]
-[out]
-_program.py:3: error: List item 1 has incompatible type "Tuple[str, str]"
-_program.py:5: error: Dict[str, int] has no attribute "xyz"
-
-[case testDefaultDict]
-import typing as t
-from collections import defaultdict
-
-T = t.TypeVar('T')
-
-d1 = defaultdict(list) # type: t.DefaultDict[int, str]
-d2 = defaultdict() # type: t.DefaultDict[int, str]
-d2[0] = '0'
-d2['0'] = 0
-
-def tst(dct: t.DefaultDict[int, T]) -> T:
- return dct[0]
-
-collections = ['coins', 'stamps', 'comics'] # type: t.List[str]
-d3 = defaultdict(str) # type: t.DefaultDict[int, str]
-collections[2]
-
-tst(defaultdict(list, {0: []}))
-tst(defaultdict(list, {'0': []}))
-
-class MyDDict(t.DefaultDict[int,T], t.Generic[T]):
- pass
-MyDDict(dict)['0']
-MyDDict(dict)[0]
-[out]
-_program.py:6: error: Argument 1 to "defaultdict" has incompatible type List[_T]; expected Callable[[], str]
-_program.py:9: error: Invalid index type "str" for "dict"; expected type "int"
-_program.py:9: error: Incompatible types in assignment (expression has type "int", target has type "str")
-_program.py:19: error: List item 0 has incompatible type "Tuple[str, List[None]]"
-_program.py:23: error: Invalid index type "str" for "dict"; expected type "int"
-
-[case testDictUpdateInference]
-from typing import Dict, Optional
-d = {} # type: Dict[str, Optional[int]]
-d.update({str(i): None for i in range(4)})
-
-[case testSuperAndSetattr]
-class A:
- def __init__(self) -> None:
- super().__setattr__('a', 1)
- super().__setattr__(1, 'a')
-[out]
-_program.py:4: error: Argument 1 to "__setattr__" of "object" has incompatible type "int"; expected "str"
-
-[case testMetaclassAndSuper]
-class A(type):
- def __new__(cls, name, bases, namespace) -> 'type':
- return super().__new__(cls, '', (object,), {'x': 7})
-
-class B(metaclass=A):
- pass
-
-print(getattr(B(), 'x'))
-[out]
-7
-
-[case testSortedNoError]
-from typing import Iterable, Callable, TypeVar, List, Dict
-T = TypeVar('T')
-def sorted(x: Iterable[T], *, key: Callable[[T], object] = None) -> None: ...
-a = None # type: List[Dict[str, str]]
-sorted(a, key=lambda y: y[''])
-
-[case testAbstractProperty]
-from abc import abstractproperty, ABCMeta
-class A(metaclass=ABCMeta):
- @abstractproperty
- def x(self) -> int: pass
-class B(A):
- @property
- def x(self) -> int:
- return 3
-b = B()
-print(b.x + 1)
-[out]
-4
-
-[case testInferenceWithLambda]
-from typing import TypeVar, Iterable, Iterator
-import itertools
-
-_T = TypeVar('_T')
-
-def f(iterable): # type: (Iterable[_T]) -> Iterator[List[_T]]
- grouped = itertools.groupby(enumerate(iterable), lambda pair: pair[0] // 2)
- return ([elem for _, elem in group] for _, group in grouped)
-
-[case testReModuleBytes]
-# Regression tests for various overloads in the re module -- bytes version
-import re
-bre = b'a+'
-bpat = re.compile(bre)
-bpat = re.compile(bpat)
-re.search(bre, b'').groups()
-re.search(bre, u'') # Error
-re.search(bpat, b'').groups()
-re.search(bpat, u'') # Error
-# match(), split(), findall(), finditer() are much the same, so skip those.
-# sub(), subn() have more overloads and we are checking these:
-re.sub(bre, b'', b'') + b''
-re.sub(bpat, b'', b'') + b''
-re.sub(bre, lambda m: b'', b'') + b''
-re.sub(bpat, lambda m: b'', b'') + b''
-re.subn(bre, b'', b'')[0] + b''
-re.subn(bpat, b'', b'')[0] + b''
-re.subn(bre, lambda m: b'', b'')[0] + b''
-re.subn(bpat, lambda m: b'', b'')[0] + b''
-[out]
-_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
-_program.py:9: error: Cannot infer type argument 1 of "search"
-
-[case testReModuleString]
-# Regression tests for various overloads in the re module -- string version
-import re
-sre = 'a+'
-spat = re.compile(sre)
-spat = re.compile(spat)
-re.search(sre, '').groups()
-re.search(sre, b'') # Error
-re.search(spat, '').groups()
-re.search(spat, b'') # Error
-# match(), split(), findall(), finditer() are much the same, so skip those.
-# sus(), susn() have more overloads and we are checking these:
-re.sub(sre, '', '') + ''
-re.sub(spat, '', '') + ''
-re.sub(sre, lambda m: '', '') + ''
-re.sub(spat, lambda m: '', '') + ''
-re.subn(sre, '', '')[0] + ''
-re.subn(spat, '', '')[0] + ''
-re.subn(sre, lambda m: '', '')[0] + ''
-re.subn(spat, lambda m: '', '')[0] + ''
-[out]
-_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
-_program.py:9: error: Cannot infer type argument 1 of "search"
-
-[case testListSetitemTuple]
-from typing import List, Tuple
-a = [] # type: List[Tuple[str, int]]
-a[0] = 'x', 1
-a[1] = 2, 'y'
-a[:] = [('z', 3)]
-[out]
-_program.py:4: error: Incompatible types in assignment (expression has type "Tuple[int, str]", target has type "Tuple[str, int]")
diff --git a/test-data/unit/semanal-abstractclasses.test b/test-data/unit/semanal-abstractclasses.test
deleted file mode 100644
index b5147bd..0000000
--- a/test-data/unit/semanal-abstractclasses.test
+++ /dev/null
@@ -1,119 +0,0 @@
-[case testAbstractMethods]
-from abc import abstractmethod, ABCMeta
-import typing
-
-class A(metaclass=ABCMeta):
- @abstractmethod
- def g(self) -> 'A': pass
- @abstractmethod
- def f(self) -> 'A': return self
-[out]
-MypyFile:1(
- ImportFrom:1(abc, [abstractmethod, ABCMeta])
- Import:2(typing)
- ClassDef:4(
- A
- Metaclass(ABCMeta)
- Decorator:5(
- Var(g)
- FuncDef:6(
- g
- Args(
- Var(self))
- def (self: __main__.A) -> __main__.A
- Abstract
- Block:6(
- PassStmt:6())))
- Decorator:7(
- Var(f)
- FuncDef:8(
- f
- Args(
- Var(self))
- def (self: __main__.A) -> __main__.A
- Abstract
- Block:8(
- ReturnStmt:8(
- NameExpr(self [l])))))))
-
-[case testClassInheritingTwoAbstractClasses]
-from abc import abstractmethod, ABCMeta
-import typing
-
-class A(metaclass=ABCMeta): pass
-class B(metaclass=ABCMeta): pass
-class C(A, B): pass
-[out]
-MypyFile:1(
- ImportFrom:1(abc, [abstractmethod, ABCMeta])
- Import:2(typing)
- ClassDef:4(
- A
- Metaclass(ABCMeta)
- PassStmt:4())
- ClassDef:5(
- B
- Metaclass(ABCMeta)
- PassStmt:5())
- ClassDef:6(
- C
- BaseType(
- __main__.A
- __main__.B)
- PassStmt:6()))
-
-[case testAbstractGenericClass]
-from abc import abstractmethod
-from typing import Generic, TypeVar
-T = TypeVar('T')
-class A(Generic[T]):
- @abstractmethod
- def f(self) -> 'A[T]': pass
-[out]
-MypyFile:1(
- ImportFrom:1(abc, [abstractmethod])
- ImportFrom:2(typing, [Generic, TypeVar])
- AssignmentStmt:3(
- NameExpr(T* [__main__.T])
- TypeVarExpr:3())
- ClassDef:4(
- A
- TypeVars(
- T)
- Decorator:5(
- Var(f)
- FuncDef:6(
- f
- Args(
- Var(self))
- def (self: __main__.A[T`1]) -> __main__.A[T`1]
- Abstract
- Block:6(
- PassStmt:6())))))
-
-[case testFullyQualifiedAbstractMethodDecl]
-import abc
-from abc import ABCMeta
-import typing
-
-class A(metaclass=ABCMeta):
- @abc.abstractmethod
- def g(self) -> 'A': pass
-[out]
-MypyFile:1(
- Import:1(abc)
- ImportFrom:2(abc, [ABCMeta])
- Import:3(typing)
- ClassDef:5(
- A
- Metaclass(ABCMeta)
- Decorator:6(
- Var(g)
- FuncDef:7(
- g
- Args(
- Var(self))
- def (self: __main__.A) -> __main__.A
- Abstract
- Block:7(
- PassStmt:7())))))
diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test
deleted file mode 100644
index 3c11da8..0000000
--- a/test-data/unit/semanal-basic.test
+++ /dev/null
@@ -1,459 +0,0 @@
-[case testEmptyFile]
-[out]
-MypyFile:1()
-
-[case testGlobalVariable]
-x = 1
-x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- ExpressionStmt:2(
- NameExpr(x [__main__.x])))
-
-[case testMultipleGlobals]
-x = y = 2
-z = 3
-(x, y, z)
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(2))
- AssignmentStmt:2(
- NameExpr(z* [__main__.z])
- IntExpr(3))
- ExpressionStmt:3(
- TupleExpr:3(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])
- NameExpr(z [__main__.z]))))
-
-[case testEmptyFunction]
-def f(): pass
-f()
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- PassStmt:1()))
- ExpressionStmt:2(
- CallExpr:2(
- NameExpr(f [__main__.f])
- Args())))
-
-[case testAccessingGlobalNameBeforeDefinition]
-x
-f()
-x = 1
-def f(): pass
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(x [__main__.x]))
- ExpressionStmt:2(
- CallExpr:2(
- NameExpr(f [__main__.f])
- Args()))
- AssignmentStmt:3(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- FuncDef:4(
- f
- Block:4(
- PassStmt:4())))
-
-[case testFunctionArgs]
-def f(x, y):
- (x, y)
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x)
- Var(y))
- Block:1(
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [l])
- NameExpr(y [l]))))))
-
-[case testLocalVar]
-def f():
- x = 1
- x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- AssignmentStmt:2(
- NameExpr(x* [l])
- IntExpr(1))
- ExpressionStmt:3(
- NameExpr(x [l])))))
-
-[case testAccessGlobalInFn]
-def f():
- x
- g()
-x = 1
-def g(): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ExpressionStmt:2(
- NameExpr(x [__main__.x]))
- ExpressionStmt:3(
- CallExpr:3(
- NameExpr(g [__main__.g])
- Args()))))
- AssignmentStmt:4(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- FuncDef:5(
- g
- Block:5(
- PassStmt:5())))
-
-[case testAssignmentAfterInit]
-x = 1
-x = 2
-def f(y):
- y = 1
- z = 1
- z = 2
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- IntExpr(2))
- FuncDef:3(
- f
- Args(
- Var(y))
- Block:3(
- AssignmentStmt:4(
- NameExpr(y [l])
- IntExpr(1))
- AssignmentStmt:5(
- NameExpr(z* [l])
- IntExpr(1))
- AssignmentStmt:6(
- NameExpr(z [l])
- IntExpr(2)))))
-
-[case testLocalAndGlobalAliasing]
-x = 1
-def f():
- x = 2
- x
-x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- FuncDef:2(
- f
- Block:2(
- AssignmentStmt:3(
- NameExpr(x* [l])
- IntExpr(2))
- ExpressionStmt:4(
- NameExpr(x [l]))))
- ExpressionStmt:5(
- NameExpr(x [__main__.x])))
-
-[case testArgumentInitializers]
-def f(x = f, y = object):
- x, y
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x)
- Var(y))
- Init(
- AssignmentStmt:1(
- NameExpr(x [l])
- NameExpr(f [__main__.f]))
- AssignmentStmt:1(
- NameExpr(y [l])
- NameExpr(object [builtins.object])))
- Block:1(
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [l])
- NameExpr(y [l]))))))
-
-[case testVarArgs]
-def f(x, *y):
- x, y
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- VarArg(
- Var(y))
- Block:1(
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [l])
- NameExpr(y [l]))))))
-
-[case testGlobalDecl]
-x = None
-def f():
- global x
- x = None
- x
-class A: pass
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None]))
- FuncDef:2(
- f
- Block:2(
- GlobalDecl:3(
- x)
- AssignmentStmt:4(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None]))
- ExpressionStmt:5(
- NameExpr(x [__main__.x]))))
- ClassDef:6(
- A
- PassStmt:6()))
-
-[case testMultipleNamesInGlobalDecl]
-x, y = None, None
-def f():
- global x, y
- x = y
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- TupleExpr:1(
- NameExpr(None [builtins.None])
- NameExpr(None [builtins.None])))
- FuncDef:2(
- f
- Block:2(
- GlobalDecl:3(
- x
- y)
- AssignmentStmt:4(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])))))
-
-[case testGlobalDeclScope]
-x = None
-def f():
- global x
-def g():
- x = None
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None]))
- FuncDef:2(
- f
- Block:2(
- GlobalDecl:3(
- x)))
- FuncDef:4(
- g
- Block:4(
- AssignmentStmt:5(
- NameExpr(x* [l])
- NameExpr(None [builtins.None])))))
-
-[case testGlobalDeclScope]
-x = None
-def f():
- global x
-def g():
- x = None
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None]))
- FuncDef:2(
- f
- Block:2(
- GlobalDecl:3(
- x)))
- FuncDef:4(
- g
- Block:4(
- AssignmentStmt:5(
- NameExpr(x* [l])
- NameExpr(None [builtins.None])))))
-
-[case testGlobaWithinMethod]
-x = None
-class A:
- def f(self):
- global x
- x = self
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None]))
- ClassDef:2(
- A
- FuncDef:3(
- f
- Args(
- Var(self))
- Block:3(
- GlobalDecl:4(
- x)
- AssignmentStmt:5(
- NameExpr(x [__main__.x])
- NameExpr(self [l]))))))
-
-[case testGlobalDefinedInBlock]
-if object:
- x = object()
- x = x
-x
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- NameExpr(object [builtins.object]))
- Then(
- AssignmentStmt:2(
- NameExpr(x* [__main__.x])
- CallExpr:2(
- NameExpr(object [builtins.object])
- Args()))
- AssignmentStmt:3(
- NameExpr(x [__main__.x])
- NameExpr(x [__main__.x]))))
- ExpressionStmt:4(
- NameExpr(x [__main__.x])))
-
-[case testNonlocalDecl]
-def g():
- x = None
- def f():
- nonlocal x
- x = None
- x
-[out]
-MypyFile:1(
- FuncDef:1(
- g
- Block:1(
- AssignmentStmt:2(
- NameExpr(x* [l])
- NameExpr(None [builtins.None]))
- FuncDef:3(
- f
- Block:3(
- NonlocalDecl:4(
- x)
- AssignmentStmt:5(
- NameExpr(x [l])
- NameExpr(None [builtins.None]))
- ExpressionStmt:6(
- NameExpr(x [l])))))))
-
-[case testMultipleNamesInNonlocalDecl]
-def g():
- x, y = None, None
- def f(z):
- nonlocal x, y
- x = y
-[out]
-MypyFile:1(
- FuncDef:1(
- g
- Block:1(
- AssignmentStmt:2(
- TupleExpr:2(
- NameExpr(x* [l])
- NameExpr(y* [l]))
- TupleExpr:2(
- NameExpr(None [builtins.None])
- NameExpr(None [builtins.None])))
- FuncDef:3(
- f
- Args(
- Var(z))
- Block:3(
- NonlocalDecl:4(
- x
- y)
- AssignmentStmt:5(
- NameExpr(x [l])
- NameExpr(y [l])))))))
-
-[case testNestedFunctions]
-def f(x):
- def g(y):
- z = y + x
- return g
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Block:1(
- FuncDef:2(
- g
- Args(
- Var(y))
- Block:2(
- AssignmentStmt:3(
- NameExpr(z* [l])
- OpExpr:3(
- +
- NameExpr(y [l])
- NameExpr(x [l])))))
- ReturnStmt:4(
- NameExpr(g [l])))))
-
-[case testNestedFunctionWithOverlappingName]
-def f(x):
- def g():
- x = 1
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Block:1(
- FuncDef:2(
- g
- Block:2(
- AssignmentStmt:3(
- NameExpr(x* [l])
- IntExpr(1)))))))
diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test
deleted file mode 100644
index a99f851..0000000
--- a/test-data/unit/semanal-classes.test
+++ /dev/null
@@ -1,623 +0,0 @@
--- Test cases related to classes for the semantic analyzer.
-
-[case testSimpleClass]
-class A: pass
-x = A
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- PassStmt:1())
- AssignmentStmt:2(
- NameExpr(x* [__main__.x])
- NameExpr(A [__main__.A])))
-
-[case testMethods]
-class A:
- def __init__(self, x):
- y = x
- def f(self):
- y = self
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- __init__
- Args(
- Var(self)
- Var(x))
- Block:2(
- AssignmentStmt:3(
- NameExpr(y* [l])
- NameExpr(x [l]))))
- FuncDef:4(
- f
- Args(
- Var(self))
- Block:4(
- AssignmentStmt:5(
- NameExpr(y* [l])
- NameExpr(self [l]))))))
-
-[case testMemberDefinitionInInit]
-class A:
- def __init__(self):
- self.x = 1
- self.y = 2
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- __init__
- Args(
- Var(self))
- Block:2(
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(self [l])
- x*)
- IntExpr(1))
- AssignmentStmt:4(
- MemberExpr:4(
- NameExpr(self [l])
- y*)
- IntExpr(2))))))
-
-[case testMemberAssignmentViaSelfOutsideInit]
-class A:
- def f(self):
- self.x = 1
-def __init__(self):
- self.y = 1
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- f
- Args(
- Var(self))
- Block:2(
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(self [l])
- x*)
- IntExpr(1)))))
- FuncDef:4(
- __init__
- Args(
- Var(self))
- Block:4(
- AssignmentStmt:5(
- MemberExpr:5(
- NameExpr(self [l])
- y)
- IntExpr(1)))))
-
-[case testMemberAssignmentNotViaSelf]
-class A:
- def __init__(x, self):
- self.y = 1 # not really self
-class B:
- def __init__(x):
- self = x
- self.z = 1
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- __init__
- Args(
- Var(x)
- Var(self))
- Block:2(
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(self [l])
- y)
- IntExpr(1)))))
- ClassDef:4(
- B
- FuncDef:5(
- __init__
- Args(
- Var(x))
- Block:5(
- AssignmentStmt:6(
- NameExpr(self* [l])
- NameExpr(x [l]))
- AssignmentStmt:7(
- MemberExpr:7(
- NameExpr(self [l])
- z)
- IntExpr(1))))))
-
-[case testNonStandardNameForSelfAndInit]
-class A:
- def __init__(x):
- x.y = 1
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- __init__
- Args(
- Var(x))
- Block:2(
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(x [l])
- y*)
- IntExpr(1))))))
-
-[case testAssignmentAfterAttributeInit]
-class A:
- def __init__(self):
- self.x = 1
- self.x = 2
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- __init__
- Args(
- Var(self))
- Block:2(
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(self [l])
- x*)
- IntExpr(1))
- AssignmentStmt:4(
- MemberExpr:4(
- NameExpr(self [l])
- x)
- IntExpr(2))))))
-
-[case testOverloadedMethod]
-from typing import overload
-class A:
- @overload
- def f(self) -> None: self
- @overload
- def f(self, x: 'A') -> None: self
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [overload])
- ClassDef:2(
- A
- OverloadedFuncDef:3(
- Overload(def (self: __main__.A), \
- def (self: __main__.A, x: __main__.A))
- Decorator:3(
- Var(f)
- NameExpr(overload [typing.overload])
- FuncDef:4(
- f
- Args(
- Var(self))
- def (self: __main__.A)
- Block:4(
- ExpressionStmt:4(
- NameExpr(self [l])))))
- Decorator:5(
- Var(f)
- NameExpr(overload [typing.overload])
- FuncDef:6(
- f
- Args(
- Var(self)
- Var(x))
- def (self: __main__.A, x: __main__.A)
- Block:6(
- ExpressionStmt:6(
- NameExpr(self [l]))))))))
-
-[case testAttributeWithoutType]
-class A:
- a = object
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- AssignmentStmt:2(
- NameExpr(a* [m])
- NameExpr(object [builtins.object]))))
-
-[case testDataAttributeRefInClassBody]
-class A:
- x = 1
- y = x
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- AssignmentStmt:2(
- NameExpr(x* [m])
- IntExpr(1))
- AssignmentStmt:3(
- NameExpr(y* [m])
- NameExpr(x [m]))))
-
-[case testMethodRefInClassBody]
-class A:
- def f(self): pass
- g = f
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- f
- Args(
- Var(self))
- Block:2(
- PassStmt:2()))
- AssignmentStmt:3(
- NameExpr(g* [m])
- NameExpr(f [m]))))
-
-[case testIfStatementInClassBody]
-class A:
- if A:
- x = 1
- else:
- x = 2
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- IfStmt:2(
- If(
- NameExpr(A [__main__.A]))
- Then(
- AssignmentStmt:3(
- NameExpr(x* [m])
- IntExpr(1)))
- Else(
- AssignmentStmt:5(
- NameExpr(x [m])
- IntExpr(2))))))
-
-[case testForStatementInClassBody]
-class A:
- for x in [1, 2]:
- y = x
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- ForStmt:2(
- NameExpr(x* [m])
- ListExpr:2(
- IntExpr(1)
- IntExpr(2))
- Block:2(
- AssignmentStmt:3(
- NameExpr(y* [m])
- NameExpr(x [m]))))))
-
-[case testReferenceToClassWithinFunction]
-def f():
- class A: pass
- A
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ClassDef:2(
- A
- PassStmt:2())
- ExpressionStmt:3(
- NameExpr(A [l])))))
-
-[case testReferenceToClassWithinClass]
-class A:
- class B: pass
- B
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- ClassDef:2(
- B
- PassStmt:2())
- ExpressionStmt:3(
- NameExpr(B [__main__.A.B]))))
-
-[case testClassWithBaseClassWithinClass]
-class A:
- class B: pass
- class C(B): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- ClassDef:2(
- B
- PassStmt:2())
- ClassDef:3(
- C
- BaseType(
- __main__.A.B)
- PassStmt:3())))
-
-[case testDeclarationReferenceToNestedClass]
-def f() -> None:
- class A: pass
- x = None # type: A
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def ()
- Block:1(
- ClassDef:2(
- A
- PassStmt:2())
- AssignmentStmt:3(
- NameExpr(x [l])
- NameExpr(None [builtins.None])
- A))))
-
-[case testAccessToLocalInOuterScopeWithinNestedClass]
-def f(x):
- class A:
- y = x
- def g(self):
- z = x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Block:1(
- ClassDef:2(
- A
- AssignmentStmt:3(
- NameExpr(y* [m])
- NameExpr(x [l]))
- FuncDef:4(
- g
- Args(
- Var(self))
- Block:4(
- AssignmentStmt:5(
- NameExpr(z* [l])
- NameExpr(x [l]))))))))
-
-[case testQualifiedMetaclass]
-import abc
-class A(metaclass=abc.ABCMeta): pass
-[out]
-MypyFile:1(
- Import:1(abc)
- ClassDef:2(
- A
- Metaclass(abc.ABCMeta)
- PassStmt:2()))
-
-[case testStaticMethod]
-class A:
- @staticmethod
- def f(z: int) -> str: pass
-[builtins fixtures/staticmethod.pyi]
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- Decorator:2(
- Var(f)
- FuncDef:3(
- f
- Args(
- Var(z))
- def (z: builtins.int) -> builtins.str
- Static
- Block:3(
- PassStmt:3())))))
-
-[case testStaticMethodWithNoArgs]
-class A:
- @staticmethod
- def f() -> str: pass
-[builtins fixtures/staticmethod.pyi]
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- Decorator:2(
- Var(f)
- FuncDef:3(
- f
- def () -> builtins.str
- Static
- Block:3(
- PassStmt:3())))))
-
-[case testClassMethod]
-class A:
- @classmethod
- def f(cls, z: int) -> str: pass
-[builtins fixtures/classmethod.pyi]
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- Decorator:2(
- Var(f)
- FuncDef:3(
- f
- Args(
- Var(cls)
- Var(z))
- def (cls: def () -> __main__.A, z: builtins.int) -> builtins.str
- Class
- Block:3(
- PassStmt:3())))))
-
-[case testClassMethodWithNoArgs]
-class A:
- @classmethod
- def f(cls) -> str: pass
-[builtins fixtures/classmethod.pyi]
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- Decorator:2(
- Var(f)
- FuncDef:3(
- f
- Args(
- Var(cls))
- def (cls: def () -> __main__.A) -> builtins.str
- Class
- Block:3(
- PassStmt:3())))))
-
-[case testProperty]
-import typing
-class A:
- @property
- def f(self) -> str: pass
-[builtins fixtures/property.pyi]
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- Decorator:3(
- Var(f)
- FuncDef:4(
- f
- Args(
- Var(self))
- def (self: __main__.A) -> builtins.str
- Property
- Block:4(
- PassStmt:4())))))
-
-[case testClassDecorator]
-import typing
- at object
-class A: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- Decorators(
- NameExpr(object [builtins.object]))
- PassStmt:3()))
-
-[case testClassAttributeAsMethodDefaultArgumentValue]
-import typing
-class A:
- X = 1
- def f(self, x : int = X) -> None: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- AssignmentStmt:3(
- NameExpr(X* [m])
- IntExpr(1))
- FuncDef:4(
- f
- Args(
- Var(self)
- Var(x))
- def (self: __main__.A, x: builtins.int =)
- Init(
- AssignmentStmt:4(
- NameExpr(x [l])
- NameExpr(X [m])))
- Block:4(
- PassStmt:4()))))
-
-[case testInvalidBaseClass]
-from typing import Any, Callable
-class A(None): pass
-class B(Any): pass
-class C(Callable[[], int]): pass
-[out]
-main: error: Invalid base class
-main:4: error: Invalid base class
-
-[case testTupleAsBaseClass]
-import m
-[file m.pyi]
-from typing import Tuple
-class A(Tuple[int, str]): pass
-[builtins fixtures/tuple.pyi]
-[out]
-MypyFile:1(
- Import:1(m))
-MypyFile:1(
- tmp/m.pyi
- ImportFrom:1(typing, [Tuple])
- ClassDef:2(
- A
- TupleType(
- Tuple[builtins.int, builtins.str])
- BaseType(
- builtins.tuple[Any])
- PassStmt:2()))
-
-[case testBaseClassFromIgnoredModule]
-import m # type: ignore
-class B(m.A):
- pass
-[out]
-MypyFile:1(
- Import:1(m)
- ClassDef:2(
- B
- FallbackToAny
- BaseType(
- builtins.object)
- PassStmt:3())
- IgnoredLines(1))
-
-[case testBaseClassFromIgnoredModuleUsingImportFrom]
-from m import A # type: ignore
-class B(A, int):
- pass
-[out]
-MypyFile:1(
- ImportFrom:1(m, [A])
- ClassDef:2(
- B
- FallbackToAny
- BaseType(
- builtins.int)
- PassStmt:3())
- IgnoredLines(1))
-
-[case testBaseClassWithExplicitAnyType]
-from typing import Any
-A = 1 # type: Any
-class B(A):
- pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Any])
- AssignmentStmt:2(
- NameExpr(A [__main__.A])
- IntExpr(1)
- Any)
- ClassDef:3(
- B
- FallbackToAny
- BaseType(
- builtins.object)
- PassStmt:4()))
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
deleted file mode 100644
index aa45dde..0000000
--- a/test-data/unit/semanal-errors.test
+++ /dev/null
@@ -1,1336 +0,0 @@
-[case testUndefinedVariableInGlobalStatement]
-import typing
-x
-y
-[out]
-main:2: error: Name 'x' is not defined
-main:3: error: Name 'y' is not defined
-
-[case testUndefinedVariableWithinFunctionContext]
-import typing
-def f() -> None:
- x
-y
-[out]
-main:3: error: Name 'x' is not defined
-main:4: error: Name 'y' is not defined
-
-[case testMethodScope]
-import typing
-class A:
- def f(self): pass
-f
-[out]
-main:4: error: Name 'f' is not defined
-
-[case testMethodScope2]
-import typing
-class A:
- def f(self): pass
-class B:
- def g(self) -> None:
- f # error
- g # error
-[out]
-main:6: error: Name 'f' is not defined
-main:7: error: Name 'g' is not defined
-
-[case testInvalidType]
-import typing
-x = None # type: X
-[out]
-main:2: error: Name 'X' is not defined
-
-[case testInvalidGenericArg]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-x = 0 # type: A[y]
-[out]
-main:4: error: Name 'y' is not defined
-
-[case testInvalidNumberOfGenericArgsInTypeDecl]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A: pass
-class B(Generic[t]): pass
-x = 0 # type: B[A, A]
-y = 0 # type: A[A]
-[out]
-main:5: error: "B" expects 1 type argument, but 2 given
-main:6: error: "A" expects no type arguments, but 1 given
-
-[case testInvalidNumberOfGenericArgsInUndefinedArg]
-
-class A: pass
-x = None # type: A[int] # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInNestedBlock]
-
-class A: pass
-class B:
- def f(self) -> None:
- while 1:
- x = None # type: A[int] \
- # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInSignature]
-import typing
-class A: pass
-def f() -> A[int]: pass # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInOverloadedSignature]
-from typing import overload
-class A: pass
- at overload
-def f(): pass
- at overload # E: "A" expects no type arguments, but 1 given
-def f(x: A[int]) -> None: pass
-[out]
-
-[case testInvalidNumberOfGenericArgsInBaseType]
-import typing
-class A: pass
-class B(A[int]): pass # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInCast]
-from typing import cast
-class A: pass
-x = cast(A[int], 1) # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInTypeApplication]
-import typing
-class A: pass
-class B: pass
-x = A[B[int]]() # E: "B" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInNestedGenericType]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]): pass
-class B: pass
-def f() -> A[B[int]]: pass # E: "B" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInTupleType]
-from typing import Tuple
-class A: pass
-x = None # type: Tuple[A[int]] # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfGenericArgsInFunctionType]
-from typing import Callable
-class A: pass
-x = None # type: Callable[[A[int]], int] # E: "A" expects no type arguments, but 1 given
-y = None # type: Callable[[], A[int]] # E: "A" expects no type arguments, but 1 given
-[out]
-
-[case testVarOrFuncAsType]
-import typing
-def f(): pass
-x = 1
-y = 0 # type: f
-z = 0 # type: x
-[out]
-main:4: error: Invalid type "__main__.f"
-main:5: error: Invalid type "__main__.x"
-
-[case testGlobalVarRedefinition]
-import typing
-class A: pass
-x = 0 # type: A
-x = 0 # type: A
-[out]
-main:4: error: Name 'x' already defined
-
-[case testLocalVarRedefinition]
-import typing
-class A: pass
-def f() -> None:
- x = 0 # type: A
- x = 0 # type: A
-[out]
-main:5: error: Name 'x' already defined
-
-[case testClassVarRedefinition]
-import typing
-class A:
- x = 0 # type: object
- x = 0 # type: object
-[out]
-main:4: error: Name 'x' already defined
-
-[case testMultipleClassDefinitions]
-import typing
-class A: pass
-class A: pass
-[out]
-main:3: error: Name 'A' already defined
-
-[case testMultipleMixedDefinitions]
-import typing
-x = 1
-def x(): pass
-class x: pass
-[out]
-main:3: error: Name 'x' already defined
-main:4: error: Name 'x' already defined
-
-[case testNameNotImported]
-import typing
-from m import y
-x
-[file m.py]
-x = y = 1
-[out]
-main:3: error: Name 'x' is not defined
-
-[case testMissingNameInImportFrom]
-import typing
-from m import y
-[file m.py]
-x = 1
-[out]
-main:2: error: Module 'm' has no attribute 'y'
-
-[case testMissingModule]
-import typing
-import m
-[out]
-main:2: error: Cannot find module named 'm'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testMissingModule2]
-import typing
-from m import x
-[out]
-main:2: error: Cannot find module named 'm'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testMissingModule3]
-import typing
-from m import *
-[out]
-main:2: error: Cannot find module named 'm'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testMissingModuleRelativeImport]
-import typing
-import m
-[file m/__init__.py]
-from .x import y
-[out]
-tmp/m/__init__.py:1: error: Cannot find module named 'm.x'
-tmp/m/__init__.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testMissingModuleRelativeImport2]
-import typing
-import m.a
-[file m/__init__.py]
-[file m/a.py]
-from .x import y
-[out]
-tmp/m/a.py:1: error: Cannot find module named 'm.x'
-tmp/m/a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-
-[case testModuleNotImported]
-import typing
-import _m
-_n.x
-[file _m.py]
-import _n
-[file _n.py]
-x = 1
-[out]
-main:3: error: Name '_n' is not defined
-
-[case testImportAsteriskPlusUnderscore]
-import typing
-from _m import *
-_x
-__x__
-[file _m.py]
-_x = __x__ = 1
-[out]
-main:3: error: Name '_x' is not defined
-main:4: error: Name '__x__' is not defined
-
-[case testRelativeImportAtTopLevelModule]
-from . import m
-[out]
-main:1: error: No parent module -- cannot perform relative import
-
-[case testRelativeImportAtTopLevelModule2]
-from .. import m
-[out]
-main:1: error: No parent module -- cannot perform relative import
-
-[case testUndefinedTypeWithQualifiedName]
-import typing
-import m
-def f() -> m.c: pass
-def g() -> n.c: pass
-[file m.py]
-[out]
-main:3: error: Name 'm.c' is not defined
-main:4: error: Name 'n' is not defined
-
-[case testMissingPackage]
-import typing
-import m.n
-[out]
-main:2: error: Cannot find module named 'm'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Cannot find module named 'm.n'
-
-[case testMissingPackage]
-import typing
-from m.n import x
-from a.b import *
-[out]
-main:2: error: Cannot find module named 'm.n'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:3: error: Cannot find module named 'a.b'
-
-[case testErrorInImportedModule]
-import m
-[file m.py]
-import typing
-x = y
-[out]
-tmp/m.py:2: error: Name 'y' is not defined
-
-[case testErrorInImportedModule2]
-import m.n
-[file m/__init__.py]
-[file m/n.py]
-import k
-[file k.py]
-import typing
-x = y
-[out]
-tmp/k.py:2: error: Name 'y' is not defined
-
-[case testPackageWithoutInitFile]
-import typing
-import m.n
-m.n.x
-[file m/n.py]
-x = 1
-[out]
-main:2: error: Cannot find module named 'm'
-main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Cannot find module named 'm.n'
-
-[case testBreakOutsideLoop]
-break
-def f():
- break
-[out]
-main:1: error: 'break' outside loop
-main:3: error: 'break' outside loop
-
-[case testContinueOutsideLoop]
-continue
-def f():
- continue
-[out]
-main:1: error: 'continue' outside loop
-main:3: error: 'continue' outside loop
-
-[case testReturnOutsideFunction]
-def f(): pass
-return
-return 1
-[out]
-main:2: error: 'return' outside function
-main:3: error: 'return' outside function
-
-[case testYieldOutsideFunction]
-yield 1
-yield
-[out]
-main:1: error: 'yield' outside function
-main:2: error: 'yield' outside function
-
-[case testInvalidLvalues1]
-1 = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues2]
-(1) = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues3]
-(1, 1) = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues4]
-[1, 1] = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues5]
-() = 1
-[out]
-main:1: error: can't assign to ()
-
-[case testInvalidLvalues6]
-x = y = z = 1 # ok
-x, (y, 1) = 1
-[out]
-main:2: error: can't assign to literal
-
-[case testInvalidLvalues7]
-x, [y, 1] = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues8]
-x, [y, [z, 1]] = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues9]
-x, (y) = 1 # ok
-x, (y, (z, z)) = 1 # ok
-x, (y, (z, 1)) = 1
-[out]
-main:3: error: can't assign to literal
-
-[case testInvalidLvalues10]
-x + x = 1
-[out]
-main:1: error: can't assign to operator
-
-[case testInvalidLvalues11]
--x = 1
-[out]
-main:1: error: can't assign to operator
-
-[case testInvalidLvalues12]
-1.1 = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues13]
-'x' = 1
-[out]
-main:1: error: can't assign to literal
-
-[case testInvalidLvalues14]
-x() = 1
-[out]
-main:1: error: can't assign to function call
-
-[case testTwoStarExpressions]
-a, *b, *c = 1
-*a, (*b, c) = 1
-a, (*b, *c) = 1
-[*a, *b] = 1
-[out]
-main:1: error: Two starred expressions in assignment
-main:3: error: Two starred expressions in assignment
-main:4: error: Two starred expressions in assignment
-
-[case testTwoStarExpressionsInForStmt]
-z = 1
-for a, *b, *c in z:
- pass
-for *a, (*b, c) in z:
- pass
-for a, (*b, *c) in z:
- pass
-for [*a, *b] in z:
- pass
-[out]
-main:2: error: Two starred expressions in assignment
-main:6: error: Two starred expressions in assignment
-main:8: error: Two starred expressions in assignment
-
-[case testTwoStarExpressionsInGeneratorExpr]
-(a for a, *b, *c in [])
-(a for *a, (*b, c) in [])
-(a for a, (*b, *c) in [])
-[out]
-main:1: error: Name 'a' is not defined
-main:1: error: Two starred expressions in assignment
-main:3: error: Two starred expressions in assignment
-
-[case testStarExpressionRhs]
-b = 1
-c = 1
-d = 1
-a = *b
-[out]
-main:4: error: Can use starred expression only as assignment target
-
-[case testStarExpressionInExp]
-a = 1
-*a + 1
-[out]
-main:2: error: Can use starred expression only as assignment target
-
-[case testInvalidDel1]
-x = 1
-del x(1) # E: can't delete function call
-[out]
-
-[case testInvalidDel2]
-x = 1
-del x + 1 # E: can't delete operator
-[out]
-
-[case testInvalidDel3]
-del z # E: Name 'z' is not defined
-[out]
-
-[case testFunctionTvarScope]
-from typing import TypeVar
-t = TypeVar('t')
-def f(x: t) -> t: pass
-x = 0 # type: t
-[out]
-main:4: error: Invalid type "__main__.t"
-
-[case testClassTvarScope]
-from typing import Generic, TypeVar
-t = TypeVar('t')
-class c(Generic[t]): pass
-x = 0 # type: t
-[out]
-main:4: error: Invalid type "__main__.t"
-
-[case testExpressionRefersToTypeVariable]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class c(Generic[t]):
- def f(self) -> None: x = t
-def f(y: t): x = t
-[out]
-main:4: error: 't' is a type variable and only valid in type context
-main:5: error: 't' is a type variable and only valid in type context
-
-[case testMissingSelf]
-import typing
-class A:
- def f(): pass
-[out]
-main:3: error: Method must have at least one argument
-
-[case testInvalidBaseClass]
-import typing
-class A(B): pass
-[out]
-main:2: error: Name 'B' is not defined
-
-[case testSuperOutsideClass]
-class A: pass
-super().x
-def f() -> None: super().y
-[out]
-main:2: error: "super" used outside class
-main:3: error: "super" used outside class
-
-[case testMissingSelfInMethod]
-import typing
-class A:
- def f() -> None: pass
- def g(): pass
-[out]
-main:3: error: Method must have at least one argument
-main:4: error: Method must have at least one argument
-
-[case testMultipleMethodDefinition]
-import typing
-class A:
- def f(self) -> None: pass
- def g(self) -> None: pass
- def f(self, x: object) -> None: pass
-[out]
-main:5: error: Name 'f' already defined
-
-[case testInvalidGlobalDecl]
-import typing
-def f() -> None:
- global x
- x = None
-[out]
-main:4: error: Name 'x' is not defined
-
-[case testInvalidNonlocalDecl]
-import typing
-def f():
- def g() -> None:
- nonlocal x
- x = None
-[out]
-main:4: error: No binding for nonlocal 'x' found
-main:5: error: Name 'x' is not defined
-
-[case testNonlocalDeclNotMatchingGlobal]
-import typing
-x = None
-def f() -> None:
- nonlocal x
- x = None
-[out]
-main:4: error: No binding for nonlocal 'x' found
-main:5: error: Name 'x' is not defined
-
-[case testNonlocalDeclConflictingWithParameter]
-import typing
-def g():
- x = None
- def f(x) -> None:
- nonlocal x
- x = None
-[out]
-main:5: error: Name 'x' is already defined in local scope before nonlocal declaration
-
-[case testNonlocalDeclOutsideFunction]
-x = 2
-nonlocal x
-[out]
-main:2: error: nonlocal declaration not allowed at module level
-
-[case testGlobalAndNonlocalDecl]
-import typing
-x = 1
-def f():
- x = 1
- def g() -> None:
- global x
- nonlocal x
- x = None
-[out]
-main:7: error: Name 'x' is nonlocal and global
-
-[case testNonlocalAndGlobalDecl]
-import typing
-x = 1
-def f():
- x = 1
- def g() -> None:
- nonlocal x
- global x
- x = None
-[out]
-main:7: error: Name 'x' is nonlocal and global
-
-[case testNestedFunctionAndScoping]
-import typing
-def f(x) -> None:
- def g(y):
- z = x
- z
- y
- x
-[out]
-main:5: error: Name 'z' is not defined
-main:6: error: Name 'y' is not defined
-
-[case testMultipleNestedFunctionDef]
-import typing
-def f(x) -> None:
- def g(): pass
- x = 1
- def g(): pass
-[out]
-main:5: error: Name 'g' already defined
-
-[case testRedefinedOverloadedFunction]
-from typing import overload, Any
-def f() -> None:
- @overload
- def p(o: object) -> None: pass # no error
- @overload
- def p(o: Any) -> None: pass # no error
- x = 1
- def p(): pass # fail
-[out]
-main:8: error: Name 'p' already defined
-
-[case testNestedFunctionInMethod]
-import typing
-class A:
- def f(self) -> None:
- def g() -> None:
- x
- y
-[out]
-main:5: error: Name 'x' is not defined
-main:6: error: Name 'y' is not defined
-
-[case testImportScope]
-import typing
-def f() -> None:
- import x
-x.y # E: Name 'x' is not defined
-[file x.py]
-y = 1
-[out]
-
-[case testImportScope2]
-import typing
-def f() -> None:
- from x import y
- y
-y # E: Name 'y' is not defined
-[file x.py]
-y = 1
-[out]
-
-[case testImportScope3]
-import typing
-def f() -> None:
- from x import *
- y
-y # E: Name 'y' is not defined
-[file x.py]
-y = 1
-[out]
-
-[case testImportScope4]
-import typing
-class A:
- from x import *
- y
-y # E: Name 'y' is not defined
-[file x.py]
-y = 1
-[out]
-
-[case testScopeOfNestedClass]
-import typing
-def f():
- class A: pass
- A
-A # E: Name 'A' is not defined
-[out]
-
-[case testScopeOfNestedClass2]
-import typing
-class A:
- class B: pass
-B # E: Name 'B' is not defined
-[out]
-
-[case testScopeOfNestedClass3]
-import typing
-class A:
- def f(self):
- class B: pass
- B # E: Name 'B' is not defined
-B # E: Name 'B' is not defined
-[out]
-
-[case testInvalidNestedClassReferenceInDecl]
-import typing
-class A: pass
-foo = 0 # type: A.x # E: Name 'A.x' is not defined
-[out]
-
-[case testTvarScopingWithNestedClass]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-s = TypeVar('s')
-class A(Generic[t]):
- class B(Generic[s]):
- x = 0 # type: A[s]
- y = 0 # type: A[t] # E: Invalid type "__main__.t"
- z = 0 # type: A[s] # E: Invalid type "__main__.s"
- a = 0 # type: A[t]
-[out]
-
-[case testTestExtendPrimitives]
-class C(bool): pass # E: 'bool' is not a valid base class
-class A(int): pass # ok
-class B(float): pass # ok
-class D(str): pass # ok
-[builtins fixtures/primitives.pyi]
-[out]
-
-[case testCyclicInheritance]
-class A(A): pass # E: Cycle in inheritance hierarchy
-[out]
-
-[case testAssignToTypeDef]
-import typing
-class A: pass
-A = None # E: Invalid assignment target
-[out]
-
-[case testInvalidCastTargetSyntax]
-from typing import cast, TypeVar, Generic
-t = TypeVar('t')
-class C(Generic[t]): pass
-cast(str + str, None) # E: Cast target is not a type
-cast(C[str][str], None) # E: Cast target is not a type
-cast(C[str + str], None) # E: Cast target is not a type
-cast([int, str], None) # E: Invalid type
-[out]
-
-[case testInvalidCastTargetType]
-from typing import cast
-x = 0
-cast(x, None) # E: Invalid type "__main__.x"
-cast(t, None) # E: Name 't' is not defined
-cast(__builtins__.x, None) # E: Name '__builtins__.x' is not defined
-[out]
-
-[case testInvalidCastTargetType2]
-from typing import cast
-x = 0
-cast(str[str], None) # E: "str" expects no type arguments, but 1 given
-[out]
-
-[case testInvalidNumberOfArgsToCast]
-from typing import cast
-cast(str) # E: 'cast' expects 2 arguments
-cast(str, None, None) # E: 'cast' expects 2 arguments
-[out]
-
-[case testInvalidKindsOfArgsToCast]
-from typing import cast
-cast(str, *None) # E: 'cast' must be called with 2 positional arguments
-cast(str, target=None) # E: 'cast' must be called with 2 positional arguments
-[out]
-
-[case testInvalidAnyCall]
-from typing import Any
-Any(str, None) # E: Any(...) is no longer supported. Use cast(Any, ...) instead
-Any(arg=str) # E: Any(...) is no longer supported. Use cast(Any, ...) instead
-[out]
-
-[case testTypeListAsType]
-def f(x:[int, str]) -> None: # E: Invalid type
- pass
-[out]
-
-[case testInvalidFunctionType]
-from typing import Callable
-x = None # type: Callable[int, str]
-y = None # type: Callable[int]
-z = None # type: Callable[int, int, int]
-[out]
-main:2: error: The first argument to Callable must be a list of types or "..."
-main:3: error: Invalid function type
-main:4: error: Invalid function type
-
-[case testAbstractGlobalFunction]
-import typing
-from abc import abstractmethod
- at abstractmethod
-def foo(): pass
-[out]
-main:3: error: 'abstractmethod' used with a non-method
-
-[case testAbstractNestedFunction]
-import typing
-from abc import abstractmethod
-def g() -> None:
- @abstractmethod
- def foo(): pass
-[out]
-main:4: error: 'abstractmethod' used with a non-method
-
-[case testInvalidTypeDeclaration]
-import typing
-def f(): pass
-f() = 1 # type: int
-[out]
-main:3: error: can't assign to function call
-
-[case testIndexedAssignmentWithTypeDeclaration]
-import typing
-None[1] = 1 # type: int
-[out]
-main:2: error: Unexpected type declaration
-
-[case testNonSelfMemberAssignmentWithTypeDeclaration]
-import typing
-None.x = 1 # type: int
-[out]
-main:2: error: Type cannot be declared in assignment to non-self attribute
-
-[case testNonSelfMemberAssignmentWithTypeDeclarationInMethod]
-import typing
-class A:
- def f(self, x) -> None:
- x.y = 1 # type: int
-[out]
-main:4: error: Type cannot be declared in assignment to non-self attribute
-
-[case testInvalidTypeInTypeApplication]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-A[TypeVar] # E: Invalid type "typing.TypeVar"
-[out]
-
-[case testInvalidTypeInTypeApplication2]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-A[1] # E: Type expected within [...]
-[out]
-
-[case testVariableDeclWithInvalidNumberOfTypes]
-x, y = 1, 2 # type: int, str, int # E: Incompatible number of tuple items
-[out]
-
-[case testVariableDeclWithInvalidNumberOfTypesNested]
-x, (y, z) = 1, (2, 3) # type: int, (str, int, int) # E: Incompatible number of tuple items
-[out]
-
-[case testVariableDeclWithInvalidNumberOfTypesNested2]
-x, (y, z) = 1, (2, 3) # type: int, (str, ) # E: Incompatible number of tuple items
-[out]
-
-[case testVariableDeclWithInvalidNumberOfTypesNested3]
-x, (y, z) = 1, (2, 3) # type: int, str # E: Tuple type expected for multiple variables
-[out]
-
-[case testVariableDeclWithInvalidNumberOfTypesNested4]
-x, (y, z) = 1, (2, 3) # type: int, str, int # E: Incompatible number of tuple items
-[out]
-
-[case testVariableDeclWithInvalidNumberOfTypesNested5]
-x, (y, ) = 1, (2, ) # type: int, str # E: Tuple type expected for multiple variables
-[out]
-
-[case testVariableDeclWithInvalidType]
-x, y = 1, 2 # type: int # E: Tuple type expected for multiple variables
-[out]
-
-[case testInvalidLvalueWithExplicitType]
-a = 1
-a() = None # type: int # E: can't assign to function call
-[out]
-
-[case testInvalidLvalueWithExplicitType2]
-a = 1
-a[1] = None # type: int # E: Unexpected type declaration
-a.x = None # type: int \
- # E: Type cannot be declared in assignment to non-self attribute
-[out]
-
-[case testInvalidLvalueWithExplicitType3]
-a = 1
-a.y, a.x = None, None # type: int, int \
- # E: Type cannot be declared in assignment to non-self attribute
-a[1], a[2] = None, None # type: int, int \
- # E: Unexpected type declaration
-[out]
-
-[case testMissingGenericImport]
-from typing import TypeVar
-T = TypeVar('T')
-class A(Generic[T]): pass
-[out]
-main:3: error: Name 'Generic' is not defined
-
-[case testInvalidTypeWithinGeneric]
-from typing import Generic
-class A(Generic[int]): pass # E: Free type variable expected in Generic[...]
-[out]
-
-[case testInvalidTypeWithinNestedGenericClass]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-class A(Generic[T]):
- class B(Generic[T]): pass \
- # E: Free type variable expected in Generic[...]
-[out]
-
-[case testIncludingGenericTwiceInBaseClassList]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-S = TypeVar('S')
-class A(Generic[T], Generic[S]): pass \
- # E: Duplicate Generic in bases
-[out]
-
-[case testInvalidMetaclass]
-class A(metaclass=x): pass # E: Name 'x' is not defined
-[out]
-
-[case testInvalidQualifiedMetaclass]
-import abc
-class A(metaclass=abc.Foo): pass # E: Name 'abc.Foo' is not defined
-[out]
-
-[case testNonClassMetaclass]
-def f(): pass
-class A(metaclass=f): pass # E: Invalid metaclass 'f'
-[out]
-
-[case testInvalidTypevarArguments]
-from typing import TypeVar
-a = TypeVar() # E: Too few arguments for TypeVar()
-b = TypeVar(x='b') # E: TypeVar() expects a string literal as first argument
-c = TypeVar(1) # E: TypeVar() expects a string literal as first argument
-d = TypeVar('D') # E: String argument 1 'D' to TypeVar(...) does not match variable name 'd'
-e = TypeVar('e', int, str, x=1) # E: Unexpected argument to TypeVar(): x
-f = TypeVar('f', (int, str)) # E: Type expected
-g = TypeVar('g', x=(int, str)) # E: Unexpected argument to TypeVar(): x
-h = TypeVar('h', bound=1) # E: TypeVar 'bound' must be a type
-[out]
-
-[case testMoreInvalidTypevarArguments]
-from typing import TypeVar
-T = TypeVar('T', int, str, bound=bool) # E: TypeVar cannot have both values and an upper bound
-S = TypeVar('S', covariant=True, contravariant=True) \
- # E: TypeVar cannot be both covariant and contravariant
-[builtins fixtures/bool.pyi]
-
-[case testInvalidTypevarValues]
-from typing import TypeVar
-b = TypeVar('b', *[int]) # E: Unexpected argument to TypeVar()
-c = TypeVar('c', int, 2) # E: Type expected
-[out]
-
-[case testObsoleteTypevarValuesSyntax]
-from typing import TypeVar
-a = TypeVar('a', values=(int, str))
-[out]
-main:2: error: TypeVar 'values' argument not supported
-main:2: error: Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))
-
-[case testLocalTypevarScope]
-from typing import TypeVar
-def f() -> None:
- T = TypeVar('T')
-def g(x: T) -> None: pass # E: Name 'T' is not defined
-[out]
-
-[case testClassTypevarScope]
-from typing import TypeVar
-class A:
- T = TypeVar('T')
-def g(x: T) -> None: pass # E: Name 'T' is not defined
-[out]
-
-[case testRedefineVariableAsTypevar]
-from typing import TypeVar
-x = 0
-x = TypeVar('x') # E: Cannot redefine 'x' as a type variable
-[out]
-
-[case testTypevarWithType]
-from typing import TypeVar
-x = TypeVar('x') # type: int # E: Cannot declare the type of a type variable
-[out]
-
-[case testRedefineTypevar]
-from typing import TypeVar
-t = TypeVar('t')
-t = 1 # E: Invalid assignment target
-[out]
-
-[case testRedefineTypevar2]
-from typing import TypeVar
-t = TypeVar('t')
-def t(): pass # E: Name 't' already defined
-[out]
-
-[case testRedefineTypevar3]
-from typing import TypeVar
-t = TypeVar('t')
-class t: pass # E: Name 't' already defined
-[out]
-
-[case testRedefineTypevar4]
-from typing import TypeVar
-t = TypeVar('t')
-from typing import Generic as t # E: Name 't' already defined
-[out]
-
-[case testInvalidStrLiteralType]
-def f(x: 'foo'): pass # E: Name 'foo' is not defined
-[out]
-
-[case testInvalidStrLiteralType2]
-def f(x: 'int['): pass # E: syntax error in type comment
-[out]
-
-[case testInconsistentOverload]
-from typing import overload
-def dec(x): pass
- at overload
-def f(): pass
- at dec # E: 'overload' decorator expected
-def f(): pass
-[out]
-
-[case testInconsistentOverload2]
-from typing import overload
-def dec(x): pass
- at dec # E: 'overload' decorator expected
-def f(): pass
- at overload
-def f(): pass
-[out]
-
-[case testMissingOverloadDecorator]
-from typing import overload
-def dec(x): pass
- at dec # E: 'overload' decorator expected
-def f(): pass
- at dec # E: 'overload' decorator expected
-def f(): pass
-[out]
-
-[case testIncompatibleSignatureInComment]
-import typing
-def f(): # type: (int) -> int
- pass
-def g(x): # type: () -> int
- pass
-[out]
-main:2: error: Type signature has too many arguments
-main:4: error: Type signature has too few arguments
-
-[case testStaticmethodAndNonMethod]
-import typing
- at staticmethod
-def f(): pass
-class A:
- def g(self) -> None:
- @staticmethod
- def h(): pass
-[builtins fixtures/staticmethod.pyi]
-[out]
-main:2: error: 'staticmethod' used with a non-method
-main:6: error: 'staticmethod' used with a non-method
-
-[case testClassmethodAndNonMethod]
-import typing
- at classmethod
-def f(): pass
-class A:
- def g(self) -> None:
- @classmethod
- def h(): pass
-[builtins fixtures/classmethod.pyi]
-[out]
-main:2: error: 'classmethod' used with a non-method
-main:6: error: 'classmethod' used with a non-method
-
-[case testNonMethodProperty]
-import typing
- at property # E: 'property' used with a non-method
-def f() -> int: pass
-[builtins fixtures/property.pyi]
-[out]
-
-[case testInvalidArgCountForProperty]
-import typing
-class A:
- @property
- def f(self, x) -> int: pass # E: Too many arguments
- @property
- def g() -> int: pass # E: Method must have at least one argument
-[builtins fixtures/property.pyi]
-[out]
-
-[case testOverloadedProperty]
-from typing import overload
-class A:
- @overload # E: Decorated property not supported
- @property
- def f(self) -> int: pass
- @property # E: Decorated property not supported
- @overload
- def f(self) -> int: pass
-[builtins fixtures/property.pyi]
-[out]
-
-[case testOverloadedProperty2]
-from typing import overload
-class A:
- @overload
- def f(self) -> int: pass
- @property # E: Decorated property not supported
- @overload
- def f(self) -> int: pass
-[builtins fixtures/property.pyi]
-[out]
-
-[case testDecoratedProperty]
-import typing
-def dec(f): pass
-class A:
- @dec # E: Decorated property not supported
- @property
- def f(self) -> int: pass
- @property # E: Decorated property not supported
- @dec
- def g(self) -> int: pass
-[builtins fixtures/property.pyi]
-[out]
-
-[case testImportTwoModulesWithSameNameInFunction]
-import typing
-def f() -> None:
- import x
- import y as x # E: Name 'x' already defined
- x.y
-[file x.py]
-y = 1
-[file y.py]
-[out]
-
-[case testImportTwoModulesWithSameNameInGlobalContext]
-import typing
-import x
-import y as x # E: Name 'x' already defined
-x.y
-[file x.py]
-y = 1
-[file y.py]
-[out]
-
-[case testListTypeAliasWithoutImport]
-import typing
-def f() -> List[int]: pass
-[builtins fixtures/list.pyi]
-[out]
-main:2: error: Name 'List' is not defined
-
-[case testImportObsoleteTypingFunction]
-from typing import Function # E: Module 'typing' has no attribute 'Function' (it's now called 'typing.Callable')
-from _m import Function # E: Module '_m' has no attribute 'Function'
-[file _m.py]
-[out]
-
-[case testTypeRefresToObsoleteTypingFunction]
-import typing
-import _m
-def f(x: typing.Function[[], None]) -> None: pass
-def g(x: _m.Function[[], None]) -> None: pass
-[file _m.py]
-[out]
-main:3: error: Name 'typing.Function' is not defined (it's now called 'typing.Callable')
---'
-main:4: error: Name '_m.Function' is not defined
-
-[case testUnqualifiedNameRefersToObsoleteTypingFunction]
-x = None # type: Function[[], None]
-[out]
-main:1: error: Name 'Function' is not defined
-main:1: note: (Did you mean 'typing.Callable'?)
-
-[case testInvalidWithTarget]
-def f(): pass
-with f() as 1: pass # E: can't assign to literal
-[out]
-
-[case testUseObsoleteNameForTypeVar]
-from typing import typevar
-t = typevar('t')
-[out]
-main:1: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar')
---' (this fixes syntax highlighting)
-
-[case testUseObsoleteNameForTypeVar2]
-t = typevar('t')
-[out]
-main:1: error: Name 'typevar' is not defined
-main:1: note: (Did you mean 'typing.TypeVar'?)
-
-[case testUseObsoleteNameForTypeVar3]
-import typing
-t = typing.typevar('t')
-[out]
-main:2: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar')
---' (work around syntax highlighting :-/)
-
-[case testInvalidTypeAnnotation]
-import typing
-def f() -> None:
- 1[2] = 1 # type: int
-[out]
-main:3: error: Unexpected type declaration
-
-[case testInvalidTypeAnnotation2]
-import typing
-def f() -> None:
- f() = 1 # type: int
-[out]
-main:3: error: can't assign to function call
-
-[case testInvalidReferenceToAttributeOfOuterClass]
-class A:
- class X: pass
- class B:
- y = X # E: Name 'X' is not defined
-[out]
-
-[case testStubPackage]
-from m import x
-from m import y # E: Module 'm' has no attribute 'y'
-[file m/__init__.pyi]
-x = 1
-[out]
-
-[case testStubPackageSubModule]
-from m import x
-from m import y # E: Module 'm' has no attribute 'y'
-from m.m2 import y
-from m.m2 import z # E: Module 'm.m2' has no attribute 'z'
-[file m/__init__.pyi]
-x = 1
-[file m/m2.pyi]
-y = 1
-[out]
-
-[case testMissingStubForThirdPartyModule]
-import nosexcover
-[out]
-main:1: error: No library stub file for module 'nosexcover'
-main:1: note: (Stub files are from https://github.com/python/typeshed)
-
-[case testMissingStubForStdLibModule]
-import tabnanny
-[out]
-main:1: error: No library stub file for standard library module 'tabnanny'
-main:1: note: (Stub files are from https://github.com/python/typeshed)
-
-[case testMissingStubForTwoModules]
-import tabnanny
-import xdrlib
-[out]
-main:1: error: No library stub file for standard library module 'tabnanny'
-main:1: note: (Stub files are from https://github.com/python/typeshed)
-main:2: error: No library stub file for standard library module 'xdrlib'
-
-[case testListComprehensionSpecialScoping]
-class A:
- x = 1
- y = 1
- z = 1
- [x for i in z if y]
-[out]
-main:5: error: Name 'x' is not defined
-main:5: error: Name 'y' is not defined
-
-[case testTypeRedeclarationNoSpuriousWarnings]
-from typing import Tuple
-a = 1 # type: int
-a = 's' # type: str
-a = ('spam', 'spam', 'eggs', 'spam') # type: Tuple[str]
-
-[out]
-main:3: error: Name 'a' already defined
-main:4: error: Name 'a' already defined
diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test
deleted file mode 100644
index cec2a3c..0000000
--- a/test-data/unit/semanal-expressions.test
+++ /dev/null
@@ -1,395 +0,0 @@
-[case testLiterals]
-(1, 'x', 1.1, 1.1j)
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- TupleExpr:1(
- IntExpr(1)
- StrExpr(x)
- FloatExpr(1.1)
- ComplexExpr(1.1j))))
-
-[case testMemberExpr]
-x = 1
-x.y
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(x [__main__.x])
- y)))
-
-[case testIndexExpr]
-x = y = 1
-x[y]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- ExpressionStmt:2(
- IndexExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))))
-
-[case testBinaryOperations]
-x = y = 1
-x + y
-x | y
-x is not y
-x == y
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- ExpressionStmt:2(
- OpExpr:2(
- +
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])))
- ExpressionStmt:3(
- OpExpr:3(
- |
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])))
- ExpressionStmt:4(
- ComparisonExpr:4(
- is not
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])))
- ExpressionStmt:5(
- ComparisonExpr:5(
- ==
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))))
-
-[case testUnaryOperations]
-x = 1
--x
-~x
-+x
-not x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- ExpressionStmt:2(
- UnaryExpr:2(
- -
- NameExpr(x [__main__.x])))
- ExpressionStmt:3(
- UnaryExpr:3(
- ~
- NameExpr(x [__main__.x])))
- ExpressionStmt:4(
- UnaryExpr:4(
- +
- NameExpr(x [__main__.x])))
- ExpressionStmt:5(
- UnaryExpr:5(
- not
- NameExpr(x [__main__.x]))))
-
-[case testSlices]
-x = y = z = 1
-x[y:z:x]
-x[:]
-x[:y]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y])
- NameExpr(z* [__main__.z]))
- IntExpr(1))
- ExpressionStmt:2(
- IndexExpr:2(
- NameExpr(x [__main__.x])
- SliceExpr:-1(
- NameExpr(y [__main__.y])
- NameExpr(z [__main__.z])
- NameExpr(x [__main__.x]))))
- ExpressionStmt:3(
- IndexExpr:3(
- NameExpr(x [__main__.x])
- SliceExpr:-1(
- <empty>
- <empty>)))
- ExpressionStmt:4(
- IndexExpr:4(
- NameExpr(x [__main__.x])
- SliceExpr:-1(
- <empty>
- NameExpr(y [__main__.y])))))
-
-[case testTupleLiteral]
-x = y = 1
-x, y
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))))
-
-[case testListLiteral]
-x = y = 1
-([], [x, y])
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- ExpressionStmt:2(
- TupleExpr:2(
- ListExpr:2()
- ListExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])))))
-
-[case testDictLiterals]
-x = y = 1
-{ x : y, y : x }
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- ExpressionStmt:2(
- DictExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y])
- NameExpr(y [__main__.y])
- NameExpr(x [__main__.x]))))
-
-[case testListComprehension]
-a = 0
-([x + 1 for x in a])
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- ExpressionStmt:2(
- ListComprehension:2(
- GeneratorExpr:2(
- OpExpr:2(
- +
- NameExpr(x [l])
- IntExpr(1))
- NameExpr(x* [l])
- NameExpr(a [__main__.a])))))
-
-[case testListComprehensionInFunction]
-def f(a) -> None:
- [x for x in a]
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(a))
- def (a: Any)
- Block:1(
- ExpressionStmt:2(
- ListComprehension:2(
- GeneratorExpr:2(
- NameExpr(x [l])
- NameExpr(x* [l])
- NameExpr(a [l])))))))
-
-[case testListComprehensionWithCondition]
-a = 0
-a = [x for x in a if x]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- AssignmentStmt:2(
- NameExpr(a [__main__.a])
- ListComprehension:2(
- GeneratorExpr:2(
- NameExpr(x [l])
- NameExpr(x* [l])
- NameExpr(a [__main__.a])
- NameExpr(x [l])))))
-
-[case testSetComprehension]
-a = 0
-({x + 1 for x in a})
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- ExpressionStmt:2(
- SetComprehension:2(
- GeneratorExpr:2(
- OpExpr:2(
- +
- NameExpr(x [l])
- IntExpr(1))
- NameExpr(x* [l])
- NameExpr(a [__main__.a])))))
-
-[case testSetComprehensionWithCondition]
-a = 0
-a = {x for x in a if x}
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- AssignmentStmt:2(
- NameExpr(a [__main__.a])
- SetComprehension:2(
- GeneratorExpr:2(
- NameExpr(x [l])
- NameExpr(x* [l])
- NameExpr(a [__main__.a])
- NameExpr(x [l])))))
-
-[case testDictionaryComprehension]
-a = 0
-({x: x + 1 for x in a})
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- ExpressionStmt:2(
- DictionaryComprehension:2(
- NameExpr(x [l])
- OpExpr:2(
- +
- NameExpr(x [l])
- IntExpr(1))
- NameExpr(x* [l])
- NameExpr(a [__main__.a]))))
-
-[case testDictionaryComprehensionWithCondition]
-a = 0
-a = {x: x + 1 for x in a if x}
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- AssignmentStmt:2(
- NameExpr(a [__main__.a])
- DictionaryComprehension:2(
- NameExpr(x [l])
- OpExpr:2(
- +
- NameExpr(x [l])
- IntExpr(1))
- NameExpr(x* [l])
- NameExpr(a [__main__.a])
- NameExpr(x [l]))))
-
-[case testGeneratorExpression]
-a = 0
-(x for x in a)
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- ExpressionStmt:2(
- GeneratorExpr:2(
- NameExpr(x [l])
- NameExpr(x* [l])
- NameExpr(a [__main__.a]))))
-
-[case testGeneratorExpressionNestedIndex]
-a = 0
-(x for x, (y, z) in a)
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(a* [__main__.a])
- IntExpr(0))
- ExpressionStmt:2(
- GeneratorExpr:2(
- NameExpr(x [l])
- TupleExpr:2(
- NameExpr(x* [l])
- TupleExpr:2(
- NameExpr(y* [l])
- NameExpr(z* [l])))
- NameExpr(a [__main__.a]))))
-
-[case testLambda]
-x = 0
-lambda: x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(0))
- ExpressionStmt:2(
- FuncExpr:2(
- Block:2(
- ReturnStmt:2(
- NameExpr(x [__main__.x]))))))
-
-[case testLambdaWithArguments]
-lambda x, y: x + y
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- FuncExpr:1(
- Args(
- Var(x)
- Var(y))
- Block:1(
- ReturnStmt:1(
- OpExpr:1(
- +
- NameExpr(x [l])
- NameExpr(y [l])))))))
-
-[case testConditionalExpression]
-int if None else str
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- ConditionalExpr:1(
- Condition(
- NameExpr(None [builtins.None]))
- NameExpr(int [builtins.int])
- NameExpr(str [builtins.str]))))
-
-[case testDictWithKeywordArgs]
-dict(a=1, b=str())
-[builtins fixtures/dict.pyi]
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- DictExpr:1(
- StrExpr(a)
- IntExpr(1)
- StrExpr(b)
- CallExpr:1(
- NameExpr(str [builtins.str])
- Args()))))
diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test
deleted file mode 100644
index 7a00e66..0000000
--- a/test-data/unit/semanal-modules.test
+++ /dev/null
@@ -1,877 +0,0 @@
--- NOTE: If a module has a name starting or ending with _, it is skipped in
--- output.
-
-[case testImport]
-import x
-x.y
-[file x.py]
-y = 1
-[out]
-MypyFile:1(
- Import:1(x)
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(x)
- y [x.y])))
-MypyFile:1(
- tmp/x.py
- AssignmentStmt:1(
- NameExpr(y* [x.y])
- IntExpr(1)))
-
-[case testImportedNameInType]
-import m
-x = None # type: m.c
-[file m.py]
-class c: pass
-[out]
-MypyFile:1(
- Import:1(m)
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- m.c))
-MypyFile:1(
- tmp/m.py
- ClassDef:1(
- c
- PassStmt:1()))
-
-[case testImportFrom]
-from m import y
-x = y
-[file m.py]
-y = 1
-[out]
-MypyFile:1(
- ImportFrom:1(m, [y])
- AssignmentStmt:2(
- NameExpr(x* [__main__.x])
- NameExpr(y [m.y])))
-MypyFile:1(
- tmp/m.py
- AssignmentStmt:1(
- NameExpr(y* [m.y])
- IntExpr(1)))
-
-[case testImportFromType]
-from m import c
-x = None # type: c
-[file m.py]
-class c: pass
-[out]
-MypyFile:1(
- ImportFrom:1(m, [c])
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- m.c))
-MypyFile:1(
- tmp/m.py
- ClassDef:1(
- c
- PassStmt:1()))
-
-[case testImportMultiple]
-import _m, _n
-_m.x, _n.y
-[file _m.py]
-x = 1
-[file _n.py]
-y = 2
-[out]
-MypyFile:1(
- Import:1(_m, _n)
- ExpressionStmt:2(
- TupleExpr:2(
- MemberExpr:2(
- NameExpr(_m)
- x [_m.x])
- MemberExpr:2(
- NameExpr(_n)
- y [_n.y]))))
-
-[case testImportAs]
-import _m as n
-n.x
-[file _m.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(_m : n)
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(n [_m])
- x [_m.x])))
-
-[case testImportFromMultiple]
-from _m import x, y
-x, y
-[file _m.py]
-x = y = 1
-[out]
-MypyFile:1(
- ImportFrom:1(_m, [x, y])
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [_m.x])
- NameExpr(y [_m.y]))))
-
-[case testImportFromAs]
-from _m import y as z
-z
-[file _m.py]
-y = 1
-[out]
-MypyFile:1(
- ImportFrom:1(_m, [y : z])
- ExpressionStmt:2(
- NameExpr(z [_m.y])))
-
-[case testAccessImportedName]
-from m import x
-y = x
-[file m.py]
-from _n import x
-[file _n.py]
-x = 1
-[out]
-MypyFile:1(
- ImportFrom:1(m, [x])
- AssignmentStmt:2(
- NameExpr(y* [__main__.y])
- NameExpr(x [_n.x])))
-MypyFile:1(
- tmp/m.py
- ImportFrom:1(_n, [x]))
-
-[case testAccessImportedName2]
-import _m
-y = _m.x
-[file _m.py]
-from _n import x
-[file _n.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(_m)
- AssignmentStmt:2(
- NameExpr(y* [__main__.y])
- MemberExpr:2(
- NameExpr(_m)
- x [_n.x])))
-
-[case testAccessingImportedNameInType]
-from _m import c
-x = None # type: c
-[file _m.py]
-from _n import c
-[file _n.py]
-class c: pass
-[out]
-MypyFile:1(
- ImportFrom:1(_m, [c])
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- _n.c))
-
-[case testAccessingImportedNameInType2]
-import _m
-x = None # type: _m.c
-[file _m.py]
-from _n import c
-[file _n.py]
-class c: pass
-[out]
-MypyFile:1(
- Import:1(_m)
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- _n.c))
-
-[case testAccessingImportedModule]
-from _m import _n
-_n.x
-[file _m.py]
-import _n
-[file _n.py]
-x = 1
-[out]
-MypyFile:1(
- ImportFrom:1(_m, [_n])
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(_n)
- x [_n.x])))
-
-[case testAccessingImportedModule]
-import _m
-_m._n.x
-[file _m.py]
-import _n
-[file _n.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(_m)
- ExpressionStmt:2(
- MemberExpr:2(
- MemberExpr:2(
- NameExpr(_m)
- _n)
- x [_n.x])))
-
-[case testAccessTypeViaDoubleIndirection]
-from _m import c
-a = None # type: c
-[file _m.py]
-from _n import c
-[file _n.py]
-class c: pass
-[out]
-MypyFile:1(
- ImportFrom:1(_m, [c])
- AssignmentStmt:2(
- NameExpr(a [__main__.a])
- NameExpr(None [builtins.None])
- _n.c))
-
-[case testAccessTypeViaDoubleIndirection2]
-import _m
-a = None # type: _m.c
-[file _m.py]
-from _n import c
-[file _n.py]
-class c: pass
-[out]
-MypyFile:1(
- Import:1(_m)
- AssignmentStmt:2(
- NameExpr(a [__main__.a])
- NameExpr(None [builtins.None])
- _n.c))
-
-[case testImportAsterisk]
-from _m import *
-x, y
-[file _m.py]
-x = y = 1
-[out]
-MypyFile:1(
- ImportAll:1(_m)
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [_m.x])
- NameExpr(y [_m.y]))))
-
-[case testImportAsteriskAndImportedNames]
-from _m import *
-n_.x, y
-[file _m.py]
-import n_
-from n_ import y
-[file n_.py]
-x = y = 1
-[out]
-MypyFile:1(
- ImportAll:1(_m)
- ExpressionStmt:2(
- TupleExpr:2(
- MemberExpr:2(
- NameExpr(n_)
- x [n_.x])
- NameExpr(y [n_.y]))))
-
-[case testImportAsteriskAndImportedNamesInTypes]
-from _m import *
-x = None # type: n_.c
-y = None # type: d
-[file _m.py]
-import n_
-from n_ import d
-[file n_.py]
-class c: pass
-class d: pass
-[out]
-MypyFile:1(
- ImportAll:1(_m)
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- n_.c)
- AssignmentStmt:3(
- NameExpr(y [__main__.y])
- NameExpr(None [builtins.None])
- n_.d))
-
-[case testModuleInSubdir]
-import _m
-_m.x
-[file _m/__init__.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(_m)
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(_m)
- x [_m.x])))
-
-[case testNestedModules]
-import m.n
-m.n.x, m.y
-[file m/__init__.py]
-y = 1
-[file m/n.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(m.n)
- ExpressionStmt:2(
- TupleExpr:2(
- MemberExpr:2(
- MemberExpr:2(
- NameExpr(m)
- n [m.n])
- x [m.n.x])
- MemberExpr:2(
- NameExpr(m)
- y [m.y]))))
-MypyFile:1(
- tmp/m/n.py
- AssignmentStmt:1(
- NameExpr(x* [m.n.x])
- IntExpr(1)))
-
-[case testImportFromSubmodule]
-from m._n import x
-x
-[file m/__init__.py]
-[file m/_n.py]
-x = 1
-[out]
-MypyFile:1(
- ImportFrom:1(m._n, [x])
- ExpressionStmt:2(
- NameExpr(x [m._n.x])))
-
-[case testImportAllFromSubmodule]
-from m._n import *
-x, y
-[file m/__init__.py]
-[file m/_n.py]
-x = y = 1
-[out]
-MypyFile:1(
- ImportAll:1(m._n)
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [m._n.x])
- NameExpr(y [m._n.y]))))
-
-[case testSubmodulesAndTypes]
-import m._n
-x = None # type: m._n.c
-[file m/__init__.py]
-[file m/_n.py]
-class c: pass
-[out]
-MypyFile:1(
- Import:1(m._n)
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- m._n.c))
-
-[case testSubmodulesAndTypes]
-from m._n import c
-x = None # type: c
-[file m/__init__.py]
-[file m/_n.py]
-class c: pass
-[out]
-MypyFile:1(
- ImportFrom:1(m._n, [c])
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- m._n.c))
-
-[case testFromPackageImportModule]
-from m import _n
-_n.x
-[file m/__init__.py]
-[file m/_n.py]
-x = 1
-[out]
-MypyFile:1(
- ImportFrom:1(m, [_n])
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(_n [m._n])
- x [m._n.x])))
-
-[case testDeeplyNestedModule]
-import m.n.k
-m.n.k.x
-m.n.b
-m.a
-[file m/__init__.py]
-a = 1
-[file m/n/__init__.py]
-b = 1
-[file m/n/k.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(m.n.k)
- ExpressionStmt:2(
- MemberExpr:2(
- MemberExpr:2(
- MemberExpr:2(
- NameExpr(m)
- n [m.n])
- k [m.n.k])
- x [m.n.k.x]))
- ExpressionStmt:3(
- MemberExpr:3(
- MemberExpr:3(
- NameExpr(m)
- n [m.n])
- b [m.n.b]))
- ExpressionStmt:4(
- MemberExpr:4(
- NameExpr(m)
- a [m.a])))
-MypyFile:1(
- tmp/m/n/k.py
- AssignmentStmt:1(
- NameExpr(x* [m.n.k.x])
- IntExpr(1)))
-
-[case testImportInSubmodule]
-import m._n
-y = m._n.x
-[file m/__init__.py]
-[file m/_n.py]
-from m._k import x
-[file m/_k.py]
-x = 1
-[out]
-MypyFile:1(
- Import:1(m._n)
- AssignmentStmt:2(
- NameExpr(y* [__main__.y])
- MemberExpr:2(
- MemberExpr:2(
- NameExpr(m)
- _n [m._n])
- x [m._k.x])))
-
-[case testBuiltinsUsingModule]
-o = None # type: __builtins__.object
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(o [__main__.o])
- NameExpr(None [builtins.None])
- builtins.object))
-
-[case testImplicitAccessToBuiltins]
-object
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- NameExpr(object [builtins.object])))
-
-[case testAssignmentToModuleAttribute]
-import _m
-_m.x = (
- _m.x)
-[file _m.py]
-x = None
-[out]
-MypyFile:1(
- Import:1(_m)
- AssignmentStmt:2(
- MemberExpr:2(
- NameExpr(_m)
- x [_m.x])
- MemberExpr:3(
- NameExpr(_m)
- x [_m.x])))
-
-[case testAssignmentThatRefersToModule]
-import _m
-_m.x[None] = None
-[file _m.py]
-x = None
-[out]
-MypyFile:1(
- Import:1(_m)
- AssignmentStmt:2(
- IndexExpr:2(
- MemberExpr:2(
- NameExpr(_m)
- x [_m.x])
- NameExpr(None [builtins.None]))
- NameExpr(None [builtins.None])))
-
-[case testImportInBlock]
-if 1:
- import _x
- _x.y
-[file _x.py]
-y = 1
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- IntExpr(1))
- Then(
- Import:2(_x)
- ExpressionStmt:3(
- MemberExpr:3(
- NameExpr(_x)
- y [_x.y])))))
-
-[case testImportInFunction]
-def f() -> None:
- import _x
- _x.y
-[file _x.py]
-y = 1
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def ()
- Block:1(
- Import:2(_x)
- ExpressionStmt:3(
- MemberExpr:3(
- NameExpr(_x)
- y [_x.y])))))
-
-[case testImportInClassBody]
-class A:
- from _x import y
- z = y
-[file _x.py]
-y = 1
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- ImportFrom:2(_x, [y])
- AssignmentStmt:3(
- NameExpr(z* [m])
- NameExpr(y [_x.y]))))
-
-[case testImportInClassBody2]
-class A:
- import _x
- z = _x.y
-[file _x.py]
-y = 1
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- Import:2(_x)
- AssignmentStmt:3(
- NameExpr(z* [m])
- MemberExpr:3(
- NameExpr(_x)
- y [_x.y]))))
-
-[case testImportModuleTwice]
-def f() -> None:
- import x
- import x
- x.y
-[file x.py]
-y = 1
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def ()
- Block:1(
- Import:2(x)
- Import:3(x)
- ExpressionStmt:4(
- MemberExpr:4(
- NameExpr(x)
- y [x.y])))))
-MypyFile:1(
- tmp/x.py
- AssignmentStmt:1(
- NameExpr(y* [x.y])
- IntExpr(1)))
-
-[case testRelativeImport0]
-import m.x
-m.x.z.y
-[file m/__init__.py]
-[file m/x.py]
-from . import z
-[file m/z.py]
-y = 1
-[out]
-MypyFile:1(
- Import:1(m.x)
- ExpressionStmt:2(
- MemberExpr:2(
- MemberExpr:2(
- MemberExpr:2(
- NameExpr(m)
- x [m.x])
- z [m.z])
- y [m.z.y])))
-MypyFile:1(
- tmp/m/x.py
- ImportFrom:1(., [z]))
-MypyFile:1(
- tmp/m/z.py
- AssignmentStmt:1(
- NameExpr(y* [m.z.y])
- IntExpr(1)))
-
-[case testRelativeImport1]
-import m.t.b as b
-b.x.y
-b.z.y
-[file m/__init__.py]
-[file m/x.py]
-y = 1
-[file m/z.py]
-y = 3
-[file m/t/__init__.py]
-[file m/t/b.py]
-from .. import x, z
-[out]
-MypyFile:1(
- Import:1(m.t.b : b)
- ExpressionStmt:2(
- MemberExpr:2(
- MemberExpr:2(
- NameExpr(b [m.t.b])
- x [m.x])
- y [m.x.y]))
- ExpressionStmt:3(
- MemberExpr:3(
- MemberExpr:3(
- NameExpr(b [m.t.b])
- z [m.z])
- y [m.z.y])))
-MypyFile:1(
- tmp/m/t/b.py
- ImportFrom:1(.., [x, z]))
-MypyFile:1(
- tmp/m/x.py
- AssignmentStmt:1(
- NameExpr(y* [m.x.y])
- IntExpr(1)))
-MypyFile:1(
- tmp/m/z.py
- AssignmentStmt:1(
- NameExpr(y* [m.z.y])
- IntExpr(3)))
-
-[case testRelativeImport2]
-import m.t.b as b
-b.xy
-b.zy
-[file m/__init__.py]
-[file m/x.py]
-y = 1
-[file m/z.py]
-y = 3
-[file m/t/__init__.py]
-[file m/t/b.py]
-from ..x import y as xy
-from ..z import y as zy
-[out]
-MypyFile:1(
- Import:1(m.t.b : b)
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(b [m.t.b])
- xy [m.x.y]))
- ExpressionStmt:3(
- MemberExpr:3(
- NameExpr(b [m.t.b])
- zy [m.z.y])))
-MypyFile:1(
- tmp/m/t/b.py
- ImportFrom:1(..x, [y : xy])
- ImportFrom:2(..z, [y : zy]))
-MypyFile:1(
- tmp/m/x.py
- AssignmentStmt:1(
- NameExpr(y* [m.x.y])
- IntExpr(1)))
-MypyFile:1(
- tmp/m/z.py
- AssignmentStmt:1(
- NameExpr(y* [m.z.y])
- IntExpr(3)))
-
-[case testRelativeImport3]
-import m.t
-m.zy
-m.xy
-m.t.y
-[file m/__init__.py]
-from .x import *
-from .z import *
-[file m/x.py]
-from .z import zy as xy
-[file m/z.py]
-zy = 3
-[file m/t/__init__.py]
-from .b import *
-[file m/t/b.py]
-from .. import xy as y
-[out]
-MypyFile:1(
- Import:1(m.t)
- ExpressionStmt:2(
- MemberExpr:2(
- NameExpr(m)
- zy [m.z.zy]))
- ExpressionStmt:3(
- MemberExpr:3(
- NameExpr(m)
- xy [m.z.zy]))
- ExpressionStmt:4(
- MemberExpr:4(
- MemberExpr:4(
- NameExpr(m)
- t [m.t])
- y [m.z.zy])))
-MypyFile:1(
- tmp/m/t/b.py
- ImportFrom:1(.., [xy : y]))
-MypyFile:1(
- tmp/m/x.py
- ImportFrom:1(.z, [zy : xy]))
-MypyFile:1(
- tmp/m/z.py
- AssignmentStmt:1(
- NameExpr(zy* [m.z.zy])
- IntExpr(3)))
-
-[case testRelativeImportFromSameModule]
-import m.x
-[file m/__init__.py]
-[file m/x.py]
-from .x import nonexistent
-[out]
-tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent'
-
-[case testImportFromSameModule]
-import m.x
-[file m/__init__.py]
-[file m/x.py]
-from m.x import nonexistent
-[out]
-tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent'
-
-[case testFromImportAsInStub]
-from m import *
-x
-y # E: Name 'y' is not defined
-[file m.pyi]
-from m2 import x as x
-from m2 import y
-[file m2.py]
-x = 1
-y = 2
-[out]
-
-[case testFromImportAsInNonStub]
-from m_ import *
-x
-y
-[file m_.py]
-from m2_ import x as x
-from m2_ import y
-[file m2_.py]
-x = 1
-y = 2
-[out]
-MypyFile:1(
- ImportAll:1(m_)
- ExpressionStmt:2(
- NameExpr(x [m2_.x]))
- ExpressionStmt:3(
- NameExpr(y [m2_.y])))
-
-[case testImportAsInStub]
-from m import *
-m2
-m3 # E: Name 'm3' is not defined
-[file m.pyi]
-import m2 as m2
-import m3
-[file m2.py]
-[file m3.py]
-[out]
-
-[case testImportAsInNonStub]
-from m_ import *
-m2_
-m3_
-[file m_.py]
-import m2_ as m2_
-import m3_
-[file m2_.py]
-[file m3_.py]
-[out]
-MypyFile:1(
- ImportAll:1(m_)
- ExpressionStmt:2(
- NameExpr(m2_))
- ExpressionStmt:3(
- NameExpr(m3_)))
-
-[case testErrorsInMultipleModules]
-import m
-x
-[file m.py]
-y
-[out]
-tmp/m.py:1: error: Name 'y' is not defined
-main:2: error: Name 'x' is not defined
-
-[case testImportTwice]
-import typing
-from x import a, a # ok (we could give a warning, but this is valid)
-def f() -> None:
- from x import a
- from x import a # ok
-import x
-import x # ok, since we may import multiple submodules of a package
-[file x.py]
-a = 1
-[out]
-MypyFile:1(
- Import:1(typing)
- ImportFrom:2(x, [a, a])
- FuncDef:3(
- f
- def ()
- Block:3(
- ImportFrom:4(x, [a])
- ImportFrom:5(x, [a])))
- Import:6(x)
- Import:7(x))
-MypyFile:1(
- tmp/x.py
- AssignmentStmt:1(
- NameExpr(a* [x.a])
- IntExpr(1)))
diff --git a/test-data/unit/semanal-namedtuple.test b/test-data/unit/semanal-namedtuple.test
deleted file mode 100644
index a820a07..0000000
--- a/test-data/unit/semanal-namedtuple.test
+++ /dev/null
@@ -1,177 +0,0 @@
--- Semantic analysis of named tuples
-
-[case testSimpleNamedtuple]
-from collections import namedtuple
-N = namedtuple('N', ['a'])
-def f() -> N: pass
-[out]
-MypyFile:1(
- ImportFrom:1(collections, [namedtuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[Any]))
- FuncDef:3(
- f
- def () -> Tuple[Any, fallback=__main__.N]
- Block:3(
- PassStmt:3())))
-
-[case testTwoItemNamedtuple]
-from collections import namedtuple
-N = namedtuple('N', ['a', 'xyz'])
-def f() -> N: pass
-[out]
-MypyFile:1(
- ImportFrom:1(collections, [namedtuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[Any, Any]))
- FuncDef:3(
- f
- def () -> Tuple[Any, Any, fallback=__main__.N]
- Block:3(
- PassStmt:3())))
-
-[case testTwoItemNamedtupleWithTupleFieldNames]
-from collections import namedtuple
-N = namedtuple('N', ('a', 'xyz'))
-def f() -> N: pass
-[out]
-MypyFile:1(
- ImportFrom:1(collections, [namedtuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[Any, Any]))
- FuncDef:3(
- f
- def () -> Tuple[Any, Any, fallback=__main__.N]
- Block:3(
- PassStmt:3())))
-
-[case testTwoItemNamedtupleWithShorthandSyntax]
-from collections import namedtuple
-N = namedtuple('N', ' a xyz ')
-def f() -> N: pass
-[out]
-MypyFile:1(
- ImportFrom:1(collections, [namedtuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[Any, Any]))
- FuncDef:3(
- f
- def () -> Tuple[Any, Any, fallback=__main__.N]
- Block:3(
- PassStmt:3())))
-
-[case testNamedTupleWithItemTypes]
-from typing import NamedTuple
-N = NamedTuple('N', [('a', int),
- ('b', str)])
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [NamedTuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str])))
-
-[case testNamedTupleWithTupleFieldNamesWithItemTypes]
-from typing import NamedTuple
-N = NamedTuple('N', (('a', int),
- ('b', str)))
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [NamedTuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str])))
-
-[case testNamedTupleBaseClass]
-from collections import namedtuple
-N = namedtuple('N', ['x'])
-class A(N): pass
-[out]
-MypyFile:1(
- ImportFrom:1(collections, [namedtuple])
- AssignmentStmt:2(
- NameExpr(N* [__main__.N])
- NamedTupleExpr:2(N, Tuple[Any]))
- ClassDef:3(
- A
- TupleType(
- Tuple[Any, fallback=__main__.N])
- BaseType(
- __main__.N)
- PassStmt:3()))
-
-[case testNamedTupleBaseClass2]
-from collections import namedtuple
-class A(namedtuple('N', ['x'])): pass
-[out]
-MypyFile:1(
- ImportFrom:1(collections, [namedtuple])
- ClassDef:2(
- A
- TupleType(
- Tuple[Any, fallback=__main__.N at 2])
- BaseType(
- __main__.N at 2)
- PassStmt:2()))
-
-[case testNamedTupleBaseClassWithItemTypes]
-from typing import NamedTuple
-class A(NamedTuple('N', [('x', int)])): pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [NamedTuple])
- ClassDef:2(
- A
- TupleType(
- Tuple[builtins.int, fallback=__main__.N at 2])
- BaseType(
- __main__.N at 2)
- PassStmt:2()))
-
--- Errors
-
-[case testNamedTupleWithTooFewArguments]
-from collections import namedtuple
-N = namedtuple('N') # E: Too few arguments for namedtuple()
-
-[case testNamedTupleWithTooManyArguments]
-from collections import namedtuple
-N = namedtuple('N', ['x'], 'y') # E: Too many arguments for namedtuple()
-
-[case testNamedTupleWithInvalidName]
-from collections import namedtuple
-N = namedtuple(1, ['x']) # E: namedtuple() expects a string literal as the first argument
-
-[case testNamedTupleWithInvalidItems]
-from collections import namedtuple
-N = namedtuple('N', 1) # E: List or tuple literal expected as the second argument to namedtuple()
-
-[case testNamedTupleWithInvalidItems2]
-from collections import namedtuple
-N = namedtuple('N', ['x', 1]) # E: String literal expected as namedtuple() item
-
-[case testNamedTupleWithUnderscoreItemName]
-from collections import namedtuple
-N = namedtuple('N', ['_fallback']) # E: namedtuple() field names cannot start with an underscore: _fallback
-
--- NOTE: The following code works at runtime but is not yet supported by mypy.
--- Keyword arguments may potentially be supported in the future.
-[case testNamedTupleWithNonpositionalArgs]
-from collections import namedtuple
-N = namedtuple(typename='N', field_names=['x']) # E: Unexpected arguments to namedtuple()
-
-[case testInvalidNamedTupleBaseClass]
-from typing import NamedTuple
-class A(NamedTuple('N', [1])): pass # E: Tuple expected as NamedTuple() field
-class B(A): pass
-
-[case testInvalidNamedTupleBaseClass2]
-class A(NamedTuple('N', [1])): pass
-class B(A): pass
-[out]
-main:1: error: Name 'NamedTuple' is not defined
-main:1: error: Invalid base class
diff --git a/test-data/unit/semanal-python2.test b/test-data/unit/semanal-python2.test
deleted file mode 100644
index 97264a5..0000000
--- a/test-data/unit/semanal-python2.test
+++ /dev/null
@@ -1,76 +0,0 @@
--- Python 2 semantic analysis test cases.
-
-[case testPrintStatement_python2]
-print int, None
-[out]
-MypyFile:1(
- PrintStmt:1(
- NameExpr(int [builtins.int])
- NameExpr(None [builtins.None])
- Newline))
-
-[case testPrintStatementWithTarget]
-print >>int, None
-[out]
-MypyFile:1(
- PrintStmt:1(
- NameExpr(None [builtins.None])
- Target(
- NameExpr(int [builtins.int]))
- Newline))
-
-[case testExecStatement]
-exec None
-exec None in int
-exec None in int, str
-[out]
-MypyFile:1(
- ExecStmt:1(
- NameExpr(None [builtins.None]))
- ExecStmt:2(
- NameExpr(None [builtins.None])
- NameExpr(int [builtins.int]))
- ExecStmt:3(
- NameExpr(None [builtins.None])
- NameExpr(int [builtins.int])
- NameExpr(str [builtins.str])))
-
-[case testVariableLengthTuple_python2]
-from typing import Tuple, cast
-cast(Tuple[int, ...], ())
-[builtins_py2 fixtures/tuple.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Tuple, cast])
- ExpressionStmt:2(
- CastExpr:2(
- TupleExpr:2()
- builtins.tuple[builtins.int])))
-
-[case testTupleArgList_python2]
-def f(x, (y, z)):
- x = y
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x)
- Var(__tuple_arg_2))
- Block:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(y* [l])
- NameExpr(z* [l]))
- NameExpr(__tuple_arg_2 [l]))
- AssignmentStmt:2(
- NameExpr(x [l])
- NameExpr(y [l])))))
-
-[case testBackquoteExpr_python2]
-`object`
-[out]
-MypyFile:1(
- ExpressionStmt:1(
- BackquoteExpr:1(
- NameExpr(object [builtins.object]))))
diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test
deleted file mode 100644
index e104ab7..0000000
--- a/test-data/unit/semanal-statements.test
+++ /dev/null
@@ -1,929 +0,0 @@
-[case testReturn]
-def f(x): return x
-def g(): return
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Block:1(
- ReturnStmt:1(
- NameExpr(x [l]))))
- FuncDef:2(
- g
- Block:2(
- ReturnStmt:2())))
-
-[case testRaise]
-raise object()
-[out]
-MypyFile:1(
- RaiseStmt:1(
- CallExpr:1(
- NameExpr(object [builtins.object])
- Args())))
-
-[case testYield]
-def f(): yield f
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Generator
- Block:1(
- ExpressionStmt:1(
- YieldExpr:1(
- NameExpr(f [__main__.f]))))))
-
-[case testAssert]
-assert object
-[out]
-MypyFile:1(
- AssertStmt:1(
- NameExpr(object [builtins.object])))
-
-[case testOperatorAssignment]
-x = y = 1
-x += y
-y |= x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- OperatorAssignmentStmt:2(
- +
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))
- OperatorAssignmentStmt:3(
- |
- NameExpr(y [__main__.y])
- NameExpr(x [__main__.x])))
-
-[case testWhile]
-x = y = 1
-while x:
- y
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- WhileStmt:2(
- NameExpr(x [__main__.x])
- Block:2(
- ExpressionStmt:3(
- NameExpr(y [__main__.y])))))
-
-[case testFor]
-for x in object:
- x
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x* [__main__.x])
- NameExpr(object [builtins.object])
- Block:1(
- ExpressionStmt:2(
- NameExpr(x [__main__.x])))))
-
-[case testForInFunction]
-def f():
- for x in f:
- x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ForStmt:2(
- NameExpr(x* [l])
- NameExpr(f [__main__.f])
- Block:2(
- ExpressionStmt:3(
- NameExpr(x [l])))))))
-
-[case testMultipleForIndexVars]
-for x, y in []:
- x, y
-[out]
-MypyFile:1(
- ForStmt:1(
- TupleExpr:1(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- ListExpr:1()
- Block:1(
- ExpressionStmt:2(
- TupleExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))))))
-
-[case testForIndexVarScope]
-for x in []:
- pass
-x
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x* [__main__.x])
- ListExpr:1()
- Block:1(
- PassStmt:2()))
- ExpressionStmt:3(
- NameExpr(x [__main__.x])))
-
-[case testForIndexVarScope2]
-def f():
- for x in []:
- pass
- x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ForStmt:2(
- NameExpr(x* [l])
- ListExpr:2()
- Block:2(
- PassStmt:3()))
- ExpressionStmt:4(
- NameExpr(x [l])))))
-
-[case testReusingForLoopIndexVariable]
-for x in None:
- pass
-for x in None:
- pass
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None])
- Block:1(
- PassStmt:2()))
- ForStmt:3(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- Block:3(
- PassStmt:4())))
-
-[case testReusingForLoopIndexVariable2]
-def f():
- for x in None:
- pass
- for x in None:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- ForStmt:2(
- NameExpr(x* [l])
- NameExpr(None [builtins.None])
- Block:2(
- PassStmt:3()))
- ForStmt:4(
- NameExpr(x [l])
- NameExpr(None [builtins.None])
- Block:4(
- PassStmt:5())))))
-
-[case testLoopWithElse]
-for x in []:
- pass
-else:
- x
-while 1:
- pass
-else:
- x
-[out]
-MypyFile:1(
- ForStmt:1(
- NameExpr(x* [__main__.x])
- ListExpr:1()
- Block:1(
- PassStmt:2())
- Else(
- ExpressionStmt:4(
- NameExpr(x [__main__.x]))))
- WhileStmt:5(
- IntExpr(1)
- Block:5(
- PassStmt:6())
- Else(
- ExpressionStmt:8(
- NameExpr(x [__main__.x])))))
-
-[case testBreak]
-while 1:
- break
-for x in []:
- break
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(1)
- Block:1(
- BreakStmt:2()))
- ForStmt:3(
- NameExpr(x* [__main__.x])
- ListExpr:3()
- Block:3(
- BreakStmt:4())))
-
-[case testContinue]
-while 1:
- continue
-for x in []:
- continue
-[out]
-MypyFile:1(
- WhileStmt:1(
- IntExpr(1)
- Block:1(
- ContinueStmt:2()))
- ForStmt:3(
- NameExpr(x* [__main__.x])
- ListExpr:3()
- Block:3(
- ContinueStmt:4())))
-
-[case testIf]
-x = 1
-if x:
- x
-elif x:
- x
-elif x:
- x
-else:
- x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- IfStmt:2(
- If(
- NameExpr(x [__main__.x]))
- Then(
- ExpressionStmt:3(
- NameExpr(x [__main__.x])))
- Else(
- IfStmt:4(
- If(
- NameExpr(x [__main__.x]))
- Then(
- ExpressionStmt:5(
- NameExpr(x [__main__.x])))
- Else(
- IfStmt:6(
- If(
- NameExpr(x [__main__.x]))
- Then(
- ExpressionStmt:7(
- NameExpr(x [__main__.x])))
- Else(
- ExpressionStmt:9(
- NameExpr(x [__main__.x])))))))))
-
-[case testSimpleIf]
-if object:
- object
-[out]
-MypyFile:1(
- IfStmt:1(
- If(
- NameExpr(object [builtins.object]))
- Then(
- ExpressionStmt:2(
- NameExpr(object [builtins.object])))))
-
-[case testLvalues]
-x = y = 1
-x = 1
-x.m = 1
-x[y] = 1
-x, y = 1
-[x, y] = 1
-(x, y) = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- IntExpr(1))
- AssignmentStmt:3(
- MemberExpr:3(
- NameExpr(x [__main__.x])
- m)
- IntExpr(1))
- AssignmentStmt:4(
- IndexExpr:4(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))
- IntExpr(1))
- AssignmentStmt:5(
- TupleExpr:5(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))
- IntExpr(1))
- AssignmentStmt:6(
- ListExpr:6(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))
- IntExpr(1))
- AssignmentStmt:7(
- TupleExpr:7(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))
- IntExpr(1)))
-
-[case testStarLvalues]
-*x, y = 1
-*x, (y, *z) = 1
-*(x, q), r = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- StarExpr:1(
- NameExpr(x* [__main__.x]))
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- AssignmentStmt:2(
- TupleExpr:2(
- StarExpr:2(
- NameExpr(x [__main__.x]))
- TupleExpr:2(
- NameExpr(y [__main__.y])
- StarExpr:2(
- NameExpr(z* [__main__.z]))))
- IntExpr(1))
- AssignmentStmt:3(
- TupleExpr:3(
- StarExpr:3(
- TupleExpr:3(
- NameExpr(x [__main__.x])
- NameExpr(q* [__main__.q])))
- NameExpr(r* [__main__.r]))
- IntExpr(1)))
-
-[case testMultipleDefinition]
-x, y = 1
-x, y = 2
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- TupleExpr:1(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- AssignmentStmt:2(
- TupleExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))
- IntExpr(2)))
-
-[case testComplexDefinitions]
-(x) = 1
-([y]) = 2
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- AssignmentStmt:2(
- ListExpr:2(
- NameExpr(y* [__main__.y]))
- IntExpr(2)))
-
-[case testLocalComplexDefinition]
-def f():
- (x) = 1
- x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- AssignmentStmt:2(
- NameExpr(x* [l])
- IntExpr(1))
- ExpressionStmt:3(
- NameExpr(x [l])))))
-
-[case testMultipleDefOnlySomeNew]
-x = 1
-y, x = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- AssignmentStmt:2(
- TupleExpr:2(
- NameExpr(y* [__main__.y])
- NameExpr(x [__main__.x]))
- IntExpr(1)))
-
-[case testMultipleDefOnlySomeNewNestedTuples]
-x = 1
-y, (x, z) = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- AssignmentStmt:2(
- TupleExpr:2(
- NameExpr(y* [__main__.y])
- TupleExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(z* [__main__.z])))
- IntExpr(1)))
-
-[case testMultipleDefOnlySomeNewNestedLists]
-x = 1
-y, [x, z] = 1
-[p, [x, r]] = 1
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- AssignmentStmt:2(
- TupleExpr:2(
- NameExpr(y* [__main__.y])
- ListExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(z* [__main__.z])))
- IntExpr(1))
- AssignmentStmt:3(
- ListExpr:3(
- NameExpr(p* [__main__.p])
- ListExpr:3(
- NameExpr(x [__main__.x])
- NameExpr(r* [__main__.r])))
- IntExpr(1)))
-
-[case testIndexedDel]
-x = y = 1
-del x[y]
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- Lvalues(
- NameExpr(x* [__main__.x])
- NameExpr(y* [__main__.y]))
- IntExpr(1))
- DelStmt:2(
- IndexExpr:2(
- NameExpr(x [__main__.x])
- NameExpr(y [__main__.y]))))
-
-[case testDelGlobalName]
-x = 1
-del x
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(x* [__main__.x])
- IntExpr(1))
- DelStmt:2(
- NameExpr(x [__main__.x])))
-
-[case testDelLocalName]
-def f(x):
- del x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x))
- Block:1(
- DelStmt:2(
- NameExpr(x [l])))))
-
-[case testDelMultipleThings]
-def f(x, y):
- del x, y[0]
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(x)
- Var(y))
- Block:1(
- DelStmt:2(
- TupleExpr:2(
- NameExpr(x [l])
- IndexExpr:2(
- NameExpr(y [l])
- IntExpr(0)))))))
-
-[case testDelMultipleThingsInvalid]
-def f(x, y) -> None:
- del x, y + 1
-[out]
-main:2: error: can't delete operator
-
-[case testTry]
-class c: pass
-try:
- c
-except object:
- c
-except c as e:
- e
-except:
- c
-finally:
- c
-[out]
-MypyFile:1(
- ClassDef:1(
- c
- PassStmt:1())
- TryStmt:2(
- Block:2(
- ExpressionStmt:3(
- NameExpr(c [__main__.c])))
- NameExpr(object [builtins.object])
- Block:4(
- ExpressionStmt:5(
- NameExpr(c [__main__.c])))
- NameExpr(c [__main__.c])
- NameExpr(e* [__main__.e])
- Block:6(
- ExpressionStmt:7(
- NameExpr(e [__main__.e])))
- Block:8(
- ExpressionStmt:9(
- NameExpr(c [__main__.c])))
- Finally(
- ExpressionStmt:11(
- NameExpr(c [__main__.c])))))
-
-[case testTryElse]
-try:
- pass
-except:
- pass
-else:
- object
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- Block:3(
- PassStmt:4())
- Else(
- ExpressionStmt:6(
- NameExpr(object [builtins.object])))))
-
-[case testTryWithOnlyFinally]
-try:
- pass
-finally:
- pass
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- Finally(
- PassStmt:4())))
-
-[case testExceptWithMultipleTypes]
-class c: pass
-try:
- pass
-except (c, object) as e:
- e
-[out]
-MypyFile:1(
- ClassDef:1(
- c
- PassStmt:1())
- TryStmt:2(
- Block:2(
- PassStmt:3())
- TupleExpr:4(
- NameExpr(c [__main__.c])
- NameExpr(object [builtins.object]))
- NameExpr(e* [__main__.e])
- Block:4(
- ExpressionStmt:5(
- NameExpr(e [__main__.e])))))
-
-[case testRaiseWithoutExpr]
-raise
-[out]
-MypyFile:1(
- RaiseStmt:1())
-
-[case testWith]
-with object:
- object
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- NameExpr(object [builtins.object]))
- Block:1(
- ExpressionStmt:2(
- NameExpr(object [builtins.object])))))
-
-[case testWithAndVariable]
-with object as x:
- x
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- NameExpr(object [builtins.object]))
- Target(
- NameExpr(x* [__main__.x]))
- Block:1(
- ExpressionStmt:2(
- NameExpr(x [__main__.x])))))
-
-[case testWithInFunction]
-def f():
- with f as x:
- x
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Block:1(
- WithStmt:2(
- Expr(
- NameExpr(f [__main__.f]))
- Target(
- NameExpr(x* [l]))
- Block:2(
- ExpressionStmt:3(
- NameExpr(x [l])))))))
-
-[case testComplexWith]
-with object, object:
- pass
-with object as a, object as b:
- pass
-[out]
-MypyFile:1(
- WithStmt:1(
- Expr(
- NameExpr(object [builtins.object]))
- Expr(
- NameExpr(object [builtins.object]))
- Block:1(
- PassStmt:2()))
- WithStmt:3(
- Expr(
- NameExpr(object [builtins.object]))
- Target(
- NameExpr(a* [__main__.a]))
- Expr(
- NameExpr(object [builtins.object]))
- Target(
- NameExpr(b* [__main__.b]))
- Block:3(
- PassStmt:4())))
-
-[case testVariableInBlock]
-while object:
- x = None
- x = x
-[out]
-MypyFile:1(
- WhileStmt:1(
- NameExpr(object [builtins.object])
- Block:1(
- AssignmentStmt:2(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None]))
- AssignmentStmt:3(
- NameExpr(x [__main__.x])
- NameExpr(x [__main__.x])))))
-
-[case testVariableInExceptHandler]
-try:
- pass
-except object as o:
- x = None
- o = x
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- NameExpr(object [builtins.object])
- NameExpr(o* [__main__.o])
- Block:3(
- AssignmentStmt:4(
- NameExpr(x* [__main__.x])
- NameExpr(None [builtins.None]))
- AssignmentStmt:5(
- NameExpr(o [__main__.o])
- NameExpr(x [__main__.x])))))
-
-[case testCallInExceptHandler]
-try:
- pass
-except object as o:
- o = object()
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- NameExpr(object [builtins.object])
- NameExpr(o* [__main__.o])
- Block:3(
- AssignmentStmt:4(
- NameExpr(o [__main__.o])
- CallExpr:4(
- NameExpr(object [builtins.object])
- Args())))))
-
-[case testTryExceptWithMultipleHandlers]
-try:
- pass
-except BaseException as e:
- pass
-except Err as f:
- f = BaseException() # Fail
- f = Err()
-class Err(BaseException): pass
-[builtins fixtures/exception.pyi]
-[out]
-MypyFile:1(
- TryStmt:1(
- Block:1(
- PassStmt:2())
- NameExpr(BaseException [builtins.BaseException])
- NameExpr(e* [__main__.e])
- Block:3(
- PassStmt:4())
- NameExpr(Err [__main__.Err])
- NameExpr(f* [__main__.f])
- Block:5(
- AssignmentStmt:6(
- NameExpr(f [__main__.f])
- CallExpr:6(
- NameExpr(BaseException [builtins.BaseException])
- Args()))
- AssignmentStmt:7(
- NameExpr(f [__main__.f])
- CallExpr:7(
- NameExpr(Err [__main__.Err])
- Args()))))
- ClassDef:8(
- Err
- BaseType(
- builtins.BaseException)
- PassStmt:8()))
-
-[case testMultipleAssignmentWithPartialNewDef]
-o = None
-x, o = o, o
-[out]
-MypyFile:1(
- AssignmentStmt:1(
- NameExpr(o* [__main__.o])
- NameExpr(None [builtins.None]))
- AssignmentStmt:2(
- TupleExpr:2(
- NameExpr(x* [__main__.x])
- NameExpr(o [__main__.o]))
- TupleExpr:2(
- NameExpr(o [__main__.o])
- NameExpr(o [__main__.o]))))
-
-[case testFunctionDecorator]
-def decorate(f): pass
- at decorate
-def g():
- g()
-[out]
-MypyFile:1(
- FuncDef:1(
- decorate
- Args(
- Var(f))
- Block:1(
- PassStmt:1()))
- Decorator:2(
- Var(g)
- NameExpr(decorate [__main__.decorate])
- FuncDef:3(
- g
- Block:3(
- ExpressionStmt:4(
- CallExpr:4(
- NameExpr(g [__main__.g])
- Args()))))))
-
-[case testTryWithinFunction]
-def f() -> None:
- try:
- pass
- except object as o:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def ()
- Block:1(
- TryStmt:2(
- Block:2(
- PassStmt:3())
- NameExpr(object [builtins.object])
- NameExpr(o* [l])
- Block:4(
- PassStmt:5())))))
-
-[case testReuseExceptionVariable]
-def f() -> None:
- try:
- pass
- except object as o:
- pass
- except object as o:
- pass
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- def ()
- Block:1(
- TryStmt:2(
- Block:2(
- PassStmt:3())
- NameExpr(object [builtins.object])
- NameExpr(o* [l])
- Block:4(
- PassStmt:5())
- NameExpr(object [builtins.object])
- NameExpr(o [l])
- Block:6(
- PassStmt:7())))))
-
-[case testWithMultiple]
-def f(a):
- pass
-def main():
- with f(0) as a, f(a) as b:
- x = a, b
-[out]
-MypyFile:1(
- FuncDef:1(
- f
- Args(
- Var(a))
- Block:1(
- PassStmt:2()))
- FuncDef:3(
- main
- Block:3(
- WithStmt:4(
- Expr(
- CallExpr:4(
- NameExpr(f [__main__.f])
- Args(
- IntExpr(0))))
- Target(
- NameExpr(a* [l]))
- Expr(
- CallExpr:4(
- NameExpr(f [__main__.f])
- Args(
- NameExpr(a [l]))))
- Target(
- NameExpr(b* [l]))
- Block:4(
- AssignmentStmt:5(
- NameExpr(x* [l])
- TupleExpr:5(
- NameExpr(a [l])
- NameExpr(b [l]))))))))
diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test
deleted file mode 100644
index 4821635..0000000
--- a/test-data/unit/semanal-symtable.test
+++ /dev/null
@@ -1,52 +0,0 @@
-[case testEmptyFile]
-[out]
--- Note that builtins are ignored to simplify output.
-__main__:
- SymbolTable()
-
-[case testVarDef]
-x = 1
-[out]
-__main__:
- SymbolTable(
- x : Gdef/Var (__main__))
-
-[case testFuncDef]
-def f(): pass
-[out]
-__main__:
- SymbolTable(
- f : Gdef/FuncDef (__main__))
-
-[case testEmptyClassDef]
-class c: pass
-[out]
-__main__:
- SymbolTable(
- c : Gdef/TypeInfo (__main__))
-
-[case testImport]
-import m
-[file m.py]
-x = 1
-[out]
-__main__:
- SymbolTable(
- m : ModuleRef/MypyFile (__main__))
-m:
- SymbolTable(
- x : Gdef/Var (m))
-
-[case testImportFromModule]
-from m import x
-[file m.py]
-class x: pass
-y = 1
-[out]
-__main__:
- SymbolTable(
- x : Gdef/TypeInfo (__main__))
-m:
- SymbolTable(
- x : Gdef/TypeInfo (m)
- y : Gdef/Var (m))
diff --git a/test-data/unit/semanal-typealiases.test b/test-data/unit/semanal-typealiases.test
deleted file mode 100644
index 5178a71..0000000
--- a/test-data/unit/semanal-typealiases.test
+++ /dev/null
@@ -1,440 +0,0 @@
-[case testListTypeAlias]
-from typing import List
-def f() -> List[int]: pass
-[builtins fixtures/list.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [List])
- FuncDef:2(
- f
- def () -> builtins.list[builtins.int]
- Block:2(
- PassStmt:2())))
-
-[case testDictTypeAlias]
-from typing import Dict
-def f() -> Dict[int, str]: pass
-[builtins fixtures/dict.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Dict])
- FuncDef:2(
- f
- def () -> builtins.dict[builtins.int, builtins.str]
- Block:2(
- PassStmt:2())))
-
-[case testQualifiedTypeAlias]
-import typing
-def f() -> typing.List[int]: pass
-[builtins fixtures/list.pyi]
-[out]
-MypyFile:1(
- Import:1(typing)
- FuncDef:2(
- f
- def () -> builtins.list[builtins.int]
- Block:2(
- PassStmt:2())))
-
-[case testTypeApplicationWithTypeAlias]
-from typing import List
-List[List[int]]
-[builtins fixtures/list.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [List])
- ExpressionStmt:2(
- TypeApplication:2(
- NameExpr(List [builtins.list])
- Types(
- builtins.list[builtins.int]))))
-
-[case testTypeApplicationWithQualifiedTypeAlias]
-import typing
-typing.List[typing.List[int]]
-[builtins fixtures/list.pyi]
-[out]
-MypyFile:1(
- Import:1(typing)
- ExpressionStmt:2(
- TypeApplication:2(
- MemberExpr:2(
- NameExpr(typing)
- List [builtins.list])
- Types(
- builtins.list[builtins.int]))))
-
-[case testSimpleTypeAlias]
-import typing
-class A: pass
-A2 = A
-def f(x: A2) -> A: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- PassStmt:2())
- AssignmentStmt:3(
- NameExpr(A2* [__main__.A2])
- NameExpr(A [__main__.A]))
- FuncDef:4(
- f
- Args(
- Var(x))
- def (x: __main__.A) -> __main__.A
- Block:4(
- PassStmt:4())))
-
-[case testQualifiedSimpleTypeAlias]
-import typing
-import _m
-A2 = _m.A
-x = 1 # type: A2
-[file _m.py]
-import typing
-class A: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- Import:2(_m)
- AssignmentStmt:3(
- NameExpr(A2* [__main__.A2])
- MemberExpr:3(
- NameExpr(_m)
- A [_m.A]))
- AssignmentStmt:4(
- NameExpr(x [__main__.x])
- IntExpr(1)
- _m.A))
-
-[case testUnionTypeAlias]
-from typing import Union
-U = Union[int, str]
-def f(x: U) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- AssignmentStmt:2(
- NameExpr(U* [__main__.U])
- TypeAliasExpr(Union[builtins.int, builtins.str]))
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, builtins.str])
- Block:3(
- PassStmt:3())))
-
-[case testUnionTypeAlias2]
-from typing import Union
-class A: pass
-U = Union[int, A]
-def f(x: U) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- ClassDef:2(
- A
- PassStmt:2())
- AssignmentStmt:3(
- NameExpr(U* [__main__.U])
- TypeAliasExpr(Union[builtins.int, __main__.A]))
- FuncDef:4(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, __main__.A])
- Block:4(
- PassStmt:4())))
-
-[case testUnionTypeAliasWithQualifiedUnion]
-import typing
-U = typing.Union[int, str]
-def f(x: U) -> None: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- AssignmentStmt:2(
- NameExpr(U* [__main__.U])
- TypeAliasExpr(Union[builtins.int, builtins.str]))
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, builtins.str])
- Block:3(
- PassStmt:3())))
-
-[case testTupleTypeAlias]
-from typing import Tuple
-T = Tuple[int, str]
-def f(x: T) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Tuple])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeAliasExpr(Tuple[builtins.int, builtins.str]))
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: Tuple[builtins.int, builtins.str])
- Block:3(
- PassStmt:3())))
-
-[case testCallableTypeAlias]
-from typing import Callable
-C = Callable[[int], None]
-def f(x: C) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Callable])
- AssignmentStmt:2(
- NameExpr(C* [__main__.C])
- TypeAliasExpr(def (builtins.int)))
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: def (builtins.int))
- Block:3(
- PassStmt:3())))
-
-[case testGenericTypeAlias]
-from typing import Generic, TypeVar
-T = TypeVar('T')
-class G(Generic[T]): pass
-A = G[int]
-def f(x: A) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Generic, TypeVar])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2())
- ClassDef:3(
- G
- TypeVars(
- T)
- PassStmt:3())
- AssignmentStmt:4(
- NameExpr(A* [__main__.A])
- TypeAliasExpr(__main__.G[builtins.int]))
- FuncDef:5(
- f
- Args(
- Var(x))
- def (x: __main__.G[builtins.int])
- Block:5(
- PassStmt:5())))
-
-[case testGenericTypeAlias2]
-from typing import List
-A = List[int]
-def f(x: A) -> None: pass
-[builtins fixtures/list.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [List])
- AssignmentStmt:2(
- NameExpr(A* [__main__.A])
- TypeAliasExpr(builtins.list[builtins.int]))
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: builtins.list[builtins.int])
- Block:3(
- PassStmt:3())))
-
-[case testImportUnionTypeAlias]
-import typing
-from _m import U
-def f(x: U) -> None: pass
-[file _m.py]
-from typing import Union
-class A: pass
-U = Union[int, A]
-[out]
-MypyFile:1(
- Import:1(typing)
- ImportFrom:2(_m, [U])
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, _m.A])
- Block:3(
- PassStmt:3())))
-
-[case testImportUnionTypeAlias2]
-import typing
-import _m
-def f(x: _m.U) -> None: pass
-[file _m.py]
-from typing import Union
-class A: pass
-U = Union[int, A]
-[out]
-MypyFile:1(
- Import:1(typing)
- Import:2(_m)
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, _m.A])
- Block:3(
- PassStmt:3())))
-
-[case testImportSimpleTypeAlias]
-import typing
-from _m import A
-def f(x: A) -> None: pass
-[file _m.py]
-import typing
-A = int
-[out]
-MypyFile:1(
- Import:1(typing)
- ImportFrom:2(_m, [A])
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: builtins.int)
- Block:3(
- PassStmt:3())))
-
-[case testImportSimpleTypeAlias2]
-import typing
-import _m
-def f(x: _m.A) -> None: pass
-[file _m.py]
-import typing
-A = int
-[out]
-MypyFile:1(
- Import:1(typing)
- Import:2(_m)
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: builtins.int)
- Block:3(
- PassStmt:3())))
-
-[case testAnyTypeAlias]
-from typing import Any
-A = Any
-a = 1 # type: A
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Any])
- AssignmentStmt:2(
- NameExpr(A* [__main__.A])
- NameExpr(Any [typing.Any]))
- AssignmentStmt:3(
- NameExpr(a [__main__.a])
- IntExpr(1)
- Any))
-
-[case testAnyTypeAlias2]
-import typing
-A = typing.Any
-a = 1 # type: A
-[out]
-MypyFile:1(
- Import:1(typing)
- AssignmentStmt:2(
- NameExpr(A* [__main__.A])
- MemberExpr:2(
- NameExpr(typing)
- Any [typing.Any]))
- AssignmentStmt:3(
- NameExpr(a [__main__.a])
- IntExpr(1)
- Any))
-
-[case testTypeAliasAlias]
-from typing import Union
-U = Union[int, str]
-U2 = U
-x = 1 # type: U2
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- AssignmentStmt:2(
- NameExpr(U* [__main__.U])
- TypeAliasExpr(Union[builtins.int, builtins.str]))
- AssignmentStmt:3(
- NameExpr(U2* [__main__.U2])
- NameExpr(U [__main__.U]))
- AssignmentStmt:4(
- NameExpr(x [__main__.x])
- IntExpr(1)
- Union[builtins.int, builtins.str]))
-
-[case testTypeAliasOfImportedAlias]
-from typing import Union
-from _m import U
-U2 = U
-x = 1 # type: U2
-[file _m.py]
-from typing import Union
-U = Union[int, str]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- ImportFrom:2(_m, [U])
- AssignmentStmt:3(
- NameExpr(U2* [__main__.U2])
- NameExpr(U [_m.U]))
- AssignmentStmt:4(
- NameExpr(x [__main__.x])
- IntExpr(1)
- Union[builtins.int, builtins.str]))
-
-[case testListTypeDoesNotGenerateAlias]
-import typing
-A = [int, str]
-a = 1 # type: A # E: Invalid type "__main__.A"
-
-[case testCantUseStringLiteralAsTypeAlias]
-from typing import Union
-A = 'Union[int, str]'
-a = 1 # type: A # E: Invalid type "__main__.A"
-
-[case testStringLiteralTypeAsAliasComponent]
-from typing import Union
-A = Union['int', str]
-a = 1 # type: A
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- AssignmentStmt:2(
- NameExpr(A* [__main__.A])
- TypeAliasExpr(Union[builtins.int, builtins.str]))
- AssignmentStmt:3(
- NameExpr(a [__main__.a])
- IntExpr(1)
- Union[builtins.int, builtins.str]))
-
-[case testComplexTypeAlias]
-from typing import Union, Tuple, Any
-A = Union['int', Tuple[int, Any]]
-a = 1 # type: A
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union, Tuple, Any])
- AssignmentStmt:2(
- NameExpr(A* [__main__.A])
- TypeAliasExpr(Union[builtins.int, Tuple[builtins.int, Any]]))
- AssignmentStmt:3(
- NameExpr(a [__main__.a])
- IntExpr(1)
- Union[builtins.int, Tuple[builtins.int, Any]]))
diff --git a/test-data/unit/semanal-typeddict.test b/test-data/unit/semanal-typeddict.test
deleted file mode 100644
index a0229d8..0000000
--- a/test-data/unit/semanal-typeddict.test
+++ /dev/null
@@ -1,81 +0,0 @@
--- Create Type
-
--- TODO: Implement support for this syntax.
---[case testCanCreateTypedDictTypeWithKeywordArguments]
---from mypy_extensions import TypedDict
---Point = TypedDict('Point', x=int, y=int)
---[builtins fixtures/dict.pyi]
---[out]
---MypyFile:1(
--- ImportFrom:1(mypy_extensions, [TypedDict])
--- AssignmentStmt:2(
--- NameExpr(Point* [__main__.Point])
--- TypedDictExpr:2(Point)))
-
--- TODO: Implement support for this syntax.
---[case testCanCreateTypedDictTypeWithDictCall]
---from mypy_extensions import TypedDict
---Point = TypedDict('Point', dict(x=int, y=int))
---[builtins fixtures/dict.pyi]
---[out]
---MypyFile:1(
--- ImportFrom:1(mypy_extensions, [TypedDict])
--- AssignmentStmt:2(
--- NameExpr(Point* [__main__.Point])
--- TypedDictExpr:2(Point)))
-
-[case testCanCreateTypedDictTypeWithDictLiteral]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int})
-[builtins fixtures/dict.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(mypy_extensions, [TypedDict])
- AssignmentStmt:2(
- NameExpr(Point* [__main__.Point])
- TypedDictExpr:2(Point)))
-
-
--- Create Type (Errors)
-
-[case testCannotCreateTypedDictTypeWithTooFewArguments]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point') # E: Too few arguments for TypedDict()
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictTypeWithTooManyArguments]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int}, dict) # E: Too many arguments for TypedDict()
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictTypeWithInvalidName]
-from mypy_extensions import TypedDict
-Point = TypedDict(dict, {'x': int, 'y': int}) # E: TypedDict() expects a string literal as the first argument
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictTypeWithInvalidItems]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x'}) # E: TypedDict() expects a dictionary literal as the second argument
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictTypeWithUnderscoreItemName]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object}) # E: TypedDict() item names cannot start with an underscore: _fallback
-[builtins fixtures/dict.pyi]
-
--- NOTE: The following code works at runtime but is not yet supported by mypy.
--- Keyword arguments may potentially be supported in the future.
-[case testCannotCreateTypedDictTypeWithNonpositionalArgs]
-from mypy_extensions import TypedDict
-Point = TypedDict(typename='Point', fields={'x': int, 'y': int}) # E: Unexpected arguments to TypedDict()
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictTypeWithInvalidItemName]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {int: int, int: int}) # E: Invalid TypedDict() field name
-[builtins fixtures/dict.pyi]
-
-[case testCannotCreateTypedDictTypeWithInvalidItemType]
-from mypy_extensions import TypedDict
-Point = TypedDict('Point', {'x': 1, 'y': 1}) # E: Invalid field type
-[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/semanal-typeinfo.test b/test-data/unit/semanal-typeinfo.test
deleted file mode 100644
index 6bb62e1..0000000
--- a/test-data/unit/semanal-typeinfo.test
+++ /dev/null
@@ -1,80 +0,0 @@
-[case testEmptyFile]
-[out]
-TypeInfoMap()
-
-[case testEmptyClass]
-class c: pass
-[out]
-TypeInfoMap(
- __main__.c : TypeInfo(
- Name(__main__.c)
- Bases(builtins.object)
- Names()))
-
-[case testClassWithMethod]
-class c:
- def f(self): pass
-[out]
-TypeInfoMap(
- __main__.c : TypeInfo(
- Name(__main__.c)
- Bases(builtins.object)
- Names(
- f)))
-
-[case testClassWithAttributes]
-class c:
- def __init__(self, x):
- self.y = x
- self.z = 1
-[out]
-TypeInfoMap(
- __main__.c : TypeInfo(
- Name(__main__.c)
- Bases(builtins.object)
- Names(
- __init__
- y
- z)))
-
-[case testBaseClass]
-class base: pass
-class c(base): pass
-[out]
-TypeInfoMap(
- __main__.base : TypeInfo(
- Name(__main__.base)
- Bases(builtins.object)
- Names())
- __main__.c : TypeInfo(
- Name(__main__.c)
- Bases(__main__.base)
- Names()))
-
-[case testClassAndAbstractClass]
-from abc import abstractmethod, ABCMeta
-import typing
-
-class i(metaclass=ABCMeta): pass
-class c(i): pass
-[out]
-TypeInfoMap(
- __main__.c : TypeInfo(
- Name(__main__.c)
- Bases(__main__.i)
- Names())
- __main__.i : TypeInfo(
- Name(__main__.i)
- Bases(builtins.object)
- Names()))
-
-[case testAttributeWithoutType]
-class A:
- a = A
-[out]
-TypeInfoMap(
- __main__.A : TypeInfo(
- Name(__main__.A)
- Bases(builtins.object)
- Names(
- a)))
diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test
deleted file mode 100644
index beb3cbf..0000000
--- a/test-data/unit/semanal-types.test
+++ /dev/null
@@ -1,1465 +0,0 @@
-[case testVarWithType]
-import typing
-class A: pass
-x = A() # type: A
-y = x
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- PassStmt:2())
- AssignmentStmt:3(
- NameExpr(x [__main__.x])
- CallExpr:3(
- NameExpr(A [__main__.A])
- Args())
- __main__.A)
- AssignmentStmt:4(
- NameExpr(y* [__main__.y])
- NameExpr(x [__main__.x])))
-
-[case testLocalVarWithType]
-class A: pass
-def f():
- x = None # type: A
- y = x
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- PassStmt:1())
- FuncDef:2(
- f
- Block:2(
- AssignmentStmt:3(
- NameExpr(x [l])
- NameExpr(None [builtins.None])
- __main__.A)
- AssignmentStmt:4(
- NameExpr(y* [l])
- NameExpr(x [l])))))
-
-[case testAnyType]
-from typing import Any
-x = None # type: Any
-y = x
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Any])
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- Any)
- AssignmentStmt:3(
- NameExpr(y* [__main__.y])
- NameExpr(x [__main__.x])))
-
-[case testMemberVarWithType]
-import typing
-class A:
- def __init__(self):
- self.x = None # type: int
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- FuncDef:3(
- __init__
- Args(
- Var(self))
- Block:3(
- AssignmentStmt:4(
- MemberExpr:4(
- NameExpr(self [l])
- x)
- NameExpr(None [builtins.None])
- builtins.int)))))
-
-[case testClassVarWithType]
-import typing
-class A:
- x = None # type: int
- x = 1
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- AssignmentStmt:3(
- NameExpr(x [m])
- NameExpr(None [builtins.None])
- builtins.int)
- AssignmentStmt:4(
- NameExpr(x [m])
- IntExpr(1))))
-
-[case testFunctionSig]
-from typing import Any
-class A: pass
-def f(x: A) -> A: pass
-def g(x: Any, y: A) -> None:
- z = x, y
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Any])
- ClassDef:2(
- A
- PassStmt:2())
- FuncDef:3(
- f
- Args(
- Var(x))
- def (x: __main__.A) -> __main__.A
- Block:3(
- PassStmt:3()))
- FuncDef:4(
- g
- Args(
- Var(x)
- Var(y))
- def (x: Any, y: __main__.A)
- Block:4(
- AssignmentStmt:5(
- NameExpr(z* [l])
- TupleExpr:5(
- NameExpr(x [l])
- NameExpr(y [l]))))))
-
-[case testBaseclass]
-class A: pass
-class B(A): pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- PassStmt:1())
- ClassDef:2(
- B
- BaseType(
- __main__.A)
- PassStmt:2()))
-
-[case testMultipleVarDef]
-
-class A: pass
-class B: pass
-a, b = None, None # type: (A, B)
-x = a, b
-[out]
-MypyFile:1(
- ClassDef:2(
- A
- PassStmt:2())
- ClassDef:3(
- B
- PassStmt:3())
- AssignmentStmt:4(
- TupleExpr:4(
- NameExpr(a [__main__.a])
- NameExpr(b [__main__.b]))
- TupleExpr:4(
- NameExpr(None [builtins.None])
- NameExpr(None [builtins.None]))
- Tuple[__main__.A, __main__.B])
- AssignmentStmt:5(
- NameExpr(x* [__main__.x])
- TupleExpr:5(
- NameExpr(a [__main__.a])
- NameExpr(b [__main__.b]))))
-
-[case testGenericType]
-from typing import TypeVar, Generic, Any
-
-t = TypeVar('t')
-
-class A(Generic[t]): pass
-class B: pass
-x = None # type: A[B]
-y = None # type: A[Any]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic, Any])
- AssignmentStmt:3(
- NameExpr(t* [__main__.t])
- TypeVarExpr:3())
- ClassDef:5(
- A
- TypeVars(
- t)
- PassStmt:5())
- ClassDef:6(
- B
- PassStmt:6())
- AssignmentStmt:7(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- __main__.A[__main__.B])
- AssignmentStmt:8(
- NameExpr(y [__main__.y])
- NameExpr(None [builtins.None])
- __main__.A[Any]))
-
-[case testGenericType2]
-from typing import TypeVar, Generic, Any
-t = TypeVar('t')
-s = TypeVar('s')
-class A(Generic[t, s]): pass
-class B: pass
-x = None # type: A[B, Any]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic, Any])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- AssignmentStmt:3(
- NameExpr(s* [__main__.s])
- TypeVarExpr:3())
- ClassDef:4(
- A
- TypeVars(
- t
- s)
- PassStmt:4())
- ClassDef:5(
- B
- PassStmt:5())
- AssignmentStmt:6(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- __main__.A[__main__.B, Any]))
-
-[case testAssignmentAfterDef]
-
-
-class A: pass
-a = None # type: A
-a = 1
-def f():
- b = None # type: A
- b = 1
-[out]
-MypyFile:1(
- ClassDef:3(
- A
- PassStmt:3())
- AssignmentStmt:4(
- NameExpr(a [__main__.a])
- NameExpr(None [builtins.None])
- __main__.A)
- AssignmentStmt:5(
- NameExpr(a [__main__.a])
- IntExpr(1))
- FuncDef:6(
- f
- Block:6(
- AssignmentStmt:7(
- NameExpr(b [l])
- NameExpr(None [builtins.None])
- __main__.A)
- AssignmentStmt:8(
- NameExpr(b [l])
- IntExpr(1)))))
-
-[case testCast]
-from typing import TypeVar, Generic, Any, cast
-t = TypeVar('t')
-class c: pass
-class d(Generic[t]): pass
-cast(Any, 1)
-cast(c, 1)
-cast(d[c], c)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic, Any, cast])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- c
- PassStmt:3())
- ClassDef:4(
- d
- TypeVars(
- t)
- PassStmt:4())
- ExpressionStmt:5(
- CastExpr:5(
- IntExpr(1)
- Any))
- ExpressionStmt:6(
- CastExpr:6(
- IntExpr(1)
- __main__.c))
- ExpressionStmt:7(
- CastExpr:7(
- NameExpr(c [__main__.c])
- __main__.d[__main__.c])))
-
-[case testCastToQualifiedTypeAndCast]
-import typing
-import _m
-typing.cast(_m.C, object)
-[file _m.py]
-class C: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- Import:2(_m)
- ExpressionStmt:3(
- CastExpr:3(
- NameExpr(object [builtins.object])
- _m.C)))
-
-[case testLongQualifiedCast]
-import typing
-import _m._n
-typing.cast(_m._n.C, object)
-[file _m/__init__.py]
-[file _m/_n.py]
-class C: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- Import:2(_m._n)
- ExpressionStmt:3(
- CastExpr:3(
- NameExpr(object [builtins.object])
- _m._n.C)))
-
-[case testCastTargetWithTwoTypeArgs]
-from typing import TypeVar, Generic, cast
-t = TypeVar('t')
-s = TypeVar('s')
-class C(Generic[t, s]): pass
-cast(C[str, int], C)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic, cast])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- AssignmentStmt:3(
- NameExpr(s* [__main__.s])
- TypeVarExpr:3())
- ClassDef:4(
- C
- TypeVars(
- t
- s)
- PassStmt:4())
- ExpressionStmt:5(
- CastExpr:5(
- NameExpr(C [__main__.C])
- __main__.C[builtins.str, builtins.int])))
-
-[case testCastToTupleType]
-from typing import Tuple, cast
-cast(Tuple[int, str], None)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Tuple, cast])
- ExpressionStmt:2(
- CastExpr:2(
- NameExpr(None [builtins.None])
- Tuple[builtins.int, builtins.str])))
-
-[case testCastToFunctionType]
-from typing import Callable, cast
-cast(Callable[[int], str], None)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Callable, cast])
- ExpressionStmt:2(
- CastExpr:2(
- NameExpr(None [builtins.None])
- def (builtins.int) -> builtins.str)))
-
-[case testCastToStringLiteralType]
-from typing import cast
-cast('int', 1)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [cast])
- ExpressionStmt:2(
- CastExpr:2(
- IntExpr(1)
- builtins.int)))
-
-[case testFunctionTypeVariable]
-from typing import TypeVar
-t = TypeVar('t')
-def f(x: t) -> None:
- y = None # type: t
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- FuncDef:3(
- f
- Args(
- Var(x))
- def [t] (x: t`-1)
- Block:3(
- AssignmentStmt:4(
- NameExpr(y [l])
- NameExpr(None [builtins.None])
- t`-1))))
-
-[case testTwoFunctionTypeVariables]
-from typing import TypeVar
-t = TypeVar('t')
-u = TypeVar('u')
-def f(x: t, y: u, z: t) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- AssignmentStmt:3(
- NameExpr(u* [__main__.u])
- TypeVarExpr:3())
- FuncDef:4(
- f
- Args(
- Var(x)
- Var(y)
- Var(z))
- def [t, u] (x: t`-1, y: u`-2, z: t`-1)
- Block:4(
- PassStmt:4())))
-
-[case testNestedGenericFunctionTypeVariable]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-def f(x: A[t], y) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- A
- TypeVars(
- t)
- PassStmt:3())
- FuncDef:4(
- f
- Args(
- Var(x)
- Var(y))
- def [t] (x: __main__.A[t`-1], y: Any)
- Block:4(
- PassStmt:4())))
-
-[case testNestedGenericFunctionTypeVariable2]
-from typing import TypeVar, Tuple, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-def f(x: Tuple[int, t]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Tuple, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- A
- TypeVars(
- t)
- PassStmt:3())
- FuncDef:4(
- f
- Args(
- Var(x))
- def [t] (x: Tuple[builtins.int, t`-1])
- Block:4(
- PassStmt:4())))
-
-[case testNestedGenericFunctionTypeVariable3]
-from typing import TypeVar, Callable, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-def f(x: Callable[[int, t], int]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Callable, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- A
- TypeVars(
- t)
- PassStmt:3())
- FuncDef:4(
- f
- Args(
- Var(x))
- def [t] (x: def (builtins.int, t`-1) -> builtins.int)
- Block:4(
- PassStmt:4())))
-
-[case testNestedGenericFunctionTypeVariable4]
-from typing import TypeVar, Callable, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-def f(x: Callable[[], t]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Callable, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- A
- TypeVars(
- t)
- PassStmt:3())
- FuncDef:4(
- f
- Args(
- Var(x))
- def [t] (x: def () -> t`-1)
- Block:4(
- PassStmt:4())))
-
-[case testGenericFunctionTypeVariableInReturnType]
-from typing import TypeVar
-t = TypeVar('t')
-def f() -> t: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- FuncDef:3(
- f
- def [t] () -> t`-1
- Block:3(
- PassStmt:3())))
-
-[case testSelfType]
-class A:
- def f(self, o: object) -> None: pass
-[out]
-MypyFile:1(
- ClassDef:1(
- A
- FuncDef:2(
- f
- Args(
- Var(self)
- Var(o))
- def (self: __main__.A, o: builtins.object)
- Block:2(
- PassStmt:2()))))
-
-[case testNestedGenericFunction]
-from typing import TypeVar
-t = TypeVar('t')
-def f() -> None:
- def g() -> t: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- FuncDef:3(
- f
- def ()
- Block:3(
- FuncDef:4(
- g
- def [t] () -> t`-1
- Block:4(
- PassStmt:4())))))
-
-[case testClassTvar]
-from typing import TypeVar, Generic
-
-t = TypeVar('t')
-
-class c(Generic[t]):
- def f(self) -> t: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:3(
- NameExpr(t* [__main__.t])
- TypeVarExpr:3())
- ClassDef:5(
- c
- TypeVars(
- t)
- FuncDef:6(
- f
- Args(
- Var(self))
- def (self: __main__.c[t`1]) -> t`1
- Block:6(
- PassStmt:6()))))
-
-[case testClassTvar2]
-from typing import TypeVar, Generic
-
-t = TypeVar('t')
-s = TypeVar('s')
-
-class c(Generic[t, s]):
- def f(self, x: s) -> t: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:3(
- NameExpr(t* [__main__.t])
- TypeVarExpr:3())
- AssignmentStmt:4(
- NameExpr(s* [__main__.s])
- TypeVarExpr:4())
- ClassDef:6(
- c
- TypeVars(
- t
- s)
- FuncDef:7(
- f
- Args(
- Var(self)
- Var(x))
- def (self: __main__.c[t`1, s`2], x: s`2) -> t`1
- Block:7(
- PassStmt:7()))))
-
-[case testGenericBaseClass]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class d(Generic[t]): pass
-class c(d[t], Generic[t]): pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- d
- TypeVars(
- t)
- PassStmt:3())
- ClassDef:4(
- c
- TypeVars(
- t)
- BaseType(
- __main__.d[t`1])
- PassStmt:4()))
-
-[case testTupleType]
-from typing import Tuple
-t = None # type: tuple
-t1 = None # type: Tuple[object]
-t2 = None # type: Tuple[int, object]
-[builtins fixtures/tuple.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Tuple])
- AssignmentStmt:2(
- NameExpr(t [__main__.t])
- NameExpr(None [builtins.None])
- builtins.tuple[Any])
- AssignmentStmt:3(
- NameExpr(t1 [__main__.t1])
- NameExpr(None [builtins.None])
- Tuple[builtins.object])
- AssignmentStmt:4(
- NameExpr(t2 [__main__.t2])
- NameExpr(None [builtins.None])
- Tuple[builtins.int, builtins.object]))
-
-[case testVariableLengthTuple]
-from typing import Tuple
-t = None # type: Tuple[int, ...]
-[builtins fixtures/tuple.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Tuple])
- AssignmentStmt:2(
- NameExpr(t [__main__.t])
- NameExpr(None [builtins.None])
- builtins.tuple[builtins.int]))
-
-[case testInvalidTupleType]
-from typing import Tuple
-t = None # type: Tuple[int, str, ...] # E: Unexpected '...'
-[out]
-
-[case testFunctionTypes]
-from typing import Callable
-f = None # type: Callable[[object, int], str]
-g = None # type: Callable[[], None]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Callable])
- AssignmentStmt:2(
- NameExpr(f [__main__.f])
- NameExpr(None [builtins.None])
- def (builtins.object, builtins.int) -> builtins.str)
- AssignmentStmt:3(
- NameExpr(g [__main__.g])
- NameExpr(None [builtins.None])
- def ()))
-
-[case testOverloadedFunction]
-from typing import overload
- at overload
-def f(o: object) -> int: o
- at overload
-def f(a: str) -> object: a
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [overload])
- OverloadedFuncDef:2(
- Overload(def (o: builtins.object) -> builtins.int, \
- def (a: builtins.str) -> builtins.object)
- Decorator:2(
- Var(f)
- NameExpr(overload [typing.overload])
- FuncDef:3(
- f
- Args(
- Var(o))
- def (o: builtins.object) -> builtins.int
- Block:3(
- ExpressionStmt:3(
- NameExpr(o [l])))))
- Decorator:4(
- Var(f)
- NameExpr(overload [typing.overload])
- FuncDef:5(
- f
- Args(
- Var(a))
- def (a: builtins.str) -> builtins.object
- Block:5(
- ExpressionStmt:5(
- NameExpr(a [l])))))))
-
-[case testReferenceToOverloadedFunction]
-from typing import overload
- at overload
-def f() -> None: pass
- at overload
-def f(x: int) -> None: pass
-x = f
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [overload])
- OverloadedFuncDef:2(
- Overload(def (), def (x: builtins.int))
- Decorator:2(
- Var(f)
- NameExpr(overload [typing.overload])
- FuncDef:3(
- f
- def ()
- Block:3(
- PassStmt:3())))
- Decorator:4(
- Var(f)
- NameExpr(overload [typing.overload])
- FuncDef:5(
- f
- Args(
- Var(x))
- def (x: builtins.int)
- Block:5(
- PassStmt:5()))))
- AssignmentStmt:6(
- NameExpr(x* [__main__.x])
- NameExpr(f [__main__.f])))
-
-[case testNestedOverloadedFunction]
-from typing import overload
-def f():
- @overload
- def g(): pass
- @overload
- def g(x): pass
- y = g
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [overload])
- FuncDef:2(
- f
- Block:2(
- OverloadedFuncDef:3(
- Overload(def () -> Any, def (x: Any) -> Any)
- Decorator:3(
- Var(g)
- NameExpr(overload [typing.overload])
- FuncDef:4(
- g
- Block:4(
- PassStmt:4())))
- Decorator:5(
- Var(g)
- NameExpr(overload [typing.overload])
- FuncDef:6(
- g
- Args(
- Var(x))
- Block:6(
- PassStmt:6()))))
- AssignmentStmt:7(
- NameExpr(y* [l])
- NameExpr(g [l])))))
-
-[case testImplicitGenericTypeArgs]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-s = TypeVar('s')
-class A(Generic[t, s]): pass
-x = None # type: A
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- AssignmentStmt:3(
- NameExpr(s* [__main__.s])
- TypeVarExpr:3())
- ClassDef:4(
- A
- TypeVars(
- t
- s)
- PassStmt:4())
- AssignmentStmt:5(
- NameExpr(x [__main__.x])
- NameExpr(None [builtins.None])
- __main__.A[Any, Any]))
-
-[case testImplicitTypeArgsAndGenericBaseClass]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-s = TypeVar('s')
-class B(Generic[s]): pass
-class A(B, Generic[t]): pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- AssignmentStmt:3(
- NameExpr(s* [__main__.s])
- TypeVarExpr:3())
- ClassDef:4(
- B
- TypeVars(
- s)
- PassStmt:4())
- ClassDef:5(
- A
- TypeVars(
- t)
- BaseType(
- __main__.B[Any])
- PassStmt:5()))
-
-[case testTypeApplication]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-x = A[int]()
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- A
- TypeVars(
- t)
- PassStmt:3())
- AssignmentStmt:4(
- NameExpr(x* [__main__.x])
- CallExpr:4(
- TypeApplication:4(
- NameExpr(A [__main__.A])
- Types(
- builtins.int))
- Args())))
-
-[case testTypeApplicationWithTwoTypeArgs]
-from typing import TypeVar, Generic, Any
-t = TypeVar('t')
-s = TypeVar('s')
-class A(Generic[t, s]): pass
-x = A[int, Any]()
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic, Any])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- AssignmentStmt:3(
- NameExpr(s* [__main__.s])
- TypeVarExpr:3())
- ClassDef:4(
- A
- TypeVars(
- t
- s)
- PassStmt:4())
- AssignmentStmt:5(
- NameExpr(x* [__main__.x])
- CallExpr:5(
- TypeApplication:5(
- NameExpr(A [__main__.A])
- Types(
- builtins.int
- Any))
- Args())))
-
-[case testFunctionTypeApplication]
-from typing import TypeVar
-t = TypeVar('t')
-def f(x: t) -> None: pass
-f[int](1)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- FuncDef:3(
- f
- Args(
- Var(x))
- def [t] (x: t`-1)
- Block:3(
- PassStmt:3()))
- ExpressionStmt:4(
- CallExpr:4(
- TypeApplication:4(
- NameExpr(f [__main__.f])
- Types(
- builtins.int))
- Args(
- IntExpr(1)))))
-
-[case testTypeApplicationWithStringLiteralType]
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class A(Generic[t]): pass
-A['int']()
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(t* [__main__.t])
- TypeVarExpr:2())
- ClassDef:3(
- A
- TypeVars(
- t)
- PassStmt:3())
- ExpressionStmt:4(
- CallExpr:4(
- TypeApplication:4(
- NameExpr(A [__main__.A])
- Types(
- builtins.int))
- Args())))
-
-[case testVarArgsAndKeywordArgs]
-def g(*x: int, y: str = ''): pass
-[out]
-MypyFile:1(
- FuncDef:1(
- g
- MaxPos(0)
- Args(
- Var(y))
- def (*x: builtins.int, *, y: builtins.str =) -> Any
- Init(
- AssignmentStmt:1(
- NameExpr(y [l])
- StrExpr()))
- VarArg(
- Var(x))
- Block:1(
- PassStmt:1())))
-
-[case testQualifiedGeneric]
-from typing import TypeVar
-import typing
-T = TypeVar('T')
-class A(typing.Generic[T]): pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- Import:2(typing)
- AssignmentStmt:3(
- NameExpr(T* [__main__.T])
- TypeVarExpr:3())
- ClassDef:4(
- A
- TypeVars(
- T)
- PassStmt:4()))
-
-[case testQualifiedTypevar]
-import typing
-T = typing.TypeVar('T')
-def f(x: T) -> T: pass
-[out]
-MypyFile:1(
- Import:1(typing)
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2())
- FuncDef:3(
- f
- Args(
- Var(x))
- def [T] (x: T`-1) -> T`-1
- Block:3(
- PassStmt:3())))
-
-[case testAliasedTypevar]
-from typing import TypeVar as tv
-T = tv('T')
-def f(x: T) -> T: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar : tv])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2())
- FuncDef:3(
- f
- Args(
- Var(x))
- def [T] (x: T`-1) -> T`-1
- Block:3(
- PassStmt:3())))
-
-[case testLocalTypevar]
-from typing import TypeVar
-def f():
- T = TypeVar('T')
- def g(x: T) -> T: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- FuncDef:2(
- f
- Block:2(
- AssignmentStmt:3(
- NameExpr(T* [l])
- TypeVarExpr:3())
- FuncDef:4(
- g
- Args(
- Var(x))
- def [T] (x: T`-1) -> T`-1
- Block:4(
- PassStmt:4())))))
-
-[case testClassLevelTypevar]
-from typing import TypeVar
-class A:
- T = TypeVar('T')
- def g(self, x: T) -> T: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- ClassDef:2(
- A
- AssignmentStmt:3(
- NameExpr(T* [m])
- TypeVarExpr:3())
- FuncDef:4(
- g
- Args(
- Var(self)
- Var(x))
- def [T] (self: __main__.A, x: T`-1) -> T`-1
- Block:4(
- PassStmt:4()))))
-
-[case testImportTypevar]
-from typing import Generic
-from _m import T
-class A(Generic[T]):
- y = None # type: T
-[file _m.py]
-from typing import TypeVar
-T = TypeVar('T')
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Generic])
- ImportFrom:2(_m, [T])
- ClassDef:3(
- A
- TypeVars(
- T)
- AssignmentStmt:4(
- NameExpr(y [m])
- NameExpr(None [builtins.None])
- T`1)))
-
-[case testQualifiedReferenceToTypevarInClass]
-from typing import Generic
-import _m
-class A(Generic[_m.T]):
- a = None # type: _m.T
- def f(self, x: _m.T):
- b = None # type: _m.T
-[file _m.py]
-from typing import TypeVar
-T = TypeVar('T')
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Generic])
- Import:2(_m)
- ClassDef:3(
- A
- TypeVars(
- _m.T)
- AssignmentStmt:4(
- NameExpr(a [m])
- NameExpr(None [builtins.None])
- _m.T`1)
- FuncDef:5(
- f
- Args(
- Var(self)
- Var(x))
- def (self: __main__.A[_m.T`1], x: _m.T`1) -> Any
- Block:5(
- AssignmentStmt:6(
- NameExpr(b [l])
- NameExpr(None [builtins.None])
- _m.T`1)))))
-
-[case testQualifiedReferenceToTypevarInFunctionSignature]
-import _m
-def f(x: _m.T) -> None:
- a = None # type: _m.T
-[file _m.py]
-from typing import TypeVar
-T = TypeVar('T')
-[out]
-MypyFile:1(
- Import:1(_m)
- FuncDef:2(
- f
- Args(
- Var(x))
- def [_m.T] (x: _m.T`-1)
- Block:2(
- AssignmentStmt:3(
- NameExpr(a [l])
- NameExpr(None [builtins.None])
- _m.T`-1))))
-
-[case testFunctionCommentAnnotation]
-from typing import Any
-def f(x): # type: (int) -> Any
- x = 1
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Any])
- FuncDef:2(
- f
- Args(
- Var(x))
- def (x: builtins.int) -> Any
- Block:2(
- AssignmentStmt:3(
- NameExpr(x [l])
- IntExpr(1)))))
-
-[case testMethodCommentAnnotation]
-import typing
-class A:
- def f(self, x): # type: (int) -> str
- x = 1
-[out]
-MypyFile:1(
- Import:1(typing)
- ClassDef:2(
- A
- FuncDef:3(
- f
- Args(
- Var(self)
- Var(x))
- def (self: __main__.A, x: builtins.int) -> builtins.str
- Block:3(
- AssignmentStmt:4(
- NameExpr(x [l])
- IntExpr(1))))))
-
-[case testTypevarWithValues]
-from typing import TypeVar, Any
-T = TypeVar('T', int, str)
-S = TypeVar('S', Any, int, str)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Any])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- Values(
- builtins.int
- builtins.str)))
- AssignmentStmt:3(
- NameExpr(S* [__main__.S])
- TypeVarExpr:3(
- Values(
- Any
- builtins.int
- builtins.str))))
-
-[case testTypevarWithValuesAndVariance]
-from typing import TypeVar
-T = TypeVar('T', int, str, covariant=True)
-[builtins fixtures/bool.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- Variance(COVARIANT)
- Values(
- builtins.int
- builtins.str))))
-
-[case testTypevarWithBound]
-from typing import TypeVar
-T = TypeVar('T', bound=int)
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- UpperBound(builtins.int))))
-
-[case testGenericFunctionWithValueSet]
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- Values(
- builtins.int
- builtins.str)))
- FuncDef:3(
- f
- Args(
- Var(x))
- def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1
- Block:3(
- PassStmt:3())))
-
-[case testGenericClassWithValueSet]
-from typing import TypeVar, Generic
-T = TypeVar('T', int, str)
-class C(Generic[T]): pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- Values(
- builtins.int
- builtins.str)))
- ClassDef:3(
- C
- TypeVars(
- T in (builtins.int, builtins.str))
- PassStmt:3()))
-
-[case testGenericFunctionWithBound]
-from typing import TypeVar
-T = TypeVar('T', bound=int)
-def f(x: T) -> T: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- UpperBound(builtins.int)))
- FuncDef:3(
- f
- Args(
- Var(x))
- def [T <: builtins.int] (x: T`-1) -> T`-1
- Block:3(
- PassStmt:3())))
-
-[case testGenericClassWithBound]
-from typing import TypeVar, Generic
-T = TypeVar('T', bound=int)
-class C(Generic[T]): pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar, Generic])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- UpperBound(builtins.int)))
- ClassDef:3(
- C
- TypeVars(
- T <: builtins.int)
- PassStmt:3()))
-
-[case testSimpleDucktypeDecorator]
-from typing import _promote
- at _promote(str)
-class S: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [_promote])
- ClassDef:2(
- S
- Promote(builtins.str)
- Decorators(
- PromoteExpr:2(builtins.str))
- PassStmt:3()))
-
-[case testUnionType]
-from typing import Union
-def f(x: Union[int, str]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- FuncDef:2(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, builtins.str])
- Block:2(
- PassStmt:2())))
-
-[case testUnionTypeWithNoneItem]
-from typing import Union
-def f(x: Union[int, None]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- FuncDef:2(
- f
- Args(
- Var(x))
- def (x: builtins.int)
- Block:2(
- PassStmt:2())))
-
-[case testUnionTypeWithNoneItemAndTwoItems]
-from typing import Union
-def f(x: Union[int, None, str]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- FuncDef:2(
- f
- Args(
- Var(x))
- def (x: Union[builtins.int, builtins.str])
- Block:2(
- PassStmt:2())))
-
-[case testUnionTypeWithSingleItem]
-from typing import Union
-def f(x: Union[int]) -> None: pass
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Union])
- FuncDef:2(
- f
- Args(
- Var(x))
- def (x: builtins.int)
- Block:2(
- PassStmt:2())))
-
-[case testOptionalTypes]
-from typing import Optional
-x = 1 # type: Optional[int]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Optional])
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- IntExpr(1)
- builtins.int))
-
-[case testInvalidOptionalType]
-from typing import Optional
-x = 1 # type: Optional[int, str] # E: Optional[...] must have exactly one type argument
-y = 1 # type: Optional # E: Optional[...] must have exactly one type argument
-[out]
-
-[case testCoAndContravariantTypeVar]
-from typing import TypeVar
-T = TypeVar('T', covariant=True)
-S = TypeVar('S', contravariant=True)
-[builtins fixtures/bool.pyi]
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [TypeVar])
- AssignmentStmt:2(
- NameExpr(T* [__main__.T])
- TypeVarExpr:2(
- Variance(COVARIANT)))
- AssignmentStmt:3(
- NameExpr(S* [__main__.S])
- TypeVarExpr:3(
- Variance(CONTRAVARIANT))))
-
-[case testTupleExpressionAsType]
-def f(x: (int, int)) -> None: pass
-[out]
-main:1: error: Invalid tuple literal type
-
-[case tesQualifiedTypeNameBasedOnAny]
-from typing import Any
-x = 0 # type: Any
-z = 0 # type: x.y
-[out]
-MypyFile:1(
- ImportFrom:1(typing, [Any])
- AssignmentStmt:2(
- NameExpr(x [__main__.x])
- IntExpr(0)
- Any)
- AssignmentStmt:3(
- NameExpr(z [__main__.z])
- IntExpr(0)
- Any))
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
deleted file mode 100644
index baa5be5..0000000
--- a/test-data/unit/stubgen.test
+++ /dev/null
@@ -1,565 +0,0 @@
-[case testEmptyFile]
-[out]
-
-[case testSingleFunction]
-def f():
- x = 1
-[out]
-def f(): ...
-
-[case testTwoFunctions]
-def f(a, b):
- x = 1
-def g(arg):
- pass
-[out]
-def f(a, b): ...
-def g(arg): ...
-
-[case testDefaultArgInt]
-def f(a, b=2): ...
-def g(b=-1, c=0): ...
-[out]
-def f(a, b: int = ...): ...
-def g(b: int = ..., c: int = ...): ...
-
-[case testDefaultArgNone]
-def f(x=None): ...
-[out]
-from typing import Any, Optional
-
-def f(x: Optional[Any] = ...): ...
-
-[case testDefaultArgBool]
-def f(x=True, y=False): ...
-[out]
-def f(x: bool = ..., y: bool = ...): ...
-
-[case testDefaultArgStr]
-def f(x='foo'): ...
-[out]
-def f(x: str = ...): ...
-
-[case testDefaultArgBytes]
-def f(x=b'foo'): ...
-[out]
-def f(x: bytes = ...): ...
-
-[case testDefaultArgFloat]
-def f(x=1.2): ...
-[out]
-def f(x: float = ...): ...
-
-[case testDefaultArgOther]
-def f(x=ord): ...
-[out]
-from typing import Any
-
-def f(x: Any = ...): ...
-
-[case testVarArgs]
-def f(x, *y): ...
-[out]
-def f(x, *y): ...
-
-[case testKwVarArgs]
-def f(x, **y): ...
-[out]
-def f(x, **y): ...
-
-[case testClass]
-class A:
- def f(self, x):
- x = 1
-def g(): ...
-[out]
-class A:
- def f(self, x): ...
-
-def g(): ...
-
-[case testVariable]
-x = 1
-[out]
-x = ... # type: int
-
-[case testMultipleVariable]
-x = y = 1
-[out]
-x = ... # type: int
-y = ... # type: int
-
-[case testClassVariable]
-class C:
- x = 1
-[out]
-class C:
- x = ... # type: int
-
-[case testSelfAssignment]
-class C:
- def __init__(self):
- self.x = 1
- x.y = 2
-[out]
-class C:
- x = ... # type: int
- def __init__(self) -> None: ...
-
-[case testSelfAndClassBodyAssignment]
-x = 1
-class C:
- x = 1
- def __init__(self):
- self.x = 1
- self.x = 1
-[out]
-x = ... # type: int
-
-class C:
- x = ... # type: int
- def __init__(self) -> None: ...
-
-[case testEmptyClass]
-class A: ...
-[out]
-class A: ...
-
-[case testPrivateFunction]
-def _f(): ...
-def g(): ...
-[out]
-def g(): ...
-
-[case testPrivateMethod]
-class A:
- def _f(self): ...
-[out]
-class A: ...
-
-[case testPrivateVar]
-_x = 1
-class A:
- _y = 1
-[out]
-class A: ...
-
-[case testSpecialInternalVar]
-__all__ = []
-__author__ = ''
-__version__ = ''
-[out]
-
-[case testBaseClass]
-class A: ...
-class B(A): ...
-[out]
-class A: ...
-class B(A): ...
-
-[case testDecoratedFunction]
- at decorator
-def foo(x): ...
-[out]
-def foo(x): ...
-
-[case testMultipleAssignment]
-x, y = 1, 2
-[out]
-from typing import Any
-
-x = ... # type: Any
-y = ... # type: Any
-
-[case testMultipleAssignment2]
-[x, y] = 1, 2
-[out]
-from typing import Any
-
-x = ... # type: Any
-y = ... # type: Any
-
-[case testKeywordOnlyArg]
-def f(x, *, y=1): ...
-def g(x, *, y=1, z=2): ...
-[out]
-def f(x, *, y: int = ...): ...
-def g(x, *, y: int = ..., z: int = ...): ...
-
-[case testProperty]
-class A:
- @property
- def f(self):
- return 1
- @f.setter
- def f(self, x): ...
-[out]
-class A:
- @property
- def f(self): ...
- @f.setter
- def f(self, x): ...
-
-[case testStaticMethod]
-class A:
- @staticmethod
- def f(x): ...
-[out]
-class A:
- @staticmethod
- def f(x): ...
-
-[case testClassMethod]
-class A:
- @classmethod
- def f(cls): ...
-[out]
-class A:
- @classmethod
- def f(cls): ...
-
-[case testIfMainCheck]
-def a(): ...
-if __name__ == '__main__':
- x = 1
- def f(): ...
-def b(): ...
-[out]
-def a(): ...
-def b(): ...
-
-[case testImportStar]
-from x import *
-from a.b import *
-def f(): ...
-[out]
-from x import *
-from a.b import *
-
-def f(): ...
-
-[case testNoSpacesBetweenEmptyClasses]
-class X:
- def g(self): ...
-class A: ...
-class B: ...
-class C:
- def f(self): ...
-[out]
-class X:
- def g(self): ...
-
-class A: ...
-class B: ...
-
-class C:
- def f(self): ...
-
-[case testExceptionBaseClasses]
-class A(Exception): ...
-class B(ValueError): ...
-[out]
-class A(Exception): ...
-class B(ValueError): ...
-
-[case testOmitSomeSpecialMethods]
-class A:
- def __str__(self): ...
- def __repr__(self): ...
- def __eq__(self): ...
- def __getstate__(self): ...
- def __setstate__(self, state): ...
-[out]
-class A:
- def __eq__(self): ...
-
-[case testOmitDefsNotInAll_import]
-__all__ = [] + ['f']
-def f(): ...
-def g(): ...
-[out]
-def f(): ...
-
-[case testVarDefsNotInAll_import]
-__all__ = [] + ['f', 'g']
-def f(): ...
-x = 1
-y = 1
-def g(): ...
-[out]
-def f(): ...
-def g(): ...
-
-[case testIncludeClassNotInAll_import]
-__all__ = [] + ['f']
-def f(): ...
-class A: ...
-[out]
-def f(): ...
-
-class A: ...
-
-[case testAllAndClass_import]
-__all__ = ['A']
-class A:
- x = 1
- def f(self): ...
-[out]
-class A:
- x = ... # type: int
- def f(self): ...
-
-[case testMultiplePrivateDefs]
-class A: ...
-_x = 1
-_y = 1
-_z = 1
-class C: ...
-[out]
-class A: ...
-class C: ...
-
-[case testIncludeFromImportIfInAll_import]
-from re import match, search, sub
-__all__ = ['match', 'sub', 'x']
-x = 1
-[out]
-from re import match as match, sub as sub
-
-x = ... # type: int
-
-[case testExportModule_import]
-import re
-__all__ = ['re', 'x']
-x = 1
-y = 2
-[out]
-import re as re
-
-x = ... # type: int
-
-[case testExportModuleAs_import]
-import re as rex
-__all__ = ['rex', 'x']
-x = 1
-y = 2
-[out]
-import re as rex
-
-x = ... # type: int
-
-[case testExportModuleInPackage_import]
-import urllib.parse as p
-__all__ = ['p']
-[out]
-import urllib.parse as p
-
-[case testExportModuleInPackageUnsupported_import]
-import urllib.parse
-__all__ = ['urllib']
-[out]
-# Names in __all__ with no definition:
-# urllib
-
-[case testRelativeImportAll]
-from .x import *
-[out]
-from .x import *
-
-[case testCommentForUndefinedName_import]
-__all__ = ['f', 'x', 'C', 'g']
-def f(): ...
-x = 1
-class C:
- def g(self): ...
-[out]
-def f(): ...
-
-x = ... # type: int
-
-class C:
- def g(self): ...
-
-# Names in __all__ with no definition:
-# g
-
-[case testIgnoreSlots]
-class A:
- __slots__ = ()
-[out]
-class A: ...
-
-[case testSkipPrivateProperty]
-class A:
- @property
- def _foo(self): ...
-[out]
-class A: ...
-
-[case testSkipPrivateStaticAndClassMethod]
-class A:
- @staticmethod
- def _foo(): ...
- @classmethod
- def _bar(cls): ...
-[out]
-class A: ...
-
-[case testNamedtuple]
-import collections, x
-X = collections.namedtuple('X', ['a', 'b'])
-[out]
-from collections import namedtuple
-
-X = namedtuple('X', ['a', 'b'])
-
-[case testNamedtupleAltSyntax]
-from collections import namedtuple, x
-X = namedtuple('X', 'a b')
-[out]
-from collections import namedtuple
-
-X = namedtuple('X', 'a b')
-
-[case testNamedtupleWithUnderscore]
-from collections import namedtuple as _namedtuple
-def f(): ...
-X = _namedtuple('X', 'a b')
-def g(): ...
-[out]
-from collections import namedtuple as _namedtuple
-from collections import namedtuple
-
-def f(): ...
-
-X = namedtuple('X', 'a b')
-
-def g(): ...
-
-[case testNamedtupleBaseClass]
-import collections, x
-_X = collections.namedtuple('_X', ['a', 'b'])
-class Y(_X): ...
-[out]
-from collections import namedtuple
-
-_X = namedtuple('_X', ['a', 'b'])
-
-class Y(_X): ...
-
-[case testArbitraryBaseClass]
-import x
-class D(x.C): ...
-[out]
-import x
-
-class D(x.C): ...
-
-[case testArbitraryBaseClass]
-import x.y
-class D(x.y.C): ...
-[out]
-import x.y
-
-class D(x.y.C): ...
-
-[case testUnqualifiedArbitraryBaseClassWithNoDef]
-class A(int): ...
-[out]
-class A(int): ...
-
-[case testUnqualifiedArbitraryBaseClass]
-from x import X
-class A(X): ...
-[out]
-from x import X
-
-class A(X): ...
-
-[case testUnqualifiedArbitraryBaseClassWithImportAs]
-from x import X as _X
-class A(_X): ...
-[out]
-from x import X as _X
-
-class A(_X): ...
-
-[case testObjectBaseClass]
-class A(object): ...
-[out]
-class A: ...
-
-[case testEmptyLines]
-def x(): ...
-def f():
- class A:
- def f(self):
- self.x = 1
-def g(): ...
-[out]
-def x(): ...
-def f(): ...
-def g(): ...
-
-[case testNestedClass]
-class A:
- class B:
- x = 1
- def f(self): ...
- def g(self): ...
-[out]
-class A:
- class B:
- x = ... # type: int
- def f(self): ...
- def g(self): ...
-
-[case testExportViaRelativeImport]
-from .api import get
-[out]
-from .api import get as get
-
-[case testExportViaRelativePackageImport]
-from .packages.urllib3.contrib import parse
-[out]
-from .packages.urllib3.contrib import parse as parse
-
-[case testNoExportViaRelativeImport]
-from . import get
-[out]
-
-[case testRelativeImportAndBase]
-from .x import X
-class A(X):
- pass
-[out]
-from .x import X as X
-
-class A(X): ...
-
-[case testDuplicateDef]
-def syslog(a): pass
-def syslog(a): pass
-[out]
-def syslog(a): ...
-
-[case testAsyncAwait_fast_parser]
-async def f(a):
- x = await y
-[out]
-def f(a): ...
-
-[case testInferOptionalOnlyFunc]
-class A:
- x = None
- def __init__(self, a=None) -> None:
- self.x = []
-[out]
-from typing import Any, Optional
-
-class A:
- x = ... # type: Any
- def __init__(self, a: Optional[Any] = ...) -> None: ...
-
--- More features/fixes:
--- do not export deleted names
diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test
deleted file mode 100644
index a51d844..0000000
--- a/test-data/unit/typexport-basic.test
+++ /dev/null
@@ -1,1159 +0,0 @@
--- Test cases for exporting node types from the type checker.
---
--- Each test case consists of at least two sections.
--- The first section contains [case NAME-skip] followed by the input code,
--- while the second section contains [out] followed by the output from the type
--- checker.
---
--- The first line of input code should be a regexp in comment that describes
--- the information to dump (prefix with ##). The regexp is matched against
--- the following items:
---
--- * each name of an expression node
--- * each type string of a node (e.g. OpExpr)
---
--- Lines starting with "--" in this file will be ignored.
-
-
--- Expressions
--- -----------
-
-
-[case testConstructorCall]
-import typing
-A()
-B()
-class A: pass
-class B: pass
-[out]
-CallExpr(2) : A
-NameExpr(2) : def () -> A
-CallExpr(3) : B
-NameExpr(3) : def () -> B
-
-[case testLiterals]
-import typing
-5
-2.3
-'foo'
-[builtins fixtures/primitives.pyi]
-[out]
-IntExpr(2) : builtins.int
-FloatExpr(3) : builtins.float
-StrExpr(4) : builtins.str
-
-[case testNameExpression]
-
-a = None # type: A
-a # node
-def f(aa: 'A') -> None:
- b = None # type: B
- aa # node
- b # node
-class A:
- def g(self) -> None:
- self # node
-class B: pass
-[out]
-NameExpr(3) : A
-NameExpr(6) : A
-NameExpr(7) : B
-NameExpr(10) : A
-
-[case testEllipsis]
-import typing
-...
-[out]
-EllipsisExpr(2) : builtins.ellipsis
-
-[case testMemberAccess]
-## MemberExpr|CallExpr
-
-a = None # type: A
-a.m
-a.f
-a.f()
-class A:
- m = None # type: A
- def f(self) -> 'B': pass
-class B: pass
-[out]
-MemberExpr(4) : A
-MemberExpr(5) : def () -> B
-CallExpr(6) : B
-MemberExpr(6) : def () -> B
-
-[case testCastExpression]
-## CastExpr|[a-z]
-from typing import Any, cast
-d = None # type: Any
-b = None # type: B
-class A: pass
-class B(A): pass
-cast(A, d)
-cast(A, b)
-cast(B, b)
-[out]
-CastExpr(7) : A
-NameExpr(7) : Any
-CastExpr(8) : A
-NameExpr(8) : B
-CastExpr(9) : B
-NameExpr(9) : B
-
-[case testArithmeticOps]
-## OpExpr
-import typing
-a = 1 + 2
-1.2 * 3
-2.2 - 3
-1 / 2
-[file builtins.py]
-class object:
- def __init__(self) -> None: pass
-class function: pass
-class int:
- def __add__(self, x: int) -> int: pass
- def __truediv__(self, x: int) -> float: pass
-class float:
- def __mul__(self, x: int) -> float: pass
- def __sub__(self, x: int) -> float: pass
-class type: pass
-class str: pass
-[out]
-OpExpr(3) : builtins.int
-OpExpr(4) : builtins.float
-OpExpr(5) : builtins.float
-OpExpr(6) : builtins.float
-
-[case testComparisonOps]
-## ComparisonExpr
-import typing
-1 == object()
-1 == 2
-2 < 3
-1 < 2 < 3
-8 > 3
-4 < 6 > 2
-[file builtins.py]
-class object:
- def __init__(self) -> None: pass
-class int:
- def __eq__(self, x: object) -> bool: pass
- def __lt__(self, x: int) -> bool: pass
- def __gt__(self, x: int) -> int: pass
-class bool: pass
-class type: pass
-class function: pass
-class str: pass
-[out]
-ComparisonExpr(3) : builtins.bool
-ComparisonExpr(4) : builtins.bool
-ComparisonExpr(5) : builtins.bool
-ComparisonExpr(6) : builtins.bool
-ComparisonExpr(7) : builtins.int
-ComparisonExpr(8) : builtins.object
-
-[case testBooleanOps]
-## OpExpr|UnaryExpr
-import typing
-a = 1
-a and a
-a or a
-not a
-[builtins fixtures/bool.pyi]
-[out]
-OpExpr(4) : builtins.int
-OpExpr(5) : builtins.int
-UnaryExpr(6) : builtins.bool
-
-[case testBooleanOpsOnBools]
-## OpExpr|UnaryExpr
-import typing
-a = bool()
-a and a
-a or a
-not a
-[builtins fixtures/bool.pyi]
-[out]
-OpExpr(4) : builtins.bool
-OpExpr(5) : builtins.bool
-UnaryExpr(6) : builtins.bool
-
-[case testFunctionCall]
-## CallExpr
-from typing import Tuple
-f(
- A(),
- B())
-class A: pass
-class B: pass
-def f(a: A, b: B) -> Tuple[A, B]: pass
-[builtins fixtures/tuple-simple.pyi]
-[out]
-CallExpr(3) : Tuple[A, B]
-CallExpr(4) : A
-CallExpr(5) : B
-
-
--- Statements
--- ----------
-
-
-[case testSimpleAssignment]
-from typing import Any
-a = None # type: A
-b = a # type: Any
-b = a
-a = b
-
-class A: pass
-[out]
-NameExpr(3) : A
-NameExpr(4) : A
-NameExpr(4) : Any
-NameExpr(5) : A
-NameExpr(5) : Any
-
-[case testMemberAssignment]
-from typing import Any
-class A:
- a = None # type: A
- b = None # type: Any
- def f(self) -> None:
- self.b = self.a
- self.a.a = self.b
-[out]
-MemberExpr(6) : A
-MemberExpr(6) : Any
-NameExpr(6) : A
-NameExpr(6) : A
-MemberExpr(7) : A
-MemberExpr(7) : A
-MemberExpr(7) : A
-NameExpr(7) : A
-NameExpr(7) : A
-
-[case testIf]
-
-a = None # type: bool
-if a:
- 1
-elif not a:
- 1
-[builtins fixtures/bool.pyi]
-[out]
-NameExpr(3) : builtins.bool
-IntExpr(4) : builtins.int
-NameExpr(5) : builtins.bool
-UnaryExpr(5) : builtins.bool
-IntExpr(6) : builtins.int
-
-[case testWhile]
-
-a = None # type: bool
-while a:
- a
-[builtins fixtures/bool.pyi]
-[out]
-NameExpr(3) : builtins.bool
-NameExpr(4) : builtins.bool
-
-
--- Simple type inference
--- ---------------------
-
-
-[case testInferSingleType]
-import typing
-x = ()
-[builtins fixtures/primitives.pyi]
-[out]
-NameExpr(2) : Tuple[]
-TupleExpr(2) : Tuple[]
-
-[case testInferTwoTypes]
-## NameExpr
-import typing
-(s,
-i) = 'x', 1
-[builtins fixtures/primitives.pyi]
-[out]
-NameExpr(3) : builtins.str
-NameExpr(4) : builtins.int
-
-[case testInferSingleLocalVarType]
-import typing
-def f() -> None:
- x = ()
-[builtins fixtures/primitives.pyi]
-[out]
-NameExpr(3) : Tuple[]
-TupleExpr(3) : Tuple[]
-
-
--- Basic generics
--- --------------
-
-
-[case testImplicitBoundTypeVarsForMethod]
-## MemberExpr
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def f(self) -> T: pass
-class B: pass
-def g() -> None:
- a = None # type: A[B]
- f = a.f
-[out]
-MemberExpr(9) : def () -> B
-
-[case testImplicitBoundTypeVarsForSelfMethodReference]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def f(self) -> T:
- self.f()
-[out]
-CallExpr(5) : T`1
-MemberExpr(5) : def () -> T`1
-NameExpr(5) : A[T`1]
-
-[case testGenericFunctionCallWithTypeApp-skip]
-## CallExpr|TypeApplication|NameExpr
-from typing import Any, TypeVar, Tuple
-T = TypeVar('T')
-class A: pass
-f[A](A())
-f[Any](A())
-def f(a: T) -> Tuple[T, T]: pass
-[builtins fixtures/tuple.pyi]
-[out]
-CallExpr(5) : A
-CallExpr(5) : Tuple[A, A]
-NameExpr(5) : def () -> A
-NameExpr(5) : def (a: A) -> Tuple[A, A]
-TypeApplication(5) : def (a: A) -> Tuple[A, A]
-CallExpr(6) : A
-CallExpr(6) : Tuple[Any, Any]
-NameExpr(6) : def () -> A
-NameExpr(6) : def (a: Any) -> Tuple[Any, Any]
-TypeApplication(6) : def (a: Any) -> Tuple[Any, Any]
-
--- NOTE: Type applications are not supported for generic methods, so the
--- following test cases are commented out.
-
---[case testGenericMethodCallWithTypeApp]
---## CallExpr|MemberExpr|TypeApplication
---from typing import Any, TypeVar, Tuple
---T = TypeVar('T')
---class A:
--- def f(self, a: T) -> Tuple[T, T]: pass
---a.f[A](a)
---a.f[Any](a)
---a = None # type: A
---[builtins fixtures/tuple.py]
---[out]
---CallExpr(2) : Tuple[A, A]
---MemberExpr(2) : def (A a) -> Tuple[A, A]
---TypeApplication(2) : def (A a) -> Tuple[A, A]
---CallExpr(3) : Tuple[Any, Any]
---MemberExpr(3) : def (any a) -> Tuple[Any, Any]
---TypeApplication(3) : def (any a) -> Tuple[Any, Any]
-
---[case testGenericMethodCallInGenericTypeWithTypeApp]
---## CallExpr|MemberExpr|TypeApplication
---from typing import Any, TypeVar, Generic, Tuple
---T = TypeVar('T')
---S = TypeVar('S')
---class B: pass
---class C: pass
---a.f[B](b)
---a.f[Any](b)
---class A(Generic[T]):
--- def f(self, a: S) -> Tuple[T, S]: pass
---a = None # type: A[C]
---b = None # type: B
---[builtins fixtures/tuple.py]
---[out]
---CallExpr(6) : Tuple[C, B]
---MemberExpr(6) : def (B a) -> Tuple[C, B]
---TypeApplication(6) : def (B a) -> Tuple[C, B]
---CallExpr(7) : Tuple[C, Any]
---MemberExpr(7) : def (any a) -> Tuple[C, Any]
---TypeApplication(7) : def (any a) -> Tuple[C, Any]
-
-[case testGenericTypeVariableInference]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class A(Generic[T]):
- def __init__(self, a: T) -> None: pass
-class B: pass
-A(A(B()))
-[out]
-CallExpr(6) : A[A[B]]
-CallExpr(6) : A[B]
-CallExpr(6) : B
-NameExpr(6) : def (a: A[B]) -> A[A[B]]
-NameExpr(6) : def (a: B) -> A[B]
-NameExpr(6) : def () -> B
-
-
--- Generic inheritance
--- -------------------
-
-
-[case testInheritedMethodReferenceWithGenericInheritance]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class C: pass
-class A(Generic[T]):
- def f(self, a: T) -> None: pass
-class B(A[C]):
- def g(self, c: C) -> None:
- self.f(c)
-[out]
-CallExpr(8) : void
-MemberExpr(8) : def (a: C)
-NameExpr(8) : C
-NameExpr(8) : B
-
-[case testInheritedMethodReferenceWithGenericSubclass]
-from typing import TypeVar, Generic
-S = TypeVar('S')
-T = TypeVar('T')
-class C: pass
-class A(Generic[S, T]):
- def f(self, a: C) -> None: pass
-class B(A[C, T], Generic[T]):
- def g(self, c: C) -> None:
- self.f(c)
-[out]
-CallExpr(9) : void
-MemberExpr(9) : def (a: C)
-NameExpr(9) : C
-NameExpr(9) : B[T`1]
-
-[case testExternalReferenceWithGenericInheritance]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-class C: pass
-class A(Generic[T]):
- def f(self, a: T) -> None: pass
-class B(A[C]): pass
-b = None # type: B
-c = None # type: C
-b.f(c)
-[out]
-CallExpr(9) : void
-MemberExpr(9) : def (a: C)
-NameExpr(9) : B
-NameExpr(9) : C
-
-
--- Implicit Any types
--- ------------------
-
-
-[case testDynamicallyTypedFunction]
-
-def f(x):
- y = x + o
- z = o
- z
-o = None # type: object
-[out]
-NameExpr(3) : builtins.object
-NameExpr(3) : Any
-NameExpr(3) : Any
-OpExpr(3) : Any
-NameExpr(4) : builtins.object
-NameExpr(4) : Any
-NameExpr(5) : Any
-
-[case testDynamicallyTypedMethod]
-
-class A:
- def f(self, x):
- y = (
- o) # Place y and o on separate lines
- x
- y
-o = None # type: object
-[out]
-NameExpr(4) : Any
-NameExpr(5) : builtins.object
-NameExpr(6) : Any
-NameExpr(7) : Any
-
-[case testDynamicallyTypedConstructor]
-
-class A:
- def __init__(self, x):
- y = o
- x
- y
-o = None # type: object
-[out]
-NameExpr(4) : builtins.object
-NameExpr(4) : Any
-NameExpr(5) : Any
-NameExpr(6) : Any
-
-[case testCallInDynamicallyTypedFunction]
-
-def f():
- g(o)
-def g(a: object) -> object: pass
-o = None # type: object
-[out]
-CallExpr(3) : Any
-NameExpr(3) : def (a: builtins.object) -> builtins.object
-NameExpr(3) : builtins.object
-
-[case testExpressionInDynamicallyTypedFn]
-import typing
-def f():
- x = None
- x.f()
-[out]
-CallExpr(4) : Any
-MemberExpr(4) : Any
-NameExpr(4) : Any
-
-[case testGenericCall]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def f() -> None:
- a1 = A(b) # type: A[B]
- a2 = A(b) # type: A[object]
-class A(Generic[T]):
- def __init__(self, a: T) -> None: pass
-class B: pass
-b = None # type: B
-[out]
-CallExpr(4) : A[B]
-NameExpr(4) : def (a: B) -> A[B]
-NameExpr(4) : B
-CallExpr(5) : A[builtins.object]
-NameExpr(5) : def (a: builtins.object) -> A[builtins.object]
-NameExpr(5) : B
-
-[case testGenericCallInDynamicallyTypedFunction]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def f():
- A()
-class A(Generic[T]): pass
-[out]
-CallExpr(4) : Any
-NameExpr(4) : def [T] () -> A[T`1]
-
-[case testGenericCallInDynamicallyTypedFunction2]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def f():
- A(f)
-class A(Generic[T]):
- def __init__(self, x: T) -> None: pass
-[out]
-CallExpr(4) : Any
-NameExpr(4) : def [T] (x: T`1) -> A[T`1]
-NameExpr(4) : def () -> Any
-
-[case testGenericCallInDynamicallyTypedFunction3]
-from typing import TypeVar
-t = TypeVar('t')
-def f():
- g(None)
-def g(x: t) -> t: pass
-[out]
-CallExpr(4) : Any
-NameExpr(4) : def [t] (x: t`-1) -> t`-1
-
-
--- Generic types and type inference
--- --------------------------------
-
-
-[case testInferenceInArgumentContext]
-## CallExpr
-from typing import TypeVar, Generic
-T = TypeVar('T')
-f(g())
-f(h(b))
-f(h(c))
-
-b = None # type: B
-c = None # type: C
-
-def f(a: 'A[B]') -> None: pass
-
-def g() -> 'A[T]': pass
-def h(a: T) -> 'A[T]': pass
-
-class A(Generic[T]): pass
-class B: pass
-class C(B): pass
-[out]
-CallExpr(4) : void
-CallExpr(4) : A[B]
-CallExpr(5) : void
-CallExpr(5) : A[B]
-CallExpr(6) : void
-CallExpr(6) : A[B]
-
-[case testInferGenericTypeForLocalVariable]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-def f() -> None:
- a = A(b)
- a
- a2, a3 = A(b), A(c)
- a2
- a3
-b = None # type: B
-c = None # type: C
-class A(Generic[T]):
- def __init__(self, x: T) -> None: pass
-class B: pass
-class C: pass
-[out]
-CallExpr(4) : A[B]
-NameExpr(4) : def (x: B) -> A[B]
-NameExpr(4) : A[B]
-NameExpr(4) : B
-NameExpr(5) : A[B]
-CallExpr(6) : A[B]
-CallExpr(6) : A[C]
-NameExpr(6) : def (x: B) -> A[B]
-NameExpr(6) : def (x: C) -> A[C]
-NameExpr(6) : A[B]
-NameExpr(6) : A[C]
-NameExpr(6) : B
-NameExpr(6) : C
-NameExpr(7) : A[B]
-NameExpr(8) : A[C]
-
-[case testNestedGenericCalls]
-from typing import TypeVar, Generic
-T = TypeVar('T')
-S = TypeVar('S')
-def h() -> None:
- g(f(c))
-
-c = None # type: C
-
-class A(Generic[T]): pass
-class B(Generic[T]): pass
-class C: pass
-def f(a: T) -> A[T]: pass
-def g(a: S) -> B[S]: pass
-[out]
-CallExpr(5) : A[C]
-CallExpr(5) : B[A[C]]
-NameExpr(5) : C
-NameExpr(5) : def (a: C) -> A[C]
-NameExpr(5) : def (a: A[C]) -> B[A[C]]
-
-[case testInferListLiterals]
-from typing import List
-a = [] # type: List[A]
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-ListExpr(2) : builtins.list[A]
-
-[case testInferGenericTypeInTypeAnyContext]
-from typing import Any
-a = [] # type: Any
-[builtins fixtures/list.pyi]
-[out]
-ListExpr(2) : builtins.list[Any]
-
-[case testHigherOrderFunction]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-map(
- f,
- [A()])
-def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
-class A: pass
-class B: pass
-def f(a: A) -> B: pass
-[builtins fixtures/list.pyi]
-[out]
-CallExpr(4) : builtins.list[B]
-NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
-NameExpr(5) : def (a: A) -> B
-CallExpr(6) : A
-ListExpr(6) : builtins.list[A]
-NameExpr(6) : def () -> A
-
-
--- Lambdas
--- -------
-
-
-[case testLambdaWithTypeInferredFromContext]
-from typing import Callable
-f = lambda x: x.a # type: Callable[[B], A]
-class A: pass
-class B:
- a = None # type: A
-[out]
-FuncExpr(2) : def (B) -> A
-MemberExpr(2) : A
-NameExpr(2) : B
-
-[case testLambdaWithInferredType]
-## FuncExpr|NameExpr
-import typing
-f = lambda: 1
-[out]
-FuncExpr(3) : def () -> builtins.int
-NameExpr(3) : def () -> builtins.int
-
-[case testLambdaWithInferredType2]
-## FuncExpr|NameExpr
-import typing
-f = lambda: [1]
-[builtins fixtures/list.pyi]
-[out]
-FuncExpr(3) : def () -> builtins.list[builtins.int]
-NameExpr(3) : def () -> builtins.list[builtins.int]
-
-[case testLambdaWithInferredType2]
-from typing import List, Callable
-f = lambda x: [] # type: Callable[[B], List[A]]
-class A: pass
-class B:
- a = None # type: A
-[builtins fixtures/list.pyi]
-[out]
-FuncExpr(2) : def (B) -> builtins.list[A]
-ListExpr(2) : builtins.list[A]
-
-[case testLambdaAndHigherOrderFunction]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-l = None # type: List[A]
-map(
- lambda x: f(x), l)
-def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
-class A: pass
-class B: pass
-def f(a: A) -> B: pass
-[builtins fixtures/list.pyi]
-[out]
-CallExpr(5) : builtins.list[B]
-NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
-CallExpr(6) : B
-FuncExpr(6) : def (A) -> B
-NameExpr(6) : def (a: A) -> B
-NameExpr(6) : builtins.list[A]
-NameExpr(6) : A
-
-[case testLambdaAndHigherOrderFunction2]
-## FuncExpr|NameExpr|ListExpr
-from typing import TypeVar, List, Callable
-t = TypeVar('t')
-s = TypeVar('s')
-l = None # type: List[A]
-map(
- lambda x: [f(x)], l)
-def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass
-class A: pass
-class B: pass
-def f(a: A) -> B: pass
-[builtins fixtures/list.pyi]
-[out]
-NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B]
-FuncExpr(7) : def (A) -> builtins.list[B]
-ListExpr(7) : builtins.list[B]
-NameExpr(7) : def (a: A) -> B
-NameExpr(7) : builtins.list[A]
-NameExpr(7) : A
-
-[case testLambdaInListAndHigherOrderFunction]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-l = None # type: List[A]
-map(
- [lambda x: x],
- l)
-def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
--- TODO We probably should not silently infer 'Any' types in statically typed
--- context. Perhaps just fail instead?
-CallExpr(5) : builtins.list[Any]
-NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any]
-FuncExpr(6) : def (A) -> A
-ListExpr(6) : builtins.list[def (A) -> Any]
-NameExpr(6) : A
-NameExpr(7) : builtins.list[A]
-
-[case testLambdaAndHigherOrderFunction3]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-l = None # type: List[A]
-map(
- lambda x: x.b,
- l)
-def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
-class A:
- b = None # type: B
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-CallExpr(5) : builtins.list[B]
-NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
-FuncExpr(6) : def (A) -> B
-MemberExpr(6) : B
-NameExpr(6) : A
-NameExpr(7) : builtins.list[A]
-
-[case testLambdaAndHigherOrderFunctionAndKeywordArgs]
-from typing import TypeVar, Callable, List
-t = TypeVar('t')
-s = TypeVar('s')
-l = None # type: List[A]
-map(
- a=l,
- f=lambda x: x.b)
-def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
-class A:
- b = None # type: B
-class B: pass
-[builtins fixtures/list.pyi]
-[out]
-CallExpr(5) : builtins.list[B]
-NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
-NameExpr(6) : builtins.list[A]
-FuncExpr(7) : def (A) -> B
-MemberExpr(7) : B
-NameExpr(7) : A
-
-
--- Boolean operations
--- ------------------
-
-
-[case testBooleanOr]
-from typing import List
-a = None # type: List[A]
-a or []
-a = a or []
-a = [] or a
-class A: pass
-[builtins fixtures/list.pyi]
-[out]
-ListExpr(3) : builtins.list[A]
-NameExpr(3) : builtins.list[A]
-OpExpr(3) : builtins.list[A]
-ListExpr(4) : builtins.list[A]
-NameExpr(4) : builtins.list[A]
-NameExpr(4) : builtins.list[A]
-OpExpr(4) : builtins.list[A]
-ListExpr(5) : builtins.list[A]
-NameExpr(5) : builtins.list[A]
-NameExpr(5) : builtins.list[A]
-OpExpr(5) : builtins.list[A]
-
-
--- Class attributes
--- ----------------
-
-
-[case testUnboundMethod]
-## MemberExpr
-import typing
-class A:
- def f(self) -> None: pass
-A.f
-[out]
-MemberExpr(5) : def (self: A)
-
-[case testUnboundMethodWithImplicitSig]
-## MemberExpr
-import typing
-class A:
- def f(self): pass
-A.f
-[out]
-MemberExpr(5) : def (self: Any) -> Any
-
-[case testOverloadedUnboundMethod]
-## MemberExpr
-from typing import overload
-class A:
- @overload
- def f(self) -> None: pass
- @overload
- def f(self, x: object) -> None: pass
-A.f
-[out]
-MemberExpr(8) : Overload(def (self: A), def (self: A, x: builtins.object))
-
-[case testOverloadedUnboundMethodWithImplicitSig]
-## MemberExpr
-from typing import overload
-class A:
- @overload
- def f(self): pass
- @overload
- def f(self, x): pass
-A.f
-[out]
-MemberExpr(8) : Overload(def (self: Any) -> Any, def (self: Any, x: Any) -> Any)
-
-[case testUnboundMethodWithInheritance]
-## MemberExpr
-import typing
-class A:
- def __init__(self) -> None: pass
- def f(self) -> None: pass
-class B(A):
- pass
-B.f
-[out]
-MemberExpr(8) : def (self: A)
-
-[case testUnboundGenericMethod]
-## MemberExpr
-from typing import TypeVar
-t = TypeVar('t')
-class B: pass
-class A:
- def f(self, x: t) -> None: pass
-A.f(A(), B())
-[out]
-MemberExpr(7) : def (self: A, x: B)
-
-[case testUnboundMethodOfGenericClass]
-## MemberExpr
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class B: pass
-class A(Generic[t]):
- def f(self, x: t) -> None: pass
-A.f
-a_b = A() # type: A[B]
-A.f(a_b, B())
-[out]
-MemberExpr(7) : def [t] (self: A[t`1], x: t`1)
-MemberExpr(9) : def (self: A[B], x: B)
-
-[case testUnboundOverloadedMethodOfGenericClass]
-## CallExpr
-from typing import TypeVar, Generic, overload
-t = TypeVar('t')
-class B: pass
-class A(Generic[t]):
- @overload
- def f(self, x: t) -> t: pass
- @overload
- def f(self) -> object: pass
-ab, b = None, None # type: (A[B], B)
-A.f(ab, b)
-[out]
-CallExpr(11) : B
-
-[case testUnboundMethodOfGenericClassWithImplicitSig]
-## MemberExpr
-from typing import TypeVar, Generic
-t = TypeVar('t')
-class B: pass
-class A(Generic[t]):
- def f(self, x): pass
-A.f(None, None)
-[out]
-MemberExpr(7) : def (self: Any, x: Any) -> Any
-
-[case testGenericMethodOfGenericClass]
-## MemberExpr
-from typing import TypeVar, Generic
-t = TypeVar('t')
-s = TypeVar('s')
-class B: pass
-class A(Generic[t]):
- def f(self, y: s) -> None: pass
-ab = None # type: A[B]
-o = None # type: object
-A.f(ab, o)
-[out]
-MemberExpr(10) : def (self: A[B], y: builtins.object)
-
-
--- Type variables with value restriction
--- -------------------------------------
-
-
-[case testTypeVariableWithValueRestriction]
-## NameExpr
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> None: pass
-f(1)
-f('x')
-[out]
-NameExpr(5) : def (x: builtins.int)
-NameExpr(6) : def (x: builtins.str)
-
-[case testTypeVariableWithValueRestrictionAndSubtype]
-## NameExpr|CallExpr
-from typing import TypeVar
-T = TypeVar('T', int, str)
-def f(x: T) -> T: pass
-class S(str): pass
-s = None # type: S
-f(s)
-[out]
-CallExpr(7) : builtins.str
-NameExpr(7) : def (x: builtins.str) -> builtins.str
-NameExpr(7) : S
-
-
--- Binary operations
--- -----------------
-
-
-[case testBinaryOperatorWithAnyLeftOperand]
-## OpExpr
-from typing import Any, cast
-class B:
- def __add__(self, x: int) -> str: pass
-class A:
- def __radd__(self, x: B) -> int: pass
-cast(Any, 1) + A()
-B() + A()
-[out]
-OpExpr(7) : Any
-OpExpr(8) : builtins.int
-
-[case testBinaryOperatorWithAnyRightOperand]
-## OpExpr
-from typing import Any, cast
-class A:
- def __add__(self, x: str) -> int: pass
-A() + cast(Any, 1)
-[out]
-OpExpr(5) : Any
-
-
--- Callable overloading
--- --------------------
-
-
-[case testOverloadedFunctionType]
-## CallExpr
-from typing import overload
- at overload
-def f(x: int) -> str: pass
- at overload
-def f(x: str) -> int: pass
-f(1)
-f('')
-[out]
-CallExpr(7) : builtins.str
-CallExpr(8) : builtins.int
-
-[case testOverlappingOverloadedFunctionType]
-## CallExpr
-from typing import overload
-class A: pass
-class B(A): pass
- at overload
-def f(x: B) -> B: pass
- at overload
-def f(x: A) -> A: pass
-a = None # type: A
-b = None # type: B
-f(a)
-f(b)
-[out]
-CallExpr(11) : A
-CallExpr(12) : B
-
-
-
-[case testOverloadedErasedType]
-from typing import Callable
-from typing import List
-from typing import overload
-from typing import TypeVar
-
-T = TypeVar("T")
-V = TypeVar("V")
-
-def fun(s: int) -> int: pass
-
-def m(fun: Callable[[T], V], iter: List[T]) -> None: pass
-
-nums = [1] # type: List[int]
-m(fun,
- nums)
-[builtins fixtures/list.pyi]
-[out]
-IntExpr(13) : builtins.int
-ListExpr(13) : builtins.list[builtins.int]
-CallExpr(14) : void
-NameExpr(14) : def (s: builtins.int) -> builtins.int
-NameExpr(14) : def (fun: def (builtins.int) -> builtins.int, iter: builtins.list[builtins.int])
-NameExpr(15) : builtins.list[builtins.int]
-
-
--- Special cases
--- -------------
-
-
-[case testImplicitDataAttributeInit]
-## NameExpr
-import typing
-class A:
- def __init__(self) -> None:
- self.x = (
- A())
-[out]
-NameExpr(5) : A
-NameExpr(6) : def () -> A
-
-[case testListMultiplicationInContext]
-## ListExpr|OpExpr|IntExpr
-from typing import List
-a = [None] * 3 # type: List[str]
-[builtins fixtures/list.pyi]
-[out]
-IntExpr(3) : builtins.int
-ListExpr(3) : builtins.list[builtins.str]
-OpExpr(3) : builtins.list[builtins.str]
-
-
--- TODO
---
--- test expressions
--- list literal
--- tuple literal
--- unary minus
--- indexing
--- super expression
--- more complex lambda (multiple arguments etc.)
--- list comprehension
--- generator expression
--- overloads
--- other things
--- type inference
--- default argument value
--- for loop variable
--- exception variable
--- varargs
--- generics
--- explicit types
--- type of 'None' (currently stripped, but sometimes we may want to dump it)
diff --git a/test-requirements.txt b/test-requirements.txt
deleted file mode 100644
index 5ce8f80..0000000
--- a/test-requirements.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-flake8
-flake8-bugbear; python_version >= '3.5'
-flake8-pyi; python_version >= '3.5'
-lxml; sys_platform != 'win32'
-typed-ast>=0.6.3; sys_platform != 'win32' or python_version >= '3.5'
-pytest>=2.8
-pytest-xdist>=1.13
-pytest-cov>=2.4.0
-typing>=3.5.2; python_version < '3.5'
diff --git a/tmp-test-dirs/.gitignore b/tmp-test-dirs/.gitignore
deleted file mode 100644
index e6579d8..0000000
--- a/tmp-test-dirs/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-# This directory is used to store temporary directories for the testsuite.
-# If anything manages to exist here, it means python crashed instead of
-# calling tempfile.TemporaryDirectory's cleanup while unwinding.
-# Therefore, don't actually provide any ignore patterns.
diff --git a/typeshed/stdlib/2/ConfigParser.pyi b/typeshed/stdlib/2/ConfigParser.pyi
index 67b0340..5191e99 100644
--- a/typeshed/stdlib/2/ConfigParser.pyi
+++ b/typeshed/stdlib/2/ConfigParser.pyi
@@ -1,6 +1,6 @@
-from typing import Any, IO, Sequence, Tuple, Union
+from typing import Any, IO, Sequence, Tuple, Union, List, Dict
-__all__ = ... # type: list[str]
+__all__ = ... # type: List[str]
DEFAULTSECT = ... # type: str
MAX_INTERPOLATION_DEPTH = ... # type: int
@@ -42,7 +42,7 @@ class InterpolationDepthError(InterpolationError):
class ParsingError(Error):
filename = ... # type: str
- errors = ... # type: list[Tuple[Any, Any]]
+ errors = ... # type: List[Tuple[Any, Any]]
def __init__(self, filename: str) -> None: ...
def append(self, lineno: Any, line: Any) -> None: ...
@@ -60,20 +60,20 @@ class RawConfigParser:
SECTCRE = ... # type: Any
OPTCRE = ... # type: Any
OPTCRE_NV = ... # type: Any
- def __init__(self, defaults: dict[Any, Any] = ..., dict_type: Any = ..., allow_no_value: bool = ...) -> None: ...
- def defaults(self) -> dict[Any, Any]: ...
- def sections(self) -> list[str]: ...
+ def __init__(self, defaults: Dict[Any, Any] = ..., dict_type: Any = ..., allow_no_value: bool = ...) -> None: ...
+ def defaults(self) -> Dict[Any, Any]: ...
+ def sections(self) -> List[str]: ...
def add_section(self, section: str) -> None: ...
def has_section(self, section: str) -> bool: ...
- def options(self, section: str) -> list[str]: ...
- def read(self, filenames: Union[str, Sequence[str]]) -> list[str]: ...
+ def options(self, section: str) -> List[str]: ...
+ def read(self, filenames: Union[str, Sequence[str]]) -> List[str]: ...
def readfp(self, fp: IO[str], filename: str = ...) -> None: ...
def get(self, section: str, option: str) -> str: ...
- def items(self, section: str) -> list[Tuple[Any, Any]]: ...
+ def items(self, section: str) -> List[Tuple[Any, Any]]: ...
def _get(self, section: str, conv: type, option: str) -> Any: ...
def getint(self, section: str, option: str) -> int: ...
def getfloat(self, section: str, option: str) -> float: ...
- _boolean_states = ... # type: dict[str, bool]
+ _boolean_states = ... # type: Dict[str, bool]
def getboolean(self, section: str, option: str) -> bool: ...
def optionxform(self, optionstr: str) -> str: ...
def has_option(self, section: str, option: str) -> bool: ...
@@ -86,7 +86,7 @@ class RawConfigParser:
class ConfigParser(RawConfigParser):
_KEYCRE = ... # type: Any
def get(self, section: str, option: str, raw: bool = ..., vars: dict = ...) -> Any: ...
- def items(self, section: str, raw: bool = ..., vars: dict = ...) -> list[Tuple[str, Any]]: ...
+ def items(self, section: str, raw: bool = ..., vars: dict = ...) -> List[Tuple[str, Any]]: ...
def _interpolate(self, section: str, option: str, rawval: Any, vars: Any) -> str: ...
def _interpolation_replace(self, match: Any) -> str: ...
diff --git a/typeshed/stdlib/2/SimpleHTTPServer.pyi b/typeshed/stdlib/2/SimpleHTTPServer.pyi
new file mode 100644
index 0000000..5bfed2a
--- /dev/null
+++ b/typeshed/stdlib/2/SimpleHTTPServer.pyi
@@ -0,0 +1,16 @@
+# Stubs for SimpleHTTPServer (Python 2)
+
+from typing import Any, AnyStr, IO, Mapping, Optional, Union
+import BaseHTTPServer
+from StringIO import StringIO
+
+class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ server_version = ... # type: str
+ def do_GET(self) -> None: ...
+ def do_HEAD(self) -> None: ...
+ def send_head(self) -> Optional[IO[str]]: ...
+ def list_directory(self, path: Union[str, unicode]) -> Optional[StringIO]: ...
+ def translate_path(self, path: AnyStr) -> AnyStr: ...
+ def copyfile(self, source: IO[AnyStr], outputfile: IO[AnyStr]): ...
+ def guess_type(self, path: Union[str, unicode]) -> str: ...
+ extensions_map = ... # type: Mapping[str, str]
diff --git a/typeshed/stdlib/2/StringIO.pyi b/typeshed/stdlib/2/StringIO.pyi
index bf77516..0526edf 100644
--- a/typeshed/stdlib/2/StringIO.pyi
+++ b/typeshed/stdlib/2/StringIO.pyi
@@ -1,6 +1,6 @@
# Stubs for StringIO (Python 2)
-from typing import Any, IO, AnyStr, Iterator, Iterable, Generic, List
+from typing import Any, IO, AnyStr, Iterator, Iterable, Generic, List, Optional
class StringIO(IO[AnyStr], Generic[AnyStr]):
closed = ... # type: bool
@@ -12,13 +12,13 @@ class StringIO(IO[AnyStr], Generic[AnyStr]):
def next(self) -> AnyStr: ...
def close(self) -> None: ...
def isatty(self) -> bool: ...
- def seek(self, pos: int, mode: int = ...) -> None: ...
+ def seek(self, pos: int, mode: int = ...) -> int: ...
def tell(self) -> int: ...
def read(self, n: int = ...) -> AnyStr: ...
def readline(self, length: int = ...) -> AnyStr: ...
def readlines(self, sizehint: int = ...) -> List[AnyStr]: ...
- def truncate(self, size: int = ...) -> int: ...
- def write(self, s: AnyStr) -> None: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
+ def write(self, s: AnyStr) -> int: ...
def writelines(self, iterable: Iterable[AnyStr]) -> None: ...
def flush(self) -> None: ...
def getvalue(self) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/__builtin__.pyi b/typeshed/stdlib/2/__builtin__.pyi
index 59b01e5..541122b 100644
--- a/typeshed/stdlib/2/__builtin__.pyi
+++ b/typeshed/stdlib/2/__builtin__.pyi
@@ -8,7 +8,7 @@ from typing import (
Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set,
AbstractSet, FrozenSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs,
SupportsRound, IO, BinaryIO, Union, AnyStr, MutableSequence, MutableMapping,
- MutableSet, ItemsView, KeysView, ValuesView, Optional, Container,
+ MutableSet, ItemsView, KeysView, ValuesView, Optional, Container, Type
)
from abc import abstractmethod, ABCMeta
from mypy_extensions import NoReturn
@@ -22,15 +22,14 @@ _T1 = TypeVar('_T1')
_T2 = TypeVar('_T2')
_T3 = TypeVar('_T3')
_T4 = TypeVar('_T4')
+_T5 = TypeVar('_T5')
_TT = TypeVar('_TT', bound='type')
-class staticmethod: pass # Special, only valid as a decorator.
-class classmethod: pass # Special, only valid as a decorator.
-
class object:
__doc__ = ... # type: Optional[str]
__class__ = ... # type: type
__slots__ = ... # type: Optional[Union[str, unicode, Iterable[Union[str, unicode]]]]
+ __module__ = ... # type: str
def __init__(self) -> None: ...
def __new__(cls) -> Any: ...
@@ -44,6 +43,22 @@ class object:
def __getattribute__(self, name: str) -> Any: ...
def __delattr__(self, name: str) -> None: ...
def __sizeof__(self) -> int: ...
+ def __reduce__(self) -> tuple: ...
+ def __reduce_ex__(self, protocol: int) -> tuple: ...
+
+class staticmethod(object): # Special, only valid as a decorator.
+ __func__ = ... # type: function
+
+ def __init__(self, f: function) -> None: ...
+ def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
+ def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ...
+
+class classmethod(object): # Special, only valid as a decorator.
+ __func__ = ... # type: function
+
+ def __init__(self, f: function) -> None: ...
+ def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
+ def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ...
class type(object):
__bases__ = ... # type: Tuple[type, ...]
@@ -297,14 +312,14 @@ class str(basestring, Sequence[str]):
def __init__(self, object: object='') -> None: ...
def capitalize(self) -> str: ...
def center(self, width: int, fillchar: str = ...) -> str: ...
- def count(self, x: unicode) -> int: ...
+ def count(self, x: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
def expandtabs(self, tabsize: int = 8) -> str: ...
- def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+ def find(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def format(self, *args: Any, **kwargs: Any) -> str: ...
- def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+ def index(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
def isdigit(self) -> bool: ...
@@ -326,8 +341,8 @@ class str(basestring, Sequence[str]):
@overload
def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ...
- def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
- def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+ def rfind(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
+ def rindex(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, width: int, fillchar: str = ...) -> str: ...
@overload
def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ...
@@ -470,9 +485,10 @@ class slice(object):
step = ... # type: Optional[int]
stop = ... # type: Optional[int]
@overload
- def __init__(self, stop: int = None) -> None: ...
+ def __init__(self, stop: Optional[int]) -> None: ...
@overload
- def __init__(self, start: int = None, stop: int = None, step: int = None) -> None: ...
+ def __init__(self, start: Optional[int], stop: Optional[int], step: int = None) -> None: ...
+ def indices(self, len: int) -> Tuple[int, int, int]: ...
class tuple(Sequence[_T_co], Generic[_T_co]):
def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
@@ -550,10 +566,11 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
@overload
def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
+ def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ...
+
def has_key(self, k: _KT) -> bool: ...
def clear(self) -> None: ...
def copy(self) -> Dict[_KT, _VT]: ...
- def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
@overload
@@ -583,60 +600,60 @@ class set(MutableSet[_T], Generic[_T]):
def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
def add(self, element: _T) -> None: ...
def clear(self) -> None: ...
- def copy(self) -> set[_T]: ...
- def difference(self, *s: Iterable[Any]) -> set[_T]: ...
+ def copy(self) -> Set[_T]: ...
+ def difference(self, *s: Iterable[Any]) -> Set[_T]: ...
def difference_update(self, *s: Iterable[Any]) -> None: ...
def discard(self, element: _T) -> None: ...
- def intersection(self, *s: Iterable[Any]) -> set[_T]: ...
+ def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
def intersection_update(self, *s: Iterable[Any]) -> None: ...
def isdisjoint(self, s: Iterable[Any]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
def pop(self) -> _T: ...
def remove(self, element: _T) -> None: ...
- def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
+ def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
- def union(self, *s: Iterable[_T]) -> set[_T]: ...
+ def union(self, *s: Iterable[_T]) -> Set[_T]: ...
def update(self, *s: Iterable[_T]) -> None: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
- def __and__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __iand__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __or__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __ior__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __sub__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __isub__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __xor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __ixor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+ def __and__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __iand__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __sub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __isub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[Any]) -> bool: ...
def __lt__(self, s: AbstractSet[Any]) -> bool: ...
def __ge__(self, s: AbstractSet[Any]) -> bool: ...
def __gt__(self, s: AbstractSet[Any]) -> bool: ...
# TODO more set operations
-class frozenset(FrozenSet[_T], Generic[_T]):
+class frozenset(AbstractSet[_T], Generic[_T]):
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, iterable: Iterable[_T]) -> None: ...
- def copy(self) -> frozenset[_T]: ...
- def difference(self, *s: Iterable[Any]) -> frozenset[_T]: ...
- def intersection(self, *s: Iterable[Any]) -> frozenset[_T]: ...
+ def copy(self) -> FrozenSet[_T]: ...
+ def difference(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
+ def intersection(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
def isdisjoint(self, s: Iterable[_T]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
- def symmetric_difference(self, s: Iterable[_T]) -> frozenset[_T]: ...
- def union(self, *s: Iterable[_T]) -> frozenset[_T]: ...
+ def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ...
+ def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
- def __and__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
- def __or__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
- def __sub__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
- def __xor__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+ def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
+ def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
+ def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
+ def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[Any]) -> bool: ...
def __lt__(self, s: AbstractSet[Any]) -> bool: ...
def __ge__(self, s: AbstractSet[Any]) -> bool: ...
@@ -658,12 +675,7 @@ class xrange(Sized, Iterable[int], Reversible[int]):
def __getitem__(self, i: int) -> int: ...
def __reversed__(self) -> Iterator[int]: ...
-class module:
- __name__ = ... # type: str
- __file__ = ... # type: str
- __dict__ = ... # type: Dict[unicode, Any]
-
-class property:
+class property(object):
def __init__(self, fget: Callable[[Any], Any] = None,
fset: Callable[[Any, Any], None] = None,
fdel: Callable[[Any], None] = None, doc: str = None) -> None: ...
@@ -673,6 +685,9 @@ class property:
def __get__(self, obj: Any, type: type=None) -> Any: ...
def __set__(self, obj: Any, value: Any) -> None: ...
def __delete__(self, obj: Any) -> None: ...
+ def fget(self) -> Any: ...
+ def fset(self, value: Any) -> None: ...
+ def fdel(self) -> None: ...
long = int
bytes = str
@@ -693,7 +708,7 @@ def dir(o: object = ...) -> List[str]: ...
def divmod(a: int, b: int) -> Tuple[int, int]: ...
@overload
def divmod(a: float, b: float) -> Tuple[float, float]: ...
-def exit(code: int = ...) -> NoReturn: ...
+def exit(code: Any = ...) -> NoReturn: ...
@overload
def filter(function: Callable[[_T], Any],
iterable: Iterable[_T]) -> List[_T]: ...
@@ -712,8 +727,8 @@ def intern(string: str) -> str: ...
def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ...
-def isinstance(o: object, t: Union[type, Tuple[type, ...]]) -> bool: ...
-def issubclass(cls: type, classinfo: Union[type, Tuple[type, ...]]) -> bool: ...
+def isinstance(o: object, t: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ...
+def issubclass(cls: type, classinfo: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ...
def len(o: Sized) -> int: ...
@overload
def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> List[_S]: ...
@@ -794,7 +809,15 @@ def zip(iter1: Iterable[_T1], iter2: Iterable[_T2],
@overload
def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2,
- _T3, _T4]]: ... # TODO more than four iterables
+ _T3, _T4]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+ iter4: Iterable[_T4], iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2,
+ _T3, _T4, _T5]]: ...
+ at overload
+def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any],
+ iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any],
+ *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ...
def __import__(name: unicode,
globals: Dict[str, Any] = ...,
locals: Dict[str, Any] = ...,
@@ -809,12 +832,12 @@ class ellipsis: ...
Ellipsis = ... # type: ellipsis
# TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check.
-AnyBuffer = TypeVar('AnyBuffer', str, unicode, bytearray, buffer)
+_AnyBuffer = TypeVar('_AnyBuffer', str, unicode, bytearray, buffer)
class buffer(Sized):
- def __init__(self, object: AnyBuffer, offset: int = ..., size: int = ...) -> None: ...
- def __add__(self, other: AnyBuffer) -> str: ...
- def __cmp__(self, other: AnyBuffer) -> bool: ...
+ def __init__(self, object: _AnyBuffer, offset: int = ..., size: int = ...) -> None: ...
+ def __add__(self, other: _AnyBuffer) -> str: ...
+ def __cmp__(self, other: _AnyBuffer) -> bool: ...
def __getitem__(self, key: Union[int, slice]) -> str: ...
def __getslice__(self, i: int, j: int) -> str: ...
def __len__(self) -> int: ...
@@ -850,7 +873,7 @@ class memoryview(Sized, Container[bytes]):
def tobytes(self) -> bytes: ...
def tolist(self) -> List[int]: ...
-class BaseException:
+class BaseException(object):
args = ... # type: Tuple[Any, ...]
message = ... # type: str
def __init__(self, *args: Any) -> None: ...
@@ -892,6 +915,7 @@ class SyntaxError(StandardError):
lineno = ... # type: int
offset = ... # type: int
text = ... # type: str
+ filename = ... # type: str
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class SystemError(StandardError): ...
@@ -943,13 +967,13 @@ class file(BinaryIO):
def readable(self) -> bool: ...
def writable(self) -> bool: ...
def seekable(self) -> bool: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def tell(self) -> int: ...
def readline(self, limit: int = ...) -> str: ...
def readlines(self, hint: int = ...) -> List[str]: ...
- def write(self, data: str) -> None: ...
+ def write(self, data: str) -> int: ...
def writelines(self, data: Iterable[str]) -> None: ...
- def truncate(self, pos: int = ...) -> int: ...
+ def truncate(self, pos: Optional[int] = ...) -> int: ...
# Very old builtins
def apply(func: Callable[..., _T], args: Sequence[Any] = None, kwds: Mapping[str, Any] = None) -> _T: ...
diff --git a/typeshed/stdlib/2/__future__.pyi b/typeshed/stdlib/2/__future__.pyi
deleted file mode 100644
index 2414069..0000000
--- a/typeshed/stdlib/2/__future__.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-from sys import _version_info
-
-class _Feature:
- def getOptionalRelease(self) -> _version_info: ...
- def getMandatoryRelease(self) -> _version_info: ...
-
-absolute_import = ... # type: _Feature
-division = ... # type: _Feature
-generators = ... # type: _Feature
-nested_scopes = ... # type: _Feature
-print_function = ... # type: _Feature
-unicode_literals = ... # type: _Feature
-with_statement = ... # type: _Feature
diff --git a/typeshed/stdlib/2/_codecs.pyi b/typeshed/stdlib/2/_codecs.pyi
deleted file mode 100644
index 2a00a4d..0000000
--- a/typeshed/stdlib/2/_codecs.pyi
+++ /dev/null
@@ -1,55 +0,0 @@
-"""Stub file for the '_codecs' module."""
-
-from typing import Any, AnyStr, Callable, Tuple, Optional
-
-import codecs
-
-# For convenience:
-_Handler = Callable[[Exception], Tuple[unicode, int]]
-
-# Not exposed. In Python 2, this is defined in unicode.c:
-class _EncodingMap(object):
- def size(self) -> int: ...
-
-def register(search_function: Callable[[str], Any]) -> None: ...
-def register_error(errors: str, handler: _Handler) -> None: ...
-def lookup(a: str) -> codecs.CodecInfo: ...
-def lookup_error(a: str) -> _Handler: ...
-def decode(obj: Any, encoding: str = ..., errors: str = ...) -> Any: ...
-def encode(obj: Any, encoding: str = ..., errors: str = ...) -> Any: ...
-def charmap_build(a: unicode) -> _EncodingMap: ...
-
-def ascii_decode(data: AnyStr, errors: str = ...) -> Tuple[unicode, int]: ...
-def ascii_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def charbuffer_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def charmap_decode(data: AnyStr, errors: str = ..., mapping: Optional[_EncodingMap] = ...) -> Tuple[unicode, int]: ...
-def charmap_encode(data: AnyStr, errors: str, mapping: Optional[_EncodingMap] = ...) -> Tuple[str, int]: ...
-def escape_decode(data: AnyStr, errors: str = ...) -> Tuple[unicode, int]: ...
-def escape_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def latin_1_decode(data: AnyStr, errors: str = ...) -> Tuple[unicode, int]: ...
-def latin_1_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def raw_unicode_escape_decode(data: AnyStr, errors: str = ...) -> Tuple[unicode, int]: ...
-def raw_unicode_escape_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def readbuffer_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def unicode_escape_decode(data: AnyStr, errors: str = ...) -> Tuple[unicode, int]: ...
-def unicode_escape_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def unicode_internal_decode(data: AnyStr, errors: str = ...) -> Tuple[unicode, int]: ...
-def unicode_internal_encode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def utf_16_be_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_16_be_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_16_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_16_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_16_ex_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_16_le_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_16_le_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_be_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_32_be_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_32_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_ex_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_32_le_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_32_le_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_7_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_7_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_8_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[unicode, int]: ...
-def utf_8_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
diff --git a/typeshed/stdlib/2/_io.pyi b/typeshed/stdlib/2/_io.pyi
index e85da33..959d1d1 100644
--- a/typeshed/stdlib/2/_io.pyi
+++ b/typeshed/stdlib/2/_io.pyi
@@ -1,4 +1,4 @@
-from typing import Any, Optional, Iterable, Tuple, List, Union
+from typing import Any, BinaryIO, IO, Iterable, Iterator, List, Optional, Type, Tuple, Union
DEFAULT_BUFFER_SIZE = ... # type: int
@@ -9,29 +9,33 @@ class BlockingIOError(IOError):
class UnsupportedOperation(ValueError, IOError): ...
-class _IOBase(object):
- closed = ... # type: bool
- def __enter__(self) -> "_IOBase": ...
- def __exit__(self, type, value, traceback) -> bool: ...
- def __iter__(self) -> "_IOBase": ...
+class _IOBase(BinaryIO):
def _checkClosed(self) -> None: ...
def _checkReadable(self) -> None: ...
def _checkSeekable(self) -> None: ...
def _checkWritable(self) -> None: ...
+ # All these methods are concrete here (you can instantiate this)
def close(self) -> None: ...
def fileno(self) -> int: ...
def flush(self) -> None: ...
def isatty(self) -> bool: ...
- def next(self) -> str: ...
+ def read(self, n: int = ...) -> bytes: ...
def readable(self) -> bool: ...
- def readline(self, limit: int = ...) -> str: ...
- def readlines(self, hint: int = ...) -> List[str]: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def readline(self, limit: int = ...) -> bytes: ...
+ def readlines(self, hint: int = ...) -> list[bytes]: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def seekable(self) -> bool: ...
def tell(self) -> int: ...
- def truncate(self, size: int = ...) -> int: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
def writable(self) -> bool: ...
- def writelines(self, lines: Iterable[str]) -> None: ...
+ def write(self, s: bytes) -> int: ...
+ def writelines(self, lines: Iterable[bytes]) -> None: ...
+ def next(self) -> bytes: ...
+ def __iter__(self) -> Iterator[bytes]: ...
+ def __enter__(self) -> '_IOBase': ...
+ def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException],
+ # TODO: traceback should be TracebackType but that's defined in types
+ traceback: Optional[Any]) -> bool: ...
class _BufferedIOBase(_IOBase):
def read1(self, n: int) -> str: ...
@@ -87,8 +91,6 @@ class _TextIOBase(_IOBase):
newlines = ... # type: Union[str, unicode]
encoding = ... # type: Optional[str]
def read(self, n: int = ...) -> str: ...
- def write(self) -> None:
- raise UnsupportedOperation
def detach(self) -> None:
raise UnsupportedOperation
diff --git a/typeshed/stdlib/2/_sre.pyi b/typeshed/stdlib/2/_sre.pyi
index 08d1ecc..1b6b5bc 100644
--- a/typeshed/stdlib/2/_sre.pyi
+++ b/typeshed/stdlib/2/_sre.pyi
@@ -18,7 +18,7 @@ class SRE_Match(object):
@overload
def group(self, group: int = ...) -> Optional[str]: ...
def groupdict(self) -> Dict[int, Optional[str]]: ...
- def groups(self) -> Tuple[Optional[str]]: ...
+ def groups(self) -> Tuple[Optional[str], ...]: ...
def span(self) -> Tuple[int, int]:
raise IndexError()
@@ -31,7 +31,7 @@ class SRE_Pattern(object):
pattern = ... # type: str
flags = ... # type: int
groups = ... # type: int
- groupindex = ... # type: Mapping[int, int]
+ groupindex = ... # type: Mapping[str, int]
indexgroup = ... # type: Sequence[int]
def findall(self, source: str, pos: int = ..., endpos: int = ...) -> List[Union[tuple, str]]: ...
def finditer(self, source: str, pos: int = ..., endpos: int = ...) -> Iterable[Union[tuple, str]]: ...
@@ -44,7 +44,7 @@ class SRE_Pattern(object):
def compile(pattern: str, flags: int, code: List[int],
groups: int = ...,
- groupindex: Mapping[int, int] = ...,
+ groupindex: Mapping[str, int] = ...,
indexgroup: Sequence[int] = ...) -> SRE_Pattern:
raise OverflowError()
diff --git a/typeshed/stdlib/2/_struct.pyi b/typeshed/stdlib/2/_struct.pyi
index 8917187..add6f84 100644
--- a/typeshed/stdlib/2/_struct.pyi
+++ b/typeshed/stdlib/2/_struct.pyi
@@ -11,12 +11,12 @@ class Struct(object):
def __init__(self, fmt: str) -> None: ...
def pack_into(self, buffer: bytearray, offset: int, obj: Any) -> None: ...
def pack(self, *args) -> str: ...
- def unpack(self, s: str) -> Tuple[Any]: ...
- def unpack_from(self, buffer: bytearray, offset: int = ...) -> Tuple[Any]: ...
+ def unpack(self, s: str) -> Tuple[Any, ...]: ...
+ def unpack_from(self, buffer: bytearray, offset: int = ...) -> Tuple[Any, ...]: ...
def _clearcache() -> None: ...
def calcsize(fmt: str) -> int: ...
def pack(fmt: AnyStr, obj: Any) -> str: ...
def pack_into(fmt: AnyStr, buffer: bytearray, offset: int, obj: Any) -> None: ...
-def unpack(fmt: AnyStr, data: str) -> Tuple[Any]: ...
-def unpack_from(fmt: AnyStr, buffer: bytearray, offset: int = ...) -> Tuple[Any]: ...
+def unpack(fmt: AnyStr, data: str) -> Tuple[Any, ...]: ...
+def unpack_from(fmt: AnyStr, buffer: bytearray, offset: int = ...) -> Tuple[Any, ...]: ...
diff --git a/typeshed/stdlib/2/_warnings.pyi b/typeshed/stdlib/2/_warnings.pyi
index 4f54529..63fd9d4 100644
--- a/typeshed/stdlib/2/_warnings.pyi
+++ b/typeshed/stdlib/2/_warnings.pyi
@@ -1,11 +1,11 @@
-from typing import Any, List
+from typing import Any, List, Optional, Type
default_action = ... # type: str
filters = ... # type: List[tuple]
once_registry = ... # type: dict
-def warn(message: Warning, category: type = ..., stacklevel: int = ...) -> None: ...
-def warn_explicit(message: Warning, category: type,
+def warn(message: Warning, category: Optional[Type[Warning]] = ..., stacklevel: int = ...) -> None: ...
+def warn_explicit(message: Warning, category: Optional[Type[Warning]],
filename: str, lineno: int,
module: Any = ..., registry: dict = ...,
module_globals: dict = ...) -> None: ...
diff --git a/typeshed/stdlib/2/abc.pyi b/typeshed/stdlib/2/abc.pyi
index 3ae3b52..e21065e 100644
--- a/typeshed/stdlib/2/abc.pyi
+++ b/typeshed/stdlib/2/abc.pyi
@@ -1,11 +1,7 @@
-from typing import Any, Dict, Set, Union, Tuple
+from typing import Any, Dict, Set, Tuple, Type
import _weakrefset
-# mypy has special processing for ABCMeta and abstractmethod.
-
-WeakSet = ... # type: _weakrefset.WeakSet
-_InstanceType = ... # type: type
-types = ... # type: module
+# NOTE: mypy has special processing for ABCMeta and abstractmethod.
def abstractmethod(funcobj: Any) -> Any: ...
@@ -21,11 +17,7 @@ class ABCMeta(type):
def __instancecheck__(cls: "ABCMeta", instance: Any) -> Any: ...
def __subclasscheck__(cls: "ABCMeta", subclass: Any) -> Any: ...
def _dump_registry(cls: "ABCMeta", *args: Any, **kwargs: Any) -> None: ...
- # TODO: subclass: Union["ABCMeta", type, Tuple[type, ...]]
- def register(cls: "ABCMeta", subclass: Any) -> None: ...
-
-class _C:
- pass
+ def register(cls: "ABCMeta", subclass: Type[Any]) -> None: ...
# TODO: The real abc.abstractproperty inherits from "property".
class abstractproperty(object):
diff --git a/typeshed/stdlib/2/array.pyi b/typeshed/stdlib/2/array.pyi
index 3ff4081..508eadd 100644
--- a/typeshed/stdlib/2/array.pyi
+++ b/typeshed/stdlib/2/array.pyi
@@ -3,13 +3,13 @@
from typing import (Any, Generic, IO, Iterable, Sequence, TypeVar,
Union, overload, Iterator, Tuple, BinaryIO, List)
-T = TypeVar('T')
+_T = TypeVar('_T')
-class array(Generic[T]):
- def __init__(self, typecode: str, init: Iterable[T] = ...) -> None: ...
- def __add__(self, y: "array[T]") -> "array[T]": ...
+class array(Generic[_T]):
+ def __init__(self, typecode: str, init: Iterable[_T] = ...) -> None: ...
+ def __add__(self, y: "array[_T]") -> "array[_T]": ...
def __contains__(self, y: Any) -> bool: ...
- def __copy__(self) -> "array[T]": ...
+ def __copy__(self) -> "array[_T]": ...
def __deepcopy__(self) -> "array": ...
def __delitem__(self, y: Union[slice, int]) -> None: ...
def __delslice__(self, i: int, j: int) -> None: ...
@@ -17,39 +17,39 @@ class array(Generic[T]):
def __getitem__(self, i: int) -> Any: ...
@overload
def __getitem__(self, s: slice) -> "array": ...
- def __iadd__(self, y: "array[T]") -> "array[T]": ...
- def __imul__(self, y: int) -> "array[T]": ...
- def __iter__(self) -> Iterator[T]: ...
+ def __iadd__(self, y: "array[_T]") -> "array[_T]": ...
+ def __imul__(self, y: int) -> "array[_T]": ...
+ def __iter__(self) -> Iterator[_T]: ...
def __len__(self) -> int: ...
- def __mul__(self, n: int) -> "array[T]": ...
- def __rmul__(self, n: int) -> "array[T]": ...
+ def __mul__(self, n: int) -> "array[_T]": ...
+ def __rmul__(self, n: int) -> "array[_T]": ...
@overload
- def __setitem__(self, i: int, y: T) -> None: ...
+ def __setitem__(self, i: int, y: _T) -> None: ...
@overload
- def __setitem__(self, i: slice, y: "array[T]") -> None: ...
+ def __setitem__(self, i: slice, y: "array[_T]") -> None: ...
- def append(self, x: T) -> None: ...
+ def append(self, x: _T) -> None: ...
def buffer_info(self) -> Tuple[int, int]: ...
def byteswap(self) -> None:
raise RuntimeError()
def count(self) -> int: ...
- def extend(self, x: Sequence[T]) -> None: ...
- def fromlist(self, list: List[T]) -> None:
+ def extend(self, x: Sequence[_T]) -> None: ...
+ def fromlist(self, list: List[_T]) -> None:
raise EOFError()
raise IOError()
def fromfile(self, f: BinaryIO, n: int) -> None: ...
def fromstring(self, s: str) -> None: ...
def fromunicode(self, u: unicode) -> None: ...
- def index(self, x: T) -> int: ...
- def insert(self, i: int, x: T) -> None: ...
- def pop(self, i: int = ...) -> T: ...
+ def index(self, x: _T) -> int: ...
+ def insert(self, i: int, x: _T) -> None: ...
+ def pop(self, i: int = ...) -> _T: ...
def read(self, f: IO[str], n: int) -> None:
raise DeprecationWarning()
- def remove(self, x: T) -> None: ...
+ def remove(self, x: _T) -> None: ...
def reverse(self) -> None: ...
def tofile(self, f: BinaryIO) -> None:
raise IOError()
- def tolist(self) -> List[T]: ...
+ def tolist(self) -> List[_T]: ...
def tostring(self) -> str: ...
def tounicode(self) -> unicode: ...
def write(self, f: IO[str]) -> None:
diff --git a/typeshed/stdlib/2/base64.pyi b/typeshed/stdlib/2/base64.pyi
deleted file mode 100644
index d593fa8..0000000
--- a/typeshed/stdlib/2/base64.pyi
+++ /dev/null
@@ -1,25 +0,0 @@
-# Stubs for base64
-
-# Based on http://docs.python.org/3.2/library/base64.html
-
-from typing import IO
-
-def b64encode(s: str, altchars: str = ...) -> str: ...
-def b64decode(s: str, altchars: str = ...,
- validate: bool = ...) -> str: ...
-def standard_b64encode(s: str) -> str: ...
-def standard_b64decode(s: str) -> str: ...
-def urlsafe_b64encode(s: str) -> str: ...
-def urlsafe_b64decode(s: str) -> str: ...
-def b32encode(s: str) -> str: ...
-def b32decode(s: str, casefold: bool = ...,
- map01: str = ...) -> str: ...
-def b16encode(s: str) -> str: ...
-def b16decode(s: str, casefold: bool = ...) -> str: ...
-
-def decode(input: IO[str], output: IO[str]) -> None: ...
-def decodebytes(s: str) -> str: ...
-def decodestring(s: str) -> str: ...
-def encode(input: IO[str], output: IO[str]) -> None: ...
-def encodebytes(s: str) -> str: ...
-def encodestring(s: str) -> str: ...
diff --git a/typeshed/stdlib/2/binascii.pyi b/typeshed/stdlib/2/binascii.pyi
deleted file mode 100644
index f8b85b6..0000000
--- a/typeshed/stdlib/2/binascii.pyi
+++ /dev/null
@@ -1,21 +0,0 @@
-"""Stubs for the binascii module."""
-
-def a2b_base64(string: str) -> str: ...
-def a2b_hex(hexstr: str) -> str: ...
-def a2b_hqx(string: str) -> str: ...
-def a2b_qp(string: str, header: bool = ...) -> str: ...
-def a2b_uu(string: str) -> str: ...
-def b2a_base64(data: str) -> str: ...
-def b2a_hex(data: str) -> str: ...
-def b2a_hqx(data: str) -> str: ...
-def b2a_qp(data: str, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> str: ...
-def b2a_uu(data: str) -> str: ...
-def crc32(data: str, crc: int = None) -> int: ...
-def crc_hqx(data: str, oldcrc: int) -> int: ...
-def hexlify(data: str) -> str: ...
-def rlecode_hqx(data: str) -> str: ...
-def rledecode_hqx(data: str) -> str: ...
-def unhexlify(hexstr: str) -> str: ...
-
-class Error(Exception): ...
-class Incomplete(Exception): ...
diff --git a/typeshed/stdlib/2/builtins.pyi b/typeshed/stdlib/2/builtins.pyi
index 59b01e5..541122b 100644
--- a/typeshed/stdlib/2/builtins.pyi
+++ b/typeshed/stdlib/2/builtins.pyi
@@ -8,7 +8,7 @@ from typing import (
Sequence, Mapping, Tuple, List, Any, Dict, Callable, Generic, Set,
AbstractSet, FrozenSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsAbs,
SupportsRound, IO, BinaryIO, Union, AnyStr, MutableSequence, MutableMapping,
- MutableSet, ItemsView, KeysView, ValuesView, Optional, Container,
+ MutableSet, ItemsView, KeysView, ValuesView, Optional, Container, Type
)
from abc import abstractmethod, ABCMeta
from mypy_extensions import NoReturn
@@ -22,15 +22,14 @@ _T1 = TypeVar('_T1')
_T2 = TypeVar('_T2')
_T3 = TypeVar('_T3')
_T4 = TypeVar('_T4')
+_T5 = TypeVar('_T5')
_TT = TypeVar('_TT', bound='type')
-class staticmethod: pass # Special, only valid as a decorator.
-class classmethod: pass # Special, only valid as a decorator.
-
class object:
__doc__ = ... # type: Optional[str]
__class__ = ... # type: type
__slots__ = ... # type: Optional[Union[str, unicode, Iterable[Union[str, unicode]]]]
+ __module__ = ... # type: str
def __init__(self) -> None: ...
def __new__(cls) -> Any: ...
@@ -44,6 +43,22 @@ class object:
def __getattribute__(self, name: str) -> Any: ...
def __delattr__(self, name: str) -> None: ...
def __sizeof__(self) -> int: ...
+ def __reduce__(self) -> tuple: ...
+ def __reduce_ex__(self, protocol: int) -> tuple: ...
+
+class staticmethod(object): # Special, only valid as a decorator.
+ __func__ = ... # type: function
+
+ def __init__(self, f: function) -> None: ...
+ def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
+ def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ...
+
+class classmethod(object): # Special, only valid as a decorator.
+ __func__ = ... # type: function
+
+ def __init__(self, f: function) -> None: ...
+ def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
+ def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ...
class type(object):
__bases__ = ... # type: Tuple[type, ...]
@@ -297,14 +312,14 @@ class str(basestring, Sequence[str]):
def __init__(self, object: object='') -> None: ...
def capitalize(self) -> str: ...
def center(self, width: int, fillchar: str = ...) -> str: ...
- def count(self, x: unicode) -> int: ...
+ def count(self, x: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def decode(self, encoding: unicode = ..., errors: unicode = ...) -> unicode: ...
def encode(self, encoding: unicode = ..., errors: unicode = ...) -> str: ...
def endswith(self, suffix: Union[unicode, Tuple[unicode, ...]]) -> bool: ...
def expandtabs(self, tabsize: int = 8) -> str: ...
- def find(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+ def find(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def format(self, *args: Any, **kwargs: Any) -> str: ...
- def index(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+ def index(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
def isdigit(self) -> bool: ...
@@ -326,8 +341,8 @@ class str(basestring, Sequence[str]):
@overload
def partition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
def replace(self, old: AnyStr, new: AnyStr, count: int = ...) -> AnyStr: ...
- def rfind(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
- def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
+ def rfind(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
+ def rindex(self, sub: unicode, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, width: int, fillchar: str = ...) -> str: ...
@overload
def rpartition(self, sep: bytearray) -> Tuple[str, bytearray, str]: ...
@@ -470,9 +485,10 @@ class slice(object):
step = ... # type: Optional[int]
stop = ... # type: Optional[int]
@overload
- def __init__(self, stop: int = None) -> None: ...
+ def __init__(self, stop: Optional[int]) -> None: ...
@overload
- def __init__(self, start: int = None, stop: int = None, step: int = None) -> None: ...
+ def __init__(self, start: Optional[int], stop: Optional[int], step: int = None) -> None: ...
+ def indices(self, len: int) -> Tuple[int, int, int]: ...
class tuple(Sequence[_T_co], Generic[_T_co]):
def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
@@ -550,10 +566,11 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
@overload
def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
+ def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ...
+
def has_key(self, k: _KT) -> bool: ...
def clear(self) -> None: ...
def copy(self) -> Dict[_KT, _VT]: ...
- def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
@overload
@@ -583,60 +600,60 @@ class set(MutableSet[_T], Generic[_T]):
def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
def add(self, element: _T) -> None: ...
def clear(self) -> None: ...
- def copy(self) -> set[_T]: ...
- def difference(self, *s: Iterable[Any]) -> set[_T]: ...
+ def copy(self) -> Set[_T]: ...
+ def difference(self, *s: Iterable[Any]) -> Set[_T]: ...
def difference_update(self, *s: Iterable[Any]) -> None: ...
def discard(self, element: _T) -> None: ...
- def intersection(self, *s: Iterable[Any]) -> set[_T]: ...
+ def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
def intersection_update(self, *s: Iterable[Any]) -> None: ...
def isdisjoint(self, s: Iterable[Any]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
def pop(self) -> _T: ...
def remove(self, element: _T) -> None: ...
- def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
+ def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
- def union(self, *s: Iterable[_T]) -> set[_T]: ...
+ def union(self, *s: Iterable[_T]) -> Set[_T]: ...
def update(self, *s: Iterable[_T]) -> None: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
- def __and__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __iand__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __or__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __ior__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __sub__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __isub__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __xor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __ixor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+ def __and__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __iand__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __sub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __isub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[Any]) -> bool: ...
def __lt__(self, s: AbstractSet[Any]) -> bool: ...
def __ge__(self, s: AbstractSet[Any]) -> bool: ...
def __gt__(self, s: AbstractSet[Any]) -> bool: ...
# TODO more set operations
-class frozenset(FrozenSet[_T], Generic[_T]):
+class frozenset(AbstractSet[_T], Generic[_T]):
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, iterable: Iterable[_T]) -> None: ...
- def copy(self) -> frozenset[_T]: ...
- def difference(self, *s: Iterable[Any]) -> frozenset[_T]: ...
- def intersection(self, *s: Iterable[Any]) -> frozenset[_T]: ...
+ def copy(self) -> FrozenSet[_T]: ...
+ def difference(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
+ def intersection(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
def isdisjoint(self, s: Iterable[_T]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
- def symmetric_difference(self, s: Iterable[_T]) -> frozenset[_T]: ...
- def union(self, *s: Iterable[_T]) -> frozenset[_T]: ...
+ def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ...
+ def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
- def __and__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
- def __or__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
- def __sub__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
- def __xor__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+ def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
+ def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
+ def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
+ def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[Any]) -> bool: ...
def __lt__(self, s: AbstractSet[Any]) -> bool: ...
def __ge__(self, s: AbstractSet[Any]) -> bool: ...
@@ -658,12 +675,7 @@ class xrange(Sized, Iterable[int], Reversible[int]):
def __getitem__(self, i: int) -> int: ...
def __reversed__(self) -> Iterator[int]: ...
-class module:
- __name__ = ... # type: str
- __file__ = ... # type: str
- __dict__ = ... # type: Dict[unicode, Any]
-
-class property:
+class property(object):
def __init__(self, fget: Callable[[Any], Any] = None,
fset: Callable[[Any, Any], None] = None,
fdel: Callable[[Any], None] = None, doc: str = None) -> None: ...
@@ -673,6 +685,9 @@ class property:
def __get__(self, obj: Any, type: type=None) -> Any: ...
def __set__(self, obj: Any, value: Any) -> None: ...
def __delete__(self, obj: Any) -> None: ...
+ def fget(self) -> Any: ...
+ def fset(self, value: Any) -> None: ...
+ def fdel(self) -> None: ...
long = int
bytes = str
@@ -693,7 +708,7 @@ def dir(o: object = ...) -> List[str]: ...
def divmod(a: int, b: int) -> Tuple[int, int]: ...
@overload
def divmod(a: float, b: float) -> Tuple[float, float]: ...
-def exit(code: int = ...) -> NoReturn: ...
+def exit(code: Any = ...) -> NoReturn: ...
@overload
def filter(function: Callable[[_T], Any],
iterable: Iterable[_T]) -> List[_T]: ...
@@ -712,8 +727,8 @@ def intern(string: str) -> str: ...
def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ...
-def isinstance(o: object, t: Union[type, Tuple[type, ...]]) -> bool: ...
-def issubclass(cls: type, classinfo: Union[type, Tuple[type, ...]]) -> bool: ...
+def isinstance(o: object, t: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ...
+def issubclass(cls: type, classinfo: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ...
def len(o: Sized) -> int: ...
@overload
def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> List[_S]: ...
@@ -794,7 +809,15 @@ def zip(iter1: Iterable[_T1], iter2: Iterable[_T2],
@overload
def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
iter4: Iterable[_T4]) -> List[Tuple[_T1, _T2,
- _T3, _T4]]: ... # TODO more than four iterables
+ _T3, _T4]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+ iter4: Iterable[_T4], iter5: Iterable[_T5]) -> List[Tuple[_T1, _T2,
+ _T3, _T4, _T5]]: ...
+ at overload
+def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any],
+ iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any],
+ *iterables: Iterable[Any]) -> List[Tuple[Any, ...]]: ...
def __import__(name: unicode,
globals: Dict[str, Any] = ...,
locals: Dict[str, Any] = ...,
@@ -809,12 +832,12 @@ class ellipsis: ...
Ellipsis = ... # type: ellipsis
# TODO: buffer support is incomplete; e.g. some_string.startswith(some_buffer) doesn't type check.
-AnyBuffer = TypeVar('AnyBuffer', str, unicode, bytearray, buffer)
+_AnyBuffer = TypeVar('_AnyBuffer', str, unicode, bytearray, buffer)
class buffer(Sized):
- def __init__(self, object: AnyBuffer, offset: int = ..., size: int = ...) -> None: ...
- def __add__(self, other: AnyBuffer) -> str: ...
- def __cmp__(self, other: AnyBuffer) -> bool: ...
+ def __init__(self, object: _AnyBuffer, offset: int = ..., size: int = ...) -> None: ...
+ def __add__(self, other: _AnyBuffer) -> str: ...
+ def __cmp__(self, other: _AnyBuffer) -> bool: ...
def __getitem__(self, key: Union[int, slice]) -> str: ...
def __getslice__(self, i: int, j: int) -> str: ...
def __len__(self) -> int: ...
@@ -850,7 +873,7 @@ class memoryview(Sized, Container[bytes]):
def tobytes(self) -> bytes: ...
def tolist(self) -> List[int]: ...
-class BaseException:
+class BaseException(object):
args = ... # type: Tuple[Any, ...]
message = ... # type: str
def __init__(self, *args: Any) -> None: ...
@@ -892,6 +915,7 @@ class SyntaxError(StandardError):
lineno = ... # type: int
offset = ... # type: int
text = ... # type: str
+ filename = ... # type: str
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class SystemError(StandardError): ...
@@ -943,13 +967,13 @@ class file(BinaryIO):
def readable(self) -> bool: ...
def writable(self) -> bool: ...
def seekable(self) -> bool: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def tell(self) -> int: ...
def readline(self, limit: int = ...) -> str: ...
def readlines(self, hint: int = ...) -> List[str]: ...
- def write(self, data: str) -> None: ...
+ def write(self, data: str) -> int: ...
def writelines(self, data: Iterable[str]) -> None: ...
- def truncate(self, pos: int = ...) -> int: ...
+ def truncate(self, pos: Optional[int] = ...) -> int: ...
# Very old builtins
def apply(func: Callable[..., _T], args: Sequence[Any] = None, kwds: Mapping[str, Any] = None) -> _T: ...
diff --git a/typeshed/stdlib/2/cStringIO.pyi b/typeshed/stdlib/2/cStringIO.pyi
index 5b4ad25..462e6ae 100644
--- a/typeshed/stdlib/2/cStringIO.pyi
+++ b/typeshed/stdlib/2/cStringIO.pyi
@@ -16,9 +16,9 @@ class InputType(IO[str], Iterator[str]):
def read(self, size: int = ...) -> str: ...
def readline(self, size: int = ...) -> str: ...
def readlines(self, hint: int = ...) -> List[str]: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def tell(self) -> int: ...
- def truncate(self, size: int = ...) -> Optional[int]: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
def __iter__(self) -> 'InputType': ...
def next(self) -> str: ...
def reset(self) -> None: ...
@@ -35,13 +35,13 @@ class OutputType(IO[str], Iterator[str]):
def read(self, size: int = ...) -> str: ...
def readline(self, size: int = ...) -> str: ...
def readlines(self, hint: int = ...) -> List[str]: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def tell(self) -> int: ...
- def truncate(self, size: int = ...) -> Optional[int]: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
def __iter__(self) -> 'OutputType': ...
def next(self) -> str: ...
def reset(self) -> None: ...
- def write(self, b: Union[str, unicode]) -> None: ...
+ def write(self, b: Union[str, unicode]) -> int: ...
def writelines(self, lines: Iterable[Union[str, unicode]]) -> None: ...
@overload
diff --git a/typeshed/stdlib/2/collections.pyi b/typeshed/stdlib/2/collections.pyi
index c2f0af2..5360fe6 100644
--- a/typeshed/stdlib/2/collections.pyi
+++ b/typeshed/stdlib/2/collections.pyi
@@ -2,16 +2,28 @@
# Based on http://docs.python.org/2.7/library/collections.html
-# TODO more abstract base classes (interfaces in mypy)
-
-# NOTE: These are incomplete!
+# These are not exported.
+from typing import Any, Dict, Generic, TypeVar, Tuple, overload, Type, Optional, List, Union, Reversible
+# These are exported.
from typing import (
- Any, Container, Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload,
- Iterator, Type, Sized, Optional, List, Set, Sequence, Union, Reversible,
- MutableMapping, MutableSet, MutableSequence,
+ Callable as Callable,
+ Container as Container,
+ Hashable as Hashable,
+ ItemsView as ItemsView,
+ Iterable as Iterable,
+ Iterator as Iterator,
+ KeysView as KeysView,
+ Mapping as Mapping,
+ MappingView as MappingView,
+ MutableMapping as MutableMapping,
+ MutableSequence as MutableSequence,
+ MutableSet as MutableSet,
+ Sequence as Sequence,
+ AbstractSet as Set,
+ Sized as Sized,
+ ValuesView as ValuesView,
)
-import typing
_T = TypeVar('_T')
_KT = TypeVar('_KT')
@@ -70,9 +82,10 @@ class Counter(Dict[_T, int], Generic[_T]):
@overload
def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: _VT) -> None: ...
-class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
+class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
def move_to_end(self, key: _KT, last: bool = ...) -> None: ...
+ def __reversed__(self) -> Iterator[_KT]: ...
class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
default_factory = ... # type: Callable[[], _VT]
diff --git a/typeshed/stdlib/2/compileall.pyi b/typeshed/stdlib/2/compileall.pyi
index 103d622..28f079a 100644
--- a/typeshed/stdlib/2/compileall.pyi
+++ b/typeshed/stdlib/2/compileall.pyi
@@ -1,7 +1,10 @@
# Stubs for compileall (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-def compile_dir(dir, maxlevels=..., ddir=..., force=..., rx=..., quiet=...): ...
-def compile_file(fullname, ddir=..., force=..., rx=..., quiet=...): ...
-def compile_path(skip_curdir=..., maxlevels=..., force=..., quiet=...): ...
+from typing import Optional, Pattern, Union
+
+_Path = Union[str, bytes]
+
+# fx can be any object with a 'search' method; once we have Protocols we can change the type
+def compile_dir(dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ...) -> None: ...
+def compile_file(fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ...) -> None: ...
+def compile_path(skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2/copy.pyi b/typeshed/stdlib/2/copy.pyi
deleted file mode 100644
index 0661cb7..0000000
--- a/typeshed/stdlib/2/copy.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-# Stubs for copy
-
-# NOTE: These are incomplete!
-
-from typing import TypeVar, Dict, Any
-
-_T = TypeVar('_T')
-
-def deepcopy(x: _T, memo: Dict[Any, Any] = ...) -> _T: ...
-def copy(x: _T) -> _T: ...
diff --git a/typeshed/stdlib/2/csv.pyi b/typeshed/stdlib/2/csv.pyi
index c91d330..51a57e4 100644
--- a/typeshed/stdlib/2/csv.pyi
+++ b/typeshed/stdlib/2/csv.pyi
@@ -2,7 +2,7 @@
#
# NOTE: Based on a dynamically typed stub automatically generated by stubgen.
-from typing import Any, Dict, Iterable, List, Sequence, Union
+from typing import Any, Dict, Iterable, List, Sequence, Type, Union
# Public interface of _csv.reader's return type
class _Reader(Iterable[List[str]]):
@@ -11,7 +11,7 @@ class _Reader(Iterable[List[str]]):
def next(self) -> List[str]: ...
-_Row = Sequence[Union[str, int]]
+_Row = Sequence[Any] # May contain anything: csv calls str() on the elements that are not None
# Public interface of _csv.writer's return type
class _Writer:
@@ -27,7 +27,7 @@ QUOTE_NONNUMERIC = ... # type: int
class Error(Exception): ...
-_Dialect = Union[str, Dialect]
+_Dialect = Union[str, Dialect, Type[Dialect]]
def writer(csvfile: Any, dialect: _Dialect = ..., **fmtparams) -> _Writer: ...
def reader(csvfile: Iterable[str], dialect: _Dialect = ..., **fmtparams) -> _Reader: ...
@@ -66,7 +66,7 @@ class DictReader(Iterable):
def __init__(self, f: Iterable[str], fieldnames: Sequence[Any] = ..., restkey=...,
restval=..., dialect: _Dialect = ..., *args, **kwds) -> None: ...
def __iter__(self): ...
- def __next__(self): ...
+ def next(self): ...
_DictRow = Dict[Any, Union[str, int]]
diff --git a/typeshed/stdlib/2/datetime.pyi b/typeshed/stdlib/2/datetime.pyi
index 6174b9a..eb91541 100644
--- a/typeshed/stdlib/2/datetime.pyi
+++ b/typeshed/stdlib/2/datetime.pyi
@@ -3,7 +3,7 @@
# NOTE: These are incomplete!
from time import struct_time
-from typing import Optional, SupportsAbs, Tuple, Union, overload
+from typing import AnyStr, Optional, SupportsAbs, Tuple, Union, overload
MINYEAR = 0
MAXYEAR = 0
@@ -21,7 +21,7 @@ class date(object):
max = ... # type: date
resolution = ... # type: timedelta
- def __init__(self, year: int, month: int = ..., day: int = ...) -> None: ...
+ def __init__(self, year: int, month: int, day: int) -> None: ...
@classmethod
def fromtimestamp(cls, t: float) -> date: ...
@@ -39,7 +39,7 @@ class date(object):
def ctime(self) -> str: ...
def strftime(self, fmt: Union[str, unicode]) -> str: ...
- def __format__(self, fmt: Union[str, unicode]) -> str: ...
+ def __format__(self, fmt: AnyStr) -> AnyStr: ...
def isoformat(self) -> str: ...
def timetuple(self) -> struct_time: ...
def toordinal(self) -> int: ...
@@ -84,7 +84,7 @@ class time:
def __hash__(self) -> int: ...
def isoformat(self) -> str: ...
def strftime(self, fmt: Union[str, unicode]) -> str: ...
- def __format__(self, fmt: str) -> str: ...
+ def __format__(self, fmt: AnyStr) -> AnyStr: ...
def utcoffset(self) -> Optional[timedelta]: ...
def tzname(self) -> Optional[str]: ...
def dst(self) -> Optional[int]: ...
@@ -114,7 +114,7 @@ class timedelta(SupportsAbs[timedelta]):
def __add__(self, other: timedelta) -> timedelta: ...
def __radd__(self, other: timedelta) -> timedelta: ...
def __sub__(self, other: timedelta) -> timedelta: ...
- def __rsub(self, other: timedelta) -> timedelta: ...
+ def __rsub__(self, other: timedelta) -> timedelta: ...
def __neg__(self) -> timedelta: ...
def __pos__(self) -> timedelta: ...
def __abs__(self) -> timedelta: ...
@@ -125,11 +125,9 @@ class timedelta(SupportsAbs[timedelta]):
@overload
def __floordiv__(self, other: int) -> timedelta: ...
@overload
- def __truediv__(self, other: timedelta) -> float: ...
+ def __div__(self, other: timedelta) -> float: ...
@overload
- def __truediv__(self, other: float) -> timedelta: ...
- def __mod__(self, other: timedelta) -> timedelta: ...
- def __divmod__(self, other: timedelta) -> Tuple[int, timedelta]: ...
+ def __div__(self, other: float) -> timedelta: ...
def __le__(self, other: timedelta) -> bool: ...
def __lt__(self, other: timedelta) -> bool: ...
def __ge__(self, other: timedelta) -> bool: ...
@@ -137,12 +135,12 @@ class timedelta(SupportsAbs[timedelta]):
def __hash__(self) -> int: ...
class datetime(object):
- # TODO: is actually subclass of date, but __le__, __lt__, __ge__, __gt__ don't work with date.
+ # TODO: is actually subclass of date, but __le__, __lt__, __ge__, __gt__, __sub__ don't work with date.
min = ... # type: datetime
max = ... # type: datetime
resolution = ... # type: timedelta
- def __init__(self, year: int, month: int = ..., day: int = ..., hour: int = ...,
+ def __init__(self, year: int, month: int, day: int, hour: int = ...,
minute: int = ..., second: int = ..., microseconds: int = ...,
tzinfo: tzinfo = ...) -> None: ...
@@ -161,7 +159,7 @@ class datetime(object):
@property
def microsecond(self) -> int: ...
@property
- def tzinfo(self) -> _tzinfo: ...
+ def tzinfo(self) -> Optional[_tzinfo]: ...
@classmethod
def fromtimestamp(cls, t: float, tz: _tzinfo = ...) -> datetime: ...
@@ -178,10 +176,9 @@ class datetime(object):
@classmethod
def combine(cls, date: date, time: time) -> datetime: ...
def strftime(self, fmt: Union[str, unicode]) -> str: ...
- def __format__(self, fmt: str) -> str: ...
+ def __format__(self, fmt: AnyStr) -> AnyStr: ...
def toordinal(self) -> int: ...
def timetuple(self) -> struct_time: ...
- def timestamp(self) -> float: ...
def utctimetuple(self) -> struct_time: ...
def date(self) -> _date: ...
def time(self) -> _time: ...
@@ -189,7 +186,7 @@ class datetime(object):
def replace(self, year: int = ..., month: int = ..., day: int = ..., hour: int = ...,
minute: int = ..., second: int = ..., microsecond: int = ..., tzinfo:
Union[_tzinfo, bool] = ...) -> datetime: ...
- def astimezone(self, tz: _tzinfo = ...) -> datetime: ...
+ def astimezone(self, tz: _tzinfo) -> datetime: ...
def ctime(self) -> str: ...
def isoformat(self, sep: str = ...) -> str: ...
@classmethod
diff --git a/typeshed/stdlib/2/encodings/utf_8.pyi b/typeshed/stdlib/2/encodings/utf_8.pyi
index 3be496a..0111184 100644
--- a/typeshed/stdlib/2/encodings/utf_8.pyi
+++ b/typeshed/stdlib/2/encodings/utf_8.pyi
@@ -9,6 +9,6 @@ class StreamWriter(codecs.StreamWriter):
class StreamReader(codecs.StreamReader):
pass
-def getregentry() -> codecs.CodecInfo: pass
-def encode(input: str, errors: str = ...) -> bytes: pass
-def decode(input: bytes, errors: str = ...) -> str: pass
+def getregentry() -> codecs.CodecInfo: ...
+def encode(input: str, errors: str = ...) -> bytes: ...
+def decode(input: bytes, errors: str = ...) -> str: ...
diff --git a/typeshed/stdlib/2/fnmatch.pyi b/typeshed/stdlib/2/fnmatch.pyi
index 23b5978..e933b7b 100644
--- a/typeshed/stdlib/2/fnmatch.pyi
+++ b/typeshed/stdlib/2/fnmatch.pyi
@@ -1,6 +1,8 @@
-from typing import Iterable
+from typing import AnyStr, Iterable, List, Union
-def fnmatch(filename: str, pattern: str) -> bool: ...
-def fnmatchcase(filename: str, pattern: str) -> bool: ...
-def filter(names: Iterable[str], pattern: str) -> Iterable[str]: ...
-def translate(pattern: str) -> str: ...
+_EitherStr = Union[str, unicode]
+
+def fnmatch(filename: _EitherStr, pattern: _EitherStr) -> bool: ...
+def fnmatchcase(filename: _EitherStr, pattern: _EitherStr) -> bool: ...
+def filter(names: Iterable[AnyStr], pattern: _EitherStr) -> List[AnyStr]: ...
+def translate(pattern: AnyStr) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/hashlib.pyi b/typeshed/stdlib/2/hashlib.pyi
index 95f2b82..fa1dbce 100644
--- a/typeshed/stdlib/2/hashlib.pyi
+++ b/typeshed/stdlib/2/hashlib.pyi
@@ -1,24 +1,28 @@
# Stubs for hashlib (Python 2)
-from typing import Tuple
+from typing import Tuple, Union
-class _hash(object):
- # This is not actually in the module namespace.
- digest_size = 0
+_DataType = Union[str, unicode, bytearray, buffer, memoryview]
+
+class _hash(object): # This is not actually in the module namespace.
+ name = ... # type: str
block_size = 0
- def update(self, arg: str) -> None: ...
+ digest_size = 0
+ digestsize = 0
+ def __init__(self, arg: _DataType = ...) -> None: ...
+ def update(self, arg: _DataType) -> None: ...
def digest(self) -> str: ...
def hexdigest(self) -> str: ...
def copy(self) -> _hash: ...
def new(name: str, data: str = ...) -> _hash: ...
-def md5(s: str = ...) -> _hash: ...
-def sha1(s: str = ...) -> _hash: ...
-def sha224(s: str = ...) -> _hash: ...
-def sha256(s: str = ...) -> _hash: ...
-def sha384(s: str = ...) -> _hash: ...
-def sha512(s: str = ...) -> _hash: ...
+def md5(s: _DataType = ...) -> _hash: ...
+def sha1(s: _DataType = ...) -> _hash: ...
+def sha224(s: _DataType = ...) -> _hash: ...
+def sha256(s: _DataType = ...) -> _hash: ...
+def sha384(s: _DataType = ...) -> _hash: ...
+def sha512(s: _DataType = ...) -> _hash: ...
algorithms = ... # type: Tuple[str, ...]
algorithms_guaranteed = ... # type: Tuple[str, ...]
diff --git a/typeshed/stdlib/2/inspect.pyi b/typeshed/stdlib/2/inspect.pyi
index e86da66..a356934 100644
--- a/typeshed/stdlib/2/inspect.pyi
+++ b/typeshed/stdlib/2/inspect.pyi
@@ -1,6 +1,5 @@
-# TODO incomplete
from types import TracebackType, FrameType, ModuleType
-from typing import Any, Callable, List, Optional, Tuple, Union, NamedTuple
+from typing import Any, Dict, Callable, List, Optional, Tuple, Union, NamedTuple, Type
# Types and members
ModuleInfo = NamedTuple('ModuleInfo', [('name', str),
@@ -47,22 +46,31 @@ def getsource(object: object) -> str: ...
def cleandoc(doc: str) -> str: ...
# Classes and functions
-# TODO make the return type more specific
-def getclasstree(classes: List[type], unique: bool = ...) -> Any: ...
+def getclasstree(classes: List[type], unique: bool = ...) -> List[
+ Union[Tuple[type, Tuple[type, ...]], list]]: ...
ArgSpec = NamedTuple('ArgSpec', [('args', List[str]),
- ('varargs', str),
- ('keywords', str),
+ ('varargs', Optional[str]),
+ ('keywords', Optional[str]),
('defaults', tuple),
])
+ArgInfo = NamedTuple('ArgInfo', [('args', List[str]),
+ ('varargs', Optional[str]),
+ ('keywords', Optional[str]),
+ ('locals', Dict[str, Any]),
+ ])
+
def getargspec(func: object) -> ArgSpec: ...
-# TODO make the return type more specific
-def getargvalues(frame: FrameType) -> Any: ...
-# TODO formatargspec
-# TODO formatargvalues
+def getargvalues(frame: FrameType) -> ArgInfo: ...
+def formatargspec(args, varargs=..., varkw=..., defaults=...,
+ formatarg=..., formatvarargs=..., formatvarkw=..., formatvalue=...,
+ join=...) -> str: ...
+def formatargvalues(args, varargs=..., varkw=..., defaults=...,
+ formatarg=..., formatvarargs=..., formatvarkw=..., formatvalue=...,
+ join=...) -> str: ...
def getmro(cls: type) -> Tuple[type, ...]: ...
-# TODO getcallargs
+def getcallargs(func, *args, **kwds) -> Dict[str, Any]: ...
# The interpreter stack
@@ -77,12 +85,12 @@ Traceback = NamedTuple(
]
)
-_FrameRecord = Tuple[FrameType, str, int, str, List[str], int]
+_FrameInfo = Tuple[FrameType, str, int, str, List[str], int]
-def getouterframes(frame: FrameType, context: int = ...) -> List[FrameType]: ...
+def getouterframes(frame: FrameType, context: int = ...) -> List[_FrameInfo]: ...
def getframeinfo(frame: Union[FrameType, TracebackType], context: int = ...) -> Traceback: ...
-def getinnerframes(traceback: TracebackType, context: int = ...) -> List[FrameType]: ...
+def getinnerframes(traceback: TracebackType, context: int = ...) -> List[_FrameInfo]: ...
-def currentframe() -> FrameType: ...
-def stack(context: int = ...) -> List[_FrameRecord]: ...
-def trace(context: int = ...) -> List[_FrameRecord]: ...
+def currentframe(depth: int = ...) -> FrameType: ...
+def stack(context: int = ...) -> List[_FrameInfo]: ...
+def trace(context: int = ...) -> List[_FrameInfo]: ...
diff --git a/typeshed/stdlib/2/io.pyi b/typeshed/stdlib/2/io.pyi
index 5fa20c7..03cd38e 100644
--- a/typeshed/stdlib/2/io.pyi
+++ b/typeshed/stdlib/2/io.pyi
@@ -4,7 +4,8 @@
# Only a subset of functionality is included.
-from typing import List, BinaryIO, TextIO, IO, overload, Iterator, Iterable, Any, Union
+from typing import List, BinaryIO, TextIO, IO, overload, Iterator, Iterable, Any, Union, Optional
+import _io
DEFAULT_BUFFER_SIZE = 0
@@ -13,9 +14,7 @@ def open(file: Union[str, unicode, int],
errors: unicode = ..., newline: unicode = ...,
closefd: bool = ...) -> IO[Any]: ...
-class IOBase:
- # TODO
- ...
+class IOBase(_io._IOBase): ...
class BytesIO(BinaryIO):
def __init__(self, initial_bytes: str = ...) -> None: ...
@@ -30,12 +29,12 @@ class BytesIO(BinaryIO):
def readable(self) -> bool: ...
def readline(self, limit: int = ...) -> str: ...
def readlines(self, hint: int = ...) -> List[str]: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def seekable(self) -> bool: ...
def tell(self) -> int: ...
- def truncate(self, size: int = ...) -> int: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
def writable(self) -> bool: ...
- def write(self, s: str) -> None: ...
+ def write(self, s: str) -> int: ...
def writelines(self, lines: Iterable[str]) -> None: ...
def getvalue(self) -> str: ...
def read1(self) -> str: ...
@@ -59,12 +58,12 @@ class StringIO(TextIO):
def readable(self) -> bool: ...
def readline(self, limit: int = ...) -> unicode: ...
def readlines(self, hint: int = ...) -> List[unicode]: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def seekable(self) -> bool: ...
def tell(self) -> int: ...
- def truncate(self, size: int = ...) -> int: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
def writable(self) -> bool: ...
- def write(self, s: unicode) -> None: ...
+ def write(self, s: unicode) -> int: ...
def writelines(self, lines: Iterable[unicode]) -> None: ...
def getvalue(self) -> unicode: ...
@@ -89,12 +88,12 @@ class TextIOWrapper(TextIO):
def readable(self) -> bool: ...
def readline(self, limit: int = ...) -> unicode: ...
def readlines(self, hint: int = ...) -> List[unicode]: ...
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
def seekable(self) -> bool: ...
def tell(self) -> int: ...
- def truncate(self, size: int = ...) -> int: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
def writable(self) -> bool: ...
- def write(self, s: unicode) -> None: ...
+ def write(self, s: unicode) -> int: ...
def writelines(self, lines: Iterable[unicode]) -> None: ...
def __iter__(self) -> Iterator[unicode]: ...
@@ -102,4 +101,4 @@ class TextIOWrapper(TextIO):
def __enter__(self) -> StringIO: ...
def __exit__(self, type, value, traceback) -> bool: ...
-class BufferedIOBase(IOBase): ...
+class BufferedIOBase(_io._BufferedIOBase, IOBase): ...
diff --git a/typeshed/stdlib/2/itertools.pyi b/typeshed/stdlib/2/itertools.pyi
index 286d200..644d9f1 100644
--- a/typeshed/stdlib/2/itertools.pyi
+++ b/typeshed/stdlib/2/itertools.pyi
@@ -47,13 +47,15 @@ _T1 = TypeVar('_T1')
_T2 = TypeVar('_T2')
_T3 = TypeVar('_T3')
_T4 = TypeVar('_T4')
+_T5 = TypeVar('_T5')
+_T6 = TypeVar('_T6')
@overload
-def imap(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterable[_S]: ...
+def imap(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterator[_S]: ...
@overload
def imap(func: Callable[[_T1, _T2], _S],
iter1: Iterable[_T1],
- iter2: Iterable[_T2]) -> Iterable[_S]: ... # TODO more than two iterables
+ iter2: Iterable[_T2]) -> Iterator[_S]: ... # TODO more than two iterables
def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ...
def takewhile(predicate: Callable[[_T], Any],
@@ -61,17 +63,33 @@ def takewhile(predicate: Callable[[_T], Any],
def tee(iterable: Iterable[Any], n: int = ...) -> Iterator[Any]: ...
@overload
-def izip(iter1: Iterable[_T1]) -> Iterable[Tuple[_T1]]: ...
+def izip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
@overload
def izip(iter1: Iterable[_T1],
- iter2: Iterable[_T2]) -> Iterable[Tuple[_T1, _T2]]: ...
+ iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ...
@overload
def izip(iter1: Iterable[_T1], iter2: Iterable[_T2],
- iter3: Iterable[_T3]) -> Iterable[Tuple[_T1, _T2, _T3]]: ...
+ iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ...
@overload
def izip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
- iter4: Iterable[_T4]) -> Iterable[Tuple[_T1, _T2,
- _T3, _T4]]: ... # TODO more than four iterables
+ iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2,
+ _T3, _T4]]: ...
+ at overload
+def izip(iter1: Iterable[_T1], iter2: Iterable[_T2],
+ iter3: Iterable[_T3], iter4: Iterable[_T4],
+ iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2,
+ _T3, _T4, _T5]]: ...
+ at overload
+def izip(iter1: Iterable[_T1], iter2: Iterable[_T2],
+ iter3: Iterable[_T3], iter4: Iterable[_T4],
+ iter5: Iterable[_T5], iter6: Iterable[_T6]) -> Iterator[Tuple[_T1, _T2, _T3,
+ _T4, _T5, _T6]]: ...
+ at overload
+def izip(iter1: Iterable[Any], iter2: Iterable[Any],
+ iter3: Iterable[Any], iter4: Iterable[Any],
+ iter5: Iterable[Any], iter6: Iterable[Any],
+ iter7: Iterable[Any], *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ...
+
def izip_longest(*p: Iterable[Any],
fillvalue: Any = ...) -> Iterator[Any]: ...
@@ -82,6 +100,6 @@ def product(*p: Iterable[_T], repeat: int = ...) -> Iterator[Sequence[_T]]: ...
def permutations(iterable: Iterable[_T],
r: int = ...) -> Iterator[Sequence[_T]]: ...
def combinations(iterable: Iterable[_T],
- r: int) -> Iterable[Sequence[_T]]: ...
+ r: int) -> Iterator[Sequence[_T]]: ...
def combinations_with_replacement(iterable: Iterable[_T],
- r: int) -> Iterable[Sequence[_T]]: ...
+ r: int) -> Iterator[Sequence[_T]]: ...
diff --git a/typeshed/stdlib/2/macpath.pyi b/typeshed/stdlib/2/macpath.pyi
new file mode 100644
index 0000000..5e5439f
--- /dev/null
+++ b/typeshed/stdlib/2/macpath.pyi
@@ -0,0 +1,50 @@
+# Stubs for posixpath (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, List
+from genericpath import * # noqa: F403
+
+curdir = ... # type: Any
+pardir = ... # type: Any
+extsep = ... # type: Any
+sep = ... # type: Any
+pathsep = ... # type: Any
+defpath = ... # type: Any
+altsep = ... # type: Any
+devnull = ... # type: Any
+
+def normcase(s): ...
+def isabs(s): ...
+def join(a, *p): ...
+def split(p): ...
+def splitext(p): ...
+def splitdrive(p): ...
+def basename(p): ...
+def dirname(p): ...
+def islink(path): ...
+def lexists(path): ...
+def samefile(f1, f2): ...
+def sameopenfile(fp1, fp2): ...
+def samestat(s1, s2): ...
+def ismount(path): ...
+def walk(top, func, arg): ...
+def expanduser(path): ...
+def expandvars(path): ...
+def normpath(path): ...
+def abspath(path): ...
+def realpath(filename): ...
+
+supports_unicode_filenames = ... # type: Any
+
+def relpath(path, start=...): ...
+
+# posixpath imports these from genericpath.py:
+def commonprefix(list: List[AnyStr]) -> AnyStr: ...
+def exists(path: unicode) -> bool: ...
+def getatime(path: unicode) -> float: ...
+def getmtime(path: unicode) -> float: ...
+def getctime(path: unicode) -> float: ...
+def getsize(path: unicode) -> int: ...
+def isfile(path: unicode) -> bool: ...
+def isdir(path: unicode) -> bool: ...
diff --git a/typeshed/stdlib/2/md5.pyi b/typeshed/stdlib/2/md5.pyi
index 3488466..fe6ad71 100644
--- a/typeshed/stdlib/2/md5.pyi
+++ b/typeshed/stdlib/2/md5.pyi
@@ -1,11 +1,6 @@
# Stubs for Python 2.7 md5 stdlib module
-class md5(object):
- def update(self, arg: str) -> None: ...
- def digest(self) -> str: ...
- def hexdigest(self) -> str: ...
- def copy(self) -> md5: ...
+from hashlib import md5 as md5, md5 as new
-def new(string: str = ...) -> md5: ...
blocksize = 0
digest_size = 0
diff --git a/typeshed/stdlib/2/ntpath.pyi b/typeshed/stdlib/2/ntpath.pyi
new file mode 100644
index 0000000..5e5439f
--- /dev/null
+++ b/typeshed/stdlib/2/ntpath.pyi
@@ -0,0 +1,50 @@
+# Stubs for posixpath (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, List
+from genericpath import * # noqa: F403
+
+curdir = ... # type: Any
+pardir = ... # type: Any
+extsep = ... # type: Any
+sep = ... # type: Any
+pathsep = ... # type: Any
+defpath = ... # type: Any
+altsep = ... # type: Any
+devnull = ... # type: Any
+
+def normcase(s): ...
+def isabs(s): ...
+def join(a, *p): ...
+def split(p): ...
+def splitext(p): ...
+def splitdrive(p): ...
+def basename(p): ...
+def dirname(p): ...
+def islink(path): ...
+def lexists(path): ...
+def samefile(f1, f2): ...
+def sameopenfile(fp1, fp2): ...
+def samestat(s1, s2): ...
+def ismount(path): ...
+def walk(top, func, arg): ...
+def expanduser(path): ...
+def expandvars(path): ...
+def normpath(path): ...
+def abspath(path): ...
+def realpath(filename): ...
+
+supports_unicode_filenames = ... # type: Any
+
+def relpath(path, start=...): ...
+
+# posixpath imports these from genericpath.py:
+def commonprefix(list: List[AnyStr]) -> AnyStr: ...
+def exists(path: unicode) -> bool: ...
+def getatime(path: unicode) -> float: ...
+def getmtime(path: unicode) -> float: ...
+def getctime(path: unicode) -> float: ...
+def getsize(path: unicode) -> int: ...
+def isfile(path: unicode) -> bool: ...
+def isdir(path: unicode) -> bool: ...
diff --git a/typeshed/stdlib/2/nturl2path.pyi b/typeshed/stdlib/2/nturl2path.pyi
new file mode 100644
index 0000000..b87b008
--- /dev/null
+++ b/typeshed/stdlib/2/nturl2path.pyi
@@ -0,0 +1,4 @@
+from typing import AnyStr
+
+def url2pathname(url: AnyStr) -> AnyStr: ...
+def pathname2url(p: AnyStr) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/optparse.pyi b/typeshed/stdlib/2/optparse.pyi
deleted file mode 100644
index 0606a62..0000000
--- a/typeshed/stdlib/2/optparse.pyi
+++ /dev/null
@@ -1,249 +0,0 @@
-# Generated by pytype, with only minor tweaks. Might be incomplete.
-
-from typing import Any, Optional, List, Callable, Tuple, Dict, Iterable, Union
-
-# See https://groups.google.com/forum/#!topic/python-ideas/gA1gdj3RZ5g
-Text = Union[str, unicode]
-
-NO_DEFAULT = ... # type: Tuple[str, ...]
-SUPPRESS_HELP = ... # type: str
-SUPPRESS_USAGE = ... # type: str
-
-def check_builtin(option: Option, opt, value: Text) -> Any: ...
-def check_choice(option: Option, opt, value) -> Any: ...
-def isbasestring(x) -> bool: ...
-
-class OptParseError(Exception):
- msg = ... # type: Any
- def __init__(self, msg) -> None: ...
-
-class BadOptionError(OptParseError):
- opt_str = ... # type: Any
- def __init__(self, opt_str) -> None: ...
-
-class AmbiguousOptionError(BadOptionError):
- possibilities = ... # type: Any
- def __init__(self, opt_str, possibilities) -> None: ...
-
-class OptionError(OptParseError):
- msg = ... # type: Any
- option_id = ... # type: str
- def __init__(self, msg, option: Option) -> None: ...
-
-class OptionConflictError(OptionError): ...
-
-class HelpFormatter:
- NO_DEFAULT_VALUE = ... # type: str
- _long_opt_fmt = ... # type: Union[str, unicode]
- _short_opt_fmt = ... # type: Union[str, unicode]
- current_indent = ... # type: int
- default_tag = ... # type: str
- help_position = ... # type: Any
- help_width = ... # type: Any
- indent_increment = ... # type: Any
- level = ... # type: int
- max_help_position = ... # type: int
- option_strings = ... # type: Dict[Option, str]
- parser = ... # type: Any
- short_first = ... # type: Any
- width = ... # type: Any
- def __init__(self, indent_increment, max_help_position, width, short_first) -> None: ...
- def _format_text(self, text: Text) -> Text: ...
- def dedent(self) -> None: ...
- def expand_default(self, option: Option) -> Text: ...
- def format_description(self, description) -> Any: ...
- def format_epilog(self, epilog) -> Any: ...
- def format_heading(self, heading) -> Any: ...
- def format_option(self, option: Any) -> str: ...
- def format_option_strings(self, option: Any) -> Any: ...
- def format_usage(self, usage) -> Any: ...
- def indent(self) -> None: ...
- def set_long_opt_delimiter(self, delim) -> None: ...
- def set_parser(self, parser) -> None: ...
- def set_short_opt_delimiter(self, delim) -> None: ...
- def store_option_strings(self, parser) -> None: ...
-
-class IndentedHelpFormatter(HelpFormatter):
- _long_opt_fmt = ... # type: str
- _short_opt_fmt = ... # type: str
- current_indent = ... # type: int
- default_tag = ... # type: str
- help_position = ... # type: int
- help_width = ... # type: Optional[int]
- indent_increment = ... # type: Any
- level = ... # type: int
- max_help_position = ... # type: int
- option_strings = ... # type: Dict[Any, Any]
- parser = ... # type: Optional[OptionParser]
- short_first = ... # type: Any
- width = ... # type: Any
- def __init__(self, *args, **kwargs) -> None: ...
- def format_heading(self, heading) -> str: ...
- def format_usage(self, usage) -> str: ...
-
-class Option:
- ACTIONS = ... # type: Tuple[str, ...]
- ALWAYS_TYPED_ACTIONS = ... # type: Tuple[str, ...]
- ATTRS = ... # type: List[str]
- CHECK_METHODS = ... # type: Union[None, List[Callable]]
- CONST_ACTIONS = ... # type: Tuple[str, ...]
- STORE_ACTIONS = ... # type: Tuple[str, ...]
- TYPED_ACTIONS = ... # type: Tuple[str, ...]
- TYPES = ... # type: Tuple[str, ...]
- TYPE_CHECKER = ... # type: Dict[str, Callable]
- _long_opts = ... # type: List[Text]
- _short_opts = ... # type: List[Text]
- action = ... # type: str
- dest = ... # type: Any
- nargs = ... # type: int
- type = ... # type: Any
- def __init__(self, *args, **kwargs) -> None: ...
- def __repr__(self) -> str: ...
- def __str__(self) -> str: ...
- def _check_action(self) -> None: ...
- def _check_callback(self) -> None: ...
- def _check_choice(self) -> None: ...
- def _check_const(self) -> None: ...
- def _check_dest(self) -> None: ...
- def _check_nargs(self) -> None: ...
- def _check_opt_strings(self, opts) -> Any: ...
- def _check_type(self) -> None: ...
- def _set_attrs(self, attrs: Dict[str, Any]) -> None: ...
- def _set_opt_strings(self, opts) -> None: ...
- def check_value(self, opt, value) -> Any: ...
- def convert_value(self, opt, value) -> Any: ...
- def get_opt_string(self) -> Text: ...
- def process(self, opt, value, values: Any, parser: OptionParser) -> int: ...
- def take_action(self, action, dest, opt, value, values, parser: OptionParser) -> int: ...
- def takes_value(self) -> bool: ...
-
-make_option = Option
-
-class OptionContainer:
- _long_opt = ... # type: Dict[Text, Any]
- _short_opt = ... # type: Dict[Text, Any]
- conflict_handler = ... # type: Any
- defaults = ... # type: Dict[Text, Any]
- description = ... # type: Any
- option_class = ... # type: Any
- def __init__(self, option_class, conflict_handler, description) -> None: ...
- def _check_conflict(self, option: Any) -> None: ...
- def _create_option_mappings(self) -> None: ...
- def _share_option_mappings(self, parser) -> None: ...
- def add_option(self, *args, **kwargs) -> Any: ...
- def add_options(self, option_list) -> None: ...
- def destroy(self) -> None: ...
- def format_description(self, formatter: Optional[HelpFormatter]) -> Any: ...
- def format_help(self, formatter: HelpFormatter) -> str: ...
- def format_option_help(self, formatter: Optional[HelpFormatter]) -> str: ...
- def get_description(self) -> Any: ...
- def get_option(self, opt_str) -> Optional[Option]: ...
- def has_option(self, opt_str) -> bool: ...
- def remove_option(self, opt_str) -> None: ...
- def set_conflict_handler(self, handler) -> None: ...
- def set_description(self, description) -> None: ...
-
-class OptionGroup(OptionContainer):
- _long_opt = ... # type: Dict[Any, Any]
- _short_opt = ... # type: Dict[Any, Any]
- conflict_handler = ... # type: Any
- defaults = ... # type: Dict[Text, Any]
- description = ... # type: Any
- option_class = ... # type: Any
- option_list = ... # type: List
- parser = ... # type: Any
- title = ... # type: Any
- def __init__(self, parser, title, *args, **kwargs) -> None: ...
- def _create_option_list(self) -> None: ...
- def format_help(self, formatter: HelpFormatter) -> Any: ...
- def set_title(self, title) -> None: ...
-
-class OptionParser(OptionContainer):
- _long_opt = ... # type: Dict[Text, Any]
- _short_opt = ... # type: Dict[Any, Any]
- allow_interspersed_args = ... # type: bool
- conflict_handler = ... # type: Any
- defaults = ... # type: Dict[Any, Any]
- description = ... # type: Text
- epilog = ... # type: Any
- formatter = ... # type: HelpFormatter
- largs = ... # type: Union[None, List[Text]]
- option_class = ... # type: Callable
- option_groups = ... # type: List[OptionParser]
- option_list = ... # type: List[Any]
- process_default_values = ... # type: Any
- prog = ... # type: Any
- rargs = ... # type: Optional[List[Any]]
- standard_option_list = ... # type: List
- usage = ... # type: Optional[Text]
- values = ... # type: Any
- version = ... # type: Text
- def __init__(self, *args, **kwargs) -> None: ...
- def _add_help_option(self) -> None: ...
- def _add_version_option(self) -> None: ...
- def _create_option_list(self) -> None: ...
- def _get_all_options(self) -> List[Any]: ...
- def _get_args(self, args: Iterable) -> List[Any]: ...
- def _get_encoding(self, file) -> Any: ...
- def _init_parsing_state(self) -> None: ...
- def _match_long_opt(self, opt) -> Any: ...
- def _populate_option_list(self, option_list, *args, **kwargs) -> None: ...
- def _process_args(self, largs: List[Text], rargs: List, values: Values) -> None: ...
- def _process_long_opt(self, rargs: List, values) -> None: ...
- def _process_short_opts(self, rargs: List, values) -> None: ...
- def add_option_group(self, *args, **kwargs) -> OptionParser: ...
- def check_values(self, values, args) -> Tuple[Any, ...]: ...
- def disable_interspersed_args(self) -> None: ...
- def enable_interspersed_args(self) -> None: ...
- def error(self, msg) -> None: ...
- def exit(self, *args, **kwargs) -> None: ...
- def expand_prog_name(self, s: Optional[Text]) -> Any: ...
- def format_epilog(self, formatter: Union[HelpFormatter, OptionParser, None]) -> Any: ...
- def format_help(self, *args, **kwargs) -> str: ...
- def format_option_help(self, *args, **kwargs) -> str: ...
- def get_default_values(self) -> Values: ...
- def get_option_group(self, opt_str) -> Any: ...
- def get_prog_name(self) -> Any: ...
- def get_usage(self) -> Text: ...
- def get_version(self) -> Any: ...
- def parse_args(self, *args, **kwargs) -> Tuple[Any, ...]: ...
- def print_help(self, *args, **kwargs) -> None: ...
- def print_usage(self, *args, **kwargs) -> None: ...
- def print_version(self, *args, **kwargs) -> None: ...
- def set_default(self, dest, value) -> None: ...
- def set_defaults(self, *args, **kwargs) -> None: ...
- def set_process_default_values(self, process) -> None: ...
- def set_usage(self, usage: Text) -> None: ...
-
-class OptionValueError(OptParseError):
- msg = ... # type: Any
-
-class TitledHelpFormatter(HelpFormatter):
- _long_opt_fmt = ... # type: str
- _short_opt_fmt = ... # type: str
- current_indent = ... # type: int
- default_tag = ... # type: str
- help_position = ... # type: int
- help_width = ... # type: None
- indent_increment = ... # type: Any
- level = ... # type: int
- max_help_position = ... # type: int
- option_strings = ... # type: Dict
- parser = ... # type: None
- short_first = ... # type: Any
- width = ... # type: Any
- def __init__(self, *args, **kwargs) -> None: ...
- def format_heading(self, heading) -> str: ...
- def format_usage(self, usage) -> str: ...
-
-class Values:
- def __cmp__(self, other) -> int: ...
- def __init__(self, *args, **kwargs) -> None: ...
- def __repr__(self) -> str: ...
- def __str__(self) -> str: ...
- def _update(self, dict: Dict[str, Any], mode) -> None: ...
- def _update_careful(self, dict: Dict[str, Any]) -> None: ...
- def _update_loose(self, dict) -> None: ...
- def ensure_value(self, attr, value) -> Any: ...
- def read_file(self, filename, *args, **kwargs) -> None: ...
- def read_module(self, modname, *args, **kwargs) -> None: ...
diff --git a/typeshed/stdlib/2/os/__init__.pyi b/typeshed/stdlib/2/os/__init__.pyi
index 3bd50d8..881bcf7 100644
--- a/typeshed/stdlib/2/os/__init__.pyi
+++ b/typeshed/stdlib/2/os/__init__.pyi
@@ -1,246 +1,21 @@
# created from https://docs.python.org/2/library/os.html
+from builtins import OSError as error
+import sys
from typing import (
- List, Tuple, Union, Sequence, Mapping, IO, Any, Optional, AnyStr, Iterator,
- Dict, MutableMapping, NamedTuple, overload
+ Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr,
+ Optional, Generic, Set, Callable, Text, Sequence, IO, NamedTuple, TypeVar
)
-from . import path
+from . import path as path
from mypy_extensions import NoReturn
-error = OSError
-name = ... # type: str
-
-class _Environ(MutableMapping[str, str]):
- def copy(self) -> Dict[str, str]: ...
-
-environ = ... # type: _Environ
-
-def chdir(path: unicode) -> None: ...
-def fchdir(fd: int) -> None: ...
-def getcwd() -> str: ...
-def ctermid() -> str: ...
-def getegid() -> int: ...
-def geteuid() -> int: ...
-def getgid() -> int: ...
-def getgroups() -> List[int]: ...
-def initgroups(username: str, gid: int) -> None: ...
-def getlogin() -> str: ...
-def getpgid(pid: int) -> int: ...
-def getpgrp() -> int: ...
-def getpid() -> int: ...
-def getppid() -> int: ...
-def getresuid() -> Tuple[int, int, int]: ...
-def getresgid() -> Tuple[int, int, int]: ...
-def getuid() -> int: ...
-def getenv(varname: unicode, value: unicode = ...) -> str: ...
-def putenv(varname: unicode, value: unicode) -> None: ...
-def setegid(egid: int) -> None: ...
-def seteuid(euid: int) -> None: ...
-def setgid(gid: int) -> None: ...
-def setgroups(groups: Sequence[int]) -> None: ...
-
-# TODO(MichalPokorny)
-def setpgrp(*args) -> None: ...
-
-def setpgid(pid: int, pgrp: int) -> None: ...
-def setregid(rgid: int, egid: int) -> None: ...
-def setresgid(rgid: int, egid: int, sgid: int) -> None: ...
-def setresuid(ruid: int, euid: int, suid: int) -> None: ...
-def setreuid(ruid: int, euid: int) -> None: ...
-def getsid(pid: int) -> int: ...
-def setsid() -> None: ...
-def setuid(pid: int) -> None: ...
-
-def strerror(code: int) -> str: ...
-
-def umask(mask: int) -> int: ...
-def uname() -> Tuple[str, str, str, str, str]: ...
-def unsetenv(varname: str) -> None: ...
-
-# TODO(MichalPokorny)
-def fdopen(fd: int, *args, **kwargs) -> IO[Any]: ...
-def popen(command: str, *args, **kwargs) -> Optional[IO[Any]]: ...
-def tmpfile() -> IO[Any]: ...
-
-def tmpnam() -> str: ...
-def tempnam(dir: str = ..., prefix: str = ...) -> str: ...
+_T = TypeVar('_T')
-def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
-def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ...
-def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
-
-def close(fd: int) -> None: ...
-def closerange(fd_low: int, fd_high: int) -> None: ...
-def dup(fd: int) -> int: ...
-def dup2(fd: int, fd2: int) -> None: ...
-def fchmod(fd: int, mode: int) -> None: ...
-def fchown(fd: int, uid: int, gid: int) -> None: ...
-def fdatasync(fd: int) -> None: ...
-def fpathconf(fd: int, name: str) -> None: ...
-
-# TODO(prvak)
-def fstat(fd: int) -> Any: ...
-def fsync(fd: int) -> None: ...
-def ftruncate(fd: int, length: int) -> None: ...
-def isatty(fd: int) -> bool: ...
-
-def lseek(fd: int, pos: int, how: int) -> None: ...
SEEK_SET = 0
SEEK_CUR = 0
SEEK_END = 0
-# TODO(prvak): maybe file should be unicode? (same with all other paths...)
-def open(file: unicode, flags: int, mode: int = ...) -> int: ...
-def openpty() -> Tuple[int, int]: ...
-def pipe() -> Tuple[int, int]: ...
-def read(fd: int, n: int) -> str: ...
-def tcgetpgrp(fd: int) -> int: ...
-def tcsetpgrp(fd: int, pg: int) -> None: ...
-def ttyname(fd: int) -> str: ...
-def write(fd: int, str: str) -> int: ...
-
-# TODO: O_*
-
-def access(path: unicode, mode: int) -> bool: ...
-F_OK = 0
-R_OK = 0
-W_OK = 0
-X_OK = 0
-
-def getcwdu() -> unicode: ...
-def chflags(path: unicode, flags: int) -> None: ...
-def chroot(path: unicode) -> None: ...
-def chmod(path: unicode, mode: int) -> None: ...
-def chown(path: unicode, uid: int, gid: int) -> None: ...
-def lchflags(path: unicode, flags: int) -> None: ...
-def lchmod(path: unicode, uid: int, gid: int) -> None: ...
-def lchown(path: unicode, uid: int, gid: int) -> None: ...
-def link(source: unicode, link_name: unicode) -> None: ...
-def listdir(path: AnyStr) -> List[AnyStr]: ...
-
-# TODO(MichalPokorny)
-def lstat(path: unicode) -> Any: ...
-
-def mkfifo(path: unicode, mode: int = ...) -> None: ...
-def mknod(filename: unicode, mode: int = ..., device: int = ...) -> None: ...
-def major(device: int) -> int: ...
-def minor(device: int) -> int: ...
-def makedev(major: int, minor: int) -> int: ...
-def mkdir(path: unicode, mode: int = ...) -> None: ...
-def makedirs(path: unicode, mode: int = ...) -> None: ...
-def pathconf(path: unicode, name: str) -> str: ...
-
-pathconf_names = ... # type: Mapping[str, int]
-
-def readlink(path: AnyStr) -> AnyStr: ...
-def remove(path: unicode) -> None: ...
-def removedirs(path: unicode) -> None: ...
-def rename(src: unicode, dst: unicode) -> None: ...
-def renames(old: unicode, new: unicode) -> None: ...
-def rmdir(path: unicode) -> None: ...
-
-# TODO(MichalPokorny)
-def stat(path: unicode) -> Any: ...
-
-_StatVFS = NamedTuple('_StatVFS', [('f_bsize', int), ('f_frsize', int), ('f_blocks', int),
- ('f_bfree', int), ('f_bavail', int), ('f_files', int),
- ('f_ffree', int), ('f_favail', int), ('f_flag', int),
- ('f_namemax', int)])
-
-def fstatvfs(fd: int) -> _StatVFS: ...
-def statvfs(path: unicode) -> _StatVFS: ...
-
-def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
- followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
- List[AnyStr]]]: ...
-
-def symlink(source: unicode, link_name: unicode) -> None: ...
-def unlink(path: unicode) -> None: ...
-def utime(path: unicode, times: Optional[Tuple[int, int]]) -> None: ...
-
-def abort() -> None: ...
-
-EX_OK = 0 # Unix only
-EX_USAGE = 0 # Unix only
-EX_DATAERR = 0 # Unix only
-EX_NOINPUT = 0 # Unix only
-EX_NOUSER = 0 # Unix only
-EX_NOHOST = 0 # Unix only
-EX_UNAVAILABLE = 0 # Unix only
-EX_SOFTWARE = 0 # Unix only
-EX_OSERR = 0 # Unix only
-EX_OSFILE = 0 # Unix only
-EX_CANTCREAT = 0 # Unix only
-EX_IOERR = 0 # Unix only
-EX_TEMPFAIL = 0 # Unix only
-EX_PROTOCOL = 0 # Unix only
-EX_NOPERM = 0 # Unix only
-EX_CONFIG = 0 # Unix only
-
-def execl(file: AnyStr, *args) -> None: ...
-def execle(file: AnyStr, *args) -> None: ...
-def execlp(file: AnyStr, *args) -> None: ...
-def execlpe(file: AnyStr, *args) -> None: ...
-def execvp(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
-def execvpe(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[AnyStr, AnyStr]) -> None: ...
-def execv(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
-def execve(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[AnyStr, AnyStr]) -> None: ...
-
-def _exit(n: int) -> NoReturn: ...
-
-def fork() -> int: ...
-def forkpty() -> Tuple[int, int]: ...
-
-def kill(pid: int, sig: int) -> None: ...
-def killpg(pgid: int, sig: int) -> None: ...
-def nice(increment: int) -> int: ...
-
-# TODO: plock, popen*, P_*
-
-def spawnl(mode: int, path: AnyStr, arg0: AnyStr, *args: AnyStr) -> int: ...
-def spawnle(mode: int, path: AnyStr, arg0: AnyStr,
- *args: Any) -> int: ... # Imprecise sig
-def spawnlp(mode: int, file: AnyStr, arg0: AnyStr,
- *args: AnyStr) -> int: ... # Unix only TODO
-def spawnlpe(mode: int, file: AnyStr, arg0: AnyStr, *args: Any) -> int:
- ... # Imprecise signature; Unix only TODO
-def spawnv(mode: int, path: AnyStr, args: List[AnyStr]) -> int: ...
-def spawnve(mode: int, path: AnyStr, args: List[AnyStr],
- env: Mapping[str, str]) -> int: ...
-def spawnvp(mode: int, file: AnyStr, args: List[AnyStr]) -> int: ... # Unix only
-def spawnvpe(mode: int, file: AnyStr, args: List[AnyStr],
- env: Mapping[str, str]) -> int:
- ... # Unix only
-def startfile(path: unicode, operation: str = ...) -> None: ... # Windows only
-def system(command: unicode) -> int: ...
-def times() -> Tuple[float, float, float, float, float]: ...
-def wait() -> Tuple[int, int]: ... # Unix only
-def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only
-def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only
-def waitpid(pid: int, options: int) -> Tuple[int, int]: ...
-
-def confstr(name: Union[str, int]) -> Optional[str]: ...
-confstr_names = ... # type: Mapping[str, int]
-
-def getloadavg() -> Tuple[float, float, float]: ...
-
-def sysconf(name: Union[str, int]) -> int: ...
-sysconf_names = ... # type: Mapping[str, int]
-
-curdir = ... # type: str
-pardir = ... # type: str
-sep = ... # type: str
-altsep = ... # type: str
-extsep = ... # type: str
-pathsep = ... # type: str
-defpath = ... # type: str
-linesep = ... # type: str
-devnull = ... # type: str
-
-def urandom(n: int) -> str: ...
-
# More constants, copied from stdlib/3/os/__init__.pyi
-
O_RDONLY = 0
O_WRONLY = 0
O_RDWR = 0
@@ -270,6 +45,47 @@ O_NOFOLLOW = 0 # Gnu extension if in C library
O_NOATIME = 0 # Gnu extension if in C library
O_LARGEFILE = 0 # Gnu extension if in C library
+curdir = ... # type: str
+pardir = ... # type: str
+sep = ... # type: str
+altsep = ... # type: str
+extsep = ... # type: str
+pathsep = ... # type: str
+defpath = ... # type: str
+linesep = ... # type: str
+devnull = ... # type: str
+name = ... # type: str
+
+F_OK = 0
+R_OK = 0
+W_OK = 0
+X_OK = 0
+
+class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]):
+ def copy(self) -> Dict[AnyStr, AnyStr]: ...
+
+environ = ... # type: _Environ[str]
+
+confstr_names = ... # type: Dict[str, int] # Unix only
+pathconf_names = ... # type: Dict[str, int] # Unix only
+sysconf_names = ... # type: Dict[str, int] # Unix only
+
+EX_OK = 0 # Unix only
+EX_USAGE = 0 # Unix only
+EX_DATAERR = 0 # Unix only
+EX_NOINPUT = 0 # Unix only
+EX_NOUSER = 0 # Unix only
+EX_NOHOST = 0 # Unix only
+EX_UNAVAILABLE = 0 # Unix only
+EX_SOFTWARE = 0 # Unix only
+EX_OSERR = 0 # Unix only
+EX_OSFILE = 0 # Unix only
+EX_CANTCREAT = 0 # Unix only
+EX_IOERR = 0 # Unix only
+EX_TEMPFAIL = 0 # Unix only
+EX_PROTOCOL = 0 # Unix only
+EX_NOPERM = 0 # Unix only
+EX_CONFIG = 0 # Unix only
P_NOWAIT = 0
P_NOWAITO = 0
P_WAIT = 0
@@ -281,23 +97,184 @@ WNOHANG = 0 # Unix only
WCONTINUED = 0 # some Unix systems
WUNTRACED = 0 # Unix only
-P_ALL = 0
-WEXITED = 0
-WNOWAIT = 0
-
-TMP_MAX = 0
+TMP_MAX = 0 # Undocumented, but used by tempfile
+_PathType = Union[bytes, Text]
+_StatVFS = NamedTuple('_StatVFS', [('f_bsize', int), ('f_frsize', int), ('f_blocks', int),
+ ('f_bfree', int), ('f_bavail', int), ('f_files', int),
+ ('f_ffree', int), ('f_favail', int), ('f_flag', int),
+ ('f_namemax', int)])
+def ctermid() -> str: ... # Unix only
+def getegid() -> int: ... # Unix only
+def geteuid() -> int: ... # Unix only
+def getgid() -> int: ... # Unix only
+def getgroups() -> List[int]: ... # Unix only, behaves differently on Mac
+def initgroups(username: str, gid: int) -> None: ... # Unix only
+def getlogin() -> str: ...
+def getpgid(pid: int) -> int: ... # Unix only
+def getpgrp() -> int: ... # Unix only
+def getpid() -> int: ...
+def getppid() -> int: ...
+def getresuid() -> Tuple[int, int, int]: ... # Unix only
+def getresgid() -> Tuple[int, int, int]: ... # Unix only
+def getuid() -> int: ... # Unix only
+def setegid(egid: int) -> None: ... # Unix only
+def seteuid(euid: int) -> None: ... # Unix only
+def setgid(gid: int) -> None: ... # Unix only
+def setgroups(groups: Sequence[int]) -> None: ... # Unix only
+def setpgrp() -> None: ... # Unix only
+def setpgid(pid: int, pgrp: int) -> None: ... # Unix only
+def setregid(rgid: int, egid: int) -> None: ... # Unix only
+def setresgid(rgid: int, egid: int, sgid: int) -> None: ... # Unix only
+def setresuid(ruid: int, euid: int, suid: int) -> None: ... # Unix only
+def setreuid(ruid: int, euid: int) -> None: ... # Unix only
+def getsid(pid: int) -> int: ... # Unix only
+def setsid() -> None: ... # Unix only
+def setuid(uid: int) -> None: ... # Unix only
+def strerror(code: int) -> str: ...
+def umask(mask: int) -> int: ...
+def uname() -> Tuple[str, str, str, str, str]: ... # Unix only
-# Below are Unix-only
-def WCOREDUMP(status: int) -> bool: ...
-def WEXITSTATUS(status: int) -> int: ...
-def WIFCONTINUED(status: int) -> bool: ...
-def WIFEXITED(status: int) -> bool: ...
-def WIFSIGNALED(status: int) -> bool: ...
-def WIFSTOPPED(status: int) -> bool: ...
-def WSTOPSIG(status: int) -> int: ...
-def WTERMSIG(status: int) -> int: ...
+ at overload
+def getenv(key: Text) -> Optional[str]: ...
+ at overload
+def getenv(key: Text, default: _T) -> Union[str, _T]: ...
+def putenv(key: Union[bytes, Text], value: Union[bytes, Text]) -> None: ...
+def unsetenv(key: Union[bytes, Text]) -> None: ...
+def fdopen(fd: int, *args, **kwargs) -> IO[Any]: ...
+def close(fd: int) -> None: ...
+def closerange(fd_low: int, fd_high: int) -> None: ...
+def dup(fd: int) -> int: ...
+def dup2(fd: int, fd2: int) -> None: ...
+def fchmod(fd: int, mode: int) -> None: ... # Unix only
+def fchown(fd: int, uid: int, gid: int) -> None: ... # Unix only
+def fdatasync(fd: int) -> None: ... # Unix only, not Mac
+def fpathconf(fd: int, name: Union[str, int]) -> int: ... # Unix only
+def fstat(fd: int) -> Any: ...
+def fstatvfs(fd: int) -> _StatVFS: ... # Unix only
+def fsync(fd: int) -> None: ...
+def ftruncate(fd: int, length: int) -> None: ... # Unix only
+def isatty(fd: int) -> bool: ... # Unix only
+def lseek(fd: int, pos: int, how: int) -> int: ...
+def open(file: _PathType, flags: int, mode: int = ...) -> int: ...
+def openpty() -> Tuple[int, int]: ... # some flavors of Unix
+def pipe() -> Tuple[int, int]: ...
+def read(fd: int, n: int) -> bytes: ...
+def tcgetpgrp(fd: int) -> int: ... # Unix only
+def tcsetpgrp(fd: int, pg: int) -> None: ... # Unix only
+def ttyname(fd: int) -> str: ... # Unix only
+def write(fd: int, string: bytes) -> int: ...
+def access(path: _PathType, mode: int) -> bool: ...
+def chdir(path: _PathType) -> None: ...
+def fchdir(fd: int) -> None: ...
+def getcwd() -> str: ...
+def getcwdu() -> unicode: ...
+def chflags(path: _PathType, flags: int) -> None: ... # Unix only
+def chroot(path: _PathType) -> None: ... # Unix only
+def chmod(path: _PathType, mode: int) -> None: ...
+def chown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
+def lchflags(path: _PathType, flags: int) -> None: ... # Unix only
+def lchmod(path: _PathType, mode: int) -> None: ... # Unix only
+def lchown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
+def link(src: _PathType, link_name: _PathType) -> None: ...
+def listdir(path: AnyStr) -> List[AnyStr]: ...
+def lstat(path: _PathType) -> Any: ...
+def mkfifo(path: _PathType, mode: int = ...) -> None: ... # Unix only
+def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ...
+def major(device: int) -> int: ...
+def minor(device: int) -> int: ...
+def makedev(major: int, minor: int) -> int: ...
+def mkdir(path: _PathType, mode: int = ...) -> None: ...
+def makedirs(path: _PathType, mode: int = ...) -> None: ...
+def pathconf(path: _PathType, name: Union[str, int]) -> int: ... # Unix only
+def readlink(path: AnyStr) -> AnyStr: ...
+def remove(path: _PathType) -> None: ...
+def removedirs(path: _PathType) -> None: ...
+def rename(src: _PathType, dst: _PathType) -> None: ...
+def renames(old: _PathType, new: _PathType) -> None: ...
+def rmdir(path: _PathType) -> None: ...
+def stat(path: _PathType) -> Any: ...
@overload
def stat_float_times(newvalue: bool = ...) -> None: ...
@overload
def stat_float_times() -> bool: ...
+def statvfs(path: _PathType) -> _StatVFS: ... # Unix only
+def symlink(source: _PathType, link_name: _PathType) -> None: ...
+def unlink(path: _PathType) -> None: ...
+def utime(path: _PathType, times: Optional[Tuple[float, float]]) -> None: ...
+
+# TODO onerror: function from OSError to void
+def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
+ followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
+ List[AnyStr]]]: ...
+
+def abort() -> NoReturn: ...
+# These are defined as execl(file, *args) but the first *arg is mandatory.
+def execl(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ...
+def execlp(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ...
+
+# These are: execle(file, *args, env) but env is pulled from the last element of the args.
+def execle(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ...
+def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ...
+
+# The docs say `args: tuple or list of strings`
+# The implementation enforces tuple or list so we can't use Sequence.
+_ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]]
+def execv(path: _PathType, args: _ExecVArgs) -> None: ...
+def execve(path: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
+def execvp(file: _PathType, args: _ExecVArgs) -> None: ...
+def execvpe(file: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
+
+def _exit(n: int) -> NoReturn: ...
+def fork() -> int: ... # Unix only
+def forkpty() -> Tuple[int, int]: ... # some flavors of Unix
+def kill(pid: int, sig: int) -> None: ...
+def killpg(pgid: int, sig: int) -> None: ... # Unix only
+def nice(increment: int) -> int: ... # Unix only
+# TODO: plock, popen*, P_*
+def popen(command: str, *args, **kwargs) -> Optional[IO[Any]]: ...
+def popen2(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
+def popen3(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any], IO[Any]]: ...
+def popen4(cmd: str, *args, **kwargs) -> Tuple[IO[Any], IO[Any]]: ...
+
+def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ...
+def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text],
+ *args: Any) -> int: ... # Imprecise sig
+def spawnlp(mode: int, file: _PathType, arg0: Union[bytes, Text],
+ *args: Union[bytes, Text]) -> int: ... # Unix only TODO
+def spawnlpe(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Any) -> int:
+ ... # Imprecise signature; Unix only TODO
+def spawnv(mode: int, path: _PathType, args: List[Union[bytes, Text]]) -> int: ...
+def spawnve(mode: int, path: _PathType, args: List[Union[bytes, Text]],
+ env: Mapping[str, str]) -> int: ...
+def spawnvp(mode: int, file: _PathType, args: List[Union[bytes, Text]]) -> int: ... # Unix only
+def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]],
+ env: Mapping[str, str]) -> int:
+ ... # Unix only
+def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... # Windows only
+def system(command: _PathType) -> int: ...
+def times() -> Tuple[float, float, float, float, float]: ...
+def wait() -> Tuple[int, int]: ... # Unix only
+def waitpid(pid: int, options: int) -> Tuple[int, int]: ...
+def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only
+def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only
+def WCOREDUMP(status: int) -> bool: ... # Unix only
+def WIFCONTINUED(status: int) -> bool: ... # Unix only
+def WIFSTOPPED(status: int) -> bool: ... # Unix only
+def WIFSIGNALED(status: int) -> bool: ... # Unix only
+def WIFEXITED(status: int) -> bool: ... # Unix only
+def WEXITSTATUS(status: int) -> int: ... # Unix only
+def WSTOPSIG(status: int) -> int: ... # Unix only
+def WTERMSIG(status: int) -> int: ... # Unix only
+def confstr(name: Union[str, int]) -> Optional[str]: ... # Unix only
+def getloadavg() -> Tuple[float, float, float]: ... # Unix only
+def sysconf(name: Union[str, int]) -> int: ... # Unix only
+def urandom(n: int) -> bytes: ...
+
+def tmpfile() -> IO[Any]: ...
+def tmpnam() -> str: ...
+def tempnam(dir: str = ..., prefix: str = ...) -> str: ...
+
+P_ALL = 0
+WEXITED = 0
+WNOWAIT = 0
diff --git a/typeshed/stdlib/2/os/path.pyi b/typeshed/stdlib/2/os/path.pyi
index bfd88a4..7488956 100644
--- a/typeshed/stdlib/2/os/path.pyi
+++ b/typeshed/stdlib/2/os/path.pyi
@@ -3,8 +3,14 @@
# based on http://docs.python.org/3.2/library/os.path.html
# adapted for 2.7 by Michal Pokorny
+import sys
+from typing import (
+ overload, List, Any, AnyStr, Sequence, Tuple, BinaryIO, TextIO,
+ TypeVar, Union, Text, Callable
+)
-from typing import overload, List, Any, Tuple, BinaryIO, TextIO, TypeVar, Callable, AnyStr
+_T = TypeVar('_T')
+_PathType = Union[bytes, Text]
# ----- os.path variables -----
supports_unicode_filenames = False
@@ -22,25 +28,32 @@ devnull = ... # type: str
def abspath(path: AnyStr) -> AnyStr: ...
def basename(path: AnyStr) -> AnyStr: ...
-def commonprefix(list: List[AnyStr]) -> AnyStr: ...
+if sys.version_info >= (3, 5):
+ def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ...
+
+# NOTE: Empty lists results in '' (str) regardless of contained type.
+# Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes
+# So, fall back to Any
+def commonprefix(list: Sequence[AnyStr]) -> Any: ...
+
def dirname(path: AnyStr) -> AnyStr: ...
-def exists(path: unicode) -> bool: ...
-def lexists(path: unicode) -> bool: ...
+def exists(path: _PathType) -> bool: ...
+def lexists(path: _PathType) -> bool: ...
def expanduser(path: AnyStr) -> AnyStr: ...
def expandvars(path: AnyStr) -> AnyStr: ...
# These return float if os.stat_float_times() == True,
# but int is a subclass of float.
-def getatime(path: unicode) -> float: ...
-def getmtime(path: unicode) -> float: ...
-def getctime(path: unicode) -> float: ...
+def getatime(path: _PathType) -> float: ...
+def getmtime(path: _PathType) -> float: ...
+def getctime(path: _PathType) -> float: ...
-def getsize(path: unicode) -> int: ...
-def isabs(path: unicode) -> bool: ...
-def isfile(path: unicode) -> bool: ...
-def isdir(path: unicode) -> bool: ...
-def islink(path: unicode) -> bool: ...
-def ismount(path: unicode) -> bool: ...
+def getsize(path: _PathType) -> int: ...
+def isabs(path: _PathType) -> bool: ...
+def isfile(path: _PathType) -> bool: ...
+def isdir(path: _PathType) -> bool: ...
+def islink(path: _PathType) -> bool: ...
+def ismount(path: _PathType) -> bool: ...
def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ...
@@ -49,7 +62,7 @@ def normpath(path: AnyStr) -> AnyStr: ...
def realpath(path: AnyStr) -> AnyStr: ...
def relpath(path: AnyStr, start: AnyStr = ...) -> AnyStr: ...
-def samefile(path1: unicode, path2: unicode) -> bool: ...
+def samefile(path1: _PathType, path2: _PathType) -> bool: ...
def sameopenfile(fp1: int, fp2: int) -> bool: ...
# TODO
# def samestat(stat1: stat_result,
@@ -61,5 +74,5 @@ def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # Windows only, deprecated
-_T = TypeVar('_T')
-def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ...
+if sys.version_info < (3,):
+ def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ...
diff --git a/typeshed/stdlib/2/os2emxpath.pyi b/typeshed/stdlib/2/os2emxpath.pyi
new file mode 100644
index 0000000..5e5439f
--- /dev/null
+++ b/typeshed/stdlib/2/os2emxpath.pyi
@@ -0,0 +1,50 @@
+# Stubs for posixpath (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, List
+from genericpath import * # noqa: F403
+
+curdir = ... # type: Any
+pardir = ... # type: Any
+extsep = ... # type: Any
+sep = ... # type: Any
+pathsep = ... # type: Any
+defpath = ... # type: Any
+altsep = ... # type: Any
+devnull = ... # type: Any
+
+def normcase(s): ...
+def isabs(s): ...
+def join(a, *p): ...
+def split(p): ...
+def splitext(p): ...
+def splitdrive(p): ...
+def basename(p): ...
+def dirname(p): ...
+def islink(path): ...
+def lexists(path): ...
+def samefile(f1, f2): ...
+def sameopenfile(fp1, fp2): ...
+def samestat(s1, s2): ...
+def ismount(path): ...
+def walk(top, func, arg): ...
+def expanduser(path): ...
+def expandvars(path): ...
+def normpath(path): ...
+def abspath(path): ...
+def realpath(filename): ...
+
+supports_unicode_filenames = ... # type: Any
+
+def relpath(path, start=...): ...
+
+# posixpath imports these from genericpath.py:
+def commonprefix(list: List[AnyStr]) -> AnyStr: ...
+def exists(path: unicode) -> bool: ...
+def getatime(path: unicode) -> float: ...
+def getmtime(path: unicode) -> float: ...
+def getctime(path: unicode) -> float: ...
+def getsize(path: unicode) -> int: ...
+def isfile(path: unicode) -> bool: ...
+def isdir(path: unicode) -> bool: ...
diff --git a/typeshed/stdlib/2/pickle.pyi b/typeshed/stdlib/2/pickle.pyi
index 1b5b1d6..ed7af58 100644
--- a/typeshed/stdlib/2/pickle.pyi
+++ b/typeshed/stdlib/2/pickle.pyi
@@ -1,14 +1,14 @@
# Stubs for pickle (Python 2)
-from typing import Any, BinaryIO
+from typing import Any, IO
HIGHEST_PROTOCOL = ... # type: int
-def dump(obj: Any, file: BinaryIO, protocol: int = None) -> None: ...
+def dump(obj: Any, file: IO[bytes], protocol: int = None) -> None: ...
def dumps(obj: Any, protocol: int = ...) -> bytes: ...
-def load(file: BinaryIO) -> Any: ...
+def load(file: IO[bytes]) -> Any: ...
def loads(string: bytes) -> Any: ...
@@ -25,7 +25,7 @@ class UnpicklingError(PickleError):
class Pickler:
- def __init__(self, file: BinaryIO, protocol: int = None) -> None: ...
+ def __init__(self, file: IO[bytes], protocol: int = None) -> None: ...
def dump(self, obj: Any) -> None: ...
@@ -33,6 +33,6 @@ class Pickler:
class Unpickler:
- def __init__(self, file: BinaryIO) -> None: ...
+ def __init__(self, file: IO[bytes]) -> None: ...
def load(self) -> Any: ...
diff --git a/typeshed/stdlib/2/posix.pyi b/typeshed/stdlib/2/posix.pyi
index f8feb03..ba01df0 100644
--- a/typeshed/stdlib/2/posix.pyi
+++ b/typeshed/stdlib/2/posix.pyi
@@ -1,4 +1,4 @@
-from typing import List, Mapping, Tuple, Union, Sequence, IO, Optional, TypeVar
+from typing import Dict, List, Mapping, Tuple, Union, Sequence, IO, Optional, TypeVar
error = OSError
diff --git a/typeshed/stdlib/2/pprint.pyi b/typeshed/stdlib/2/pprint.pyi
deleted file mode 100644
index 5275d03..0000000
--- a/typeshed/stdlib/2/pprint.pyi
+++ /dev/null
@@ -1,24 +0,0 @@
-# Stubs for pprint (Python 2)
-#
-# NOTE: Based on a dynamically typed automatically generated by stubgen.
-
-from typing import IO, Any
-
-def pprint(object: Any, stream: IO[str] = ..., indent: int = ..., width: int = ...,
- depth: int = ...) -> None: ...
-def pformat(object: Any, indent: int =..., width: int =..., depth: int =...) -> str: ...
-def saferepr(object): ...
-def isreadable(object): ...
-def isrecursive(object): ...
-
-class PrettyPrinter:
- def __init__(self,
- indent: int = ...,
- width: int = ...,
- depth: int = ...,
- stream: IO[Any] = ...) -> None: ...
- def pprint(self, object: Any) -> str: ...
- def pformat(self, object): ...
- def isrecursive(self, object): ...
- def isreadable(self, object): ...
- def format(self, object, context, maxlevels, level): ...
diff --git a/typeshed/stdlib/2/pydoc.pyi b/typeshed/stdlib/2/pydoc.pyi
new file mode 100644
index 0000000..453d2d7
--- /dev/null
+++ b/typeshed/stdlib/2/pydoc.pyi
@@ -0,0 +1,181 @@
+# Stubs for pydoc (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, AnyStr, Callable, Container, Dict, IO, List, Mapping, MutableMapping, Optional, Tuple, Type, Union
+from mypy_extensions import NoReturn
+from repr import Repr
+
+from types import FunctionType, MethodType, ModuleType, TracebackType
+# the return type of sys.exc_info(), used by ErrorDuringImport.__init__
+_Exc_Info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]]
+
+__author__ = ... # type: str
+__date__ = ... # type: str
+__version__ = ... # type: str
+__credits__ = ... # type: str
+
+def pathdirs() -> List[str]: ...
+def getdoc(object: object) -> Union[str, unicode]: ...
+def splitdoc(doc: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+def classname(object: object, modname: str) -> str: ...
+def isdata(object: object) -> bool: ...
+def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ...
+def cram(text: str, maxlen: int) -> str: ...
+def stripid(text: str) -> str: ...
+def allmethods(cl: type) -> MutableMapping[str, MethodType]: ...
+def visiblename(name: str, all: Optional[Container[str]] = ..., obj: Optional[object] = ...) -> bool: ...
+def classify_class_attrs(object: object) -> List[Tuple[str, str, type, str]]: ...
+
+def ispackage(path: str) -> bool: ...
+def source_synopsis(file: IO[AnyStr]) -> Optional[AnyStr]: ...
+def synopsis(filename: str, cache: MutableMapping[str, Tuple[int, str]] = ...) -> Optional[str]: ...
+
+class ErrorDuringImport(Exception):
+ filename = ... # type: str
+ exc = ... # type: Optional[Type[BaseException]]
+ value = ... # type: Optional[BaseException]
+ tb = ... # type: Optional[TracebackType]
+ def __init__(self, filename: str, exc_info: _Exc_Info) -> None: ...
+
+def importfile(path: str) -> ModuleType: ...
+def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = ...) -> ModuleType: ...
+
+class Doc:
+ def document(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def fail(self, object: object, name: Optional[str] = ..., *args: Any) -> NoReturn: ...
+ def docmodule(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def docclass(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def docroutine(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def docother(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def docproperty(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def docdata(self, object: object, name: Optional[str] = ..., *args: Any) -> str: ...
+ def getdocloc(self, object: object) -> Optional[str]: ...
+
+class HTMLRepr(Repr):
+ maxlist = ... # type: int
+ maxtuple = ... # type: int
+ maxdict = ... # type: int
+ maxstring = ... # type: int
+ maxother = ... # type: int
+ def __init__(self) -> None: ...
+ def escape(self, text: str) -> str: ...
+ def repr(self, object: object) -> str: ...
+ def repr1(self, x: object, level: complex) -> str: ...
+ def repr_string(self, x: Union[str, unicode], level: complex) -> str: ...
+ def repr_str(self, x: Union[str, unicode], level: complex) -> str: ...
+ def repr_instance(self, x: object, level: complex) -> str: ...
+ def repr_unicode(self, x: AnyStr, level: complex) -> str: ...
+
+class HTMLDoc(Doc):
+ def repr(self, object: object) -> str: ...
+ def escape(self, test: str) -> str: ...
+ def page(self, title: str, contents: str) -> str: ...
+ def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ...
+ def section(self, title: str, fgcol: str, bgcol: str, contents: str, width: int = ..., prelude: str = ..., marginalia: Optional[str] = ..., gap: str = ...) -> str: ...
+ def bigsection(self, title: str, *args) -> str: ...
+ def preformat(self, text: str) -> str: ...
+ def multicolumn(self, list: List[Any], format: Callable[[Any], str], cols: int = ...) -> str: ...
+ def grey(self, text: str) -> str: ...
+ def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ...
+ def classlink(self, object: object, modname: str) -> str: ...
+ def modulelink(self, object: object) -> str: ...
+ def modpkglink(self, data: Tuple[str, str, bool, bool]) -> str: ...
+ def markup(self, text: str, escape: Optional[Callable[[str], str]] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ...) -> str: ...
+ def formattree(self, tree: List[Union[Tuple[type, Tuple[type, ...]], list]], modname: str, parent: Optional[type] = ...) -> str: ...
+ def docmodule(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., *ignored) -> str: ...
+ def docclass(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., *ignored) -> str: ...
+ def formatvalue(self, object: object) -> str: ...
+ def docroutine(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., cl: Optional[type] = ..., *ignored) -> str: ...
+ def docproperty(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ...
+ def docother(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., *ignored) -> str: ...
+ def docdata(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., cl: Optional[Any] = ..., *ignored) -> str: ...
+ def index(self, dir: str, shadowed: Optional[MutableMapping[str, bool]] = ...) -> str: ...
+
+class TextRepr(Repr):
+ maxlist = ... # type: int
+ maxtuple = ... # type: int
+ maxdict = ... # type: int
+ maxstring = ... # type: int
+ maxother = ... # type: int
+ def __init__(self) -> None: ...
+ def repr1(self, x: object, level: complex) -> str: ...
+ def repr_string(self, x: str, level: complex) -> str: ...
+ def repr_str(self, x: str, level: complex) -> str: ...
+ def repr_instance(self, x: object, level: complex) -> str: ...
+
+class TextDoc(Doc):
+ def repr(self, object: object) -> str: ...
+ def bold(self, text: str) -> str: ...
+ def indent(self, text: str, prefix: str = ...) -> str: ...
+ def section(self, title: str, contents: str) -> str: ...
+ def formattree(self, tree: List[Union[Tuple[type, Tuple[type, ...]], list]], modname: str, parent: Optional[type] = ..., prefix: str = ...) -> str: ...
+ def docmodule(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., *ignored) -> str: ...
+ def docclass(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., *ignored) -> str: ...
+ def formatvalue(self, object: object) -> str: ...
+ def docroutine(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ...
+ def docproperty(self, object: object, name: Optional[str] = ..., mod: Optional[Any] = ..., cl: Optional[Any] = ..., *ignored) -> str: ...
+ def docdata(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., cl: Optional[Any] = ..., *ignored) -> str: ...
+ def docother(self, object: object, name: Optional[str] = ..., mod: Optional[str] = ..., parent: Optional[str] = ..., maxlen: Optional[int] = ..., doc: Optional[Any] = ..., *ignored) -> str: ...
+
+def pager(text: str) -> None: ...
+def getpager() -> Callable[[str], None]: ...
+def plain(text: str) -> str: ...
+def pipepager(text: str, cmd: str) -> None: ...
+def tempfilepager(text: str, cmd: str) -> None: ...
+def ttypager(text: str) -> None: ...
+def plainpager(text: str) -> None: ...
+def describe(thing: Any) -> str: ...
+def locate(path: str, forceload: bool = ...) -> object: ...
+
+text = ... # type: TextDoc
+html = ... # type: HTMLDoc
+
+class _OldStyleClass: ...
+
+def resolve(thing: Union[str, object], forceload: bool = ...) -> Optional[Tuple[object, str]]: ...
+def render_doc(thing: Union[str, object], title: str = ..., forceload: bool = ...) -> str: ...
+def doc(thing: Union[str, object], title: str = ..., forceload: bool = ...) -> None: ...
+def writedoc(thing: Union[str, object], forceload: bool = ...) -> None: ...
+def writedocs(dir: str, pkgpath: str = ..., done: Optional[Any] = ...) -> None: ...
+
+class Helper:
+ keywords = ... # type: Dict[str, Union[str, Tuple[str, str]]]
+ symbols = ... # type: Dict[str, str]
+ topics = ... # type: Dict[str, Union[str, Tuple[str, ...]]]
+ def __init__(self, input: Optional[IO[str]] = ..., output: Optional[IO[str]] = ...) -> None: ...
+ input = ... # type: IO[str]
+ output = ... # type: IO[str]
+ def __call__(self, request: Union[str, Helper, object] = ...) -> None: ...
+ def interact(self) -> None: ...
+ def getline(self, prompt: str) -> str: ...
+ def help(self, request: Any) -> None: ...
+ def intro(self) -> None: ...
+ def list(self, items: List[str], columns: int = ..., width: int = ...) -> None: ...
+ def listkeywords(self) -> None: ...
+ def listsymbols(self) -> None: ...
+ def listtopics(self) -> None: ...
+ def showtopic(self, topic: str, more_xrefs: str = ...) -> None: ...
+ def showsymbol(self, symbol: str) -> None: ...
+ def listmodules(self, key: str = ...) -> None: ...
+
+help = ... # type: Helper
+
+# See Python issue #11182: "remove the unused and undocumented pydoc.Scanner class"
+# class Scanner:
+# roots = ... # type: Any
+# state = ... # type: Any
+# children = ... # type: Any
+# descendp = ... # type: Any
+# def __init__(self, roots, children, descendp) -> None: ...
+# def next(self): ...
+
+class ModuleScanner:
+ quit = ... # type: bool
+ def run(self, callback: Callable[[Optional[str], str, str], None], key: Optional[Any] = ..., completer: Optional[Callable[[], None]] = ..., onerror: Optional[Callable] = ...) -> None: ...
+
+def apropos(key: str) -> None: ...
+def serve(port: int, callback: Optional[Callable[[Any], None]] = ..., completer: Optional[Callable[[], None]] = ...) -> None: ...
+def gui() -> None: ...
+def ispath(x: Any) -> bool: ...
+def cli() -> None: ...
diff --git a/typeshed/stdlib/2/quopri.pyi b/typeshed/stdlib/2/quopri.pyi
deleted file mode 100644
index 93ac393..0000000
--- a/typeshed/stdlib/2/quopri.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Stubs for quopri (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-def encode(input, output, quotetabs, header=0): ...
-def encodestring(s, quotetabs=0, header=0): ...
-def decode(input, output, header=0): ...
-def decodestring(s, header=0): ...
diff --git a/typeshed/stdlib/2/repr.pyi b/typeshed/stdlib/2/repr.pyi
new file mode 100644
index 0000000..5e54f69
--- /dev/null
+++ b/typeshed/stdlib/2/repr.pyi
@@ -0,0 +1,31 @@
+class Repr:
+ maxarray = ... # type: int
+ maxdeque = ... # type: int
+ maxdict = ... # type: int
+ maxfrozenset = ... # type: int
+ maxlevel = ... # type: int
+ maxlist = ... # type: int
+ maxlong = ... # type: int
+ maxother = ... # type: int
+ maxset = ... # type: int
+ maxstring = ... # type: int
+ maxtuple = ... # type: int
+ def __init__(self) -> None: ...
+ def _repr_iterable(self, x, level: complex, left, right, maxiter, trail=...) -> str: ...
+ def repr(self, x) -> str: ...
+ def repr1(self, x, level: complex) -> str: ...
+ def repr_array(self, x, level: complex) -> str: ...
+ def repr_deque(self, x, level: complex) -> str: ...
+ def repr_dict(self, x, level: complex) -> str: ...
+ def repr_frozenset(self, x, level: complex) -> str: ...
+ def repr_instance(self, x, level: complex) -> str: ...
+ def repr_list(self, x, level: complex) -> str: ...
+ def repr_long(self, x, level: complex) -> str: ...
+ def repr_set(self, x, level: complex) -> str: ...
+ def repr_str(self, x, level: complex) -> str: ...
+ def repr_tuple(self, x, level: complex) -> str: ...
+
+def _possibly_sorted(x) -> list: ...
+
+aRepr = ... # type: Repr
+def repr(x) -> str: ...
diff --git a/typeshed/stdlib/2/sets.pyi b/typeshed/stdlib/2/sets.pyi
new file mode 100644
index 0000000..a68994f
--- /dev/null
+++ b/typeshed/stdlib/2/sets.pyi
@@ -0,0 +1,61 @@
+# Stubs for sets (Python 2)
+from typing import Any, Callable, Hashable, Iterable, Iterator, MutableMapping, Optional, TypeVar, Union
+
+_T = TypeVar('_T')
+_Setlike = Union[BaseSet[_T], Iterable[_T]]
+_SelfT = TypeVar('_SelfT', bound=BaseSet)
+
+class BaseSet(Iterable[_T]):
+ def __init__(self) -> None: ...
+ def __len__(self) -> int: ...
+ def __repr__(self) -> str: ...
+ def __str__(self) -> str: ...
+ def __iter__(self) -> Iterator[_T]: ...
+ def __cmp__(self, other: Any) -> int: ...
+ def __eq__(self, other: Any) -> bool: ...
+ def __ne__(self, other: Any) -> bool: ...
+ def copy(self: _SelfT) -> _SelfT: ...
+ def __copy__(self: _SelfT) -> _SelfT: ...
+ def __deepcopy__(self: _SelfT, memo: MutableMapping[int, BaseSet[_T]]) -> _SelfT: ...
+ def __or__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ...
+ def union(self: _SelfT, other: _Setlike) -> _SelfT: ...
+ def __and__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ...
+ def intersection(self: _SelfT, other: _Setlike) -> _SelfT: ...
+ def __xor__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ...
+ def symmetric_difference(self: _SelfT, other: _Setlike) -> _SelfT: ...
+ def __sub__(self: _SelfT, other: BaseSet[_T]) -> _SelfT: ...
+ def difference(self: _SelfT, other: _Setlike) -> _SelfT: ...
+ def __contains__(self, element: Any) -> bool: ...
+ def issubset(self, other: BaseSet[_T]) -> bool: ...
+ def issuperset(self, other: BaseSet[_T]) -> bool: ...
+ def __le__(self, other: BaseSet[_T]) -> bool: ...
+ def __ge__(self, other: BaseSet[_T]) -> bool: ...
+ def __lt__(self, other: BaseSet[_T]) -> bool: ...
+ def __gt__(self, other: BaseSet[_T]) -> bool: ...
+
+class ImmutableSet(BaseSet[_T], Hashable):
+ def __init__(self, iterable: Optional[_Setlike] = ...) -> None: ...
+ def __hash__(self) -> int: ...
+
+class Set(BaseSet[_T]):
+ def __init__(self, iterable: Optional[_Setlike] = ...) -> None: ...
+ def __ior__(self, other: BaseSet[_T]) -> Set: ...
+ def union_update(self, other: _Setlike) -> None: ...
+ def __iand__(self, other: BaseSet[_T]) -> Set: ...
+ def intersection_update(self, other: _Setlike) -> None: ...
+ def __ixor__(self, other: BaseSet[_T]) -> Set: ...
+ def symmetric_difference_update(self, other: _Setlike) -> None: ...
+ def __isub__(self, other: BaseSet[_T]) -> Set: ...
+ def difference_update(self, other: _Setlike) -> None: ...
+ def update(self, iterable: _Setlike) -> None: ...
+ def clear(self) -> None: ...
+ def add(self, element: _T) -> None: ...
+ def remove(self, element: _T) -> None: ...
+ def discard(self, element: _T) -> None: ...
+ def pop(self) -> _T: ...
+ def __as_immutable__(self) -> ImmutableSet[_T]: ...
+ def __as_temporarily_immutable__(self) -> _TemporarilyImmutableSet[_T]: ...
+
+class _TemporarilyImmutableSet(BaseSet[_T]):
+ def __init__(self, set: BaseSet[_T]) -> None: ...
+ def __hash__(self) -> int: ...
diff --git a/typeshed/stdlib/2/socket.pyi b/typeshed/stdlib/2/socket.pyi
deleted file mode 100644
index cbc3573..0000000
--- a/typeshed/stdlib/2/socket.pyi
+++ /dev/null
@@ -1,362 +0,0 @@
-# Stubs for socket
-# Ron Murawski <ron at horizonchess.com>
-
-# based on: http://docs.python.org/3.2/library/socket.html
-# see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py
-# see: http://nullege.com/codes/search/socket
-# adapted for Python 2.7 by Michal Pokorny
-
-from typing import Any, Tuple, List, Optional, Union, overload
-
-# ----- variables and constants -----
-
-AF_UNIX = 0
-AF_INET = 0
-AF_INET6 = 0
-SOCK_STREAM = 0
-SOCK_DGRAM = 0
-SOCK_RAW = 0
-SOCK_RDM = 0
-SOCK_SEQPACKET = 0
-SOCK_CLOEXEC = 0
-SOCK_NONBLOCK = 0
-SOMAXCONN = 0
-has_ipv6 = False
-_GLOBAL_DEFAULT_TIMEOUT = ... # type: Any
-SocketType = ... # type: Any
-SocketIO = ... # type: Any
-
-
-# the following constants are included with Python 3.2.3 (Ubuntu)
-# some of the constants may be Linux-only
-# all Windows/Mac-specific constants are absent
-AF_APPLETALK = 0
-AF_ASH = 0
-AF_ATMPVC = 0
-AF_ATMSVC = 0
-AF_AX25 = 0
-AF_BLUETOOTH = 0
-AF_BRIDGE = 0
-AF_DECnet = 0
-AF_ECONET = 0
-AF_IPX = 0
-AF_IRDA = 0
-AF_KEY = 0
-AF_LLC = 0
-AF_NETBEUI = 0
-AF_NETLINK = 0
-AF_NETROM = 0
-AF_PACKET = 0
-AF_PPPOX = 0
-AF_ROSE = 0
-AF_ROUTE = 0
-AF_SECURITY = 0
-AF_SNA = 0
-AF_TIPC = 0
-AF_UNSPEC = 0
-AF_WANPIPE = 0
-AF_X25 = 0
-AI_ADDRCONFIG = 0
-AI_ALL = 0
-AI_CANONNAME = 0
-AI_NUMERICHOST = 0
-AI_NUMERICSERV = 0
-AI_PASSIVE = 0
-AI_V4MAPPED = 0
-BDADDR_ANY = 0
-BDADDR_LOCAL = 0
-BTPROTO_HCI = 0
-BTPROTO_L2CAP = 0
-BTPROTO_RFCOMM = 0
-BTPROTO_SCO = 0
-CAPI = 0
-EAGAIN = 0
-EAI_ADDRFAMILY = 0
-EAI_AGAIN = 0
-EAI_BADFLAGS = 0
-EAI_FAIL = 0
-EAI_FAMILY = 0
-EAI_MEMORY = 0
-EAI_NODATA = 0
-EAI_NONAME = 0
-EAI_OVERFLOW = 0
-EAI_SERVICE = 0
-EAI_SOCKTYPE = 0
-EAI_SYSTEM = 0
-EBADF = 0
-EINTR = 0
-EWOULDBLOCK = 0
-HCI_DATA_DIR = 0
-HCI_FILTER = 0
-HCI_TIME_STAMP = 0
-INADDR_ALLHOSTS_GROUP = 0
-INADDR_ANY = 0
-INADDR_BROADCAST = 0
-INADDR_LOOPBACK = 0
-INADDR_MAX_LOCAL_GROUP = 0
-INADDR_NONE = 0
-INADDR_UNSPEC_GROUP = 0
-IPPORT_RESERVED = 0
-IPPORT_USERRESERVED = 0
-IPPROTO_AH = 0
-IPPROTO_DSTOPTS = 0
-IPPROTO_EGP = 0
-IPPROTO_ESP = 0
-IPPROTO_FRAGMENT = 0
-IPPROTO_GRE = 0
-IPPROTO_HOPOPTS = 0
-IPPROTO_ICMP = 0
-IPPROTO_ICMPV6 = 0
-IPPROTO_IDP = 0
-IPPROTO_IGMP = 0
-IPPROTO_IP = 0
-IPPROTO_IPIP = 0
-IPPROTO_IPV6 = 0
-IPPROTO_NONE = 0
-IPPROTO_PIM = 0
-IPPROTO_PUP = 0
-IPPROTO_RAW = 0
-IPPROTO_ROUTING = 0
-IPPROTO_RSVP = 0
-IPPROTO_TCP = 0
-IPPROTO_TP = 0
-IPPROTO_UDP = 0
-IPV6_CHECKSUM = 0
-IPV6_DSTOPTS = 0
-IPV6_HOPLIMIT = 0
-IPV6_HOPOPTS = 0
-IPV6_JOIN_GROUP = 0
-IPV6_LEAVE_GROUP = 0
-IPV6_MULTICAST_HOPS = 0
-IPV6_MULTICAST_IF = 0
-IPV6_MULTICAST_LOOP = 0
-IPV6_NEXTHOP = 0
-IPV6_PKTINFO = 0
-IPV6_RECVDSTOPTS = 0
-IPV6_RECVHOPLIMIT = 0
-IPV6_RECVHOPOPTS = 0
-IPV6_RECVPKTINFO = 0
-IPV6_RECVRTHDR = 0
-IPV6_RECVTCLASS = 0
-IPV6_RTHDR = 0
-IPV6_RTHDRDSTOPTS = 0
-IPV6_RTHDR_TYPE_0 = 0
-IPV6_TCLASS = 0
-IPV6_UNICAST_HOPS = 0
-IPV6_V6ONLY = 0
-IP_ADD_MEMBERSHIP = 0
-IP_DEFAULT_MULTICAST_LOOP = 0
-IP_DEFAULT_MULTICAST_TTL = 0
-IP_DROP_MEMBERSHIP = 0
-IP_HDRINCL = 0
-IP_MAX_MEMBERSHIPS = 0
-IP_MULTICAST_IF = 0
-IP_MULTICAST_LOOP = 0
-IP_MULTICAST_TTL = 0
-IP_OPTIONS = 0
-IP_RECVOPTS = 0
-IP_RECVRETOPTS = 0
-IP_RETOPTS = 0
-IP_TOS = 0
-IP_TTL = 0
-MSG_CTRUNC = 0
-MSG_DONTROUTE = 0
-MSG_DONTWAIT = 0
-MSG_EOR = 0
-MSG_OOB = 0
-MSG_PEEK = 0
-MSG_TRUNC = 0
-MSG_WAITALL = 0
-NETLINK_DNRTMSG = 0
-NETLINK_FIREWALL = 0
-NETLINK_IP6_FW = 0
-NETLINK_NFLOG = 0
-NETLINK_ROUTE = 0
-NETLINK_USERSOCK = 0
-NETLINK_XFRM = 0
-NI_DGRAM = 0
-NI_MAXHOST = 0
-NI_MAXSERV = 0
-NI_NAMEREQD = 0
-NI_NOFQDN = 0
-NI_NUMERICHOST = 0
-NI_NUMERICSERV = 0
-PACKET_BROADCAST = 0
-PACKET_FASTROUTE = 0
-PACKET_HOST = 0
-PACKET_LOOPBACK = 0
-PACKET_MULTICAST = 0
-PACKET_OTHERHOST = 0
-PACKET_OUTGOING = 0
-PF_PACKET = 0
-SHUT_RD = 0
-SHUT_RDWR = 0
-SHUT_WR = 0
-SOL_HCI = 0
-SOL_IP = 0
-SOL_SOCKET = 0
-SOL_TCP = 0
-SOL_TIPC = 0
-SOL_UDP = 0
-SO_ACCEPTCONN = 0
-SO_BROADCAST = 0
-SO_DEBUG = 0
-SO_DONTROUTE = 0
-SO_ERROR = 0
-SO_KEEPALIVE = 0
-SO_LINGER = 0
-SO_OOBINLINE = 0
-SO_RCVBUF = 0
-SO_RCVLOWAT = 0
-SO_RCVTIMEO = 0
-SO_REUSEADDR = 0
-SO_SNDBUF = 0
-SO_SNDLOWAT = 0
-SO_SNDTIMEO = 0
-SO_TYPE = 0
-TCP_CORK = 0
-TCP_DEFER_ACCEPT = 0
-TCP_INFO = 0
-TCP_KEEPCNT = 0
-TCP_KEEPIDLE = 0
-TCP_KEEPINTVL = 0
-TCP_LINGER2 = 0
-TCP_MAXSEG = 0
-TCP_NODELAY = 0
-TCP_QUICKACK = 0
-TCP_SYNCNT = 0
-TCP_WINDOW_CLAMP = 0
-TIPC_ADDR_ID = 0
-TIPC_ADDR_NAME = 0
-TIPC_ADDR_NAMESEQ = 0
-TIPC_CFG_SRV = 0
-TIPC_CLUSTER_SCOPE = 0
-TIPC_CONN_TIMEOUT = 0
-TIPC_CRITICAL_IMPORTANCE = 0
-TIPC_DEST_DROPPABLE = 0
-TIPC_HIGH_IMPORTANCE = 0
-TIPC_IMPORTANCE = 0
-TIPC_LOW_IMPORTANCE = 0
-TIPC_MEDIUM_IMPORTANCE = 0
-TIPC_NODE_SCOPE = 0
-TIPC_PUBLISHED = 0
-TIPC_SRC_DROPPABLE = 0
-TIPC_SUBSCR_TIMEOUT = 0
-TIPC_SUB_CANCEL = 0
-TIPC_SUB_PORTS = 0
-TIPC_SUB_SERVICE = 0
-TIPC_TOP_SRV = 0
-TIPC_WAIT_FOREVER = 0
-TIPC_WITHDRAWN = 0
-TIPC_ZONE_SCOPE = 0
-
-
-# ----- exceptions -----
-class error(IOError):
- ...
-
-class herror(error):
- def __init__(self, herror: int, string: str) -> None: ...
-
-class gaierror(error):
- def __init__(self, error: int, string: str) -> None: ...
-
-class timeout(error):
- ...
-
-
-# Addresses can be either tuples of varying lengths (AF_INET, AF_INET6,
-# AF_NETLINK, AF_TIPC) or strings (AF_UNIX).
-
-# TODO AF_PACKET and AF_BLUETOOTH address objects
-
-
-# ----- classes -----
-class socket:
- family = 0
- type = 0
- proto = 0
-
- def __init__(self, family: int = ..., type: int = ...,
- proto: int = ...) -> None: ...
-
- # --- methods ---
- # second tuple item is an address
- def accept(self) -> Tuple['socket', Any]: ...
- def bind(self, address: Union[tuple, str]) -> None: ...
- def close(self) -> None: ...
- def connect(self, address: Union[tuple, str]) -> None: ...
- def connect_ex(self, address: Union[tuple, str]) -> int: ...
- def detach(self) -> int: ...
- def fileno(self) -> int: ...
-
- # return value is an address
- def getpeername(self) -> Any: ...
- def getsockname(self) -> Any: ...
-
- @overload
- def getsockopt(self, level: int, optname: int) -> int: ...
- @overload
- def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ...
-
- def gettimeout(self) -> float: ...
- def ioctl(self, control: object,
- option: Tuple[int, int, int]) -> None: ...
- def listen(self, backlog: int) -> None: ...
- # TODO the return value may be BinaryIO or TextIO, depending on mode
- def makefile(self, mode: str = ..., buffering: int = ...,
- encoding: str = ..., errors: str = ...,
- newline: str = ...) -> Any:
- ...
- def recv(self, bufsize: int, flags: int = ...) -> str: ...
-
- # return type is an address
- def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ...
- def recvfrom_into(self, buffer: str, nbytes: int,
- flags: int = ...) -> Any: ...
- def recv_into(self, buffer: str, nbytes: int,
- flags: int = ...) -> Any: ...
- def send(self, data: str, flags: int = ...) -> int: ...
- def sendall(self, data: str, flags: int = ...) -> None:
- ... # return type: None on success
- def sendto(self, data: str, address: Union[tuple, str], flags: int = ...) -> int: ...
- def setblocking(self, flag: bool) -> None: ...
- def settimeout(self, value: Union[float, None]) -> None: ...
- def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ...
- def shutdown(self, how: int) -> None: ...
-
-
-# ----- functions -----
-def create_connection(address: Tuple[str, int],
- timeout: float = ...,
- source_address: Tuple[str, int] = ...) -> socket: ...
-
-# the 5th tuple item is an address
-def getaddrinfo(
- host: Optional[str], port: Union[str, int, None], family: int = ...,
- socktype: int = ..., proto: int = ..., flags: int = ...) -> List[Tuple[int, int, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]]:
- ...
-
-def getfqdn(name: str = ...) -> str: ...
-def gethostbyname(hostname: str) -> str: ...
-def gethostbyname_ex(hostname: str) -> Tuple[str, List[str], List[str]]: ...
-def gethostname() -> str: ...
-def gethostbyaddr(ip_address: str) -> Tuple[str, List[str], List[str]]: ...
-def getnameinfo(sockaddr: tuple, flags: int) -> Tuple[str, int]: ...
-def getprotobyname(protocolname: str) -> int: ...
-def getservbyname(servicename: str, protocolname: str = ...) -> int: ...
-def getservbyport(port: int, protocolname: str = ...) -> str: ...
-def socketpair(family: int = ...,
- type: int = ...,
- proto: int = ...) -> Tuple[socket, socket]: ...
-def fromfd(fd: int, family: int, type: int, proto: int = ...) -> socket: ...
-def ntohl(x: int) -> int: ... # param & ret val are 32-bit ints
-def ntohs(x: int) -> int: ... # param & ret val are 16-bit ints
-def htonl(x: int) -> int: ... # param & ret val are 32-bit ints
-def htons(x: int) -> int: ... # param & ret val are 16-bit ints
-def inet_aton(ip_string: str) -> str: ... # ret val 4 bytes in length
-def inet_ntoa(packed_ip: str) -> str: ...
-def inet_pton(address_family: int, ip_string: str) -> str: ...
-def inet_ntop(address_family: int, packed_ip: str) -> str: ...
-def getdefaulttimeout() -> Union[float, None]: ...
-def setdefaulttimeout(timeout: float) -> None: ...
diff --git a/typeshed/stdlib/2/ssl.pyi b/typeshed/stdlib/2/ssl.pyi
index 3c07549..d3a3c1b 100644
--- a/typeshed/stdlib/2/ssl.pyi
+++ b/typeshed/stdlib/2/ssl.pyi
@@ -148,6 +148,10 @@ class SSLSocket(socket.socket):
def selected_npn_protocol(self) -> Optional[str]: ...
def unwrap(self) -> socket.socket: ...
def version(self) -> Optional[str]: ...
+ def read(self, len: int = ...,
+ buffer: Optional[bytearray] = ...) -> str: ...
+ def write(self, buf: str) -> int: ...
+ def pending(self) -> int: ...
class SSLContext:
@@ -178,7 +182,7 @@ class SSLContext:
def wrap_socket(self, sock: socket.socket, server_side: bool = ...,
do_handshake_on_connect: bool = ...,
suppress_ragged_eofs: bool = ...,
- server_hostname: Optional[str] = ...) -> 'SSLContext': ...
+ server_hostname: Optional[str] = ...) -> SSLSocket: ...
def session_stats(self) -> Dict[str, int]: ...
diff --git a/typeshed/stdlib/2/string.pyi b/typeshed/stdlib/2/string.pyi
index 4bbfb48..751fd28 100644
--- a/typeshed/stdlib/2/string.pyi
+++ b/typeshed/stdlib/2/string.pyi
@@ -41,9 +41,9 @@ def strip(s: AnyStr, chars: AnyStr = ...) -> AnyStr: ...
def swapcase(s: AnyStr) -> AnyStr: ...
def translate(s: str, table: str, deletechars: str = ...) -> str: ...
def upper(s: AnyStr) -> AnyStr: ...
-def ljust(s: AnyStr, width: int, fillhar: AnyStr = ...) -> AnyStr: ...
-def rjust(s: AnyStr, width: int, fillhar: AnyStr = ...) -> AnyStr: ...
-def center(s: AnyStr, width: int, fillhar: AnyStr = ...) -> AnyStr: ...
+def ljust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ...
+def rjust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ...
+def center(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ...
def zfill(s: AnyStr, width: int) -> AnyStr: ...
def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/struct.pyi b/typeshed/stdlib/2/struct.pyi
deleted file mode 100644
index 213b618..0000000
--- a/typeshed/stdlib/2/struct.pyi
+++ /dev/null
@@ -1,28 +0,0 @@
-# Stubs for struct for Python 2.7
-# Based on https://docs.python.org/2/library/struct.html
-
-from typing import Any, Tuple
-
-class error(Exception): ...
-
-def pack(fmt: str, *v: Any) -> str: ...
-# TODO buffer type
-def pack_into(fmt: str, buffer: Any, offset: int, *v: Any) -> None: ...
-
-# TODO buffer type
-def unpack(fmt: str, buffer: Any) -> Tuple[Any, ...]: ...
-def unpack_from(fmt: str, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
-
-def calcsize(fmt: str) -> int: ...
-
-class Struct:
- format = ... # type: str
- size = ... # type: int
-
- def __init__(self, format: str) -> None: ...
-
- def pack(self, *v: Any) -> str: ...
- # TODO buffer type
- def pack_into(self, buffer: Any, offset: int, *v: Any) -> None: ...
- def unpack(self, buffer: Any) -> Tuple[Any, ...]: ...
- def unpack_from(self, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
diff --git a/typeshed/stdlib/2/subprocess.pyi b/typeshed/stdlib/2/subprocess.pyi
index 288e82b..06934c6 100644
--- a/typeshed/stdlib/2/subprocess.pyi
+++ b/typeshed/stdlib/2/subprocess.pyi
@@ -2,65 +2,73 @@
# Based on http://docs.python.org/2/library/subprocess.html and Python 3 stub
-from typing import Sequence, Any, AnyStr, Mapping, Callable, Tuple, IO, Union, Optional
+from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Union, Optional, List, Text
_FILE = Union[int, IO[Any]]
+_TXT = Union[bytes, Text]
+_CMD = Union[_TXT, Sequence[_TXT]]
+_ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]]
# Same args as Popen.__init__
-def call(args: Union[str, Sequence[str]],
+def call(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
+ executable: _TXT = ...,
stdin: _FILE = ...,
stdout: _FILE = ...,
stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...) -> int: ...
-def check_call(args: Union[str, Sequence[str]],
+def check_call(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
+ executable: _TXT = ...,
stdin: _FILE = ...,
stdout: _FILE = ...,
stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...) -> int: ...
# Same args as Popen.__init__ except for stdout
-def check_output(args: Union[str, Sequence[str]],
+def check_output(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
+ executable: _TXT = ...,
stdin: _FILE = ...,
stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
- creationflags: int = ...) -> str: ...
+ creationflags: int = ...) -> bytes: ...
PIPE = ... # type: int
STDOUT = ... # type: int
class CalledProcessError(Exception):
returncode = 0
- cmd = ... # type: str
- output = ... # type: str # May be None
+ # morally: _CMD
+ cmd = ... # type: Any
+ # morally: Optional[bytes]
+ output = ... # type: Any
- def __init__(self, returncode: int, cmd: str, output: Optional[str] = ...) -> None: ...
+ def __init__(self,
+ returncode: int,
+ cmd: _CMD,
+ output: Optional[bytes] = ...) -> None: ...
class Popen:
stdin = ... # type: Optional[IO[Any]]
@@ -70,33 +78,31 @@ class Popen:
returncode = 0
def __init__(self,
- args: Union[str, Sequence[str]],
+ args: _CMD,
bufsize: int = ...,
- executable: Optional[str] = ...,
+ executable: Optional[_TXT] = ...,
stdin: Optional[_FILE] = ...,
stdout: Optional[_FILE] = ...,
stderr: Optional[_FILE] = ...,
preexec_fn: Optional[Callable[[], Any]] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: Optional[str] = ...,
- env: Optional[Mapping[str, str]] = ...,
+ cwd: Optional[_TXT] = ...,
+ env: Optional[_ENV] = ...,
universal_newlines: bool = ...,
startupinfo: Optional[Any] = ...,
creationflags: int = ...) -> None: ...
def poll(self) -> int: ...
def wait(self) -> int: ...
- def communicate(self, input: Optional[AnyStr] = ...) -> Tuple[Optional[bytes], Optional[bytes]]: ...
+ # morally: -> Tuple[Optional[bytes], Optional[bytes]]
+ def communicate(self, input: Optional[_TXT] = ...) -> Tuple[Any, Any]: ...
def send_signal(self, signal: int) -> None: ...
def terminate(self) -> None: ...
def kill(self) -> None: ...
def __enter__(self) -> 'Popen': ...
def __exit__(self, type, value, traceback) -> bool: ...
-def getstatusoutput(cmd: str) -> Tuple[int, str]: ...
-def getoutput(cmd: str) -> str: ...
-
# Windows-only: STARTUPINFO etc.
STD_INPUT_HANDLE = ... # type: Any
diff --git a/typeshed/stdlib/2/symbol.pyi b/typeshed/stdlib/2/symbol.pyi
new file mode 100644
index 0000000..dd33444
--- /dev/null
+++ b/typeshed/stdlib/2/symbol.pyi
@@ -0,0 +1,91 @@
+# Stubs for symbol (Python 2)
+
+from typing import Dict
+
+single_input = ... # type: int
+file_input = ... # type: int
+eval_input = ... # type: int
+decorator = ... # type: int
+decorators = ... # type: int
+decorated = ... # type: int
+funcdef = ... # type: int
+parameters = ... # type: int
+varargslist = ... # type: int
+fpdef = ... # type: int
+fplist = ... # type: int
+stmt = ... # type: int
+simple_stmt = ... # type: int
+small_stmt = ... # type: int
+expr_stmt = ... # type: int
+augassign = ... # type: int
+print_stmt = ... # type: int
+del_stmt = ... # type: int
+pass_stmt = ... # type: int
+flow_stmt = ... # type: int
+break_stmt = ... # type: int
+continue_stmt = ... # type: int
+return_stmt = ... # type: int
+yield_stmt = ... # type: int
+raise_stmt = ... # type: int
+import_stmt = ... # type: int
+import_name = ... # type: int
+import_from = ... # type: int
+import_as_name = ... # type: int
+dotted_as_name = ... # type: int
+import_as_names = ... # type: int
+dotted_as_names = ... # type: int
+dotted_name = ... # type: int
+global_stmt = ... # type: int
+exec_stmt = ... # type: int
+assert_stmt = ... # type: int
+compound_stmt = ... # type: int
+if_stmt = ... # type: int
+while_stmt = ... # type: int
+for_stmt = ... # type: int
+try_stmt = ... # type: int
+with_stmt = ... # type: int
+with_item = ... # type: int
+except_clause = ... # type: int
+suite = ... # type: int
+testlist_safe = ... # type: int
+old_test = ... # type: int
+old_lambdef = ... # type: int
+test = ... # type: int
+or_test = ... # type: int
+and_test = ... # type: int
+not_test = ... # type: int
+comparison = ... # type: int
+comp_op = ... # type: int
+expr = ... # type: int
+xor_expr = ... # type: int
+and_expr = ... # type: int
+shift_expr = ... # type: int
+arith_expr = ... # type: int
+term = ... # type: int
+factor = ... # type: int
+power = ... # type: int
+atom = ... # type: int
+listmaker = ... # type: int
+testlist_comp = ... # type: int
+lambdef = ... # type: int
+trailer = ... # type: int
+subscriptlist = ... # type: int
+subscript = ... # type: int
+sliceop = ... # type: int
+exprlist = ... # type: int
+testlist = ... # type: int
+dictorsetmaker = ... # type: int
+classdef = ... # type: int
+arglist = ... # type: int
+argument = ... # type: int
+list_iter = ... # type: int
+list_for = ... # type: int
+list_if = ... # type: int
+comp_iter = ... # type: int
+comp_for = ... # type: int
+comp_if = ... # type: int
+testlist1 = ... # type: int
+encoding_decl = ... # type: int
+yield_expr = ... # type: int
+
+symbol = ... # type: Dict[int, str]
diff --git a/typeshed/stdlib/2/tempfile.pyi b/typeshed/stdlib/2/tempfile.pyi
index d718bd7..15eda96 100644
--- a/typeshed/stdlib/2/tempfile.pyi
+++ b/typeshed/stdlib/2/tempfile.pyi
@@ -86,16 +86,16 @@ class TemporaryDirectory:
@overload
def mkstemp() -> Tuple[int, str]: ...
@overload
-def mkstemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: AnyStr = ...,
+def mkstemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...,
text: bool = ...) -> Tuple[int, AnyStr]: ...
@overload
def mkdtemp() -> str: ...
@overload
-def mkdtemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: AnyStr = ...) -> AnyStr: ...
+def mkdtemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ...
@overload
def mktemp() -> str: ...
@overload
-def mktemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: AnyStr = ...) -> AnyStr: ...
+def mktemp(suffix: AnyStr = ..., prefix: AnyStr = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ...
def gettempdir() -> str: ...
def gettempprefix() -> str: ...
diff --git a/typeshed/stdlib/2/time.pyi b/typeshed/stdlib/2/time.pyi
index 6bea209..6d2499f 100644
--- a/typeshed/stdlib/2/time.pyi
+++ b/typeshed/stdlib/2/time.pyi
@@ -20,7 +20,7 @@ class struct_time(NamedTuple('_struct_time',
_TIME_TUPLE = Tuple[int, int, int, int, int, int, int, int, int]
-def asctime(t: struct_time = ...) -> str:
+def asctime(t: Union[struct_time, _TIME_TUPLE] = ...) -> str:
raise ValueError()
def clock() -> float: ...
@@ -38,7 +38,7 @@ def mktime(t: struct_time) -> float:
def sleep(secs: float) -> None: ...
-def strftime(format: str, t: struct_time = ...) -> str:
+def strftime(format: str, t: Union[struct_time, _TIME_TUPLE] = ...) -> str:
raise MemoryError()
raise ValueError()
diff --git a/typeshed/stdlib/2/token.pyi b/typeshed/stdlib/2/token.pyi
deleted file mode 100644
index 1c14dc4..0000000
--- a/typeshed/stdlib/2/token.pyi
+++ /dev/null
@@ -1,62 +0,0 @@
-from typing import Dict
-
-ENDMARKER = 0
-NAME = 0
-NUMBER = 0
-STRING = 0
-NEWLINE = 0
-INDENT = 0
-DEDENT = 0
-LPAR = 0
-RPAR = 0
-LSQB = 0
-RSQB = 0
-COLON = 0
-COMMA = 0
-SEMI = 0
-PLUS = 0
-MINUS = 0
-STAR = 0
-SLASH = 0
-VBAR = 0
-AMPER = 0
-LESS = 0
-GREATER = 0
-EQUAL = 0
-DOT = 0
-PERCENT = 0
-BACKQUOTE = 0
-LBRACE = 0
-RBRACE = 0
-EQEQUAL = 0
-NOTEQUAL = 0
-LESSEQUAL = 0
-GREATEREQUAL = 0
-TILDE = 0
-CIRCUMFLEX = 0
-LEFTSHIFT = 0
-RIGHTSHIFT = 0
-DOUBLESTAR = 0
-PLUSEQUAL = 0
-MINEQUAL = 0
-STAREQUAL = 0
-SLASHEQUAL = 0
-PERCENTEQUAL = 0
-AMPEREQUAL = 0
-VBAREQUAL = 0
-CIRCUMFLEXEQUAL = 0
-LEFTSHIFTEQUAL = 0
-RIGHTSHIFTEQUAL = 0
-DOUBLESTAREQUAL = 0
-DOUBLESLASH = 0
-DOUBLESLASHEQUAL = 0
-AT = 0
-OP = 0
-ERRORTOKEN = 0
-N_TOKENS = 0
-NT_OFFSET = 0
-tok_name = ... # type: Dict[int, str]
-
-def ISTERMINAL(x) -> bool: ...
-def ISNONTERMINAL(x) -> bool: ...
-def ISEOF(x) -> bool: ...
diff --git a/typeshed/stdlib/2/tokenize.pyi b/typeshed/stdlib/2/tokenize.pyi
index 159adf9..834e219 100644
--- a/typeshed/stdlib/2/tokenize.pyi
+++ b/typeshed/stdlib/2/tokenize.pyi
@@ -102,15 +102,11 @@ chain = ... # type: type
double3prog = ... # type: type
endprogs = ... # type: Dict[str, Any]
pseudoprog = ... # type: type
-re = ... # type: module
single3prog = ... # type: type
single_quoted = ... # type: Dict[str, str]
-string = ... # type: module
-sys = ... # type: module
t = ... # type: str
tabsize = ... # type: int
tok_name = ... # type: Dict[int, str]
-token = ... # type: module
tokenprog = ... # type: type
triple_quoted = ... # type: Dict[str, str]
x = ... # type: str
diff --git a/typeshed/stdlib/2/types.pyi b/typeshed/stdlib/2/types.pyi
index 15c8dd9..2896d1a 100644
--- a/typeshed/stdlib/2/types.pyi
+++ b/typeshed/stdlib/2/types.pyi
@@ -3,7 +3,7 @@
from typing import (
Any, Callable, Dict, Iterable, Iterator, List, Optional,
- Tuple, TypeVar, Union, overload,
+ Tuple, Type, TypeVar, Union, overload,
)
_T = TypeVar('_T')
@@ -13,17 +13,18 @@ TypeType = type
ObjectType = object
IntType = int
-LongType = long
+LongType = int # Really long, but can't reference that due to a mypy import cycle
FloatType = float
BooleanType = bool
ComplexType = complex
StringType = str
UnicodeType = unicode
-StringTypes = (StringType, UnicodeType)
+StringTypes = ... # type: Tuple[Type[StringType], Type[UnicodeType]]
BufferType = buffer
TupleType = tuple
ListType = list
-DictType = DictionaryType = dict
+DictType = dict
+DictionaryType = dict
class _Cell:
cell_contents = ... # type: Any
@@ -103,7 +104,8 @@ class ModuleType:
__name__ = ... # type: str
__package__ = ... # type: Optional[str]
__path__ = ... # type: Optional[Iterable[str]]
- def __init__(self, name: str, doc: str) -> None: ...
+ __dict__ = ... # type: Dict[str, Any]
+ def __init__(self, name: str, doc: Optional[str] = ...) -> None: ...
FileType = file
XRangeType = xrange
@@ -127,7 +129,7 @@ class FrameType:
f_restricted = ... # type: bool
f_trace = ... # type: Callable[[], None]
- def clear(self) -> None: pass
+ def clear(self) -> None: ...
SliceType = slice
class EllipsisType: ...
diff --git a/typeshed/stdlib/2/typing.pyi b/typeshed/stdlib/2/typing.pyi
index df17a70..dc807a0 100644
--- a/typeshed/stdlib/2/typing.pyi
+++ b/typeshed/stdlib/2/typing.pyi
@@ -1,8 +1,9 @@
# Stubs for typing (Python 2.7)
from abc import abstractmethod, ABCMeta
+from types import CodeType, FrameType
-# Definitions of special type checking related constructs. Their definition
+# Definitions of special type checking related constructs. Their definitions
# are not used, so their value does not matter.
overload = object()
@@ -13,11 +14,12 @@ Tuple = object()
Callable = object()
Type = object()
_promote = object()
+no_type_check = object()
ClassVar = object()
class GenericMeta(type): ...
-# Type aliases
+# Type aliases and type constructors
class TypeAlias:
# Class for defining generic aliases for library types.
@@ -30,6 +32,9 @@ List = TypeAlias(object)
Dict = TypeAlias(object)
DefaultDict = TypeAlias(object)
Set = TypeAlias(object)
+FrozenSet = TypeAlias(object)
+Counter = TypeAlias(object)
+Deque = TypeAlias(object)
# Predefined type variables.
AnyStr = TypeVar('AnyStr', str, unicode)
@@ -55,6 +60,10 @@ class SupportsFloat(metaclass=ABCMeta):
@abstractmethod
def __float__(self) -> float: ...
+class SupportsComplex(metaclass=ABCMeta):
+ @abstractmethod
+ def __complex__(self) -> complex: ...
+
class SupportsAbs(Generic[_T]):
@abstractmethod
def __abs__(self) -> _T: ...
@@ -71,6 +80,13 @@ class Sized(metaclass=ABCMeta):
@abstractmethod
def __len__(self) -> int: ...
+class Hashable(metaclass=ABCMeta):
+ # TODO: This is special, in that a subclass of a hashable class may not be hashable
+ # (for example, list vs. object). It's not obvious how to represent this. This class
+ # is currently mostly useless for static checking.
+ @abstractmethod
+ def __hash__(self) -> int: ...
+
class Iterable(Generic[_T_co]):
@abstractmethod
def __iter__(self) -> Iterator[_T_co]: ...
@@ -87,11 +103,17 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
def send(self, value: _T_contra) -> _T_co: ...
@abstractmethod
- def throw(self, typ: BaseException, val: Any = None, tb: Any = None) -> None: ...
+ def throw(self, typ: Type[BaseException], val: Optional[BaseException] = None,
+ # TODO: tb should be TracebackType but that's defined in types
+ tb: Any = None) -> None: ...
@abstractmethod
def close(self) -> None: ...
+ gi_code = ... # type: CodeType
+ gi_frame = ... # type: FrameType
+ gi_running = ... # type: bool
+
class Container(Generic[_T_co]):
@abstractmethod
def __contains__(self, x: object) -> bool: ...
@@ -119,8 +141,12 @@ class MutableSequence(Sequence[_T], Generic[_T]):
@overload
@abstractmethod
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
+ @overload
@abstractmethod
- def __delitem__(self, i: Union[int, slice]) -> None: ...
+ def __delitem__(self, i: int) -> None: ...
+ @overload
+ @abstractmethod
+ def __delitem__(self, i: slice) -> None: ...
# Mixin methods
def append(self, object: _T) -> None: ...
def extend(self, iterable: Iterable[_T]) -> None: ...
@@ -144,8 +170,6 @@ class AbstractSet(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]):
# TODO: argument can be any container?
def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
-class FrozenSet(AbstractSet[_T_co], Generic[_T_co]): ...
-
class MutableSet(AbstractSet[_T], Generic[_T]):
@abstractmethod
def add(self, x: _T) -> None: ...
@@ -201,7 +225,10 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
def __delitem__(self, v: _KT) -> None: ...
def clear(self) -> None: ...
- def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+ @overload
+ def pop(self, k: _KT) -> _VT: ...
+ @overload
+ def pop(self, k: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
@overload
@@ -240,18 +267,18 @@ class IO(Iterator[AnyStr], Generic[AnyStr]):
@abstractmethod
def readlines(self, hint: int = ...) -> list[AnyStr]: ...
@abstractmethod
- def seek(self, offset: int, whence: int = ...) -> None: ...
+ def seek(self, offset: int, whence: int = ...) -> int: ...
@abstractmethod
def seekable(self) -> bool: ...
@abstractmethod
def tell(self) -> int: ...
@abstractmethod
- def truncate(self, size: int = ...) -> Optional[int]: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
@abstractmethod
def writable(self) -> bool: ...
# TODO buffer objects
@abstractmethod
- def write(self, s: AnyStr) -> None: ...
+ def write(self, s: AnyStr) -> int: ...
@abstractmethod
def writelines(self, lines: Iterable[AnyStr]) -> None: ...
@@ -288,6 +315,8 @@ class TextIO(IO[unicode]):
@abstractmethod
def __enter__(self) -> TextIO: ...
+class ByteString(Sequence[int]): ...
+
class Match(Generic[AnyStr]):
pos = 0
endpos = 0
diff --git a/typeshed/stdlib/2/unicodedata.pyi b/typeshed/stdlib/2/unicodedata.pyi
deleted file mode 100644
index 1faf16a..0000000
--- a/typeshed/stdlib/2/unicodedata.pyi
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Stubs for the 'unicodedata' module."""
-
-from typing import Any, TypeVar, Union
-
-ucd_3_2_0 = ... # type: UCD
-unidata_version = ... # type: str
-# PyCapsule
-ucnhash_CAPI = ... # type: Any
-
-_default = TypeVar("_default")
-
-def bidirectional(unichr: unicode) -> str: ...
-def category(unichr: unicode) -> str: ...
-def combining(unichr: unicode) -> int: ...
-def decimal(chr: unicode, default: _default = ...) -> Union[int, _default]: ...
-def decomposition(unichr: unicode) -> str: ...
-def digit(chr: unicode, default: _default = ...) -> Union[int, _default]: ...
-def east_asian_width(unichr: unicode) -> str: ...
-def lookup(name: str) -> unicode: ...
-def mirrored(unichr: unicode) -> int: ...
-def name(chr: unicode, default: _default = ...) -> Union[str, _default]: ...
-def normalize(form: str, unistr: unicode) -> unicode: ...
-def numeric(chr, default: _default = ...) -> Union[float, _default]: ...
-
-class UCD(object):
- unidata_version = ... # type: str
- # The methods below are constructed from the same array in C
- # (unicodedata_functions) and hence identical to the methods above.
- def bidirectional(self, unichr: unicode) -> str: ...
- def category(self, unichr: unicode) -> str: ...
- def combining(self, unichr: unicode) -> int: ...
- def decimal(self, chr: unicode, default: _default = ...) -> Union[int, _default]: ...
- def decomposition(self, unichr: unicode) -> str: ...
- def digit(self, chr: unicode, default: _default = ...) -> Union[int, _default]: ...
- def east_asian_width(self, unichr: unicode) -> str: ...
- def lookup(self, name: str) -> unicode: ...
- def mirrored(self, unichr: unicode) -> int: ...
- def name(self, chr: unicode, default: _default = ...) -> Union[str, _default]: ...
- def normalize(self, form: str, unistr: unicode) -> unicode: ...
- def numeric(self, chr: unicode, default: _default = ...) -> Union[float, _default]: ...
diff --git a/typeshed/stdlib/2/unittest.pyi b/typeshed/stdlib/2/unittest.pyi
index 163ccce..52decfe 100644
--- a/typeshed/stdlib/2/unittest.pyi
+++ b/typeshed/stdlib/2/unittest.pyi
@@ -6,9 +6,10 @@
from typing import (
Any, Callable, Dict, Iterable, Tuple, List, TextIO, Sequence,
- overload, Set, TypeVar, Union, Pattern
+ overload, Set, FrozenSet, TypeVar, Union, Pattern, Type
)
from abc import abstractmethod, ABCMeta
+import types
_T = TypeVar('_T')
_FT = TypeVar('_FT')
@@ -41,7 +42,7 @@ class TestResult:
class _AssertRaisesBaseContext:
expected = ... # type: Any
- failureException = ... # type: type
+ failureException = ... # type: Type[BaseException]
obj_name = ... # type: str
expected_regex = ... # type: Pattern[str]
@@ -51,8 +52,8 @@ class _AssertRaisesContext(_AssertRaisesBaseContext):
def __exit__(self, exc_type, exc_value, tb) -> bool: ...
class TestCase(Testable):
+ failureException = ... # type: Type[BaseException]
def __init__(self, methodName: str = ...) -> None: ...
- # TODO failureException
def setUp(self) -> None: ...
def tearDown(self) -> None: ...
def run(self, result: TestResult = ...) -> None: ...
@@ -101,8 +102,8 @@ class TestCase(Testable):
msg: object = ...) -> None: ...
def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...],
msg: object = ...) -> None: ...
- def assertSetEqual(self, first: Set[Any], second: Set[Any],
- msg: object = ...) -> None: ...
+ def assertSetEqual(self, first: Union[Set[Any], FrozenSet[Any]],
+ second: Union[Set[Any], FrozenSet[Any]], msg: object = ...) -> None: ...
def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any],
msg: object = ...) -> None: ...
def assertLess(self, first: Any, second: Any,
@@ -173,4 +174,4 @@ def main(module: str = ..., defaultTest: str = ...,
testLoader: Any = ...) -> None: ... # TODO types
# private but occasionally used
-util = ... # type: module
+util = ... # type: types.ModuleType
diff --git a/typeshed/stdlib/2/urllib2.pyi b/typeshed/stdlib/2/urllib2.pyi
index 728d298..6c1d30c 100644
--- a/typeshed/stdlib/2/urllib2.pyi
+++ b/typeshed/stdlib/2/urllib2.pyi
@@ -1,6 +1,10 @@
-from typing import AnyStr, Dict, List, Union
+import ssl
+from typing import Any, AnyStr, Dict, List, Union, Optional, Mapping, Callable, Sequence, Tuple
from urllib import addinfourl
+from httplib import HTTPResponse
+
+_string = Union[str, unicode]
class URLError(IOError):
reason = ... # type: Union[str, BaseException]
@@ -15,12 +19,12 @@ class Request(object):
data = ... # type: str
headers = ... # type: Dict[str, str]
unverifiable = ... # type: bool
- type = ...
+ type = ... # type: Optional[str]
origin_req_host = ...
unredirected_hdrs = ...
- def __init__(self, url: str, data: str = None, headers: Dict[str, str] = ...,
- origin_req_host: str = None, unverifiable: bool = ...) -> None: ...
+ def __init__(self, url: str, data: Optional[str] = ..., headers: Dict[str, str] = ...,
+ origin_req_host: Optional[str] = ..., unverifiable: bool = ...) -> None: ...
def __getattr__(self, attr): ...
def get_method(self) -> str: ...
def add_data(self, data) -> None: ...
@@ -40,19 +44,24 @@ class Request(object):
def get_header(self, header_name: str, default: str = None) -> str: ...
def header_items(self): ...
-class OpenerDirector(object): ...
+class OpenerDirector(object):
+ def add_handler(self, handler: BaseHandler) -> None: ...
+ def open(self, url: Union[Request, _string], data: Optional[_string] = ..., timeout: int = ...): ...
+ def error(self, proto: _string, *args: Any): ...
-def urlopen(url, data=None, timeout=..., cafile=None, capath=None, cadefault=False,
- context=None): ...
-def install_opener(opener): ...
-def build_opener(*handlers): ...
+def urlopen(url: Union[Request, _string], data: Optional[_string] = ..., timeout: int = ...,
+ cafile: Optional[_string] = ..., capath: Optional[_string] = ..., cadefault: bool = ...,
+ context: Optional[ssl.SSLContext] = ...): ...
+def install_opener(opener: OpenerDirector) -> None: ...
+def build_opener(*handlers: BaseHandler) -> OpenerDirector: ...
class BaseHandler:
- handler_order = ... # int
+ handler_order = ... # type: int
+ parent = ... # type: OpenerDirector
- def add_parent(self, parent) -> None: ...
+ def add_parent(self, parent: OpenerDirector) -> None: ...
def close(self) -> None: ...
- def __lt__(self, other) -> bool: ...
+ def __lt__(self, other: Any) -> bool: ...
class HTTPErrorProcessor(BaseHandler):
def http_response(self, request, response): ...
@@ -61,8 +70,8 @@ class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs): ...
class HTTPRedirectHandler(BaseHandler):
- max_repeats = ... # int
- max_redirections = ... # int
+ max_repeats = ... # type: int
+ max_redirections = ... # type: int
def redirect_request(self, req, fp, code, msg, headers, newurl): ...
def http_error_301(self, req, fp, code, msg, headers): ...
def http_error_302(self, req, fp, code, msg, headers): ...
@@ -77,16 +86,15 @@ class ProxyHandler(BaseHandler):
class HTTPPasswordMgr:
def __init__(self) -> None: ...
- def add_password(self, realm, uri, user, passwd): ...
- def find_user_password(self, realm, authuri): ...
- def reduce_uri(self, uri, default_port: bool): ...
- def is_suburi(self, base, test): ...
+ def add_password(self, realm: _string, uri: Union[_string, Sequence[_string]], user: _string, passwd: _string) -> None: ...
+ def find_user_password(self, realm: _string, authuri: _string) -> Tuple[Any, Any]: ...
+ def reduce_uri(self, uri: _string, default_port: bool = ...) -> Tuple[Any, Any]: ...
+ def is_suburi(self, base: _string, test: _string) -> bool: ...
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): ...
class AbstractBasicAuthHandler:
- def __init__(self, password_mgr=None): ...
- def reset_retry_count(self): ...
+ def __init__(self, password_mgr: Optional[HTTPPasswordMgr] = ...) -> None: ...
def http_error_auth_reqed(self, authreq, host, req, headers): ...
def retry_http_basic_auth(self, host, req, realm): ...
@@ -99,27 +107,28 @@ class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
def http_error_407(self, req, fp, code, msg, headers): ...
class AbstractDigestAuthHandler:
- def __init__(self, passwd=None): ...
- def reset_retry_count(self): ...
- def http_error_auth_reqed(self, auth_header, host, req, headers): ...
- def retry_http_digest_auth(self, req, auth): ...
- def get_cnonce(self, nonce): ...
- def get_authorization(self, req, chal): ...
- def get_algorithm_impls(self, algorithm): ...
- def get_entity_digest(self, data, chal): ...
+ def __init__(self, passwd: Optional[HTTPPasswordMgr] = ...) -> None: ...
+ def reset_retry_count(self) -> None: ...
+ def http_error_auth_reqed(self, auth_header: str, host: str, req: Request,
+ headers: Mapping[str, str]) -> None: ...
+ def retry_http_digest_auth(self, req: Request, auth: str) -> Optional[HTTPResponse]: ...
+ def get_cnonce(self, nonce: str) -> str: ...
+ def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ...
+ def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ...
+ def get_entity_digest(self, data: Optional[bytes], chal: Mapping[str, str]) -> Optional[str]: ...
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
- auth_header = ... # str
- handler_order = ... # int
+ auth_header = ... # type: str
+ handler_order = ... # type: int
def http_error_401(self, req, fp, code, msg, headers): ...
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
- auth_header = ... # str
- handler_order = ... # int
+ auth_header = ... # type: str
+ handler_order = ... # type: int
def http_error_407(self, req, fp, code, msg, headers): ...
class AbstractHTTPHandler(BaseHandler):
- def __init__(self, debuglevel: int) -> None: ...
+ def __init__(self, debuglevel: int=0) -> None: ...
def do_request_(self, request): ...
def do_open(self, http_class, req): ...
@@ -150,7 +159,7 @@ class FTPHandler(BaseHandler):
def connect_ftp(self, user, passwd, host, port, dirs, timeout): ...
class CacheFTPHandler(FTPHandler):
- def __init__(self): ...
+ def __init__(self) -> None: ...
def setTimeout(self, t): ...
def setMaxConns(self, m): ...
def check_cache(self): ...
diff --git a/typeshed/stdlib/2/uuid.pyi b/typeshed/stdlib/2/uuid.pyi
deleted file mode 100644
index ce8a2a4..0000000
--- a/typeshed/stdlib/2/uuid.pyi
+++ /dev/null
@@ -1,36 +0,0 @@
-from typing import NamedTuple, Any, Tuple
-
-_int_type = int
-
-class _UUIDFields(NamedTuple('_UUIDFields',
- [('time_low', int), ('time_mid', int), ('time_hi_version', int), ('clock_seq_hi_variant', int), ('clock_seq_low', int), ('node', int)])):
- time = ... # type: int
- clock_seq = ... # type: int
-
-class UUID:
- def __init__(self, hex: str = ..., bytes: str = ..., bytes_le: str = ...,
- fields: Tuple[int, int, int, int, int, int] = ..., int: int = ..., version: Any = ...) -> None: ...
- bytes = ... # type: str
- bytes_le = ... # type: str
- fields = ... # type: _UUIDFields
- hex = ... # type: str
- int = ... # type: _int_type
- urn = ... # type: str
- variant = ... # type: _int_type
- version = ... # type: _int_type
-
-RESERVED_NCS = ... # type: int
-RFC_4122 = ... # type: int
-RESERVED_MICROSOFT = ... # type: int
-RESERVED_FUTURE = ... # type: int
-
-def getnode() -> int: ...
-def uuid1(node: int = ..., clock_seq: int = ...) -> UUID: ...
-def uuid3(namespace: UUID, name: str) -> UUID: ...
-def uuid4() -> UUID: ...
-def uuid5(namespace: UUID, name: str) -> UUID: ...
-
-NAMESPACE_DNS = ... # type: UUID
-NAMESPACE_URL = ... # type: UUID
-NAMESPACE_OID = ... # type: UUID
-NAMESPACE_X500 = ... # type: UUID
diff --git a/typeshed/stdlib/2/xmlrpclib.pyi b/typeshed/stdlib/2/xmlrpclib.pyi
new file mode 100644
index 0000000..9f6fb98
--- /dev/null
+++ b/typeshed/stdlib/2/xmlrpclib.pyi
@@ -0,0 +1,199 @@
+# Stubs for xmlrpclib (Python 2)
+
+from typing import Any, AnyStr, Callable, IO, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Type, TypeVar, Union
+from types import InstanceType
+from datetime import datetime
+from time import struct_time
+from httplib import HTTPConnection, HTTPResponse, HTTPSConnection
+from ssl import SSLContext
+from StringIO import StringIO
+from gzip import GzipFile
+
+_Unmarshaller = Any
+_timeTuple = Tuple[int, int, int, int, int, int, int, int, int]
+# Represents types that can be compared against a DateTime object
+_dateTimeComp = Union[AnyStr, DateTime, datetime, _timeTuple]
+# A "host description" used by Transport factories
+_hostDesc = Union[str, Tuple[str, Mapping[Any, Any]]]
+
+def escape(s: AnyStr, replace: Callable[[AnyStr, AnyStr, AnyStr], AnyStr] = ...) -> AnyStr: ...
+
+MAXINT = ... # type: int
+MININT = ... # type: int
+PARSE_ERROR = ... # type: int
+SERVER_ERROR = ... # type: int
+APPLICATION_ERROR = ... # type: int
+SYSTEM_ERROR = ... # type: int
+TRANSPORT_ERROR = ... # type: int
+NOT_WELLFORMED_ERROR = ... # type: int
+UNSUPPORTED_ENCODING = ... # type: int
+INVALID_ENCODING_CHAR = ... # type: int
+INVALID_XMLRPC = ... # type: int
+METHOD_NOT_FOUND = ... # type: int
+INVALID_METHOD_PARAMS = ... # type: int
+INTERNAL_ERROR = ... # type: int
+
+class Error(Exception): ...
+
+class ProtocolError(Error):
+ url = ... # type: str
+ errcode = ... # type: int
+ errmsg = ... # type: str
+ headers = ... # type: Any
+ def __init__(self, url: str, errcode: int, errmsg: str, headers: Any) -> None: ...
+
+class ResponseError(Error): ...
+
+class Fault(Error):
+ faultCode = ... # type: Any
+ faultString = ... # type: str
+ def __init__(self, faultCode: Any, faultString: str, **extra: Any) -> None: ...
+
+boolean = ... # type: Type[bool]
+Boolean = ... # type: Type[bool]
+
+class DateTime:
+ value = ... # type: str
+ def __init__(self, value: Union[str, unicode, datetime, float, int, _timeTuple, struct_time] = ...) -> None: ...
+ def make_comparable(self, other: _dateTimeComp) -> Tuple[_dateTimeComp, _dateTimeComp]: ...
+ def __lt__(self, other: _dateTimeComp) -> bool: ...
+ def __le__(self, other: _dateTimeComp) -> bool: ...
+ def __gt__(self, other: _dateTimeComp) -> bool: ...
+ def __ge__(self, other: _dateTimeComp) -> bool: ...
+ def __eq__(self, other: _dateTimeComp) -> bool: ...
+ def __ne__(self, other: _dateTimeComp) -> bool: ...
+ def timetuple(self) -> struct_time: ...
+ def __cmp__(self, other: _dateTimeComp) -> int: ...
+ def decode(self, data: Any) -> None: ...
+ def encode(self, out: IO) -> None: ...
+
+class Binary:
+ data = ... # type: str
+ def __init__(self, data: Optional[str] = ...) -> None: ...
+ def __cmp__(self, other: Any) -> int: ...
+ def decode(self, data: str) -> None: ...
+ def encode(self, out: IO) -> None: ...
+
+WRAPPERS = ... # type: tuple
+
+# Still part of the public API, but see http://bugs.python.org/issue1773632
+FastParser = ... # type: None
+FastUnmarshaller = ... # type: None
+FastMarshaller = ... # type: None
+
+# xmlrpclib.py will leave ExpatParser undefined if it can't import expat from
+# xml.parsers. Because this is Python 2.7, the import will succeed.
+class ExpatParser:
+ def __init__(self, target: _Unmarshaller) -> None: ...
+ def feed(self, data: str): ...
+ def close(self): ...
+
+# TODO: Add xmllib.XMLParser as base class
+class SlowParser:
+ handle_xml = ... # type: Callable[[str, bool], None]
+ unknown_starttag = ... # type: Callable[[str, Any], None]
+ handle_data = ... # type: Callable[[str], None]
+ handle_cdata = ... # type: Callable[[str], None]
+ unknown_endtag = ... # type: Callable[[str, Callable[[Iterable[str], str], str]], None]
+ def __init__(self, target: _Unmarshaller) -> None: ...
+
+class Marshaller:
+ memo = ... # type: MutableMapping[int, Any]
+ data = ... # type: Optional[str]
+ encoding = ... # type: Optional[str]
+ allow_none = ... # type: bool
+ def __init__(self, encoding: Optional[str] = ..., allow_none: bool = ...) -> None: ...
+ dispatch = ... # type: Mapping[type, Callable[[Marshaller, str, Callable[[str], None]], None]]
+ def dumps(self, values: Union[Iterable[Union[None, int, bool, long, float, str, unicode, List, Tuple, Mapping, datetime, InstanceType]], Fault]) -> str: ...
+ def dump_nil(self, value: None, write: Callable[[str], None]) -> None: ...
+ def dump_int(self, value: int, write: Callable[[str], None]) -> None: ...
+ def dump_bool(self, value: bool, write: Callable[[str], None]) -> None: ...
+ def dump_long(self, value: long, write: Callable[[str], None]) -> None: ...
+ def dump_double(self, value: float, write: Callable[[str], None]) -> None: ...
+ def dump_string(self, value: str, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ...
+ def dump_unicode(self, value: unicode, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ...
+ def dump_array(self, value: Union[List, Tuple], write: Callable[[str], None]) -> None: ...
+ def dump_struct(self, value: Mapping, write: Callable[[str], None], escape: Callable[[AnyStr, Callable[[AnyStr, AnyStr, AnyStr], AnyStr]], AnyStr] = ...) -> None: ...
+ def dump_datetime(self, value: datetime, write: Callable[[str], None]) -> None: ...
+ def dump_instance(self, value: InstanceType, write: Callable[[str], None]) -> None: ...
+
+class Unmarshaller:
+ def append(self, object: Any) -> None: ...
+ def __init__(self, use_datetime: bool = ...) -> None: ...
+ def close(self) -> tuple: ...
+ def getmethodname(self) -> Optional[str]: ...
+ def xml(self, encoding: str, standalone: bool) -> None: ...
+ def start(self, tag: str, attrs: Any) -> None: ...
+ def data(self, text: str) -> None: ...
+ def end(self, tag: str, join: Callable[[Iterable[str], str], str] = ...) -> None: ...
+ def end_dispatch(self, tag: str, data: str) -> None: ...
+ dispatch = ... # type: Mapping[str, Callable[[Unmarshaller, str], None]]
+ def end_nil(self, data: str): ...
+ def end_boolean(self, data: str) -> None: ...
+ def end_int(self, data: str) -> None: ...
+ def end_double(self, data: str) -> None: ...
+ def end_string(self, data: str) -> None: ...
+ def end_array(self, data: str) -> None: ...
+ def end_struct(self, data: str) -> None: ...
+ def end_base64(self, data: str) -> None: ...
+ def end_dateTime(self, data: str) -> None: ...
+ def end_value(self, data: str) -> None: ...
+ def end_params(self, data: str) -> None: ...
+ def end_fault(self, data: str) -> None: ...
+ def end_methodName(self, data: str) -> None: ...
+
+class _MultiCallMethod:
+ def __init__(self, call_list: List[Tuple[str, tuple]], name: str) -> None: ...
+class MultiCallIterator:
+ def __init__(self, results: List) -> None: ...
+
+class MultiCall:
+ def __init__(self, server: ServerProxy) -> None: ...
+ def __getattr__(self, name: str) -> _MultiCallMethod: ...
+ def __call__(self) -> MultiCallIterator: ...
+
+def getparser(use_datetime: bool = ...) -> Tuple[Union[ExpatParser, SlowParser], Unmarshaller]: ...
+def dumps(params: Union[tuple, Fault], methodname: Optional[str] = ..., methodresponse: Optional[bool] = ..., encoding: Optional[str] = ..., allow_none: bool = ...) -> str: ...
+def loads(data: str, use_datetime: bool = ...) -> Tuple[tuple, Optional[str]]: ...
+
+def gzip_encode(data: str) -> str: ...
+def gzip_decode(data: str, max_decode: int = ...) -> str: ...
+
+class GzipDecodedResponse(GzipFile):
+ stringio = ... # type: StringIO
+ def __init__(self, response: HTTPResponse) -> None: ...
+ def close(self): ...
+
+class _Method:
+ def __init__(self, send: Callable[[str, tuple], Any], name: str) -> None: ...
+ def __getattr__(self, name: str) -> _Method: ...
+ def __call__(self, *args: Any) -> Any: ...
+
+class Transport:
+ user_agent = ... # type: str
+ accept_gzip_encoding = ... # type: bool
+ encode_threshold = ... # type: Optional[int]
+ def __init__(self, use_datetime: bool = ...) -> None: ...
+ def request(self, host: _hostDesc, handler: str, request_body: str, verbose: bool = ...) -> tuple: ...
+ verbose = ... # type: bool
+ def single_request(self, host: _hostDesc, handler: str, request_body: str, verbose: bool = ...) -> tuple: ...
+ def getparser(self) -> Tuple[Union[ExpatParser, SlowParser], Unmarshaller]: ...
+ def get_host_info(self, host: _hostDesc) -> Tuple[str, Optional[List[Tuple[str, str]]], Optional[Mapping[Any, Any]]]: ...
+ def make_connection(self, host: _hostDesc) -> HTTPConnection: ...
+ def close(self) -> None: ...
+ def send_request(self, connection: HTTPConnection, handler: str, request_body: str) -> None: ...
+ def send_host(self, connection: HTTPConnection, host: str) -> None: ...
+ def send_user_agent(self, connection: HTTPConnection) -> None: ...
+ def send_content(self, connection: HTTPConnection, request_body: str) -> None: ...
+ def parse_response(self, response: HTTPResponse) -> tuple: ...
+
+class SafeTransport(Transport):
+ def __init__(self, use_datetime: bool = ..., context: Optional[SSLContext] = ...) -> None: ...
+ def make_connection(self, host: _hostDesc) -> HTTPSConnection: ...
+
+class ServerProxy:
+ def __init__(self, uri: str, transport: Optional[Transport] = ..., encoding: Optional[str] = ..., verbose: bool = ..., allow_none: bool = ..., use_datetime: bool = ..., context: Optional[SSLContext] = ...) -> None: ...
+ def __getattr__(self, name: str) -> _Method: ...
+ def __call__(self, attr: str) -> Optional[Transport]: ...
+
+Server = ServerProxy
diff --git a/typeshed/stdlib/2/xxsubtype.pyi b/typeshed/stdlib/2/xxsubtype.pyi
deleted file mode 100644
index 56a183f..0000000
--- a/typeshed/stdlib/2/xxsubtype.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Stub file for the 'xxsubtype' module."""
-
-from typing import Any
-
-def bench(obj: Any, name: str, n: int = ...) -> float: ...
-
-class spamdict(dict):
- state = ... # type: int
- def getstate(self) -> int: ...
- def setstate(self, a: int) -> None: ...
-
-class spamlist(list):
- state = ... # type: int
- def getstate(self) -> int: ...
- def setstate(self, a: int) -> None: ...
- def classmeth(self, *args, **kwargs) -> tuple: ...
- def staticmeth(self, *args, **kwargs) -> tuple: ...
diff --git a/typeshed/stdlib/2/zlib.pyi b/typeshed/stdlib/2/zlib.pyi
deleted file mode 100644
index a232cc5..0000000
--- a/typeshed/stdlib/2/zlib.pyi
+++ /dev/null
@@ -1,42 +0,0 @@
-# Stubs for zlib (Python 2.7)
-
-DEFLATED = ... # type: int
-DEF_MEM_LEVEL = ... # type: int
-MAX_WBITS = ... # type: int
-ZLIB_VERSION = ... # type: str
-Z_BEST_COMPRESSION = ... # type: int
-Z_BEST_SPEED = ... # type: int
-Z_DEFAULT_COMPRESSION = ... # type: int
-Z_DEFAULT_STRATEGY = ... # type: int
-Z_FILTERED = ... # type: int
-Z_FINISH = ... # type: int
-Z_FULL_FLUSH = ... # type: int
-Z_HUFFMAN_ONLY = ... # type: int
-Z_NO_FLUSH = ... # type: int
-Z_SYNC_FLUSH = ... # type: int
-
-
-class error(Exception): ...
-
-
-class Compress:
- def compress(self, data: str) -> str: ...
- def flush(self) -> str: ...
- def copy(self) -> "Compress": ...
-
-
-class Decompress:
- unused_data = ... # type: str
- unconsumed_tail = ... # type: str
- def decompress(self, data: str, max_length: int = ...) -> str: ...
- def flush(self) -> str: ...
- def copy(self) -> "Decompress": ...
-
-
-def adler32(data: str, value: int = ...) -> int: ...
-def compress(data: str, level: int = ...) -> str: ...
-def compressobj(level: int = ..., method: int = ..., wbits: int = ...,
- memlevel: int = ..., strategy: int = ...) -> Compress: ...
-def crc32(data: str, value: int = ...) -> int: ...
-def decompress(data: str, wbits: int = ..., bufsize: int = ...) -> str: ...
-def decompressobj(wbits: int = ...) -> Decompress: ...
diff --git a/typeshed/stdlib/3/__future__.pyi b/typeshed/stdlib/2and3/__future__.pyi
similarity index 100%
rename from typeshed/stdlib/3/__future__.pyi
rename to typeshed/stdlib/2and3/__future__.pyi
diff --git a/typeshed/stdlib/2and3/_bisect.pyi b/typeshed/stdlib/2and3/_bisect.pyi
index 4b6ad96..c5a5148 100644
--- a/typeshed/stdlib/2and3/_bisect.pyi
+++ b/typeshed/stdlib/2and3/_bisect.pyi
@@ -2,10 +2,10 @@
from typing import Any, Sequence, TypeVar
-T = TypeVar('T')
-def bisect(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> int: ...
-def bisect_left(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> int: ...
-def bisect_right(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> int: ...
-def insort(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> None: ...
-def insort_left(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> None: ...
-def insort_right(a: Sequence[T], x: T, lo: int = ..., hi: int = ...) -> None: ...
+_T = TypeVar('_T')
+def bisect(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ...
+def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ...
+def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> int: ...
+def insort(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ...
+def insort_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ...
+def insort_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/_codecs.pyi b/typeshed/stdlib/2and3/_codecs.pyi
new file mode 100644
index 0000000..32163eb
--- /dev/null
+++ b/typeshed/stdlib/2and3/_codecs.pyi
@@ -0,0 +1,74 @@
+"""Stub file for the '_codecs' module."""
+
+import sys
+from typing import Any, Callable, Tuple, Optional, Dict, Text, Union
+
+import codecs
+
+# For convenience:
+_Handler = Callable[[Exception], Tuple[Text, int]]
+_String = Union[bytes, str]
+_Errors = Union[str, Text, None]
+if sys.version_info < (3, 0):
+ _Decodable = Union[bytes, Text]
+ _Encodable = Union[bytes, Text]
+else:
+ _Decodable = bytes
+ _Encodable = str
+
+# This type is not exposed; it is defined in unicodeobject.c
+class _EncodingMap(object):
+ def size(self) -> int: ...
+_MapT = Union[Dict[int, int], _EncodingMap]
+
+def register(search_function: Callable[[str], Any]) -> None: ...
+def register_error(errors: Union[str, Text], handler: _Handler) -> None: ...
+def lookup(encoding: Union[str, Text]) -> codecs.CodecInfo: ...
+def lookup_error(name: Union[str, Text]) -> _Handler: ...
+def decode(obj: Any, encoding: Union[str, Text] = ..., errors: _Errors = ...) -> Any: ...
+def encode(obj: Any, encoding: Union[str, Text] = ..., errors: _Errors = ...) -> Any: ...
+def charmap_build(map: Text) -> _MapT: ...
+
+def ascii_decode(data: _Decodable, errors: _Errors = ...) -> Tuple[Text, int]: ...
+def ascii_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def charbuffer_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def charmap_decode(data: _Decodable, errors: _Errors = ..., mapping: Optional[_MapT] = ...) -> Tuple[Text, int]: ...
+def charmap_encode(data: _Encodable, errors: _Errors, mapping: Optional[_MapT] = ...) -> Tuple[bytes, int]: ...
+def escape_decode(data: _String, errors: _Errors = ...) -> Tuple[str, int]: ...
+def escape_encode(data: bytes, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def latin_1_decode(data: _Decodable, errors: _Errors = ...) -> Tuple[Text, int]: ...
+def latin_1_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def raw_unicode_escape_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ...
+def raw_unicode_escape_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def readbuffer_encode(data: _String, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def unicode_escape_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ...
+def unicode_escape_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def unicode_internal_decode(data: _String, errors: _Errors = ...) -> Tuple[Text, int]: ...
+def unicode_internal_encode(data: _String, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def utf_16_be_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_16_be_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def utf_16_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_16_encode(data: _Encodable, errors: _Errors = ..., byteorder: int = ...) -> Tuple[bytes, int]: ...
+def utf_16_ex_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int, int]: ...
+def utf_16_le_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_16_le_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def utf_32_be_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_32_be_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def utf_32_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_32_encode(data: _Encodable, errors: _Errors = ..., byteorder: int = ...) -> Tuple[bytes, int]: ...
+def utf_32_ex_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int, int]: ...
+def utf_32_le_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_32_le_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def utf_7_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_7_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+def utf_8_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+def utf_8_encode(data: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+
+if sys.platform == 'win32':
+ def mbcs_decode(data: _Decodable, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+ def mbcs_encode(str: _Encodable, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+ if sys.version_info >= (3, 0):
+ def oem_decode(data: bytes, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+ def code_page_decode(codepage: int, data: bytes, errors: _Errors = ..., final: int = ...) -> Tuple[Text, int]: ...
+ def oem_encode(str: Text, errors: _Errors = ...) -> Tuple[bytes, int]: ...
+ def code_page_encode(code_page: int, str: Text, errors: _Errors = ...) -> Tuple[bytes, int]: ...
diff --git a/typeshed/stdlib/2/_random.pyi b/typeshed/stdlib/2and3/_random.pyi
similarity index 77%
rename from typeshed/stdlib/2/_random.pyi
rename to typeshed/stdlib/2and3/_random.pyi
index 060dcd2..a37149d 100644
--- a/typeshed/stdlib/2/_random.pyi
+++ b/typeshed/stdlib/2and3/_random.pyi
@@ -1,3 +1,6 @@
+# Stubs for _random
+
+import sys
from typing import Tuple
# Actually Tuple[(int,) * 625]
@@ -10,4 +13,5 @@ class Random(object):
def setstate(self, state: _State) -> None: ...
def random(self) -> float: ...
def getrandbits(self, k: int) -> int: ...
- def jumpahead(self, i: int) -> None: ...
+ if sys.version_info < (3,):
+ def jumpahead(self, i: int) -> None: ...
diff --git a/typeshed/stdlib/2and3/asynchat.pyi b/typeshed/stdlib/2and3/asynchat.pyi
index b5cf3c0..44e1f83 100644
--- a/typeshed/stdlib/2and3/asynchat.pyi
+++ b/typeshed/stdlib/2and3/asynchat.pyi
@@ -9,13 +9,13 @@ class simple_producer:
def __init__(self, data: str, buffer_size: int = ...) -> None: ...
def more(self) -> str: ...
-class async_chat (asyncore.dispatcher):
+class async_chat(asyncore.dispatcher):
ac_in_buffer_size = ... # type: int
ac_out_buffer_size = ... # type: int
def __init__(self, sock: socket.socket = None, map: asyncore._maptype = None) -> None: ...
@abstractmethod
- def collect_incoming_data(self, data: str) -> None: ...
+ def collect_incoming_data(self, data: bytes) -> None: ...
@abstractmethod
def found_terminator(self) -> None: ...
def set_terminator(self, term: Union[str, int, None]) -> None: ...
@@ -31,7 +31,7 @@ class async_chat (asyncore.dispatcher):
def initiate_send(self) -> None: ...
def discard_buffers(self) -> None: ...
-if sys.version_info < (3, 0, 0):
+if sys.version_info < (3, 0):
class fifo:
def __init__(self, list: Sequence[Union[str, simple_producer]] = ...) -> None: ...
def __len__(self) -> int: ...
diff --git a/typeshed/stdlib/2and3/asyncore.pyi b/typeshed/stdlib/2and3/asyncore.pyi
index c7bf793..4816da0 100644
--- a/typeshed/stdlib/2and3/asyncore.pyi
+++ b/typeshed/stdlib/2and3/asyncore.pyi
@@ -15,7 +15,7 @@ from errno import (EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL,
_maptype = Dict[str, Any]
-class ExitNow(Exception): pass
+class ExitNow(Exception): ...
def read(obj: Any) -> None: ...
def write(obj: Any) -> None: ...
diff --git a/typeshed/stdlib/3/base64.pyi b/typeshed/stdlib/2and3/base64.pyi
similarity index 90%
rename from typeshed/stdlib/3/base64.pyi
rename to typeshed/stdlib/2and3/base64.pyi
index 4e76ebb..70db6ad 100644
--- a/typeshed/stdlib/3/base64.pyi
+++ b/typeshed/stdlib/2and3/base64.pyi
@@ -1,10 +1,12 @@
# Stubs for base64
-from typing import IO, Union
+from typing import IO, Union, Text
import sys
-
-if sys.version_info <= (3, 2):
+if sys.version_info < (3,):
+ _encodable = Union[bytes, Text]
+ _decodable = Union[bytes, Text]
+elif sys.version_info < (3, 3):
_encodable = bytes
_decodable = bytes
elif sys.version_info[:2] == (3, 3):
diff --git a/typeshed/stdlib/2and3/binascii.pyi b/typeshed/stdlib/2and3/binascii.pyi
new file mode 100644
index 0000000..393c4af
--- /dev/null
+++ b/typeshed/stdlib/2and3/binascii.pyi
@@ -0,0 +1,45 @@
+# Stubs for binascii
+
+# Based on http://docs.python.org/3.2/library/binascii.html
+
+import sys
+from typing import Union, Text
+
+
+if sys.version_info < (3,):
+ # Python 2 accepts unicode ascii pretty much everywhere.
+ _Bytes = Union[bytes, Text]
+ _Ascii = Union[bytes, Text]
+elif sys.version_info < (3, 3):
+ # Python 3.2 and below only accepts bytes.
+ _Bytes = bytes
+ _Ascii = bytes
+else:
+ # But since Python 3.3 ASCII-only unicode strings are accepted by the
+ # a2b_* functions.
+ _Bytes = bytes
+ _Ascii = Union[bytes, Text]
+
+def a2b_uu(string: _Ascii) -> bytes: ...
+def b2a_uu(data: _Bytes) -> bytes: ...
+def a2b_base64(string: _Ascii) -> bytes: ...
+if sys.version_info >= (3, 6):
+ def b2a_base64(data: _Bytes, *, newline: bool = ...) -> bytes: ...
+else:
+ def b2a_base64(data: _Bytes) -> bytes: ...
+def a2b_qp(string: _Ascii, header: bool = ...) -> bytes: ...
+def b2a_qp(data: _Bytes, quotetabs: bool = ..., istext: bool = ...,
+ header: bool = ...) -> bytes: ...
+def a2b_hqx(string: _Ascii) -> bytes: ...
+def rledecode_hqx(data: _Bytes) -> bytes: ...
+def rlecode_hqx(data: _Bytes) -> bytes: ...
+def b2a_hqx(data: _Bytes) -> bytes: ...
+def crc_hqx(data: _Bytes, crc: int) -> int: ...
+def crc32(data: _Bytes, crc: int = ...) -> int: ...
+def b2a_hex(data: _Bytes) -> bytes: ...
+def hexlify(data: _Bytes) -> bytes: ...
+def a2b_hex(hexstr: _Ascii) -> bytes: ...
+def unhexlify(hexlify: _Ascii) -> bytes: ...
+
+class Error(Exception): ...
+class Incomplete(Exception): ...
diff --git a/typeshed/stdlib/2and3/binhex.pyi b/typeshed/stdlib/2and3/binhex.pyi
new file mode 100644
index 0000000..40ead76
--- /dev/null
+++ b/typeshed/stdlib/2and3/binhex.pyi
@@ -0,0 +1,48 @@
+from typing import (
+ Any,
+ IO,
+ Tuple,
+ Union,
+)
+
+
+class Error(Exception): ...
+
+REASONABLY_LARGE = ... # type: int
+LINELEN = ... # type: int
+RUNCHAR = ... # type: bytes
+
+class FInfo:
+ def __init__(self) -> None: ...
+ Type = ... # type: str
+ Creator = ... # type: str
+ Flags = ... # type: int
+
+_FileInfoTuple = Tuple[str, FInfo, int, int]
+_FileHandleUnion = Union[str, IO[bytes]]
+
+def getfileinfo(name: str) -> _FileInfoTuple: ...
+
+class openrsrc:
+ def __init__(self, *args: Any) -> None: ...
+ def read(self, *args: Any) -> bytes: ...
+ def write(self, *args: Any) -> None: ...
+ def close(self) -> None: ...
+
+class BinHex:
+ def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ...
+ def write(self, data: bytes) -> None: ...
+ def close_data(self) -> None: ...
+ def write_rsrc(self, data: bytes) -> None: ...
+ def close(self) -> None: ...
+
+def binhex(inp: str, out: str) -> None: ...
+
+class HexBin:
+ def __init__(self, ifp: _FileHandleUnion) -> None: ...
+ def read(self, *n: int) -> bytes: ...
+ def close_data(self) -> None: ...
+ def read_rsrc(self, *n: int) -> bytes: ...
+ def close(self) -> None: ...
+
+def hexbin(inp: str, out: str) -> None: ...
diff --git a/typeshed/stdlib/2and3/cgi.pyi b/typeshed/stdlib/2and3/cgi.pyi
new file mode 100644
index 0000000..e8a9fd9
--- /dev/null
+++ b/typeshed/stdlib/2and3/cgi.pyi
@@ -0,0 +1,119 @@
+import sys
+from typing import Any, AnyStr, Dict, IO, Iterable, List, Mapping, Optional, Tuple, TypeVar, Union
+
+_T = TypeVar('_T', bound=FieldStorage)
+
+def parse(fp: IO[Any] = ..., environ: Mapping[str, str] = ...,
+ keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ...
+def parse_qs(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ...
+def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> Dict[str, List[str]]: ...
+def parse_multipart(fp: IO[Any], pdict: Mapping[str, bytes]) -> Dict[str, List[bytes]]: ...
+def parse_header(s: str) -> Tuple[str, Dict[str, str]]: ...
+def test(environ: Mapping[str, str] = ...) -> None: ...
+def print_environ(environ: Mapping[str, str] = ...) -> None: ...
+def print_form(form: Dict[str, Any]) -> None: ...
+def print_directory() -> None: ...
+def print_environ_usage() -> None: ...
+if sys.version_info >= (3, 0):
+ def escape(s: str, quote: bool = ...) -> str: ...
+else:
+ def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ...
+
+
+class MiniFieldStorage:
+ # The first five "Any" attributes here are always None, but mypy doesn't support that
+ filename = ... # type: Any
+ list = ... # type: Any
+ type = ... # type: Any
+ file = ... # type: Optional[IO[bytes]] # Always None
+ type_options = ... # type: Dict[Any, Any]
+ disposition = ... # type: Any
+ disposition_options = ... # type: Dict[Any, Any]
+ headers = ... # type: Dict[Any, Any]
+ name = ... # type: Any
+ value = ... # type: Any
+
+ def __init__(self, name: Any, value: Any) -> None: ...
+ def __repr__(self) -> str: ...
+
+
+class FieldStorage(object):
+ FieldStorageClass = ... # type: Optional[type]
+ keep_blank_values = ... # type: int
+ strict_parsing = ... # type: int
+ qs_on_post = ... # type: Optional[str]
+ headers = ... # type: Mapping[str, str]
+ fp = ... # type: IO[bytes]
+ encoding = ... # type: str
+ errors = ... # type: str
+ outerboundary = ... # type: bytes
+ bytes_read = ... # type: int
+ limit = ... # type: Optional[int]
+ disposition = ... # type: str
+ disposition_options = ... # type: Dict[str, str]
+ filename = ... # type: Optional[str]
+ file = ... # type: Optional[IO[bytes]]
+ type = ... # type: str
+ type_options = ... # type: Dict[str, str]
+ innerboundary = ... # type: bytes
+ length = ... # type: int
+ done = ... # type: int
+ list = ... # type: Optional[List[Any]]
+ value = ... # type: Union[None, bytes, List[Any]]
+
+ if sys.version_info >= (3, 0):
+ def __init__(self, fp: IO[Any] = ..., headers: Mapping[str, str] = ..., outerboundary: bytes = ...,
+ environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...,
+ limit: int = ..., encoding: str = ..., errors: str = ...) -> None: ...
+ else:
+ def __init__(self, fp: IO[Any] = ..., headers: Mapping[str, str] = ..., outerboundary: bytes = ...,
+ environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...) -> None: ...
+
+ if sys.version_info >= (3, 0):
+ def __enter__(self: _T) -> _T: ...
+ def __exit__(self, *args: Any) -> None: ...
+ def __repr__(self) -> str: ...
+ def __iter__(self) -> Iterable[str]: ...
+ def __getitem__(self, key: str) -> Any: ...
+ def getvalue(self, key: str, default: Any = ...) -> Any: ...
+ def getfirst(self, key: str, default: Any = ...) -> Any: ...
+ def getlist(self, key: str) -> List[Any]: ...
+ def keys(self) -> List[str]: ...
+ if sys.version_info < (3, 0):
+ def has_key(self, key: str) -> bool: ...
+ def __contains__(self, key: str) -> bool: ...
+ def __len__(self) -> int: ...
+ if sys.version_info >= (3, 0):
+ def __bool__(self) -> bool: ...
+ else:
+ def __nonzero__(self) -> bool: ...
+ if sys.version_info >= (3, 0):
+ # In Python 3 it returns bytes or str IO depending on an internal flag
+ def make_file(self) -> IO[Any]: ...
+ else:
+ # In Python 2 it always returns bytes and ignores the "binary" flag
+ def make_file(self, binary: Any = ...) -> IO[bytes]: ...
+
+
+if sys.version_info < (3, 0):
+ from UserDict import UserDict
+
+ class FormContentDict(UserDict):
+ query_string = ... # type: str
+ def __init__(self, environ: Mapping[str, str] = ..., keep_blank_values: int = ..., strict_parsing: int = ...) -> None: ...
+
+ class SvFormContentDict(FormContentDict):
+ def getlist(self, key: Any) -> Any: ...
+
+ class InterpFormContentDict(SvFormContentDict): ...
+
+ class FormContent(FormContentDict):
+ # TODO this should have
+ # def values(self, key: Any) -> Any: ...
+ # but this is incompatible with the supertype, and adding '# type: ignore' triggers
+ # a parse error in pytype (https://github.com/google/pytype/issues/53)
+ def indexed_value(self, key: Any, location: int) -> Any: ...
+ def value(self, key: Any) -> Any: ...
+ def length(self, key: Any) -> int: ...
+ def stripped(self, key: Any) -> Any: ...
+ def pars(self) -> Dict[Any, Any]: ...
diff --git a/typeshed/stdlib/2and3/cmd.pyi b/typeshed/stdlib/2and3/cmd.pyi
new file mode 100644
index 0000000..dc305c9
--- /dev/null
+++ b/typeshed/stdlib/2and3/cmd.pyi
@@ -0,0 +1,41 @@
+# Stubs for cmd (Python 2/3)
+
+from typing import Any, Optional, Text, IO, List, Callable, Tuple
+
+class Cmd:
+ prompt = ... # type: str
+ identchars = ... # type: str
+ ruler = ... # type: str
+ lastcmd = ... # type: str
+ intro = ... # type: Optional[Any]
+ doc_leader = ... # type: str
+ doc_header = ... # type: str
+ misc_header = ... # type: str
+ undoc_header = ... # type: str
+ nohelp = ... # type: str
+ use_rawinput = ... # type: bool
+ stdin = ... # type: IO[str]
+ stdout = ... # type: IO[str]
+ cmdqueue = ... # type: List[str]
+ completekey = ... # type: str
+ def __init__(self, completekey: str = ..., stdin: Optional[IO[str]] = ..., stdout: Optional[IO[str]] = ...) -> None: ...
+ old_completer = ... # type: Optional[Callable[[str, int], Optional[str]]]
+ def cmdloop(self, intro: Optional[Any] = ...) -> None: ...
+ def precmd(self, line: str) -> str: ...
+ def postcmd(self, stop: bool, line: str) -> bool: ...
+ def preloop(self) -> None: ...
+ def postloop(self) -> None: ...
+ def parseline(self, line: str) -> Tuple[Optional[str], Optional[str], str]: ...
+ def onecmd(self, line: str) -> bool: ...
+ def emptyline(self) -> bool: ...
+ def default(self, line: str) -> bool: ...
+ def completedefault(self, *ignored: Any) -> List[str]: ...
+ def completenames(self, text: str, *ignored: Any) -> List[str]: ...
+ completion_matches = ... # type: Optional[List[str]]
+ def complete(self, text: str, state: int) -> Optional[List[str]]: ...
+ def get_names(self) -> List[str]: ...
+ # Only the first element of args matters.
+ def complete_help(self, *args: Any) -> List[str]: ...
+ def do_help(self, arg: Optional[str]) -> None: ...
+ def print_topics(self, header: str, cmds: Optional[List[str]], cmdlen: Any, maxcol: int) -> None: ...
+ def columnize(self, list: Optional[List[str]], displaywidth: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2/codecs.pyi b/typeshed/stdlib/2and3/codecs.pyi
similarity index 72%
rename from typeshed/stdlib/2/codecs.pyi
rename to typeshed/stdlib/2and3/codecs.pyi
index 4579378..b228d8a 100644
--- a/typeshed/stdlib/2/codecs.pyi
+++ b/typeshed/stdlib/2and3/codecs.pyi
@@ -1,34 +1,42 @@
# Better codecs stubs hand-written by o11c.
-# https://docs.python.org/2/library/codecs.html
+# https://docs.python.org/2/library/codecs.html and https://docs.python.org/3/library/codecs.html
+import sys
from typing import (
BinaryIO,
Callable,
+ IO,
Iterable,
Iterator,
List,
Optional,
+ Text,
+ TextIO,
Tuple,
+ Type,
+ TypeVar,
Union,
)
from abc import abstractmethod
+import types
# TODO: this only satisfies the most common interface, where
-# str is the raw form and unicode is the cooked form.
+# bytes (py2 str) is the raw form and str (py2 unicode) is the cooked form.
# In the long run, both should become template parameters maybe?
-# There *are* str->str and unicode->unicode encodings in the standard library.
-# And unlike python 3, they are in fairly widespread use.
+# There *are* bytes->bytes and str->str encodings in the standard library.
+# They are much more common in Python 2 than in Python 3.
+# Python 3.5 supposedly might change something there.
-_decoded = unicode
-_encoded = str
+_decoded = Text
+_encoded = bytes
# TODO: It is not possible to specify these signatures correctly, because
# they have an optional positional or keyword argument for errors=.
_encode_type = Callable[[_decoded], _encoded] # signature of Codec().encode
_decode_type = Callable[[_encoded], _decoded] # signature of Codec().decode
-_stream_reader_type = Callable[[BinaryIO], 'StreamReader'] # signature of StreamReader __init__
-_stream_writer_type = Callable[[BinaryIO], 'StreamWriter'] # signature of StreamWriter __init__
+_stream_reader_type = Callable[[IO[_encoded]], 'StreamReader'] # signature of StreamReader __init__
+_stream_writer_type = Callable[[IO[_encoded]], 'StreamWriter'] # signature of StreamWriter __init__
_incremental_encoder_type = Callable[[], 'IncrementalEncoder'] # signature of IncrementalEncoder __init__
_incremental_decoder_type = Callable[[], 'IncrementalDecoder'] # signature of IncrementalDecoder __init__
@@ -41,7 +49,6 @@ def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded:
def lookup(encoding: str) -> 'CodecInfo':
...
class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]):
- def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decoder_type = ..., name: str = ...) -> None: ...
encode = ... # type: _encode_type
decode = ... # type: _decode_type
streamreader = ... # type: _stream_reader_type
@@ -49,6 +56,7 @@ class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_w
incrementalencoder = ... # type: _incremental_encoder_type
incrementaldecoder = ... # type: _incremental_decoder_type
name = ... # type: str
+ def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decoder_type = ..., name: str = ...) -> None: ...
def getencoder(encoding: str) -> _encode_type:
...
@@ -69,7 +77,7 @@ def register(search_function: Callable[[str], CodecInfo]) -> None:
def open(filename: str, mode: str = ..., encoding: str = ..., errors: str = ..., buffering: int = ...) -> StreamReaderWriter:
...
-def EncodedFile(file: BinaryIO, data_encoding: str, file_encoding: str = ..., errors: Optional[str] = ...) -> 'StreamRecoder':
+def EncodedFile(file: IO[_encoded], data_encoding: str, file_encoding: str = ..., errors: str = ...) -> 'StreamRecoder':
...
def iterencode(iterator: Iterable[_decoded], encoding: str, errors: str = ...) -> Iterator[_encoded]:
@@ -155,7 +163,7 @@ class BufferedIncrementalEncoder(IncrementalEncoder):
def encode(self, input: _decoded, final: bool = ...) -> _encoded:
...
class BufferedIncrementalDecoder(IncrementalDecoder):
- buffer = ... # type: str
+ buffer = ... # type: bytes
def __init__(self, errors: str = ...) -> None:
...
@abstractmethod
@@ -168,18 +176,18 @@ class BufferedIncrementalDecoder(IncrementalDecoder):
# attributes and methods are passed-through from the stream.
class StreamWriter(Codec):
errors = ... # type: str
- def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
+ def __init__(self, stream: IO[_encoded], errors: str = ...) -> None:
...
def write(self, obj: _decoded) -> None:
...
- def writelines(self, list: List[str]) -> None:
+ def writelines(self, list: Iterable[_decoded]) -> None:
...
def reset(self) -> None:
...
class StreamReader(Codec):
errors = ... # type: str
- def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
+ def __init__(self, stream: IO[_encoded], errors: str = ...) -> None:
...
def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> _decoded:
...
@@ -190,14 +198,26 @@ class StreamReader(Codec):
def reset(self) -> None:
...
-class StreamReaderWriter:
- def __init__(self, stream: BinaryIO, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
- ...
- def __enter__(self) -> BinaryIO:
- ...
- def __exit__(self, typ, exc, tb) -> bool:
- ...
+_T = TypeVar('_T', bound='StreamReaderWriter')
+
+# Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing
+# and delegates attributes to the underlying binary stream with __getattr__.
+class StreamReaderWriter(TextIO):
+ def __init__(self, stream: IO[_encoded], Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None: ...
+ def read(self, size: int= ...) -> _decoded: ...
+ def readline(self, size: Optional[int] = ...) -> _decoded: ...
+ def readlines(self, sizehint: Optional[int] = ...) -> List[_decoded]: ...
+ def __next__(self) -> _decoded: ...
+ def __iter__(self: _T) -> _T: ...
+ # This actually returns None, but that's incompatible with the supertype
+ def write(self, data: _decoded) -> int: ...
+ def writelines(self, list: Iterable[_decoded]) -> None: ...
+ def reset(self) -> None: ...
+ # Same as write()
+ def seek(self, offset: int, whence: int = ...) -> int: ...
+ def __enter__(self: _T) -> _T: ...
+ def __exit__(self, typ: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[types.TracebackType]) -> bool: ...
class StreamRecoder(BinaryIO):
- def __init__(self, stream: BinaryIO, encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
+ def __init__(self, stream: IO[_encoded], encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
...
diff --git a/typeshed/stdlib/2and3/contextlib.pyi b/typeshed/stdlib/2and3/contextlib.pyi
index 0f14afa..6ff18d8 100644
--- a/typeshed/stdlib/2and3/contextlib.pyi
+++ b/typeshed/stdlib/2and3/contextlib.pyi
@@ -20,7 +20,12 @@ class ContextManager(Generic[_T]):
exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
-def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ...
+if sys.version_info >= (3, 2):
+ class GeneratorContextManager(Generic[_T], ContextManager[_T]):
+ def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ...
+ def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]: ...
+else:
+ def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ...
if sys.version_info < (3,):
def nested(*mgr: ContextManager[Any]) -> ContextManager[Iterable[Any]]: ...
diff --git a/typeshed/stdlib/2and3/copy.pyi b/typeshed/stdlib/2and3/copy.pyi
new file mode 100644
index 0000000..523802a
--- /dev/null
+++ b/typeshed/stdlib/2and3/copy.pyi
@@ -0,0 +1,14 @@
+# Stubs for copy
+
+from typing import TypeVar, Optional, Dict, Any
+
+_T = TypeVar('_T')
+
+# None in CPython but non-None in Jython
+PyStringMap: Any
+
+# Note: memo and _nil are internal kwargs.
+def deepcopy(x: _T, memo: Optional[Dict[int, _T]] = ..., _nil: Any = ...) -> _T: ...
+def copy(x: _T) -> _T: ...
+class Error(Exception): ...
+error = Error
diff --git a/typeshed/stdlib/2and3/dis.pyi b/typeshed/stdlib/2and3/dis.pyi
new file mode 100644
index 0000000..9aeb7dd
--- /dev/null
+++ b/typeshed/stdlib/2and3/dis.pyi
@@ -0,0 +1,75 @@
+from typing import List, Union, Iterator, Tuple, Optional, Any, IO, NamedTuple, Dict
+
+import sys
+import types
+
+from opcode import (hasconst as hasconst, hasname as hasname, hasjrel as hasjrel,
+ hasjabs as hasjabs, haslocal as haslocal, hascompare as hascompare,
+ hasfree as hasfree, cmp_op as cmp_op, opname as opname, opmap as opmap,
+ HAVE_ARGUMENT as HAVE_ARGUMENT, EXTENDED_ARG as EXTENDED_ARG)
+
+if sys.version_info >= (3, 4):
+ from opcode import stack_effect as stack_effect
+
+if sys.version_info >= (3, 6):
+ from opcode import hasnargs as hasnargs
+
+_have_code = Union[types.MethodType, types.FunctionType, types.CodeType, type]
+_have_code_or_string = Union[_have_code, str, bytes]
+
+
+if sys.version_info >= (3, 4):
+ Instruction = NamedTuple(
+ "Instruction",
+ [
+ ('opname', str),
+ ('opcode', int),
+ ('arg', Optional[int]),
+ ('argval', Any),
+ ('argrepr', str),
+ ('offset', int),
+ ('starts_line', Optional[int]),
+ ('is_jump_target', bool)
+ ]
+ )
+
+ class Bytecode:
+ codeobj = ... # type: types.CodeType
+ first_line = ... # type: int
+ def __init__(self, x: _have_code_or_string, *, first_line: Optional[int] = ...,
+ current_offset: Optional[int] = ...) -> None: ...
+ def __iter__(self) -> Iterator[Instruction]: ...
+ def __repr__(self) -> str: ...
+ def info(self) -> str: ...
+ def dis(self) -> str: ...
+
+ @classmethod
+ def from_traceback(cls, tb: types.TracebackType) -> Bytecode: ...
+
+
+COMPILER_FLAG_NAMES = ... # type: Dict[int, str]
+
+
+def findlabels(code: _have_code) -> List[int]: ...
+def findlinestarts(code: _have_code) -> Iterator[Tuple[int, int]]: ...
+
+if sys.version_info >= (3, 0):
+ def pretty_flags(flags: int) -> str: ...
+ def code_info(x: _have_code_or_string) -> str: ...
+
+if sys.version_info >= (3, 4):
+ def dis(x: _have_code_or_string = ..., *, file: Optional[IO[str]] = ...) -> None: ...
+ def distb(tb: Optional[types.TracebackType] = ..., *, file: Optional[IO[str]] = ...) -> None: ...
+ def disassemble(co: _have_code, lasti: int = ..., *, file: Optional[IO[str]] = ...) -> None: ...
+ def disco(co: _have_code, lasti: int = ..., *, file: Optional[IO[str]] = ...) -> None: ...
+ def show_code(co: _have_code, *, file: Optional[IO[str]] = ...) -> None: ...
+
+ def get_instructions(x: _have_code, *, first_line: Optional[int] = ...) -> Iterator[Instruction]: ...
+else:
+ def dis(x: _have_code_or_string = ...) -> None: ...
+ def distb(tb: types.TracebackType = ...) -> None: ...
+ def disassemble(co: _have_code, lasti: int = ...) -> None: ...
+ def disco(co: _have_code, lasti: int = ...) -> None: ...
+
+ if sys.version_info >= (3, 0):
+ def show_code(co: _have_code) -> None: ...
diff --git a/typeshed/stdlib/2and3/distutils/core.pyi b/typeshed/stdlib/2and3/distutils/core.pyi
index 38d6a1a..d513c2b 100644
--- a/typeshed/stdlib/2and3/distutils/core.pyi
+++ b/typeshed/stdlib/2and3/distutils/core.pyi
@@ -29,7 +29,20 @@ def setup(name: str = ...,
platforms: Union[List[str], str] = ...,
cmdclass: Mapping[str, Command] = ...,
data_files: List[Tuple[str, List[str]]] = ...,
- package_dir: Mapping[str, str] = ...) -> None: ...
+ package_dir: Mapping[str, str] = ...,
+ obsoletes: List[str] = ...,
+ provides: List[str] = ...,
+ requires: List[str] = ...,
+ command_packages: List[str] = ...,
+ command_options: Mapping[str, Mapping[str, Tuple[Any, Any]]] = ...,
+ package_data: Mapping[str, List[str]] = ...,
+ libraries: List[str] = ...,
+ headers: List[str] = ...,
+ ext_package: str = ...,
+ include_dirs: List[str] = ...,
+ password: str = ...,
+ fullname: str = ...) -> None: ...
+
def run_setup(script_name: str,
script_args: Optional[List[str]] = ...,
stop_after: str = ...) -> Distribution: ...
diff --git a/typeshed/stdlib/2and3/filecmp.pyi b/typeshed/stdlib/2and3/filecmp.pyi
new file mode 100644
index 0000000..cc0a621
--- /dev/null
+++ b/typeshed/stdlib/2and3/filecmp.pyi
@@ -0,0 +1,48 @@
+# Stubs for filecmp (Python 2/3)
+import sys
+from typing import AnyStr, Callable, Dict, Generic, Iterable, List, Optional, Sequence, Tuple, Union, Text
+
+DEFAULT_IGNORES = ... # type: List[str]
+
+def cmp(f1: Union[bytes, Text], f2: Union[bytes, Text], shallow: Union[int, bool] = ...) -> bool: ...
+def cmpfiles(a: AnyStr, b: AnyStr, common: Iterable[AnyStr],
+ shallow: Union[int, bool] = ...) -> Tuple[List[AnyStr], List[AnyStr], List[AnyStr]]: ...
+
+class dircmp(Generic[AnyStr]):
+ def __init__(self, a: AnyStr, b: AnyStr,
+ ignore: Optional[Sequence[AnyStr]] = ...,
+ hide: Optional[Sequence[AnyStr]] = ...) -> None: ...
+
+ left = ... # type: AnyStr
+ right = ... # type: AnyStr
+ hide = ... # type: Sequence[AnyStr]
+ ignore = ... # type: Sequence[AnyStr]
+
+ # These properties are created at runtime by __getattr__
+ subdirs = ... # type: Dict[AnyStr, dircmp[AnyStr]]
+ same_files = ... # type: List[AnyStr]
+ diff_files = ... # type: List[AnyStr]
+ funny_files = ... # type: List[AnyStr]
+ common_dirs = ... # type: List[AnyStr]
+ common_files = ... # type: List[AnyStr]
+ common_funny = ... # type: List[AnyStr]
+ common = ... # type: List[AnyStr]
+ left_only = ... # type: List[AnyStr]
+ right_only = ... # type: List[AnyStr]
+ left_list = ... # type: List[AnyStr]
+ right_list = ... # type: List[AnyStr]
+
+ def report(self) -> None: ...
+ def report_partial_closure(self) -> None: ...
+ def report_full_closure(self) -> None: ...
+
+ methodmap = ... # type: Dict[str, Callable[[], None]]
+ def phase0(self) -> None: ...
+ def phase1(self) -> None: ...
+ def phase2(self) -> None: ...
+ def phase3(self) -> None: ...
+ def phase4(self) -> None: ...
+ def phase4_closure(self) -> None: ...
+
+if sys.version_info >= (3,):
+ def clear_cache() -> None: ...
diff --git a/typeshed/stdlib/2and3/fractions.pyi b/typeshed/stdlib/2and3/fractions.pyi
index 66408fb..90771a6 100644
--- a/typeshed/stdlib/2and3/fractions.pyi
+++ b/typeshed/stdlib/2and3/fractions.pyi
@@ -30,11 +30,11 @@ class Fraction(Rational):
*,
_normalize: bool = True) -> None: ...
@overload
- def __init__(self, value: float, *, _normalize=True) -> None: ...
+ def __init__(self, value: float, *, _normalize: bool = True) -> None: ...
@overload
- def __init__(self, value: Decimal, *, _normalize=True) -> None: ...
+ def __init__(self, value: Decimal, *, _normalize: bool = True) -> None: ...
@overload
- def __init__(self, value: str, *, _normalize=True) -> None: ...
+ def __init__(self, value: str, *, _normalize: bool = True) -> None: ...
@classmethod
def from_float(cls, f: float) -> 'Fraction': ...
diff --git a/typeshed/stdlib/2and3/ftplib.pyi b/typeshed/stdlib/2and3/ftplib.pyi
new file mode 100644
index 0000000..308544d
--- /dev/null
+++ b/typeshed/stdlib/2and3/ftplib.pyi
@@ -0,0 +1,134 @@
+# Stubs for ftplib (Python 2.7/3)
+import sys
+from typing import Optional, BinaryIO, Tuple, TextIO, Iterable, Callable, List, Union, Iterator, Dict, Text, TypeVar, Generic
+from types import TracebackType
+from socket import socket
+from ssl import SSLContext
+
+_T = TypeVar('_T')
+_IntOrStr = Union[int, Text]
+
+MSG_OOB = ... # type: int
+FTP_PORT = ... # type: int
+MAXLINE = ... # type: int
+CRLF = ... # type: str
+if sys.version_info >= (3,):
+ B_CRLF = ... # type: bytes
+
+class Error(Exception): ...
+class error_reply(Error): ...
+class error_temp(Error): ...
+class error_perm(Error): ...
+class error_proto(Error): ...
+
+all_errors = Tuple[Exception, ...]
+
+class FTP:
+ debugging = ... # type: int
+
+ # Note: This is technically the type that's passed in as the host argument. But to make it easier in Python 2 we
+ # accept Text but return str.
+ host = ... # type: str
+
+ port = ... # type: int
+ maxline = ... # type: int
+ sock = ... # type: Optional[socket]
+ welcome = ... # type: Optional[str]
+ passiveserver = ... # type: int
+ timeout = ... # type: int
+ af = ... # type: int
+ lastresp = ... # type: str
+
+ if sys.version_info >= (3,):
+ file = ... # type: Optional[TextIO]
+ encoding = ... # type: str
+ def __enter__(self: _T) -> _T: ...
+ def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception],
+ exc_tb: Optional[TracebackType]) -> bool: ...
+ else:
+ file = ... # type: Optional[BinaryIO]
+
+ if sys.version_info >= (3, 3):
+ source_address = ... # type: Optional[Tuple[str, int]]
+ def __init__(self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ...,
+ timeout: float = ..., source_address: Optional[Tuple[str, int]] = ...) -> None: ...
+ def connect(self, host: Text = ..., port: int = ..., timeout: float = ...,
+ source_address: Optional[Tuple[str, int]] = ...) -> str: ...
+ else:
+ def __init__(self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ...,
+ timeout: float = ...) -> None: ...
+ def connect(self, host: Text = ..., port: int = ..., timeout: float = ...) -> str: ...
+
+ def getwelcome(self) -> str: ...
+ def set_debuglevel(self, level: int) -> None: ...
+ def debug(self, level: int) -> None: ...
+ def set_pasv(self, val: Union[bool, int]) -> None: ...
+ def sanitize(self, s: Text) -> str: ...
+ def putline(self, line: Text) -> None: ...
+ def putcmd(self, line: Text) -> None: ...
+ def getline(self) -> str: ...
+ def getmultiline(self) -> str: ...
+ def getresp(self) -> str: ...
+ def voidresp(self) -> str: ...
+ def abort(self) -> str: ...
+ def sendcmd(self, cmd: Text) -> str: ...
+ def voidcmd(self, cmd: Text) -> str: ...
+ def sendport(self, host: Text, port: int) -> str: ...
+ def sendeprt(self, host: Text, port: int) -> str: ...
+ def makeport(self) -> socket: ...
+ def makepasv(self) -> Tuple[str, int]: ...
+ def login(self, user: Text = ..., passwd: Text = ..., acct: Text = ...) -> str: ...
+
+ # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers.
+ def ntransfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> Tuple[socket, int]: ...
+ def transfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> socket: ...
+ def retrbinary(self, cmd: Text, callback: Callable[[bytes], None], blocksize: int = ..., rest: Optional[_IntOrStr] = ...) -> str: ...
+ def storbinary(self, cmd: Text, fp: BinaryIO, blocksize: int = ..., callback: Optional[Callable[[bytes], None]] = ..., rest: Optional[_IntOrStr] = ...) -> str: ...
+
+ def retrlines(self, cmd: Text, callback: Optional[Callable[[str], None]] = ...) -> str: ...
+ def storlines(self, cmd: Text, fp: BinaryIO, callback: Optional[Callable[[bytes], None]] = ...) -> str: ...
+
+ def acct(self, password: Text) -> str: ...
+ def nlst(self, *args: Text) -> List[str]: ...
+
+ # Technically only the last arg can be a Callable but ...
+ def dir(self, *args: Union[str, Callable[[str], None]]) -> None: ...
+
+ if sys.version_info >= (3, 3):
+ def mlsd(self, path: Text = ..., facts: Iterable[str] = ...) -> Iterator[Tuple[str, Dict[str, str]]]: ...
+ def rename(self, fromname: Text, toname: Text) -> str: ...
+ def delete(self, filename: Text) -> str: ...
+ def cwd(self, dirname: Text) -> str: ...
+ def size(self, filename: Text) -> str: ...
+ def mkd(self, dirname: Text) -> str: ...
+ def rmd(self, dirname: Text) -> str: ...
+ def pwd(self) -> str: ...
+ def quit(self) -> str: ...
+ def close(self) -> None: ...
+
+class FTP_TLS(FTP):
+ def __init__(self, host: Text = ..., user: Text = ..., passwd: Text = ..., acct: Text = ...,
+ keyfile: Optional[str] = ..., certfile: Optional[str] = ...,
+ context: Optional[SSLContext] = ..., timeout: float = ...,
+ source_address: Optional[Tuple[str, int]] = ...) -> None: ...
+
+ ssl_version = ... # type: int
+ keyfile = ... # type: Optional[str]
+ certfile = ... # type: Optional[str]
+ context = ... # type: SSLContext
+
+ def login(self, user: Text = ..., passwd: Text = ..., acct: Text = ..., secure: bool = ...) -> str: ...
+ def auth(self) -> str: ...
+ def prot_p(self) -> str: ...
+ def prot_c(self) -> str: ...
+
+ if sys.version_info >= (3, 3):
+ def ccc(self) -> str: ...
+
+if sys.version_info < (3,):
+ class Netrc:
+ def __init__(self, filename: Optional[Text] = ...) -> None: ...
+ def get_hosts(self) -> List[str]: ...
+ def get_account(self, host: Text) -> Tuple[Optional[str], Optional[str], Optional[str]]: ...
+ def get_macros(self) -> List[str]: ...
+ def get_macro(self, macro: Text) -> Tuple[str, ...]: ...
diff --git a/typeshed/stdlib/2and3/hmac.pyi b/typeshed/stdlib/2and3/hmac.pyi
index 59aaf36..9be9cf1 100644
--- a/typeshed/stdlib/2and3/hmac.pyi
+++ b/typeshed/stdlib/2and3/hmac.pyi
@@ -9,6 +9,8 @@ _B = Union[bytes, bytearray]
# TODO more precise type for object of hashlib
_Hash = Any
+digest_size: None
+
if sys.version_info >= (3, 4):
def new(key: _B, msg: Optional[_B] = ...,
digestmod: Optional[Union[str, Callable[[], _Hash], ModuleType]] = ...) -> HMAC: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/__init__.pyi b/typeshed/stdlib/2and3/lib2to3/__init__.pyi
new file mode 100644
index 0000000..145e31b
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/__init__.pyi
@@ -0,0 +1 @@
+# Stubs for lib2to3 (Python 3.6)
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi
new file mode 100644
index 0000000..1adc82a
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/__init__.pyi
@@ -0,0 +1,10 @@
+# Stubs for lib2to3.pgen2 (Python 3.6)
+
+import os
+import sys
+from typing import Text, Union
+
+if sys.version_info >= (3, 6):
+ _Path = Union[Text, os.PathLike]
+else:
+ _Path = Text
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi
new file mode 100644
index 0000000..56785f0
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/driver.pyi
@@ -0,0 +1,24 @@
+# Stubs for lib2to3.pgen2.driver (Python 3.6)
+
+import os
+import sys
+from typing import Any, Callable, IO, Iterable, List, Optional, Text, Tuple, Union
+
+from logging import Logger
+from lib2to3.pytree import _Convert, _NL
+from lib2to3.pgen2 import _Path
+from lib2to3.pgen2.grammar import Grammar
+
+
+class Driver:
+ grammar: Grammar
+ logger: Logger
+ convert: _Convert
+ def __init__(self, grammar: Grammar, convert: Optional[_Convert] = ..., logger: Optional[Logger] = ...) -> None: ...
+ def parse_tokens(self, tokens: Iterable[Any], debug: bool = ...) -> _NL: ...
+ def parse_stream_raw(self, stream: IO[Text], debug: bool = ...) -> _NL: ...
+ def parse_stream(self, stream: IO[Text], debug: bool = ...) -> _NL: ...
+ def parse_file(self, filename: _Path, encoding: Optional[Text] = ..., debug: bool = ...) -> _NL: ...
+ def parse_string(self, text: Text, debug: bool = ...) -> _NL: ...
+
+def load_grammar(gt: Text = ..., gp: Optional[Text] = ..., save: bool = ..., force: bool = ..., logger: Optional[Logger] = ...) -> Grammar: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi
new file mode 100644
index 0000000..122d771
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/grammar.pyi
@@ -0,0 +1,29 @@
+# Stubs for lib2to3.pgen2.grammar (Python 3.6)
+
+from lib2to3.pgen2 import _Path
+
+from typing import Any, Dict, List, Optional, Text, Tuple, TypeVar
+
+_P = TypeVar('_P')
+_Label = Tuple[int, Optional[Text]]
+_DFA = List[List[Tuple[int, int]]]
+_DFAS = Tuple[_DFA, Dict[int, int]]
+
+class Grammar:
+ symbol2number: Dict[Text, int]
+ number2symbol: Dict[int, Text]
+ states: List[_DFA]
+ dfas: Dict[int, _DFAS]
+ labels: List[_Label]
+ keywords: Dict[Text, int]
+ tokens: Dict[int, int]
+ symbol2label: Dict[Text, int]
+ start: int
+ def __init__(self) -> None: ...
+ def dump(self, filename: _Path) -> None: ...
+ def load(self, filename: _Path) -> None: ...
+ def copy(self: _P) -> _P: ...
+ def report(self) -> None: ...
+
+opmap_raw: Text
+opmap: Dict[Text, Text]
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi
new file mode 100644
index 0000000..8719500
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/literals.pyi
@@ -0,0 +1,9 @@
+# Stubs for lib2to3.pgen2.literals (Python 3.6)
+
+from typing import Dict, Match, Text
+
+simple_escapes: Dict[Text, Text]
+
+def escape(m: Match) -> Text: ...
+def evalString(s: Text) -> Text: ...
+def test() -> None: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi
new file mode 100644
index 0000000..101d476
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/parse.pyi
@@ -0,0 +1,29 @@
+# Stubs for lib2to3.pgen2.parse (Python 3.6)
+
+from typing import Any, Dict, List, Optional, Sequence, Set, Text, Tuple
+
+from lib2to3.pgen2.grammar import Grammar, _DFAS
+from lib2to3.pytree import _NL, _Convert, _RawNode
+
+_Context = Sequence[Any]
+
+class ParseError(Exception):
+ msg: Text
+ type: int
+ value: Optional[Text]
+ context: _Context
+ def __init__(self, msg: Text, type: int, value: Optional[Text], context: _Context) -> None: ...
+
+class Parser:
+ grammar: Grammar
+ convert: _Convert
+ stack: List[Tuple[_DFAS, int, _RawNode]]
+ rootnode: Optional[_NL]
+ used_names: Set[Text]
+ def __init__(self, grammar: Grammar, convert: Optional[_Convert] = ...) -> None: ...
+ def setup(self, start: Optional[int] = ...) -> None: ...
+ def addtoken(self, type: int, value: Optional[Text], context: _Context) -> bool: ...
+ def classify(self, type: int, value: Optional[Text], context: _Context) -> int: ...
+ def shift(self, type: int, value: Optional[Text], newstate: int, context: _Context) -> None: ...
+ def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ...
+ def pop(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi
new file mode 100644
index 0000000..4180a23
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/pgen.pyi
@@ -0,0 +1,49 @@
+# Stubs for lib2to3.pgen2.pgen (Python 3.6)
+
+from typing import Any, Dict, IO, Iterable, Iterator, List, Optional, Text, Tuple
+from mypy_extensions import NoReturn
+
+from lib2to3.pgen2 import _Path, grammar
+from lib2to3.pgen2.tokenize import _TokenInfo
+
+class PgenGrammar(grammar.Grammar): ...
+
+class ParserGenerator:
+ filename: _Path
+ stream: IO[Text]
+ generator: Iterator[_TokenInfo]
+ first: Dict[Text, Dict[Text, int]]
+ def __init__(self, filename: _Path, stream: Optional[IO[Text]] = ...) -> None: ...
+ def make_grammar(self) -> PgenGrammar: ...
+ def make_first(self, c: PgenGrammar, name: Text) -> Dict[int, int]: ...
+ def make_label(self, c: PgenGrammar, label: Text) -> int: ...
+ def addfirstsets(self) -> None: ...
+ def calcfirst(self, name: Text) -> None: ...
+ def parse(self) -> Tuple[Dict[Text, List[DFAState]], Text]: ...
+ def make_dfa(self, start: NFAState, finish: NFAState) -> List[DFAState]: ...
+ def dump_nfa(self, name: Text, start: NFAState, finish: NFAState) -> List[DFAState]: ...
+ def dump_dfa(self, name: Text, dfa: Iterable[DFAState]) -> None: ...
+ def simplify_dfa(self, dfa: List[DFAState]) -> None: ...
+ def parse_rhs(self) -> Tuple[NFAState, NFAState]: ...
+ def parse_alt(self) -> Tuple[NFAState, NFAState]: ...
+ def parse_item(self) -> Tuple[NFAState, NFAState]: ...
+ def parse_atom(self) -> Tuple[NFAState, NFAState]: ...
+ def expect(self, type: int, value: Optional[Any] = ...) -> Text: ...
+ def gettoken(self) -> None: ...
+ def raise_error(self, msg: str, *args: Any) -> NoReturn: ...
+
+class NFAState:
+ arcs: List[Tuple[Optional[Text], NFAState]]
+ def __init__(self) -> None: ...
+ def addarc(self, next: NFAState, label: Optional[Text] = ...) -> None: ...
+
+class DFAState:
+ nfaset: Dict[NFAState, Any]
+ isfinal: bool
+ arcs: Dict[Text, DFAState]
+ def __init__(self, nfaset: Dict[NFAState, Any], final: NFAState) -> None: ...
+ def addarc(self, next: DFAState, label: Text) -> None: ...
+ def unifystate(self, old: DFAState, new: DFAState) -> None: ...
+ def __eq__(self, other: Any) -> bool: ...
+
+def generate_grammar(filename: _Path = ...) -> PgenGrammar: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi
new file mode 100644
index 0000000..c256af8
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/token.pyi
@@ -0,0 +1,73 @@
+# Stubs for lib2to3.pgen2.token (Python 3.6)
+
+import sys
+from typing import Dict, Text
+
+ENDMARKER: int
+NAME: int
+NUMBER: int
+STRING: int
+NEWLINE: int
+INDENT: int
+DEDENT: int
+LPAR: int
+RPAR: int
+LSQB: int
+RSQB: int
+COLON: int
+COMMA: int
+SEMI: int
+PLUS: int
+MINUS: int
+STAR: int
+SLASH: int
+VBAR: int
+AMPER: int
+LESS: int
+GREATER: int
+EQUAL: int
+DOT: int
+PERCENT: int
+BACKQUOTE: int
+LBRACE: int
+RBRACE: int
+EQEQUAL: int
+NOTEQUAL: int
+LESSEQUAL: int
+GREATEREQUAL: int
+TILDE: int
+CIRCUMFLEX: int
+LEFTSHIFT: int
+RIGHTSHIFT: int
+DOUBLESTAR: int
+PLUSEQUAL: int
+MINEQUAL: int
+STAREQUAL: int
+SLASHEQUAL: int
+PERCENTEQUAL: int
+AMPEREQUAL: int
+VBAREQUAL: int
+CIRCUMFLEXEQUAL: int
+LEFTSHIFTEQUAL: int
+RIGHTSHIFTEQUAL: int
+DOUBLESTAREQUAL: int
+DOUBLESLASH: int
+DOUBLESLASHEQUAL: int
+OP: int
+COMMENT: int
+NL: int
+if sys.version_info >= (3,):
+ RARROW: int
+if sys.version_info >= (3, 5):
+ AT: int
+ ATEQUAL: int
+ AWAIT: int
+ ASYNC: int
+ERRORTOKEN: int
+N_TOKENS: int
+NT_OFFSET: int
+tok_name: Dict[int, Text]
+
+def ISTERMINAL(x: int) -> bool: ...
+def ISNONTERMINAL(x: int) -> bool: ...
+def ISEOF(x: int) -> bool: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi b/typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi
new file mode 100644
index 0000000..c10305f
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pgen2/tokenize.pyi
@@ -0,0 +1,30 @@
+# Stubs for lib2to3.pgen2.tokenize (Python 3.6)
+# NOTE: Only elements from __all__ are present.
+
+from typing import Callable, Iterable, Iterator, List, Text, Tuple
+from lib2to3.pgen2.token import * # noqa
+
+
+_Coord = Tuple[int, int]
+_TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None]
+_TokenInfo = Tuple[int, Text, _Coord, _Coord, Text]
+
+
+class TokenError(Exception): ...
+class StopTokenizing(Exception): ...
+
+def tokenize(readline: Callable[[], Text], tokeneater: _TokenEater = ...) -> None: ...
+
+class Untokenizer:
+ tokens: List[Text]
+ prev_row: int
+ prev_col: int
+ def __init__(self) -> None: ...
+ def add_whitespace(self, start: _Coord) -> None: ...
+ def untokenize(self, iterable: Iterable[_TokenInfo]) -> Text: ...
+ def compat(self, token: Tuple[int, Text], iterable: Iterable[_TokenInfo]) -> None: ...
+
+def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ...
+def generate_tokens(
+ readline: Callable[[], Text]
+) -> Iterator[_TokenInfo]: ...
diff --git a/typeshed/stdlib/2and3/lib2to3/pygram.pyi b/typeshed/stdlib/2and3/lib2to3/pygram.pyi
new file mode 100644
index 0000000..aeb7b93
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pygram.pyi
@@ -0,0 +1,116 @@
+# Stubs for lib2to3.pygram (Python 3.6)
+
+from typing import Any
+from lib2to3.pgen2.grammar import Grammar
+
+class Symbols:
+ def __init__(self, grammar: Grammar) -> None: ...
+
+class python_symbols(Symbols):
+ and_expr: int
+ and_test: int
+ annassign: int
+ arglist: int
+ argument: int
+ arith_expr: int
+ assert_stmt: int
+ async_funcdef: int
+ async_stmt: int
+ atom: int
+ augassign: int
+ break_stmt: int
+ classdef: int
+ comp_for: int
+ comp_if: int
+ comp_iter: int
+ comp_op: int
+ comparison: int
+ compound_stmt: int
+ continue_stmt: int
+ decorated: int
+ decorator: int
+ decorators: int
+ del_stmt: int
+ dictsetmaker: int
+ dotted_as_name: int
+ dotted_as_names: int
+ dotted_name: int
+ encoding_decl: int
+ eval_input: int
+ except_clause: int
+ exec_stmt: int
+ expr: int
+ expr_stmt: int
+ exprlist: int
+ factor: int
+ file_input: int
+ flow_stmt: int
+ for_stmt: int
+ funcdef: int
+ global_stmt: int
+ if_stmt: int
+ import_as_name: int
+ import_as_names: int
+ import_from: int
+ import_name: int
+ import_stmt: int
+ lambdef: int
+ listmaker: int
+ not_test: int
+ old_lambdef: int
+ old_test: int
+ or_test: int
+ parameters: int
+ pass_stmt: int
+ power: int
+ print_stmt: int
+ raise_stmt: int
+ return_stmt: int
+ shift_expr: int
+ simple_stmt: int
+ single_input: int
+ sliceop: int
+ small_stmt: int
+ star_expr: int
+ stmt: int
+ subscript: int
+ subscriptlist: int
+ suite: int
+ term: int
+ test: int
+ testlist: int
+ testlist1: int
+ testlist_gexp: int
+ testlist_safe: int
+ testlist_star_expr: int
+ tfpdef: int
+ tfplist: int
+ tname: int
+ trailer: int
+ try_stmt: int
+ typedargslist: int
+ varargslist: int
+ vfpdef: int
+ vfplist: int
+ vname: int
+ while_stmt: int
+ with_item: int
+ with_stmt: int
+ with_var: int
+ xor_expr: int
+ yield_arg: int
+ yield_expr: int
+ yield_stmt: int
+
+class pattern_symbols(Symbols):
+ Alternative: int
+ Alternatives: int
+ Details: int
+ Matcher: int
+ NegatedUnit: int
+ Repeater: int
+ Unit: int
+
+python_grammar: Grammar
+python_grammar_no_print_statement: Grammar
+pattern_grammar: Grammar
diff --git a/typeshed/stdlib/2and3/lib2to3/pytree.pyi b/typeshed/stdlib/2and3/lib2to3/pytree.pyi
new file mode 100644
index 0000000..06a7c12
--- /dev/null
+++ b/typeshed/stdlib/2and3/lib2to3/pytree.pyi
@@ -0,0 +1,86 @@
+# Stubs for lib2to3.pytree (Python 3.6)
+
+import sys
+from typing import Any, Callable, Dict, Iterator, List, Optional, Text, Tuple, TypeVar, Union
+
+from lib2to3.pgen2.grammar import Grammar
+
+_P = TypeVar('_P')
+_NL = Union[Node, Leaf]
+_Context = Tuple[Text, int, int]
+_Results = Dict[Text, _NL]
+_RawNode = Tuple[int, Text, _Context, Optional[List[_NL]]]
+_Convert = Callable[[Grammar, _RawNode], Any]
+
+HUGE: int
+
+def type_repr(type_num: int) -> Text: ...
+
+class Base:
+ type: int
+ parent: Optional[Node]
+ prefix: Text
+ children: List[_NL]
+ was_changed: bool
+ was_checked: bool
+ def __eq__(self, other: Any) -> bool: ...
+ def _eq(self: _P, other: _P) -> bool: ...
+ def clone(self: _P) -> _P: ...
+ def post_order(self) -> Iterator[_NL]: ...
+ def pre_order(self) -> Iterator[_NL]: ...
+ def replace(self, new: Union[_NL, List[_NL]]) -> None: ...
+ def get_lineno(self) -> int: ...
+ def changed(self) -> None: ...
+ def remove(self) -> Optional[int]: ...
+ @property
+ def next_sibling(self) -> Optional[_NL]: ...
+ @property
+ def prev_sibling(self) -> Optional[_NL]: ...
+ def leaves(self) -> Iterator[Leaf]: ...
+ def depth(self) -> int: ...
+ def get_suffix(self) -> Text: ...
+ if sys.version_info < (3,):
+ def get_prefix(self) -> Text: ...
+ def set_prefix(self, prefix: Text) -> None: ...
+
+class Node(Base):
+ fixers_applied: List[Any]
+ def __init__(self, type: int, children: List[_NL], context: Optional[Any] = ..., prefix: Optional[Text] = ..., fixers_applied: Optional[List[Any]] = ...) -> None: ...
+ def set_child(self, i: int, child: _NL) -> None: ...
+ def insert_child(self, i: int, child: _NL) -> None: ...
+ def append_child(self, child: _NL) -> None: ...
+
+class Leaf(Base):
+ lineno: int
+ column: int
+ value: Text
+ fixers_applied: List[Any]
+ def __init__(self, type: int, value: Text, context: Optional[_Context] = ..., prefix: Optional[Text] = ..., fixers_applied: List[Any] = ...) -> None: ...
+
+def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ...
+
+class BasePattern:
+ type: int
+ content: Optional[Text]
+ name: Optional[Text]
+ def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns
+ def match(self, node: _NL, results: Optional[_Results] = ...) -> bool: ...
+ def match_seq(self, nodes: List[_NL], results: Optional[_Results] = ...) -> bool: ...
+ def generate_matches(self, nodes: List[_NL]) -> Iterator[Tuple[int, _Results]]: ...
+
+class LeafPattern(BasePattern):
+ def __init__(self, type: Optional[int] = ..., content: Optional[Text] = ..., name: Optional[Text] = ...) -> None: ...
+
+class NodePattern(BasePattern):
+ wildcards: bool
+ def __init__(self, type: Optional[int] = ..., content: Optional[Text] = ..., name: Optional[Text] = ...) -> None: ...
+
+class WildcardPattern(BasePattern):
+ min: int
+ max: int
+ def __init__(self, content: Optional[Text] = ..., min: int = ..., max: int = ..., name: Optional[Text] = ...) -> None: ...
+
+class NegatedPattern(BasePattern):
+ def __init__(self, content: Optional[Text] = ...) -> None: ...
+
+def generate_matches(patterns: List[BasePattern], nodes: List[_NL]) -> Iterator[Tuple[int, _Results]]: ...
diff --git a/typeshed/stdlib/2and3/locale.pyi b/typeshed/stdlib/2and3/locale.pyi
index 9ad00e7..bb64666 100644
--- a/typeshed/stdlib/2and3/locale.pyi
+++ b/typeshed/stdlib/2and3/locale.pyi
@@ -84,7 +84,7 @@ def setlocale(category: int,
locale: Union[_str, Iterable[_str], None] = ...) -> _str: ...
def localeconv() -> Mapping[_str, Union[int, _str, List[int]]]: ...
def nl_langinfo(option: int) -> _str: ...
-def getdefaultlocale(envvars: Tuple[_str] = ...) -> Tuple[Optional[_str], Optional[_str]]: ...
+def getdefaultlocale(envvars: Tuple[_str, ...] = ...) -> Tuple[Optional[_str], Optional[_str]]: ...
def getlocale(category: int = ...) -> Sequence[_str]: ...
def getpreferredencoding(do_setlocale: bool = ...) -> _str: ...
def normalize(localename: _str) -> _str: ...
diff --git a/typeshed/stdlib/2and3/logging/__init__.pyi b/typeshed/stdlib/2and3/logging/__init__.pyi
index f7751db..653ec23 100644
--- a/typeshed/stdlib/2and3/logging/__init__.pyi
+++ b/typeshed/stdlib/2and3/logging/__init__.pyi
@@ -1,7 +1,7 @@
# Stubs for logging (Python 3.4)
from typing import (
- Any, Callable, Dict, Iterable, Mapping, MutableMapping, Optional, IO,
+ Any, Callable, Dict, Iterable, List, Mapping, MutableMapping, Optional, IO,
Tuple, Text, Union, overload,
)
from string import Template
@@ -13,9 +13,9 @@ import threading
_SysExcInfoType = Union[Tuple[type, BaseException, TracebackType],
Tuple[None, None, None]]
if sys.version_info >= (3, 5):
- _ExcInfoType = Union[bool, _SysExcInfoType, Exception]
+ _ExcInfoType = Union[None, bool, _SysExcInfoType, Exception]
else:
- _ExcInfoType = Union[bool, _SysExcInfoType]
+ _ExcInfoType = Union[None, bool, _SysExcInfoType]
_ArgsType = Union[Tuple[Any, ...], Dict[str, Any]]
_FilterType = Union['Filter', Callable[['LogRecord'], int]]
@@ -31,56 +31,56 @@ class Logger:
def isEnabledFor(self, lvl: int) -> bool: ...
def getEffectiveLevel(self) -> int: ...
def getChild(self, suffix: str) -> 'Logger': ...
- if sys.version_info > (3,):
+ if sys.version_info >= (3,):
def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def warn(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
else:
def debug(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def info(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def warning(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def warn(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def error(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def critical(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def log(self,
lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def exception(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def addFilter(self, filt: _FilterType) -> None: ...
def removeFilter(self, filt: _FilterType) -> None: ...
def filter(self, record: 'LogRecord') -> bool: ...
@@ -97,14 +97,14 @@ class Logger:
exc_info: Optional[_SysExcInfoType],
func: Optional[str] = ...,
extra: Optional[Mapping[str, Any]] = ...,
- sinfo: Optional[str] = ...) -> None: ...
+ sinfo: Optional[str] = ...) -> LogRecord: ...
else:
def makeRecord(self,
name: str, lvl: int, fn: str, lno: int, msg: Text,
args: Mapping[str, Any],
exc_info: Optional[_SysExcInfoType],
func: Optional[str] = ...,
- extra: Optional[Mapping[str, Any]] = ...) -> None: ...
+ extra: Optional[Mapping[str, Any]] = ...) -> LogRecord: ...
if sys.version_info >= (3,):
def hasHandlers(self) -> bool: ...
@@ -123,7 +123,7 @@ class Filterer(object):
def __init__(self) -> None: ...
def addFilter(self, filter: Filter) -> None: ...
def removeFilter(self, filter: Filter) -> None: ...
- def filter(self, record) -> bool: ...
+ def filter(self, record: 'LogRecord') -> bool: ...
class Handler(Filterer):
@@ -143,7 +143,7 @@ class Handler(Filterer):
def close(self) -> None: ...
def handle(self, record: 'LogRecord') -> None: ...
def handleError(self, record: 'LogRecord') -> None: ...
- def format(self, record: 'LogRecord') -> None: ...
+ def format(self, record: 'LogRecord') -> str: ...
def emit(self, record: 'LogRecord') -> None: ...
@@ -218,50 +218,50 @@ class LogRecord:
class LoggerAdapter:
def __init__(self, logger: Logger, extra: Mapping[str, Any]) -> None: ...
def process(self, msg: Text, kwargs: MutableMapping[str, Any]) -> Tuple[str, MutableMapping[str, Any]]: ...
- if sys.version_info > (3,):
+ if sys.version_info >= (3,):
def debug(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def info(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def warning(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def error(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def exception(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def critical(self, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def log(self, lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
else:
def debug(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def info(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def warning(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def error(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def exception(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def critical(self,
msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def log(self,
lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def isEnabledFor(self, lvl: int) -> bool: ...
if sys.version_info >= (3,):
def getEffectiveLevel(self) -> int: ...
@@ -280,48 +280,48 @@ def getLoggerClass() -> type: ...
if sys.version_info >= (3,):
def getLogRecordFactory() -> Callable[..., LogRecord]: ...
-if sys.version_info > (3,):
+if sys.version_info >= (3,):
def debug(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def info(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def warning(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def warn(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def error(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def critical(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def exception(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
def log(lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- stack_info: bool = ..., extra: Dict[str, Any] = ...,
+ stack_info: bool = ..., extra: Optional[Dict[str, Any]] = ...,
**kwargs: Any) -> None: ...
else:
def debug(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def info(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def warning(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def warn(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def error(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def critical(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def exception(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def log(lvl: int, msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
- extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
+ extra: Optional[Dict[str, Any]] = ..., **kwargs: Any) -> None: ...
def disable(lvl: int) -> None: ...
def addLevelName(lvl: int, levelName: str) -> None: ...
@@ -383,7 +383,7 @@ if sys.version_info >= (3,):
asctime_search = ... # type: str
_fmt = ... # type: str
- def __init__(self, fmt) -> None: ...
+ def __init__(self, fmt: str) -> None: ...
def usesTime(self) -> bool: ...
def format(self, record: Any) -> str: ...
diff --git a/typeshed/stdlib/2and3/logging/handlers.pyi b/typeshed/stdlib/2and3/logging/handlers.pyi
index 50ffa0e..6041397 100644
--- a/typeshed/stdlib/2and3/logging/handlers.pyi
+++ b/typeshed/stdlib/2and3/logging/handlers.pyi
@@ -180,7 +180,7 @@ class HTTPHandler(Handler):
def mapLogRecord(self, record: LogRecord) -> Dict[str, Any]: ...
-if sys.version_info > (3,):
+if sys.version_info >= (3,):
class QueueHandler(Handler):
def __init__(self, queue: Queue) -> None: ...
def prepare(self, record: LogRecord) -> Any: ...
diff --git a/typeshed/stdlib/2and3/mmap.pyi b/typeshed/stdlib/2and3/mmap.pyi
index d0be9df..87b1f04 100644
--- a/typeshed/stdlib/2and3/mmap.pyi
+++ b/typeshed/stdlib/2and3/mmap.pyi
@@ -3,7 +3,7 @@
import sys
from types import TracebackType
from typing import (Optional, Sequence, Union, Generic, TypeVar, overload,
- Iterable, Container, Sized, Reversible, Type)
+ Iterable, Iterator, Sized, Type)
_T = TypeVar('_T', str, bytes)
@@ -58,8 +58,7 @@ class _mmap(Generic[_T]):
def __len__(self) -> int: ...
if sys.version_info >= (3,):
- class mmap(_mmap, _ContextManager[mmap], Iterable[bytes], Container[bytes],
- Sized, Reversible[bytes]):
+ class mmap(_mmap, _ContextManager[mmap], Iterable[bytes], Sized):
closed = ... # type: bool
def rfind(self, sub: bytes, start: int = ..., stop: int = ...) -> int: ...
@overload
@@ -71,6 +70,9 @@ if sys.version_info >= (3,):
def __setitem__(self, index: int, object: int) -> None: ...
@overload
def __setitem__(self, index: slice, object: bytes) -> None: ...
+ # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and
+ # __len__, so we claim that there is also an __iter__ to help type checkers.
+ def __iter__(self) -> Iterator[bytes]: ...
else:
class mmap(_mmap, Sequence[bytes]):
def rfind(self, string: bytes, start: int = ..., stop: int = ...) -> int: ...
diff --git a/typeshed/stdlib/3/opcode.pyi b/typeshed/stdlib/2and3/opcode.pyi
similarity index 57%
rename from typeshed/stdlib/3/opcode.pyi
rename to typeshed/stdlib/2and3/opcode.pyi
index e577a9f..4cc1ab0 100644
--- a/typeshed/stdlib/3/opcode.pyi
+++ b/typeshed/stdlib/2and3/opcode.pyi
@@ -1,4 +1,6 @@
-from typing import List, Dict, Sequence
+from typing import List, Dict, Optional, Sequence
+
+import sys
cmp_op = ... # type: Sequence[str]
hasconst = ... # type: List[int]
@@ -10,9 +12,12 @@ hascompare = ... # type: List[int]
hasfree = ... # type: List[int]
opname = ... # type: List[str]
-opmap = ... # Dict[str, int]
+opmap = ... # type: Dict[str, int]
HAVE_ARGUMENT = ... # type: int
EXTENDED_ARG = ... # type: int
-hasnargs = ... # type: List[int]
-def stack_effect(opcode: int, oparg: int = ...) -> int: ...
+if sys.version_info >= (3, 4):
+ def stack_effect(opcode: int, oparg: Optional[int] = ...) -> int: ...
+
+if sys.version_info >= (3, 6):
+ hasnargs = ... # type: List[int]
diff --git a/typeshed/stdlib/2and3/optparse.pyi b/typeshed/stdlib/2and3/optparse.pyi
new file mode 100644
index 0000000..fe7f437
--- /dev/null
+++ b/typeshed/stdlib/2and3/optparse.pyi
@@ -0,0 +1,226 @@
+# Generated by pytype, with only minor tweaks. Might be incomplete.
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, List, Mapping, Optional, Sequence, Tuple, Union
+
+# See https://groups.google.com/forum/#!topic/python-ideas/gA1gdj3RZ5g
+if sys.version_info >= (3,):
+ _Text = str
+else:
+ _Text = Union[str, unicode]
+
+NO_DEFAULT = ... # type: Tuple[_Text, ...]
+SUPPRESS_HELP = ... # type: _Text
+SUPPRESS_USAGE = ... # type: _Text
+
+def check_builtin(option: Option, opt: Any, value: _Text) -> Any: ...
+def check_choice(option: Option, opt: Any, value: _Text) -> Any: ...
+if sys.version_info >= (3,):
+ def isbasestring(x: Any) -> bool: ...
+
+class OptParseError(Exception):
+ msg = ... # type: _Text
+ def __init__(self, msg: _Text) -> None: ...
+
+class BadOptionError(OptParseError):
+ opt_str = ... # type: _Text
+ def __init__(self, opt_str: _Text) -> None: ...
+
+class AmbiguousOptionError(BadOptionError):
+ possibilities = ... # type: Iterable[_Text]
+ def __init__(self, opt_str: _Text, possibilities: Sequence[_Text]) -> None: ...
+
+class OptionError(OptParseError):
+ msg = ... # type: _Text
+ option_id = ... # type: _Text
+ def __init__(self, msg: _Text, option: Option) -> None: ...
+
+class OptionConflictError(OptionError): ...
+
+class OptionValueError(OptParseError): ...
+
+
+class HelpFormatter:
+ NO_DEFAULT_VALUE = ... # type: _Text
+ _long_opt_fmt = ... # type: _Text
+ _short_opt_fmt = ... # type: _Text
+ current_indent = ... # type: int
+ default_tag = ... # type: _Text
+ help_position = ... # type: Any
+ help_width = ... # type: Any
+ indent_increment = ... # type: int
+ level = ... # type: int
+ max_help_position = ... # type: int
+ option_strings = ... # type: Dict[Option, _Text]
+ parser = ... # type: OptionParser
+ short_first = ... # type: Any
+ width = ... # type: int
+ def __init__(self, indent_increment: int, max_help_position: int, width: Optional[int], short_first: int) -> None: ...
+ def _format__Text(self, _Text: _Text) -> _Text: ...
+ def dedent(self) -> None: ...
+ def expand_default(self, option: Option) -> _Text: ...
+ def format_description(self, description: _Text) -> _Text: ...
+ def format_epilog(self, epilog) -> _Text: ...
+ def format_heading(self, heading: Any) -> _Text: ...
+ def format_option(self, option: OptionParser) -> _Text: ...
+ def format_option_strings(self, option: OptionParser) -> Any: ...
+ def format_usage(self, usage: Any) -> _Text: ...
+ def indent(self) -> None: ...
+ def set_long_opt_delimiter(self, delim: _Text) -> None: ...
+ def set_parser(self, parser: OptionParser) -> None: ...
+ def set_short_opt_delimiter(self, delim: _Text) -> None: ...
+ def store_option_strings(self, parser: OptionParser) -> None: ...
+
+class IndentedHelpFormatter(HelpFormatter):
+ def __init__(self,
+ indent_increment: int = ...,
+ max_help_position: int = ...,
+ width: Optional[int] = ...,
+ short_first: int = ...) -> None: ...
+ def format_heading(self, heading: _Text) -> _Text: ...
+ def format_usage(self, usage: _Text) -> _Text: ...
+
+class TitledHelpFormatter(HelpFormatter):
+ def __init__(self,
+ indent_increment: int = ...,
+ max_help_position: int = ...,
+ width: Optional[int] = ...,
+ short_first: int = ...) -> None: ...
+ def format_heading(self, heading: _Text) -> _Text: ...
+ def format_usage(self, usage: _Text) -> _Text: ...
+
+class Option:
+ ACTIONS = ... # type: Tuple[_Text, ...]
+ ALWAYS_TYPED_ACTIONS = ... # type: Tuple[_Text, ...]
+ ATTRS = ... # type: List[_Text]
+ CHECK_METHODS = ... # type: Optional[List[Callable]]
+ CONST_ACTIONS = ... # type: Tuple[_Text, ...]
+ STORE_ACTIONS = ... # type: Tuple[_Text, ...]
+ TYPED_ACTIONS = ... # type: Tuple[_Text, ...]
+ TYPES = ... # type: Tuple[_Text, ...]
+ TYPE_CHECKER = ... # type: Dict[_Text, Callable]
+ _long_opts = ... # type: List[_Text]
+ _short_opts = ... # type: List[_Text]
+ action = ... # type: _Text
+ dest = ... # type: Any
+ nargs = ... # type: int
+ type = ... # type: Any
+ def __init__(self, *opts, **attrs) -> None: ...
+ def _check_action(self) -> None: ...
+ def _check_callback(self) -> None: ...
+ def _check_choice(self) -> None: ...
+ def _check_const(self) -> None: ...
+ def _check_dest(self) -> None: ...
+ def _check_nargs(self) -> None: ...
+ def _check_opt_strings(self, opts: Optional[_Text]) -> Any: ...
+ def _check_type(self) -> None: ...
+ def _set_attrs(self, attrs: Dict[_Text, Any]) -> None: ...
+ def _set_opt_strings(self, opts: _Text) -> None: ...
+ def check_value(self, opt: Any, value: Any) -> Any: ...
+ def convert_value(self, opt: Any, value: Any) -> Any: ...
+ def get_opt_string(self) -> _Text: ...
+ def process(self, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ...
+ def take_action(self, action: _Text, dest: _Text, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ...
+ def takes_value(self) -> bool: ...
+
+make_option = Option
+
+class OptionContainer:
+ _long_opt = ... # type: Dict[_Text, Option]
+ _short_opt = ... # type: Dict[_Text, Option]
+ conflict_handler = ... # type: _Text
+ defaults = ... # type: Dict[_Text, Any]
+ description = ... # type: Any
+ option_class = ... # type: Any
+ def __init__(self, option_class: Option, conflict_handler: Any, description: Any) -> None: ...
+ def _check_conflict(self, option: Any) -> None: ...
+ def _create_option_mappings(self) -> None: ...
+ def _share_option_mappings(self, parser: OptionParser) -> None: ...
+ def add_option(self, *args, **kwargs) -> Any: ...
+ def add_options(self, option_list: Iterable[Option]) -> None: ...
+ def destroy(self) -> None: ...
+ def format_description(self, formatter: Optional[HelpFormatter]) -> Any: ...
+ def format_help(self, formatter: Optional[HelpFormatter]) -> _Text: ...
+ def format_option_help(self, formatter: Optional[HelpFormatter]) -> _Text: ...
+ def get_description(self) -> Any: ...
+ def get_option(self, opt_str: _Text) -> Optional[Option]: ...
+ def has_option(self, opt_str: _Text) -> bool: ...
+ def remove_option(self, opt_str: _Text) -> None: ...
+ def set_conflict_handler(self, handler: Any) -> None: ...
+ def set_description(self, description: Any) -> None: ...
+
+class OptionGroup(OptionContainer):
+ option_list = ... # type: List[Option]
+ parser = ... # type: OptionParser
+ title = ... # type: _Text
+ def __init__(self, parser: OptionParser, title: _Text, description: Optional[_Text]) -> None: ...
+ def _create_option_list(self) -> None: ...
+ def set_title(self, title: _Text) -> None: ...
+
+class OptionParser(OptionContainer):
+ allow_interspersed_args = ... # type: bool
+ epilog = ... # type: Any
+ formatter = ... # type: HelpFormatter
+ largs = ... # type: Optional[List[_Text]]
+ option_groups = ... # type: List[OptionParser]
+ option_list = ... # type: List[Any]
+ process_default_values = ... # type: Any
+ prog = ... # type: Any
+ rargs = ... # type: Optional[List[Any]]
+ standard_option_list = ... # type: List
+ usage = ... # type: Optional[_Text]
+ values = ... # type: Any
+ version = ... # type: _Text
+ def __init__(self, usage: Optional[_Text] = ...,
+ option_list: Iterable[Option] = ...,
+ option_class: Option = ...,
+ version: Optional[_Text] = ...,
+ conflict_handler: _Text = ...,
+ description: Optional[_Text] = ...,
+ formatter: Optional[HelpFormatter] = ...,
+ add_help_option: bool = ...,
+ prog: Optional[_Text] = ...,
+ epilog: Optional[_Text] = ...) -> None: ...
+ def _add_help_option(self) -> None: ...
+ def _add_version_option(self) -> None: ...
+ def _create_option_list(self) -> None: ...
+ def _get_all_options(self) -> List[Any]: ...
+ def _get_args(self, args: Iterable) -> List[Any]: ...
+ def _init_parsing_state(self) -> None: ...
+ def _match_long_opt(self, opt: _Text) -> _Text: ...
+ def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = ...) -> None: ...
+ def _process_args(self, largs: List, rargs: List, values: Values) -> None: ...
+ def _process_long_opt(self, rargs: List, values: Any) -> None: ...
+ def _process_short_opts(self, rargs: List, values: Any) -> None: ...
+ def add_option_group(self, *args, **kwargs) -> OptionParser: ...
+ def check_values(self, values: Any, args) -> Tuple[Any, ...]: ...
+ def disable_interspersed_args(self) -> None: ...
+ def enable_interspersed_args(self) -> None: ...
+ def error(self, msg: _Text) -> None: ...
+ def exit(self, status: int = ..., msg: Optional[str] = ...) -> None: ...
+ def expand_prog_name(self, s: Optional[_Text]) -> Any: ...
+ def format_epilog(self, formatter: HelpFormatter) -> Any: ...
+ def format_help(self, formatter: Optional[HelpFormatter] = ...) -> _Text: ...
+ def format_option_help(self, formatter: Optional[HelpFormatter] = ...) -> _Text: ...
+ def get_default_values(self) -> Values: ...
+ def get_option_group(self, opt_str: _Text) -> Any: ...
+ def get_prog_name(self) -> _Text: ...
+ def get_usage(self) -> _Text: ...
+ def get_version(self) -> _Text: ...
+ def parse_args(self, args: Optional[Sequence[_Text]] = ..., values: Optional[Values] = ...) -> Tuple[Any, ...]: ...
+ def print_usage(self, file: Optional[IO[str]] = ...) -> None: ...
+ def print_help(self, file: Optional[IO[str]] = ...) -> None: ...
+ def print_version(self, file: Optional[IO[str]] = ...) -> None: ...
+ def set_default(self, dest: Any, value: Any) -> None: ...
+ def set_defaults(self, **kwargs) -> None: ...
+ def set_process_default_values(self, process: Any) -> None: ...
+ def set_usage(self, usage: _Text) -> None: ...
+
+
+class Values:
+ def __init__(self, defaults: Optional[Mapping[str, Any]] = ...) -> None: ...
+ def _update(self, dict: Dict[_Text, Any], mode: Any) -> None: ...
+ def _update_careful(self, dict: Dict[_Text, Any]) -> None: ...
+ def _update_loose(self, dict: Dict[_Text, Any]) -> None: ...
+ def ensure_value(self, attr: Any, value: Any) -> Any: ...
+ def read_file(self, filename: _Text, mode: _Text) -> None: ...
+ def read_module(self, modname: _Text, mode: _Text) -> None: ...
diff --git a/typeshed/stdlib/2and3/pickletools.pyi b/typeshed/stdlib/2and3/pickletools.pyi
new file mode 100644
index 0000000..c036646
--- /dev/null
+++ b/typeshed/stdlib/2and3/pickletools.pyi
@@ -0,0 +1,145 @@
+# Stubs for pickletools (Python 2 and 3)
+import sys
+from typing import Any, Callable, IO, Iterator, List, MutableMapping, Optional, Text, Tuple, Type, Union
+
+_Reader = Callable[[IO[bytes]], Any]
+
+if sys.version_info >= (3, 0):
+ bytes_types: Tuple[Type[Any], ...]
+
+UP_TO_NEWLINE: int
+TAKEN_FROM_ARGUMENT1: int
+TAKEN_FROM_ARGUMENT4: int
+if sys.version_info >= (3, 3):
+ TAKEN_FROM_ARGUMENT4U: int
+if sys.version_info >= (3, 4):
+ TAKEN_FROM_ARGUMENT8U: int
+
+class ArgumentDescriptor(object):
+ name: str
+ n: int
+ reader: _Reader
+ doc: str
+ def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ...
+
+def read_uint1(f: IO[bytes]) -> int: ...
+uint1: ArgumentDescriptor
+
+def read_uint2(f: IO[bytes]) -> int: ...
+uint2: ArgumentDescriptor
+
+def read_int4(f: IO[bytes]) -> int: ...
+int4: ArgumentDescriptor
+
+if sys.version_info >= (3, 3):
+ def read_uint4(f: IO[bytes]) -> int: ...
+ uint4: ArgumentDescriptor
+
+if sys.version_info >= (3, 5):
+ def read_uint8(f: IO[bytes]) -> int: ...
+ uint8: ArgumentDescriptor
+
+def read_stringnl(f: IO[bytes], decode: bool = ..., stripquotes: bool = ...) -> Union[bytes, Text]: ...
+stringnl: ArgumentDescriptor
+
+def read_stringnl_noescape(f: IO[bytes]) -> str: ...
+stringnl_noescape: ArgumentDescriptor
+
+def read_stringnl_noescape_pair(f: IO[bytes]) -> Text: ...
+stringnl_noescape_pair: ArgumentDescriptor
+
+def read_string1(f: IO[bytes]) -> str: ...
+string1: ArgumentDescriptor
+
+def read_string4(f: IO[bytes]) -> str: ...
+string4: ArgumentDescriptor
+
+if sys.version_info >= (3, 3):
+ def read_bytes1(f: IO[bytes]) -> bytes: ...
+ bytes1: ArgumentDescriptor
+
+ def read_bytes4(f: IO[bytes]) -> bytes: ...
+ bytes4: ArgumentDescriptor
+
+if sys.version_info >= (3, 4):
+ def read_bytes8(f: IO[bytes]) -> bytes: ...
+ bytes8: ArgumentDescriptor
+
+def read_unicodestringnl(f: IO[bytes]) -> Text: ...
+unicodestringnl: ArgumentDescriptor
+
+if sys.version_info >= (3, 4):
+ def read_unicodestring1(f: IO[bytes]) -> Text: ...
+ unicodestring1: ArgumentDescriptor
+
+def read_unicodestring4(f: IO[bytes]) -> Text: ...
+unicodestring4: ArgumentDescriptor
+
+if sys.version_info >= (3, 4):
+ def read_unicodestring8(f: IO[bytes]) -> Text: ...
+ unicodestring8: ArgumentDescriptor
+
+def read_decimalnl_short(f: IO[bytes]) -> int: ...
+def read_decimalnl_long(f: IO[bytes]) -> int: ...
+decimalnl_short: ArgumentDescriptor
+decimalnl_long: ArgumentDescriptor
+
+def read_floatnl(f: IO[bytes]) -> float: ...
+floatnl: ArgumentDescriptor
+
+def read_float8(f: IO[bytes]) -> float: ...
+float8: ArgumentDescriptor
+
+def read_long1(f: IO[bytes]) -> int: ...
+long1: ArgumentDescriptor
+
+def read_long4(f: IO[bytes]) -> int: ...
+long4: ArgumentDescriptor
+
+class StackObject(object):
+ name: str
+ obtype: Union[Type[Any], Tuple[Type[Any], ...]]
+ doc: str
+ def __init__(self, name: str, obtype: Union[Type[Any], Tuple[Type[Any], ...]], doc: str) -> None: ...
+
+pyint: StackObject
+pylong: StackObject
+pyinteger_or_bool: StackObject
+pybool: StackObject
+pyfloat: StackObject
+if sys.version_info >= (3, 4):
+ pybytes_or_str: StackObject
+pystring: StackObject
+if sys.version_info >= (3, 0):
+ pybytes: StackObject
+pyunicode: StackObject
+pynone: StackObject
+pytuple: StackObject
+pylist: StackObject
+pydict: StackObject
+if sys.version_info >= (3, 4):
+ pyset: StackObject
+ pyfrozenset: StackObject
+anyobject: StackObject
+markobject: StackObject
+stackslice: StackObject
+
+class OpcodeInfo(object):
+ name: str
+ code: str
+ arg: Optional[ArgumentDescriptor]
+ stack_before: List[StackObject]
+ stack_after: List[StackObject]
+ proto: int
+ doc: str
+ def __init__(self, name: str, code: str, arg: Optional[ArgumentDescriptor],
+ stack_before: List[StackObject], stack_after: List[StackObject], proto: int, doc: str) -> None: ...
+
+opcodes: List[OpcodeInfo]
+
+def genops(pickle: Union[bytes, IO[bytes]]) -> Iterator[Tuple[OpcodeInfo, Optional[Any], Optional[int]]]: ...
+def optimize(p: Union[bytes, IO[bytes]]) -> bytes: ...
+if sys.version_info >= (3, 2):
+ def dis(pickle: Union[bytes, IO[bytes]], out: Optional[IO[str]] = ..., memo: Optional[MutableMapping[int, Any]] = ..., indentlevel: int = ..., annotate: int = ...) -> None: ...
+else:
+ def dis(pickle: Union[bytes, IO[bytes]], out: Optional[IO[str]] = ..., memo: Optional[MutableMapping[int, Any]] = ..., indentlevel: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/plistlib.pyi b/typeshed/stdlib/2and3/plistlib.pyi
index 2790199..40a3f97 100644
--- a/typeshed/stdlib/2and3/plistlib.pyi
+++ b/typeshed/stdlib/2and3/plistlib.pyi
@@ -9,7 +9,7 @@ from enum import Enum
import sys
mm = MutableMapping[str, Any]
-_D = TypeVar('_D', mm)
+_D = TypeVar('_D', bound=mm)
if sys.version_info >= (3,):
_Path = str
else:
diff --git a/typeshed/stdlib/2and3/poplib.pyi b/typeshed/stdlib/2and3/poplib.pyi
new file mode 100644
index 0000000..e7e0c36
--- /dev/null
+++ b/typeshed/stdlib/2and3/poplib.pyi
@@ -0,0 +1,75 @@
+# Stubs for poplib (Python 2 and 3)
+
+from mypy_extensions import NoReturn
+import socket
+import ssl
+import sys
+from typing import Any, BinaryIO, Dict, List, Optional, overload, Pattern, Text, Tuple
+
+_LongResp = Tuple[bytes, List[bytes], int]
+
+class error_proto(Exception): pass
+
+POP3_PORT: int
+POP3_SSL_PORT: int
+CR: bytes
+LF: bytes
+CRLF: bytes
+
+
+class POP3:
+ if sys.version_info >= (3, 0):
+ encoding: Text
+
+ host: Text
+ port: int
+ sock: socket.socket
+ file: BinaryIO
+ welcome: bytes
+
+ def __init__(self, host: Text, port: int = ..., timeout: float = ...) -> None: ...
+ def getwelcome(self) -> bytes: ...
+ def set_debuglevel(self, level: int) -> None: ...
+ def user(self, user: Text) -> bytes: ...
+ def pass_(self, pswd: Text) -> bytes: ...
+ def stat(self) -> Tuple[int, int]: ...
+ def list(self, which: Optional[Any] = ...) -> _LongResp: ...
+ def retr(self, which: Any) -> _LongResp: ...
+ def dele(self, which: Any) -> bytes: ...
+ def noop(self) -> bytes: ...
+ def rset(self) -> bytes: ...
+ def quit(self) -> bytes: ...
+ def close(self) -> None: ...
+ def rpop(self, user: Text) -> bytes: ...
+
+ timestamp: Pattern[Text]
+
+ if sys.version_info < (3, 0):
+ def apop(self, user: Text, secret: Text) -> bytes: ...
+ else:
+ def apop(self, user: Text, password: Text) -> bytes: ...
+ def top(self, which: Any, howmuch: int) -> _LongResp: ...
+
+ @overload
+ def uidl(self) -> _LongResp: ...
+ @overload
+ def uidl(self, which: Any) -> bytes: ...
+
+ if sys.version_info >= (3, 5):
+ def utf8(self) -> bytes: ...
+ if sys.version_info >= (3, 4):
+ def capa(self) -> Dict[Text, List[Text]]: ...
+ def stls(self, context: Optional[ssl.SSLContext] = ...) -> bytes: ...
+
+
+class POP3_SSL(POP3):
+ if sys.version_info >= (3, 0):
+ def __init__(self, host: Text, port: int = ..., keyfile: Optional[Text] = ..., certfile: Optional[Text] = ...,
+ timeout: float = ..., context: Optional[ssl.SSLContext] = ...) -> None: ...
+ else:
+ def __init__(self, host: Text, port: int = ..., keyfile: Optional[Text] = ..., certfile: Optional[Text] = ...,
+ timeout: float = ...) -> None: ...
+
+ if sys.version_info >= (3, 4):
+ # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored
+ def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> bytes: ...
diff --git a/typeshed/stdlib/3/pprint.pyi b/typeshed/stdlib/2and3/pprint.pyi
similarity index 88%
rename from typeshed/stdlib/3/pprint.pyi
rename to typeshed/stdlib/2and3/pprint.pyi
index b846ff9..35da3a3 100644
--- a/typeshed/stdlib/3/pprint.pyi
+++ b/typeshed/stdlib/2and3/pprint.pyi
@@ -1,6 +1,7 @@
# Stubs for pprint
-# Based on http://docs.python.org/3.2/library/pprint.html
+# Based on http://docs.python.org/2/library/pprint.html
+# Based on http://docs.python.org/3/library/pprint.html
from typing import Any, Dict, Tuple, IO
diff --git a/typeshed/stdlib/2and3/pstats.pyi b/typeshed/stdlib/2and3/pstats.pyi
index 9a872c2..2321391 100644
--- a/typeshed/stdlib/2and3/pstats.pyi
+++ b/typeshed/stdlib/2and3/pstats.pyi
@@ -1,19 +1,25 @@
from profile import Profile
from cProfile import Profile as cProfile
-from typing import Any, AnyStr, Dict, IO, Iterable, List, Text, Tuple, TypeVar, Union, overload
+import os
+import sys
+from typing import Any, Dict, IO, Iterable, List, Text, Tuple, TypeVar, Union, overload
_Selector = Union[str, float, int]
_T = TypeVar('_T', bound='Stats')
+if sys.version_info >= (3, 6):
+ _Path = Union[bytes, Text, os.PathLike[Any]]
+else:
+ _Path = Union[bytes, Text]
class Stats:
def __init__(self: _T, __arg: Union[None, str, Text, Profile, cProfile] = ...,
*args: Union[None, str, Text, Profile, cProfile, _T],
- stream: IO[Any]) -> None: ...
+ stream: IO[Any] = ...) -> None: ...
def init(self, arg: Union[None, str, Text, Profile, cProfile]) -> None: ...
def load_stats(self, arg: Union[None, str, Text, Profile, cProfile]) -> None: ...
def get_top_level_stats(self) -> None: ...
def add(self: _T, *arg_list: Union[None, str, Text, Profile, cProfile, _T]) -> _T: ...
- def dump_stats(self, filename: AnyStr) -> None: ...
+ def dump_stats(self, filename: _Path) -> None: ...
def get_sort_arg_defs(self) -> Dict[str, Tuple[Tuple[Tuple[int, int], ...], str]]: ...
@overload
def sort_stats(self: _T, field: int) -> _T: ...
diff --git a/typeshed/stdlib/2and3/pty.pyi b/typeshed/stdlib/2and3/pty.pyi
new file mode 100644
index 0000000..3931bb0
--- /dev/null
+++ b/typeshed/stdlib/2and3/pty.pyi
@@ -0,0 +1,20 @@
+# Stubs for pty (Python 2 and 3)
+import sys
+from typing import Callable, Iterable, Tuple, Union
+
+_Reader = Callable[[int], bytes]
+
+STDIN_FILENO: int
+STDOUT_FILENO: int
+STDERR_FILENO: int
+
+CHILD: int
+
+def openpty() -> Tuple[int, int]: ...
+def master_open() -> Tuple[int, str]: ...
+def slave_open(tty_name: str) -> int: ...
+def fork() -> Tuple[int, int]: ...
+if sys.version_info >= (3, 4):
+ def spawn(argv: Union[str, Iterable[str]], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ...
+else:
+ def spawn(argv: Union[str, Iterable[str]], master_read: _Reader = ..., stdin_read: _Reader = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/py_compile.pyi b/typeshed/stdlib/2and3/py_compile.pyi
new file mode 100644
index 0000000..cdd51ec
--- /dev/null
+++ b/typeshed/stdlib/2and3/py_compile.pyi
@@ -0,0 +1,20 @@
+# Stubs for py_compile (Python 2 and 3)
+import sys
+
+from typing import Optional, List, Text, AnyStr, Union
+
+_EitherStr = Union[bytes, Text]
+
+class PyCompileError(Exception):
+ exc_type_name = ... # type: str
+ exc_value = ... # type: BaseException
+ file = ... # type: str
+ msg = ... # type: str
+ def __init__(self, exc_type: str, exc_value: BaseException, file: str, msg: str = ...) -> None: ...
+
+if sys.version_info >= (3, 2):
+ def compile(file: AnyStr, cfile: Optional[AnyStr] = ..., dfile: Optional[AnyStr] = ..., doraise: bool = ..., optimize: int = ...) -> Optional[AnyStr]: ...
+else:
+ def compile(file: _EitherStr, cfile: Optional[_EitherStr] = ..., dfile: Optional[_EitherStr] = ..., doraise: bool = ...) -> None: ...
+
+def main(args: Optional[List[Text]] = ...): ...
diff --git a/typeshed/stdlib/3/pyclbr.pyi b/typeshed/stdlib/2and3/pyclbr.pyi
similarity index 94%
rename from typeshed/stdlib/3/pyclbr.pyi
rename to typeshed/stdlib/2and3/pyclbr.pyi
index 03c83f9..8dad523 100644
--- a/typeshed/stdlib/3/pyclbr.pyi
+++ b/typeshed/stdlib/2and3/pyclbr.pyi
@@ -4,7 +4,7 @@ from typing import List, Union, Sequence, Optional, Dict
class Class:
module = ... # type: str
name = ... # type: str
- super = Optional[List[Union["Class", str]]]
+ super = ... # type: Optional[List[Union["Class", str]]]
methods = ... # type: Dict[str, int]
file = ... # type: int
lineno = ... # type: int
diff --git a/typeshed/stdlib/2and3/quopri.pyi b/typeshed/stdlib/2and3/quopri.pyi
new file mode 100644
index 0000000..2823f8c
--- /dev/null
+++ b/typeshed/stdlib/2and3/quopri.pyi
@@ -0,0 +1,8 @@
+# Stubs for quopri (Python 2 and 3)
+
+from typing import BinaryIO
+
+def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ...
+def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ...
+def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ...
+def decodestring(s: bytes, header: int = ...) -> bytes: ...
diff --git a/typeshed/stdlib/2and3/sched.pyi b/typeshed/stdlib/2and3/sched.pyi
new file mode 100644
index 0000000..7222d13
--- /dev/null
+++ b/typeshed/stdlib/2and3/sched.pyi
@@ -0,0 +1,29 @@
+# Stub for sched (Python 2 and 3)
+
+import sys
+from typing import Any, Callable, Dict, List, NamedTuple, Text, Tuple
+
+Event = NamedTuple('Event', [
+ ('time', float),
+ ('priority', Any),
+ ('action', Callable[..., Any]),
+ ('argument', Tuple[Any, ...]),
+ ('kwargs', Dict[Text, Any]),
+])
+
+class scheduler:
+ if sys.version_info >= (3, 3):
+ def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], None] = ...) -> None: ...
+ def enterabs(self, time: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...] = ..., kwargs: Dict[str, Any] = ...) -> Event: ...
+ def enter(self, delay: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...] = ..., kwargs: Dict[str, Any] = ...) -> Event: ...
+ def run(self, blocking: bool = ...) -> float: ...
+ else:
+ def __init__(self, timefunc: Callable[[], float], delayfunc: Callable[[float], None]) -> None: ...
+ def enterabs(self, time: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...]) -> Event: ...
+ def enter(self, delay: float, priority: Any, action: Callable[..., Any], argument: Tuple[Any, ...]) -> Event: ...
+ def run(self) -> float: ...
+
+ def cancel(self, event: Event) -> None: ...
+ def empty(self) -> bool: ...
+ @property
+ def queue(self) -> List[Event]: ...
diff --git a/typeshed/stdlib/2and3/smtpd.pyi b/typeshed/stdlib/2and3/smtpd.pyi
new file mode 100644
index 0000000..f4339c0
--- /dev/null
+++ b/typeshed/stdlib/2and3/smtpd.pyi
@@ -0,0 +1,87 @@
+# Stubs for smtpd (Python 2 and 3)
+import sys
+import socket
+import asyncore
+import asynchat
+
+from typing import Any, DefaultDict, List, Optional, Text, Tuple, Type
+
+_Address = Tuple[str, int] # (host, port)
+
+
+class SMTPChannel(asynchat.async_chat):
+ COMMAND: int
+ DATA: int
+
+ if sys.version_info >= (3, 3):
+ command_size_limits: DefaultDict[str, int]
+
+ if sys.version_info >= (3,):
+ smtp_server: SMTPServer
+ conn: socket.socket
+ addr: Any
+ received_lines: List[Text]
+ smtp_state: int
+ seen_greeting: str
+ mailfrom: str
+ rcpttos: List[str]
+ received_data: str
+ fqdn: str
+ peer: str
+
+ command_size_limit: int
+ data_size_limit: int
+
+ if sys.version_info >= (3, 5):
+ enable_SMTPUTF8: bool
+
+ if sys.version_info >= (3, 3):
+ @property
+ def max_command_size_limit(self) -> int: ...
+
+ if sys.version_info >= (3, 5):
+ def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ...,
+ map: Optional[asyncore._maptype] = ..., enable_SMTPUTF8: bool = ..., decode_data: bool = ...) -> None: ...
+ elif sys.version_info >= (3, 4):
+ def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ...,
+ map: Optional[asyncore._maptype] = ...) -> None: ...
+ else:
+ def __init__(self, server: SMTPServer, conn: socket.socket, addr: Any, data_size_limit: int = ...) -> None: ...
+ def push(self, msg: Text) -> None: ...
+ def collect_incoming_data(self, data: bytes) -> None: ...
+ def found_terminator(self) -> None: ...
+ def smtp_HELO(self, arg: str) -> None: ...
+ def smtp_NOOP(self, arg: str) -> None: ...
+ def smtp_QUIT(self, arg: str) -> None: ...
+ def smtp_MAIL(self, arg: str) -> None: ...
+ def smtp_RCPT(self, arg: str) -> None: ...
+ def smtp_RSET(self, arg: str) -> None: ...
+ def smtp_DATA(self, arg: str) -> None: ...
+ if sys.version_info >= (3, 3):
+ def smtp_EHLO(self, arg: str) -> None: ...
+ def smtp_HELP(self, arg: str) -> None: ...
+ def smtp_VRFY(self, arg: str) -> None: ...
+ def smtp_EXPN(self, arg: str) -> None: ...
+
+class SMTPServer(asyncore.dispatcher):
+ channel_class: Type[SMTPChannel]
+
+ data_size_limit: int
+ enable_SMTPUTF8: bool
+
+ if sys.version_info >= (3, 5):
+ def __init__(self, localaddr: _Address, remoteaddr: _Address,
+ data_size_limit: int = ..., map: Optional[asyncore._maptype] = ...,
+ enable_SMTPUTF8: bool = ..., decode_data: bool = ...) -> None: ...
+ elif sys.version_info >= (3, 4):
+ def __init__(self, localaddr: _Address, remoteaddr: _Address,
+ data_size_limit: int = ..., map: Optional[asyncore._maptype] = ...) -> None: ...
+ else:
+ def __init__(self, localaddr: _Address, remoteaddr: _Address,
+ data_size_limit: int = ...) -> None: ...
+ def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ...
+ def process_message(self, peer: _Address, mailfrom: str, rcpttos: List[Text], data: str, **kwargs: Any) -> Optional[str]: ...
+
+class DebuggingServer(SMTPServer): ...
+class PureProxy(SMTPServer): ...
+class MailmanProxy(PureProxy): ...
diff --git a/typeshed/stdlib/2and3/sndhdr.pyi b/typeshed/stdlib/2and3/sndhdr.pyi
new file mode 100644
index 0000000..aecd70b
--- /dev/null
+++ b/typeshed/stdlib/2and3/sndhdr.pyi
@@ -0,0 +1,25 @@
+# Stubs for sndhdr (Python 2 and 3)
+
+import os
+import sys
+from typing import Any, NamedTuple, Optional, Tuple, Union
+
+if sys.version_info >= (3, 5):
+ SndHeaders = NamedTuple('SndHeaders', [
+ ('filetype', str),
+ ('framerate', int),
+ ('nchannels', int),
+ ('nframes', int),
+ ('sampwidth', Union[int, str]),
+ ])
+ _SndHeaders = SndHeaders
+else:
+ _SndHeaders = Tuple[str, int, int, int, Union[int, str]]
+
+if sys.version_info >= (3, 6):
+ _Path = Union[str, bytes, os.PathLike[Any]]
+else:
+ _Path = Union[str, bytes]
+
+def what(filename: _Path) -> Optional[_SndHeaders]: ...
+def whathdr(filename: _Path) -> Optional[_SndHeaders]: ...
diff --git a/typeshed/stdlib/3/socket.pyi b/typeshed/stdlib/2and3/socket.pyi
similarity index 76%
rename from typeshed/stdlib/3/socket.pyi
rename to typeshed/stdlib/2and3/socket.pyi
index 4915d8a..93052b8 100644
--- a/typeshed/stdlib/3/socket.pyi
+++ b/typeshed/stdlib/2and3/socket.pyi
@@ -4,7 +4,8 @@
# based on: http://docs.python.org/3.2/library/socket.html
# see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py
# see: http://nullege.com/codes/search/socket
-
+# adapted for Python 2.7 by Michal Pokorny
+import sys
from typing import Any, Tuple, List, Optional, Union, overload
# ----- variables and constants -----
@@ -21,7 +22,7 @@ SOCK_CLOEXEC = 0
SOCK_NONBLOCK = 0
SOMAXCONN = 0
has_ipv6 = False
-_GLOBAL_DEFAULT_TIMEOUT = 0.0
+_GLOBAL_DEFAULT_TIMEOUT = ... # type: Any
SocketType = ... # type: Any
SocketIO = ... # type: Any
@@ -250,6 +251,73 @@ TIPC_WITHDRAWN = 0
TIPC_ZONE_SCOPE = 0
+# enum versions of above flags py 3.4+
+if sys.version_info >= (3, 4):
+ from enum import IntEnum
+
+ class AddressFamily(IntEnum):
+ AF_UNIX = ...
+ AF_INET = ...
+ AF_INET6 = ...
+ AF_APPLETALK = ...
+ AF_ASH = ...
+ AF_ATMPVC = ...
+ AF_ATMSVC = ...
+ AF_AX25 = ...
+ AF_BLUETOOTH = ...
+ AF_BRIDGE = ...
+ AF_DECnet = ...
+ AF_ECONET = ...
+ AF_IPX = ...
+ AF_IRDA = ...
+ AF_KEY = ...
+ AF_LLC = ...
+ AF_NETBEUI = ...
+ AF_NETLINK = ...
+ AF_NETROM = ...
+ AF_PACKET = ...
+ AF_PPPOX = ...
+ AF_ROSE = ...
+ AF_ROUTE = ...
+ AF_SECURITY = ...
+ AF_SNA = ...
+ AF_TIPC = ...
+ AF_UNSPEC = ...
+ AF_WANPIPE = ...
+ AF_X25 = ...
+
+ class SocketKind(IntEnum):
+ SOCK_STREAM = ...
+ SOCK_DGRAM = ...
+ SOCK_RAW = ...
+ SOCK_RDM = ...
+ SOCK_SEQPACKET = ...
+ SOCK_CLOEXEC = ...
+ SOCK_NONBLOCK = ...
+
+if sys.version_info >= (3, 6):
+ from enum import IntFlag
+
+ class AddressInfo(IntFlag):
+ AI_ADDRCONFIG = ...
+ AI_ALL = ...
+ AI_CANONNAME = ...
+ AI_NUMERICHOST = ...
+ AI_NUMERICSERV = ...
+ AI_PASSIVE = ...
+ AI_V4MAPPED = ...
+
+ class MsgFlag(IntFlag):
+ MSG_CTRUNC = ...
+ MSG_DONTROUTE = ...
+ MSG_DONTWAIT = ...
+ MSG_EOR = ...
+ MSG_OOB = ...
+ MSG_PEEK = ...
+ MSG_TRUNC = ...
+ MSG_WAITALL = ...
+
+
# ----- exceptions -----
class error(IOError):
...
@@ -276,8 +344,12 @@ class socket:
type = 0
proto = 0
- def __init__(self, family: int = ..., type: int = ...,
- proto: int = ..., fileno: Optional[int] = ...) -> None: ...
+ if sys.version_info < (3,):
+ def __init__(self, family: int = ..., type: int = ...,
+ proto: int = ...) -> None: ...
+ else:
+ def __init__(self, family: int = ..., type: int = ...,
+ proto: int = ..., fileno: Optional[int] = ...) -> None: ...
# --- methods ---
# second tuple item is an address
@@ -318,7 +390,10 @@ class socket:
def send(self, data: bytes, flags: int = ...) -> int: ...
def sendall(self, data: bytes, flags: int =...) -> None:
... # return type: None on success
- def sendto(self, data: bytes, address: Union[tuple, str], flags: int = ...) -> int: ...
+ @overload
+ def sendto(self, data: bytes, address: Union[tuple, str]) -> int: ...
+ @overload
+ def sendto(self, data: bytes, flags: int, address: Union[tuple, str]) -> int: ...
def setblocking(self, flag: bool) -> None: ...
def settimeout(self, value: Union[float, None]) -> None: ...
def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ...
@@ -331,9 +406,12 @@ def create_connection(address: Tuple[str, int],
source_address: Tuple[str, int] = ...) -> socket: ...
# the 5th tuple item is an address
+# TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers
+# https://github.com/python/mypy/issues/2509
def getaddrinfo(
- host: str, port: int, family: int = ..., type: int = ..., proto: int = ...,
- flags: int = ...) -> List[Tuple[int, int, int, str, tuple]]:
+ host: Optional[str], port: Union[str, int, None], family: int = ...,
+ socktype: int = ..., proto: int = ...,
+ flags: int = ...) -> List[Tuple[int, int, int, str, Tuple[Any, ...]]]:
...
def getfqdn(name: str = ...) -> str: ...
@@ -357,5 +435,5 @@ def inet_aton(ip_string: str) -> bytes: ... # ret val 4 bytes in length
def inet_ntoa(packed_ip: bytes) -> str: ...
def inet_pton(address_family: int, ip_string: str) -> bytes: ...
def inet_ntop(address_family: int, packed_ip: bytes) -> str: ...
-def getdefaulttimeout() -> Union[float, None]: ...
-def setdefaulttimeout(timeout: float) -> None: ...
+def getdefaulttimeout() -> Optional[float]: ...
+def setdefaulttimeout(timeout: Optional[float]) -> None: ...
diff --git a/typeshed/stdlib/2/spwd.pyi b/typeshed/stdlib/2and3/spwd.pyi
similarity index 79%
rename from typeshed/stdlib/2/spwd.pyi
rename to typeshed/stdlib/2and3/spwd.pyi
index ee09838..ca77abd 100644
--- a/typeshed/stdlib/2/spwd.pyi
+++ b/typeshed/stdlib/2and3/spwd.pyi
@@ -11,5 +11,5 @@ class struct_spwd(object):
sp_expire = ... # type: int
sp_flag = ... # type: int
-def getspall() -> List[struct_spwd]: pass
-def getspnam() -> struct_spwd: pass
+def getspall() -> List[struct_spwd]: ...
+def getspnam(name: str) -> struct_spwd: ...
diff --git a/typeshed/stdlib/2and3/stringprep.pyi b/typeshed/stdlib/2and3/stringprep.pyi
new file mode 100644
index 0000000..e3b7e9d
--- /dev/null
+++ b/typeshed/stdlib/2and3/stringprep.pyi
@@ -0,0 +1,23 @@
+# Stubs for stringprep (Python 2 and 3)
+
+from typing import Text
+
+def in_table_a1(code: Text) -> bool: ...
+def in_table_b1(code: Text) -> bool: ...
+def map_table_b3(code: Text) -> Text: ...
+def map_table_b2(a: Text) -> Text: ...
+def in_table_c11(code: Text) -> bool: ...
+def in_table_c12(code: Text) -> bool: ...
+def in_table_c11_c12(code: Text) -> bool: ...
+def in_table_c21(code: Text) -> bool: ...
+def in_table_c22(code: Text) -> bool: ...
+def in_table_c21_c22(code: Text) -> bool: ...
+def in_table_c3(code: Text) -> bool: ...
+def in_table_c4(code: Text) -> bool: ...
+def in_table_c5(code: Text) -> bool: ...
+def in_table_c6(code: Text) -> bool: ...
+def in_table_c7(code: Text) -> bool: ...
+def in_table_c8(code: Text) -> bool: ...
+def in_table_c9(code: Text) -> bool: ...
+def in_table_d1(code: Text) -> bool: ...
+def in_table_d2(code: Text) -> bool: ...
diff --git a/typeshed/stdlib/2and3/struct.pyi b/typeshed/stdlib/2and3/struct.pyi
new file mode 100644
index 0000000..be4474e
--- /dev/null
+++ b/typeshed/stdlib/2and3/struct.pyi
@@ -0,0 +1,40 @@
+# Stubs for struct
+
+# Based on http://docs.python.org/3.2/library/struct.html
+# Based on http://docs.python.org/2/library/struct.html
+
+import sys
+from typing import Any, Tuple, Text, Union, Iterator
+from array import array
+
+class error(Exception): ...
+
+_FmtType = Union[bytes, Text]
+if sys.version_info >= (3,):
+ _BufferType = Union[bytes, bytearray, memoryview]
+ _WriteBufferType = Union[array, bytearray, memoryview]
+else:
+ _BufferType = Union[bytes, bytearray, buffer, memoryview]
+ _WriteBufferType = Union[array[Any], bytearray, buffer, memoryview]
+
+def pack(fmt: _FmtType, *v: Any) -> bytes: ...
+def pack_into(fmt: _FmtType, buffer: _WriteBufferType, offset: int, *v: Any) -> None: ...
+def unpack(fmt: _FmtType, buffer: _BufferType) -> Tuple[Any, ...]: ...
+def unpack_from(fmt: _FmtType, buffer: _BufferType, offset: int = ...) -> Tuple[Any, ...]: ...
+if sys.version_info >= (3, 4):
+ def iter_unpack(fmt: _FmtType, buffer: _BufferType) -> Iterator[Tuple[Any, ...]]: ...
+
+def calcsize(fmt: _FmtType) -> int: ...
+
+class Struct:
+ format = ... # type: bytes
+ size = ... # type: int
+
+ def __init__(self, format: _FmtType) -> None: ...
+
+ def pack(self, *v: Any) -> bytes: ...
+ def pack_into(self, buffer: _WriteBufferType, offset: int, *v: Any) -> None: ...
+ def unpack(self, buffer: _BufferType) -> Tuple[Any, ...]: ...
+ def unpack_from(self, buffer: _BufferType, offset: int = ...) -> Tuple[Any, ...]: ...
+ if sys.version_info >= (3, 4):
+ def iter_unpack(self, buffer: _BufferType) -> Iterator[Tuple[Any, ...]]: ...
diff --git a/typeshed/stdlib/2and3/sunau.pyi b/typeshed/stdlib/2and3/sunau.pyi
new file mode 100644
index 0000000..920a0b0
--- /dev/null
+++ b/typeshed/stdlib/2and3/sunau.pyi
@@ -0,0 +1,88 @@
+# Stubs for sunau (Python 2 and 3)
+
+import sys
+from mypy_extensions import NoReturn
+from typing import Any, NamedTuple, Optional, Text, IO, Union, Tuple
+
+_File = Union[Text, IO[bytes]]
+
+class Error(Exception): ...
+
+AUDIO_FILE_MAGIC = ... # type: int
+AUDIO_FILE_ENCODING_MULAW_8 = ... # type: int
+AUDIO_FILE_ENCODING_LINEAR_8 = ... # type: int
+AUDIO_FILE_ENCODING_LINEAR_16 = ... # type: int
+AUDIO_FILE_ENCODING_LINEAR_24 = ... # type: int
+AUDIO_FILE_ENCODING_LINEAR_32 = ... # type: int
+AUDIO_FILE_ENCODING_FLOAT = ... # type: int
+AUDIO_FILE_ENCODING_DOUBLE = ... # type: int
+AUDIO_FILE_ENCODING_ADPCM_G721 = ... # type: int
+AUDIO_FILE_ENCODING_ADPCM_G722 = ... # type: int
+AUDIO_FILE_ENCODING_ADPCM_G723_3 = ... # type: int
+AUDIO_FILE_ENCODING_ADPCM_G723_5 = ... # type: int
+AUDIO_FILE_ENCODING_ALAW_8 = ... # type: int
+AUDIO_UNKNOWN_SIZE = ... # type: int
+
+if sys.version_info < (3, 0):
+ _sunau_params = Tuple[int, int, int, int, str, str]
+else:
+ _sunau_params = NamedTuple('_sunau_params', [
+ ('nchannels', int),
+ ('sampwidth', int),
+ ('framerate', int),
+ ('nframes', int),
+ ('comptype', str),
+ ('compname', str),
+ ])
+
+class Au_read:
+ def __init__(self, f: _File) -> None: ...
+ if sys.version_info >= (3, 3):
+ def __enter__(self) -> Au_read: ...
+ def __exit__(self, *args: Any) -> None: ...
+ def getfp(self) -> Optional[IO[bytes]]: ...
+ def rewind(self) -> None: ...
+ def close(self) -> None: ...
+ def tell(self) -> int: ...
+ def getnchannels(self) -> int: ...
+ def getnframes(self) -> int: ...
+ def getsampwidth(self) -> int: ...
+ def getframerate(self) -> int: ...
+ def getcomptype(self) -> str: ...
+ def getcompname(self) -> str: ...
+ def getparams(self) -> _sunau_params: ...
+ def getmarkers(self) -> None: ...
+ def getmark(self, id: Any) -> NoReturn: ...
+ def setpos(self, pos: int) -> None: ...
+ def readframes(self, nframes: int) -> Optional[bytes]: ...
+
+class Au_write:
+ def __init__(self, f: _File) -> None: ...
+ if sys.version_info >= (3, 3):
+ def __enter__(self) -> Au_write: ...
+ def __exit__(self, *args: Any) -> None: ...
+ def setnchannels(self, nchannels: int) -> None: ...
+ def getnchannels(self) -> int: ...
+ def setsampwidth(self, sampwidth: int) -> None: ...
+ def getsampwidth(self) -> int: ...
+ def setframerate(self, framerate: float) -> None: ...
+ def getframerate(self) -> int: ...
+ def setnframes(self, nframes: int) -> None: ...
+ def getnframes(self) -> int: ...
+ def setcomptype(self, comptype: str, compname: str) -> None: ...
+ def getcomptype(self) -> str: ...
+ def getcompname(self) -> str: ...
+ def setparams(self, params: _sunau_params) -> None: ...
+ def getparams(self) -> _sunau_params: ...
+ def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ...
+ def getmark(self, id: Any) -> NoReturn: ...
+ def getmarkers(self) -> None: ...
+ def tell(self) -> int: ...
+ # should be any bytes-like object after 3.4, but we don't have a type for that
+ def writeframesraw(self, data: bytes) -> None: ...
+ def writeframes(self, data: bytes) -> None: ...
+ def close(self) -> None: ...
+
+# Returns a Au_read if mode is rb and Au_write if mode is wb
+def open(f: _File, mode: Optional[str] = ...) -> Any: ...
+openfp = open
diff --git a/typeshed/stdlib/2and3/symtable.pyi b/typeshed/stdlib/2and3/symtable.pyi
new file mode 100644
index 0000000..9087263
--- /dev/null
+++ b/typeshed/stdlib/2and3/symtable.pyi
@@ -0,0 +1,45 @@
+# Stubs for symtable (Python 2 and 3)
+
+import sys
+from typing import List, Sequence, Tuple
+
+class SymbolTable(object):
+ def get_type(self) -> str: ...
+ def get_id(self) -> int: ...
+ def get_name(self) -> str: ...
+ def get_lineno(self) -> int: ...
+ def is_optimized(self) -> bool: ...
+ def is_nested(self) -> bool: ...
+ def has_children(self) -> bool: ...
+ def has_exec(self) -> bool: ...
+ if sys.version_info < (3, 0):
+ def has_import_star(self) -> bool: ...
+ def get_identifiers(self) -> Sequence[str]: ...
+ def lookup(self, name: str) -> Symbol: ...
+ def get_symbols(self) -> List[Symbol]: ...
+ def get_children(self) -> List[SymbolTable]: ...
+
+class Function(SymbolTable):
+ def get_parameters(self) -> Tuple[str, ...]: ...
+ def get_locals(self) -> Tuple[str, ...]: ...
+ def get_globals(self) -> Tuple[str, ...]: ...
+ def get_frees(self) -> Tuple[str, ...]: ...
+
+class Class(SymbolTable):
+ def get_methods(self) -> Tuple[str, ...]: ...
+
+class Symbol(object):
+ def get_name(self) -> str: ...
+ def is_referenced(self) -> bool: ...
+ def is_parameter(self) -> bool: ...
+ def is_global(self) -> bool: ...
+ def is_declared_global(self) -> bool: ...
+ def is_local(self) -> bool: ...
+ if sys.version_info >= (3, 6):
+ def is_annotated(self) -> bool: ...
+ def is_free(self) -> bool: ...
+ def is_imported(self) -> bool: ...
+ def is_assigned(self) -> bool: ...
+ def is_namespace(self) -> bool: ...
+ def get_namespaces(self) -> Sequence[SymbolTable]: ...
+ def get_namespace(self) -> SymbolTable: ...
diff --git a/typeshed/stdlib/2and3/sysconfig.pyi b/typeshed/stdlib/2and3/sysconfig.pyi
new file mode 100644
index 0000000..09fd946
--- /dev/null
+++ b/typeshed/stdlib/2and3/sysconfig.pyi
@@ -0,0 +1,19 @@
+# Stubs for sysconfig
+
+from typing import overload, Any, Dict, IO, List, Optional, Tuple, Union
+
+ at overload
+def get_config_vars(*args: str) -> List[Any]: ...
+ at overload
+def get_config_vars() -> Dict[str, Any]: ...
+def get_config_var(name: str) -> Optional[str]: ...
+def get_scheme_names() -> Tuple[str, ...]: ...
+def get_path_names() -> Tuple[str, ...]: ...
+def get_path(name: str, scheme: str = ..., vars: Optional[Dict[str, Any]] = ..., expand: bool = ...) -> Optional[str]: ...
+def get_paths(scheme: str = ..., vars: Optional[Dict[str, Any]] = ..., expand: bool = ...) -> Dict[str, str]: ...
+def get_python_version() -> str: ...
+def get_platform() -> str: ...
+def is_python_build() -> bool: ...
+def parse_config_h(fp: IO[Any], vars: Optional[Dict[str, Any]]) -> Dict[str, Any]: ...
+def get_config_h_filename() -> str: ...
+def get_makefile_filename() -> str: ...
diff --git a/typeshed/stdlib/2and3/tabnanny.pyi b/typeshed/stdlib/2and3/tabnanny.pyi
new file mode 100644
index 0000000..5209ef8
--- /dev/null
+++ b/typeshed/stdlib/2and3/tabnanny.pyi
@@ -0,0 +1,22 @@
+# Stubs for tabnanny (Python 2 and 3)
+
+import os
+import sys
+from typing import Iterable, Tuple, Union
+
+if sys.version_info >= (3, 6):
+ _Path = Union[str, bytes, os.PathLike]
+else:
+ _Path = Union[str, bytes]
+
+verbose = ... # type: int
+filename_only = ... # type: int
+
+class NannyNag(Exception):
+ def __init__(self, lineno: int, msg: str, line: str) -> None: ...
+ def get_lineno(self) -> int: ...
+ def get_msg(self) -> str: ...
+ def get_line(self) -> str: ...
+
+def check(file: _Path) -> None: ...
+def process_tokens(tokens: Iterable[Tuple[int, str, Tuple[int, int], Tuple[int, int], str]]) -> None: ...
diff --git a/typeshed/stdlib/2and3/tarfile.pyi b/typeshed/stdlib/2and3/tarfile.pyi
index 32a7ca4..c164029 100644
--- a/typeshed/stdlib/2and3/tarfile.pyi
+++ b/typeshed/stdlib/2and3/tarfile.pyi
@@ -38,7 +38,8 @@ def open(name: Optional[str] = ..., mode: str = ...,
encoding: Optional[str] = ..., errors: str = ...,
pax_headers: Optional[Mapping[str, str]] = ...,
debug: Optional[int] = ...,
- errorlevel: Optional[int] = ...) -> TarFile: ...
+ errorlevel: Optional[int] = ...,
+ compresslevel: Optional[int] = ...) -> TarFile: ...
class TarFile(Iterable[TarInfo]):
@@ -64,7 +65,8 @@ class TarFile(Iterable[TarInfo]):
encoding: Optional[str] = ..., errors: str = ...,
pax_headers: Optional[Mapping[str, str]] = ...,
debug: Optional[int] = ...,
- errorlevel: Optional[int] = ...) -> None: ...
+ errorlevel: Optional[int] = ...,
+ compresslevel: Optional[int] = ...) -> None: ...
def __enter__(self) -> TarFile: ...
def __exit__(self,
exc_type: Optional[Type[BaseException]],
diff --git a/typeshed/stdlib/2and3/telnetlib.pyi b/typeshed/stdlib/2and3/telnetlib.pyi
new file mode 100644
index 0000000..e4e5dea
--- /dev/null
+++ b/typeshed/stdlib/2and3/telnetlib.pyi
@@ -0,0 +1,115 @@
+# Stubs for telnetlib (Python 2 and 3)
+
+import socket
+import sys
+from typing import Any, Callable, Match, Optional, Pattern, Sequence, Tuple, Union
+
+DEBUGLEVEL = ... # type: int
+TELNET_PORT = ... # type: int
+
+IAC = ... # type: bytes
+DONT = ... # type: bytes
+DO = ... # type: bytes
+WONT = ... # type: bytes
+WILL = ... # type: bytes
+theNULL = ... # type: bytes
+
+SE = ... # type: bytes
+NOP = ... # type: bytes
+DM = ... # type: bytes
+BRK = ... # type: bytes
+IP = ... # type: bytes
+AO = ... # type: bytes
+AYT = ... # type: bytes
+EC = ... # type: bytes
+EL = ... # type: bytes
+GA = ... # type: bytes
+SB = ... # type: bytes
+
+BINARY = ... # type: bytes
+ECHO = ... # type: bytes
+RCP = ... # type: bytes
+SGA = ... # type: bytes
+NAMS = ... # type: bytes
+STATUS = ... # type: bytes
+TM = ... # type: bytes
+RCTE = ... # type: bytes
+NAOL = ... # type: bytes
+NAOP = ... # type: bytes
+NAOCRD = ... # type: bytes
+NAOHTS = ... # type: bytes
+NAOHTD = ... # type: bytes
+NAOFFD = ... # type: bytes
+NAOVTS = ... # type: bytes
+NAOVTD = ... # type: bytes
+NAOLFD = ... # type: bytes
+XASCII = ... # type: bytes
+LOGOUT = ... # type: bytes
+BM = ... # type: bytes
+DET = ... # type: bytes
+SUPDUP = ... # type: bytes
+SUPDUPOUTPUT = ... # type: bytes
+SNDLOC = ... # type: bytes
+TTYPE = ... # type: bytes
+EOR = ... # type: bytes
+TUID = ... # type: bytes
+OUTMRK = ... # type: bytes
+TTYLOC = ... # type: bytes
+VT3270REGIME = ... # type: bytes
+X3PAD = ... # type: bytes
+NAWS = ... # type: bytes
+TSPEED = ... # type: bytes
+LFLOW = ... # type: bytes
+LINEMODE = ... # type: bytes
+XDISPLOC = ... # type: bytes
+OLD_ENVIRON = ... # type: bytes
+AUTHENTICATION = ... # type: bytes
+ENCRYPT = ... # type: bytes
+NEW_ENVIRON = ... # type: bytes
+
+TN3270E = ... # type: bytes
+XAUTH = ... # type: bytes
+CHARSET = ... # type: bytes
+RSP = ... # type: bytes
+COM_PORT_OPTION = ... # type: bytes
+SUPPRESS_LOCAL_ECHO = ... # type: bytes
+TLS = ... # type: bytes
+KERMIT = ... # type: bytes
+SEND_URL = ... # type: bytes
+FORWARD_X = ... # type: bytes
+PRAGMA_LOGON = ... # type: bytes
+SSPI_LOGON = ... # type: bytes
+PRAGMA_HEARTBEAT = ... # type: bytes
+EXOPL = ... # type: bytes
+NOOPT = ... # type: bytes
+
+class Telnet:
+ def __init__(self, host: Optional[str] = ..., port: int = ...,
+ timeout: int = ...) -> None: ...
+ def open(self, host: str, port: int = ..., timeout: int = ...) -> None: ...
+ def msg(self, msg: str, *args: Any) -> None: ...
+ def set_debuglevel(self, debuglevel: int) -> None: ...
+ def close(self) -> None: ...
+ def get_socket(self) -> socket.socket: ...
+ def fileno(self) -> int: ...
+ def write(self, buffer: bytes) -> None: ...
+ def read_until(self, match: bytes, timeout: Optional[int] = ...) -> bytes: ...
+ def read_all(self) -> bytes: ...
+ def read_some(self) -> bytes: ...
+ def read_very_eager(self) -> bytes: ...
+ def read_eager(self) -> bytes: ...
+ def read_lazy(self) -> bytes: ...
+ def read_very_lazy(self) -> bytes: ...
+ def read_sb_data(self) -> bytes: ...
+ def set_option_negotiation_callback(self, callback: Optional[Callable[[socket.socket, bytes, bytes], Any]]) -> None: ...
+ def process_rawq(self) -> None: ...
+ def rawq_getchar(self) -> bytes: ...
+ def fill_rawq(self) -> None: ...
+ def sock_avail(self) -> bool: ...
+ def interact(self) -> None: ...
+ def mt_interact(self) -> None: ...
+ def listener(self) -> None: ...
+ def expect(self, list: Sequence[Union[Pattern[bytes], bytes]], timeout: Optional[int] = ...) -> Tuple[int, Optional[Match[bytes]], bytes]: ...
+ if sys.version_info >= (3, 6):
+ def __enter__(self) -> Telnet: ...
+ def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ...
diff --git a/typeshed/stdlib/2and3/timeit.pyi b/typeshed/stdlib/2and3/timeit.pyi
new file mode 100644
index 0000000..4fbb27c
--- /dev/null
+++ b/typeshed/stdlib/2and3/timeit.pyi
@@ -0,0 +1,33 @@
+# Stubs for timeit (Python 2 and 3)
+
+import sys
+from typing import Any, Callable, Dict, IO, List, Optional, Text, Tuple, Union
+
+_str = Union[str, Text]
+_Timer = Callable[[], float]
+_stmt = Union[_str, Callable[[], Any]]
+
+default_timer = ... # type: _Timer
+
+class Timer:
+ if sys.version_info >= (3, 5):
+ def __init__(self, stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...,
+ globals: Optional[Dict[str, Any]] =...) -> None: ...
+ else:
+ def __init__(self, stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...) -> None: ...
+ def print_exc(self, file: Optional[IO[str]] = ...) -> None: ...
+ def timeit(self, number: int = ...) -> float: ...
+ def repeat(self, repeat: int = ..., number: int = ...) -> List[float]: ...
+ if sys.version_info >= (3, 6):
+ def autorange(self, callback: Optional[Callable[[int, float], Any]] = ...) -> Tuple[int, float]: ...
+
+if sys.version_info >= (3, 5):
+ def timeit(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...,
+ number: int = ..., globals: Optional[Dict[str, Any]] =...) -> float: ...
+ def repeat(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...,
+ repeat: int = ..., number: int = ..., globals: Optional[Dict[str, Any]] =...) -> List[float]: ...
+else:
+ def timeit(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...,
+ number: int = ...) -> float: ...
+ def repeat(stmt: _stmt = ..., setup: _stmt = ..., timer: _Timer = ...,
+ repeat: int = ..., number: int = ...) -> List[float]: ...
diff --git a/typeshed/stdlib/2and3/token.pyi b/typeshed/stdlib/2and3/token.pyi
new file mode 100644
index 0000000..8d044bd
--- /dev/null
+++ b/typeshed/stdlib/2and3/token.pyi
@@ -0,0 +1,71 @@
+import sys
+from typing import Dict
+
+ENDMARKER = ... # type: int
+NAME = ... # type: int
+NUMBER = ... # type: int
+STRING = ... # type: int
+NEWLINE = ... # type: int
+INDENT = ... # type: int
+DEDENT = ... # type: int
+LPAR = ... # type: int
+RPAR = ... # type: int
+LSQB = ... # type: int
+RSQB = ... # type: int
+COLON = ... # type: int
+COMMA = ... # type: int
+SEMI = ... # type: int
+PLUS = ... # type: int
+MINUS = ... # type: int
+STAR = ... # type: int
+SLASH = ... # type: int
+VBAR = ... # type: int
+AMPER = ... # type: int
+LESS = ... # type: int
+GREATER = ... # type: int
+EQUAL = ... # type: int
+DOT = ... # type: int
+PERCENT = ... # type: int
+if sys.version_info < (3,):
+ BACKQUOTE = ... # type: int
+LBRACE = ... # type: int
+RBRACE = ... # type: int
+EQEQUAL = ... # type: int
+NOTEQUAL = ... # type: int
+LESSEQUAL = ... # type: int
+GREATEREQUAL = ... # type: int
+TILDE = ... # type: int
+CIRCUMFLEX = ... # type: int
+LEFTSHIFT = ... # type: int
+RIGHTSHIFT = ... # type: int
+DOUBLESTAR = ... # type: int
+PLUSEQUAL = ... # type: int
+MINEQUAL = ... # type: int
+STAREQUAL = ... # type: int
+SLASHEQUAL = ... # type: int
+PERCENTEQUAL = ... # type: int
+AMPEREQUAL = ... # type: int
+VBAREQUAL = ... # type: int
+CIRCUMFLEXEQUAL = ... # type: int
+LEFTSHIFTEQUAL = ... # type: int
+RIGHTSHIFTEQUAL = ... # type: int
+DOUBLESTAREQUAL = ... # type: int
+DOUBLESLASH = ... # type: int
+DOUBLESLASHEQUAL = ... # type: int
+AT = ... # type: int
+if sys.version_info >= (3,):
+ RARROW = ... # type: int
+ ELLIPSIS = ... # type: int
+if sys.version_info >= (3, 5):
+ ATEQUAL = ... # type: int
+ AWAIT = ... # type: int
+ ASYNC = ... # type: int
+OP = ... # type: int
+ERRORTOKEN = ... # type: int
+N_TOKENS = ... # type: int
+NT_OFFSET = ... # type: int
+tok_name = ... # type: Dict[int, str]
+
+def ISTERMINAL(x: int) -> bool: ...
+def ISNONTERMINAL(x: int) -> bool: ...
+def ISEOF(x: int) -> bool: ...
diff --git a/typeshed/stdlib/2and3/trace.pyi b/typeshed/stdlib/2and3/trace.pyi
new file mode 100644
index 0000000..af06d39
--- /dev/null
+++ b/typeshed/stdlib/2and3/trace.pyi
@@ -0,0 +1,35 @@
+# Stubs for trace (Python 2 and 3)
+
+import os
+import sys
+import types
+from typing import Any, Callable, Mapping, Optional, Sequence, Text, Tuple, TypeVar, Union
+
+_T = TypeVar('_T')
+_localtrace = Callable[[types.FrameType, str, Any], Callable[..., Any]]
+
+if sys.version_info >= (3, 6):
+ _Path = Union[Text, os.PathLike]
+else:
+ _Path = Text
+
+class CoverageResults:
+ def update(self, other: CoverageResults) -> None: ...
+ def write_results(self, show_missing: bool = ..., summary: bool = ..., coverdir: Optional[_Path] = ...) -> None: ...
+ def write_results_file(self, path: _Path, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: Optional[str] = ...) -> Tuple[int, int]: ...
+
+class Trace:
+ def __init__(self, count: int = ..., trace: int = ..., countfuncs: int = ..., countcallers: int = ...,
+ ignoremods: Sequence[str] = ..., ignoredirs: Sequence[str] = ..., infile: Optional[_Path] = ...,
+ outfile: Optional[_Path] = ..., timing: bool = ...) -> None: ...
+ def run(self, cmd: Union[str, types.CodeType]) -> None: ...
+ def runctx(self, cmd: Union[str, types.CodeType], globals: Optional[Mapping[str, Any]] = ..., locals: Optional[Mapping[str, Any]] = ...) -> None: ...
+ def runfunc(self, func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ...
+ def file_module_function_of(self, frame: types.FrameType) -> Tuple[str, Optional[str], str]: ...
+ def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ...
+ def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ...
+ def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ...
+ def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ...
+ def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ...
+ def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> _localtrace: ...
+ def results(self) -> CoverageResults: ...
diff --git a/typeshed/stdlib/2and3/traceback.pyi b/typeshed/stdlib/2and3/traceback.pyi
index ff34ad4..2b83b5d 100644
--- a/typeshed/stdlib/2and3/traceback.pyi
+++ b/typeshed/stdlib/2and3/traceback.pyi
@@ -7,7 +7,7 @@ import sys
_PT = Tuple[str, int, str, Optional[str]]
-def print_tb(tb: TracebackType, limit: Optional[int] = ...,
+def print_tb(tb: Optional[TracebackType], limit: Optional[int] = ...,
file: Optional[IO[str]] = ...) -> None: ...
if sys.version_info >= (3,):
def print_exception(etype: Type[BaseException], value: BaseException,
@@ -28,7 +28,7 @@ else:
file: Optional[IO[str]] = ...) -> None: ...
def print_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...,
file: Optional[IO[str]] = ...) -> None: ...
-def extract_tb(tb: TracebackType, limit: Optional[int] = ...) -> List[_PT]: ...
+def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[_PT]: ...
def extract_stack(f: Optional[FrameType] = ...,
limit: Optional[int] = ...) -> List[_PT]: ...
def format_list(extracted_list: List[_PT]) -> List[str]: ...
@@ -44,14 +44,14 @@ else:
tb: TracebackType,
limit: Optional[int] = ...) -> List[str]: ...
def format_exc(limit: Optional[int] = ...) -> str: ...
-def format_tb(tb: TracebackType, limit: Optional[int] = ...) -> List[str]: ...
+def format_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[str]: ...
def format_stack(f: Optional[FrameType] = ...,
limit: Optional[int] = ...) -> List[str]: ...
if sys.version_info >= (3, 4):
def clear_frames(tb: TracebackType) -> None: ...
if sys.version_info >= (3, 5):
def walk_stack(f: Optional[FrameType]) -> Iterator[Tuple[FrameType, int]]: ...
- def walk_tb(tb: TracebackType) -> Iterator[Tuple[FrameType, int]]: ...
+ def walk_tb(tb: Optional[TracebackType]) -> Iterator[Tuple[FrameType, int]]: ...
if sys.version_info < (3,):
def tb_lineno(tb: TracebackType) -> int: ...
diff --git a/typeshed/stdlib/2and3/tty.pyi b/typeshed/stdlib/2and3/tty.pyi
new file mode 100644
index 0000000..3414835
--- /dev/null
+++ b/typeshed/stdlib/2and3/tty.pyi
@@ -0,0 +1,13 @@
+# Stubs for tty (Python 3.6)
+
+# XXX: Undocumented integer constants
+IFLAG = ... # type: int
+OFLAG = ... # type: int
+CFLAG = ... # type: int
+LFLAG = ... # type: int
+ISPEED = ... # type: int
+OSPEED = ... # type: int
+CC = ... # type: int
+
+def setraw(fd: int, when: int = ...) -> None: ...
+def setcbreak(fd: int, when: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/unicodedata.pyi b/typeshed/stdlib/2and3/unicodedata.pyi
new file mode 100644
index 0000000..6da12c3
--- /dev/null
+++ b/typeshed/stdlib/2and3/unicodedata.pyi
@@ -0,0 +1,38 @@
+# Stubs for unicodedata (Python 2.7 and 3.4)
+from typing import Any, Text, TypeVar, Union
+
+ucd_3_2_0 = ... # type: UCD
+ucnhash_CAPI = ... # type: Any
+unidata_version = ... # type: str
+
+_default = TypeVar('_default')
+
+def bidirectional(__chr: Text) -> Text: ...
+def category(__chr: Text) -> Text: ...
+def combining(__chr: Text) -> int: ...
+def decimal(__chr: Text, __default: _default=...) -> Union[int, _default]: ...
+def decomposition(__chr: Text) -> Text: ...
+def digit(__chr: Text, __default: _default=...) -> Union[int, _default]: ...
+def east_asian_width(__chr: Text) -> Text: ...
+def lookup(__name: Union[Text, bytes]) -> Text: ...
+def mirrored(__chr: Text) -> int: ...
+def name(__chr: Text, __default: _default=...) -> Union[Text, _default]: ...
+def normalize(__form: Text, __unistr: Text) -> Text: ...
+def numeric(__chr: Text, __default: _default=...) -> Union[float, _default]: ...
+
+class UCD(object):
+ # The methods below are constructed from the same array in C
+ # (unicodedata_functions) and hence identical to the methods above.
+ unidata_version = ... # type: str
+ def bidirectional(self, __chr: Text) -> str: ...
+ def category(self, __chr: Text) -> str: ...
+ def combining(self, __chr: Text) -> int: ...
+ def decimal(self, __chr: Text, __default: _default=...) -> Union[int, _default]: ...
+ def decomposition(self, __chr: Text) -> str: ...
+ def digit(self, __chr: Text, __default: _default=...) -> Union[int, _default]: ...
+ def east_asian_width(self, __chr: Text) -> str: ...
+ def lookup(self, __name: Union[Text, bytes]) -> Text: ...
+ def mirrored(self, __chr: Text) -> int: ...
+ def name(self, __chr: Text, __default: _default=...) -> Union[Text, _default]: ...
+ def normalize(self, __form: Text, __unistr: Text) -> Text: ...
+ def numeric(self, __chr: Text, __default: _default=...) -> Union[float, _default]: ...
diff --git a/typeshed/stdlib/2and3/uu.pyi b/typeshed/stdlib/2and3/uu.pyi
new file mode 100644
index 0000000..ee01c35
--- /dev/null
+++ b/typeshed/stdlib/2and3/uu.pyi
@@ -0,0 +1,10 @@
+# Stubs for uu (Python 2 and 3)
+
+from typing import BinaryIO, Union, Optional, Text
+
+_File = Union[Text, BinaryIO]
+
+class Error(Exception): ...
+
+def encode(in_file: _File, out_file: _File, name: Optional[str] = ..., mode: Optional[int] = ...) -> None: ...
+def decode(in_file: _File, out_file: Optional[_File] = ..., mode: Optional[int] = ..., quiet: int = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/uuid.pyi b/typeshed/stdlib/2and3/uuid.pyi
new file mode 100644
index 0000000..f05ccf0
--- /dev/null
+++ b/typeshed/stdlib/2and3/uuid.pyi
@@ -0,0 +1,87 @@
+# Stubs for uuid
+
+import sys
+from typing import Tuple, Optional, Any
+
+# Because UUID has properties called int and bytes we need to rename these temporarily.
+_Int = int
+_Bytes = bytes
+_FieldsType = Tuple[int, int, int, int, int, int]
+
+class UUID:
+ def __init__(self, hex: Optional[str] = ..., bytes: Optional[_Bytes] = ...,
+ bytes_le: Optional[_Bytes] = ...,
+ fields: Optional[_FieldsType] = ...,
+ int: Optional[_Int] = ...,
+ version: Optional[_Int] = ...) -> None: ...
+ @property
+ def bytes(self) -> _Bytes: ...
+ @property
+ def bytes_le(self) -> _Bytes: ...
+ @property
+ def clock_seq(self) -> _Int: ...
+ @property
+ def clock_seq_hi_variant(self) -> _Int: ...
+ @property
+ def clock_seq_low(self) -> _Int: ...
+ @property
+ def fields(self) -> _FieldsType: ...
+ @property
+ def hex(self) -> str: ...
+ @property
+ def int(self) -> _Int: ...
+ @property
+ def node(self) -> _Int: ...
+ @property
+ def time(self) -> _Int: ...
+ @property
+ def time_hi_version(self) -> _Int: ...
+ @property
+ def time_low(self) -> _Int: ...
+ @property
+ def time_mid(self) -> _Int: ...
+ @property
+ def urn(self) -> str: ...
+ @property
+ def variant(self) -> str: ...
+ @property
+ def version(self) -> Optional[_Int]: ...
+
+ if sys.version_info >= (3,):
+ def __eq__(self, other: Any) -> bool: ...
+ def __lt__(self, other: Any) -> bool: ...
+ def __le__(self, other: Any) -> bool: ...
+ def __gt__(self, other: Any) -> bool: ...
+ def __ge__(self, other: Any) -> bool: ...
+ else:
+ def get_bytes(self) -> _Bytes: ...
+ def get_bytes_le(self) -> _Bytes: ...
+ def get_clock_seq(self) -> _Int: ...
+ def get_clock_seq_hi_variant(self) -> _Int: ...
+ def get_clock_seq_low(self) -> _Int: ...
+ def get_fields(self) -> _FieldsType: ...
+ def get_hex(self) -> str: ...
+ def get_node(self) -> _Int: ...
+ def get_time(self) -> _Int: ...
+ def get_time_hi_version(self) -> _Int: ...
+ def get_time_low(self) -> _Int: ...
+ def get_time_mid(self) -> _Int: ...
+ def get_urn(self) -> str: ...
+ def get_variant(self) -> str: ...
+ def get_version(self) -> Optional[_Int]: ...
+ def __cmp__(self, other: Any) -> _Int: ...
+
+def getnode() -> int: ...
+def uuid1(node: Optional[_Int] = ..., clock_seq: Optional[_Int] = ...) -> UUID: ...
+def uuid3(namespace: UUID, name: str) -> UUID: ...
+def uuid4() -> UUID: ...
+def uuid5(namespace: UUID, name: str) -> UUID: ...
+
+NAMESPACE_DNS = ... # type: UUID
+NAMESPACE_URL = ... # type: UUID
+NAMESPACE_OID = ... # type: UUID
+NAMESPACE_X500 = ... # type: UUID
+RESERVED_NCS = ... # type: str
+RFC_4122 = ... # type: str
+RESERVED_MICROSOFT = ... # type: str
+RESERVED_FUTURE = ... # type: str
diff --git a/typeshed/stdlib/2and3/wave.pyi b/typeshed/stdlib/2and3/wave.pyi
new file mode 100644
index 0000000..d41b029
--- /dev/null
+++ b/typeshed/stdlib/2and3/wave.pyi
@@ -0,0 +1,75 @@
+# Stubs for wave (Python 2 and 3)
+
+import sys
+from mypy_extensions import NoReturn
+from typing import Any, NamedTuple, Optional, Text, BinaryIO, Union, Tuple
+
+_File = Union[Text, BinaryIO]
+
+class Error(Exception): ...
+
+WAVE_FORMAT_PCM = ... # type: int
+
+if sys.version_info < (3, 0):
+ _wave_params = Tuple[int, int, int, int, str, str]
+else:
+ _wave_params = NamedTuple('_wave_params', [
+ ('nchannels', int),
+ ('sampwidth', int),
+ ('framerate', int),
+ ('nframes', int),
+ ('comptype', str),
+ ('compname', str),
+ ])
+
+class Wave_read:
+ def __init__(self, f: _File) -> None: ...
+ if sys.version_info >= (3, 0):
+ def __enter__(self) -> Wave_read: ...
+ def __exit__(self, *args: Any) -> None: ...
+ def getfp(self) -> Optional[BinaryIO]: ...
+ def rewind(self) -> None: ...
+ def close(self) -> None: ...
+ def tell(self) -> int: ...
+ def getnchannels(self) -> int: ...
+ def getnframes(self) -> int: ...
+ def getsampwidth(self) -> int: ...
+ def getframerate(self) -> int: ...
+ def getcomptype(self) -> str: ...
+ def getcompname(self) -> str: ...
+ def getparams(self) -> _wave_params: ...
+ def getmarkers(self) -> None: ...
+ def getmark(self, id: Any) -> NoReturn: ...
+ def setpos(self, pos: int) -> None: ...
+ def readframes(self, nframes: int) -> bytes: ...
+
+class Wave_write:
+ def __init__(self, f: _File) -> None: ...
+ if sys.version_info >= (3, 0):
+ def __enter__(self) -> Wave_write: ...
+ def __exit__(self, *args: Any) -> None: ...
+ def setnchannels(self, nchannels: int) -> None: ...
+ def getnchannels(self) -> int: ...
+ def setsampwidth(self, sampwidth: int) -> None: ...
+ def getsampwidth(self) -> int: ...
+ def setframerate(self, framerate: float) -> None: ...
+ def getframerate(self) -> int: ...
+ def setnframes(self, nframes: int) -> None: ...
+ def getnframes(self) -> int: ...
+ def setcomptype(self, comptype: str, compname: str) -> None: ...
+ def getcomptype(self) -> str: ...
+ def getcompname(self) -> str: ...
+ def setparams(self, params: _wave_params) -> None: ...
+ def getparams(self) -> _wave_params: ...
+ def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ...
+ def getmark(self, id: Any) -> NoReturn: ...
+ def getmarkers(self) -> None: ...
+ def tell(self) -> int: ...
+ # should be any bytes-like object after 3.4, but we don't have a type for that
+ def writeframesraw(self, data: bytes) -> None: ...
+ def writeframes(self, data: bytes) -> None: ...
+ def close(self) -> None: ...
+
+# Returns a Wave_read if mode is rb and Wave_write if mode is wb
+def open(f: _File, mode: Optional[str] = ...) -> Any: ...
+openfp = open
diff --git a/typeshed/stdlib/2and3/webbrowser.pyi b/typeshed/stdlib/2and3/webbrowser.pyi
index a478edf..f4ddb15 100644
--- a/typeshed/stdlib/2and3/webbrowser.pyi
+++ b/typeshed/stdlib/2and3/webbrowser.pyi
@@ -3,34 +3,34 @@
# NOTE: This dynamically typed stub was automatically generated by stubgen.
import sys
-from typing import Any, Optional, Callable, List
+from typing import Any, Optional, Callable, List, Text, Union, Sequence
class Error(Exception): ...
-def register(name: str, klass: Optional[Callable[[], BaseBrowser]], instance: BaseBrowser=..., update_tryorder: int=...) -> None: ...
-def get(using: str=...) -> BaseBrowser: ...
-def open(url: str, new: int=..., autoraise: bool=...) -> bool: ...
-def open_new(url: str) -> bool: ...
-def open_new_tab(url: str) -> bool: ...
+def register(name: Text, klass: Optional[Callable[[], BaseBrowser]], instance: BaseBrowser = ..., update_tryorder: int = ...) -> None: ...
+def get(using: Optional[Text] = ...) -> BaseBrowser: ...
+def open(url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
+def open_new(url: Text) -> bool: ...
+def open_new_tab(url: Text) -> bool: ...
class BaseBrowser:
args = ... # type: List[str]
name = ... # type: str
basename = ... # type: str
- def __init__(self, name: str=...) -> None: ...
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
- def open_new(self, url: str) -> bool: ...
- def open_new_tab(self, url: str) -> bool: ...
+ def __init__(self, name: Text = ...) -> None: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
+ def open_new(self, url: Text) -> bool: ...
+ def open_new_tab(self, url: Text) -> bool: ...
class GenericBrowser(BaseBrowser):
args = ... # type: List[str]
name = ... # type: str
basename = ... # type: str
- def __init__(self, name: str) -> None: ...
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def __init__(self, name: Union[Text, Sequence[Text]]) -> None: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class BackgroundBrowser(GenericBrowser):
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class UnixBrowser(BaseBrowser):
raise_opts = ... # type: List[str]
@@ -40,7 +40,7 @@ class UnixBrowser(BaseBrowser):
remote_action = ... # type: str
remote_action_newwin = ... # type: str
remote_action_newtab = ... # type: str
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class Mozilla(UnixBrowser):
raise_opts = ... # type: List[str]
@@ -82,19 +82,19 @@ class Elinks(UnixBrowser):
redirect_stdout = ... # type: bool
class Konqueror(BaseBrowser):
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class Grail(BaseBrowser):
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class WindowsDefault(BaseBrowser):
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class MacOSX(BaseBrowser):
name = ... # type: str
- def __init__(self, name: str) -> None: ...
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def __init__(self, name: Text) -> None: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
class MacOSXOSAScript(BaseBrowser):
- def __init__(self, name: str) -> None: ...
- def open(self, url: str, new: int=..., autoraise: bool=...) -> bool: ...
+ def __init__(self, name: Text) -> None: ...
+ def open(self, url: Text, new: int = ..., autoraise: bool = ...) -> bool: ...
diff --git a/typeshed/stdlib/2and3/xdrlib.pyi b/typeshed/stdlib/2and3/xdrlib.pyi
new file mode 100644
index 0000000..bbdaa58
--- /dev/null
+++ b/typeshed/stdlib/2and3/xdrlib.pyi
@@ -0,0 +1,56 @@
+# Structs for xdrlib (Python 2 and 3)
+from typing import Callable, List, Sequence, TypeVar
+
+_T = TypeVar('_T')
+
+class Error(Exception):
+ msg = ... # type: str
+ def __init__(self, msg: str) -> None: ...
+
+class ConversionError(Error): ...
+
+class Packer:
+ def __init__(self) -> None: ...
+ def reset(self) -> None: ...
+ def get_buffer(self) -> bytes: ...
+ def get_buf(self) -> bytes: ...
+ def pack_uint(self, x: int) -> None: ...
+ def pack_int(self, x: int) -> None: ...
+ def pack_enum(self, x: int) -> None: ...
+ def pack_bool(self, x: bool) -> None: ...
+ def pack_uhyper(self, x: int) -> None: ...
+ def pack_hyper(self, x: int) -> None: ...
+ def pack_float(self, x: float) -> None: ...
+ def pack_double(self, x: float) -> None: ...
+ def pack_fstring(self, n: int, s: bytes) -> None: ...
+ def pack_fopaque(self, n: int, s: bytes) -> None: ...
+ def pack_string(self, s: bytes) -> None: ...
+ def pack_opaque(self, s: bytes) -> None: ...
+ def pack_bytes(self, s: bytes) -> None: ...
+ def pack_list(self, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ...
+ def pack_farray(self, n: int, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ...
+ def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], None]) -> None: ...
+
+class Unpacker:
+ def __init__(self, data: bytes) -> None: ...
+ def reset(self, data: bytes) -> None: ...
+ def get_position(self) -> int: ...
+ def set_position(self, position: int) -> None: ...
+ def get_buffer(self) -> bytes: ...
+ def done(self) -> None: ...
+ def unpack_uint(self) -> int: ...
+ def unpack_int(self) -> int: ...
+ def unpack_enum(self) -> int: ...
+ def unpack_bool(self) -> bool: ...
+ def unpack_uhyper(self) -> int: ...
+ def unpack_hyper(self) -> int: ...
+ def unpack_float(self) -> float: ...
+ def unpack_double(self) -> float: ...
+ def unpack_fstring(self, n: int) -> bytes: ...
+ def unpack_fopaque(self, n: int) -> bytes: ...
+ def unpack_string(self) -> bytes: ...
+ def unpack_opaque(self) -> bytes: ...
+ def unpack_bytes(self) -> bytes: ...
+ def unpack_list(self, unpack_item: Callable[[], _T]) -> List[_T]: ...
+ def unpack_farray(self, n: int, unpack_item: Callable[[], _T]) -> List[_T]: ...
+ def unpack_array(self, unpack_item: Callable[[], _T]) -> List[_T]: ...
diff --git a/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi b/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
index fe93987..404726e 100644
--- a/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
@@ -1,6 +1,6 @@
-# Stubs for xml.etree.ElementTree (Python 3.4)
+# Stubs for xml.etree.ElementTree
-from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
+from typing import Any, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator, Text
import io
import sys
@@ -10,53 +10,80 @@ class ParseError(SyntaxError): ...
def iselement(element: 'Element') -> bool: ...
-_Ss = TypeVar('_Ss', str, bytes)
_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
+
+# Type for parser inputs. Parser will accept any unicode/str/bytes and coerce,
+# and this is true in py2 and py3 (even fromstringlist() in python3 can be
+# called with a heterogeneous list)
+_parser_input_type = Union[bytes, Text]
+
+# Type for individual tag/attr/ns/text values in args to most functions.
+# In py2, the library accepts str or unicode everywhere and coerces
+# aggressively.
+# In py3, bytes is not coerced to str and so use of bytes is probably an error,
+# so we exclude it. (why? the parser never produces bytes when it parses XML,
+# so e.g., element.get(b'name') will always return None for parsed XML, even if
+# there is a 'name' attribute.)
+_str_argument_type = Union[str, Text]
+
+# Type for return values from individual tag/attr/text values and serialization
+if sys.version_info >= (3,):
+ # note: in python3, everything comes out as str, yay:
+ _str_result_type = str
+ # unfortunately, tostring and tostringlist can return either bytes or str
+ # depending on the value of `encoding` parameter. Client code knows best:
+ _tostring_result_type = Any
+else:
+ # in python2, if the tag/attribute/text wasn't decode-able as ascii, it
+ # comes out as a unicode string; otherwise it comes out as str. (see
+ # _fixtext function in the source). Client code knows best:
+ _str_result_type = Any
+ # On the bright side, tostring and tostringlist always return bytes:
+ _tostring_result_type = bytes
class Element(Sequence['Element']):
- tag = ... # type: _str_or_bytes
- attrib = ... # type: Dict[_str_or_bytes, _str_or_bytes]
- text = ... # type: Optional[_str_or_bytes]
- tail = ... # type: Optional[_str_or_bytes]
- def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
+ tag = ... # type: _str_result_type
+ attrib = ... # type: Dict[_str_result_type, _str_result_type]
+ text = ... # type: Optional[_str_result_type]
+ tail = ... # type: Optional[_str_result_type]
+ def __init__(self, tag: Union[_str_argument_type, Callable[..., 'Element']], attrib: Dict[_str_argument_type, _str_argument_type]=..., **extra: _str_argument_type) -> None: ...
def append(self, subelement: 'Element') -> None: ...
def clear(self) -> None: ...
def copy(self) -> 'Element': ...
def extend(self, elements: Sequence['Element']) -> None: ...
- def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
- def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
- def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
- def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
+ def find(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Optional['Element']: ...
+ def findall(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List['Element']: ...
+ def findtext(self, path: _str_argument_type, default: _T=..., namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Union[_T, _str_result_type]: ...
+ def get(self, key: _str_argument_type, default: _T=...) -> Union[_str_result_type, _T]: ...
def getchildren(self) -> List['Element']: ...
- def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
+ def getiterator(self, tag: _str_argument_type=...) -> List['Element']: ...
if sys.version_info >= (3, 2):
def insert(self, index: int, subelement: 'Element') -> None: ...
else:
def insert(self, index: int, element: 'Element') -> None: ...
- def items(self) -> ItemsView[AnyStr, AnyStr]: ...
- def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
- def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
- def itertext(self) -> Generator[str, None, None]: ...
- def keys(self) -> KeysView[AnyStr]: ...
- def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
+ def items(self) -> ItemsView[_str_result_type, _str_result_type]: ...
+ def iter(self, tag: _str_argument_type=...) -> Generator['Element', None, None]: ...
+ def iterfind(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List['Element']: ...
+ def itertext(self) -> Generator[_str_result_type, None, None]: ...
+ def keys(self) -> KeysView[_str_result_type]: ...
+ def makeelement(self, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type]) -> 'Element': ...
def remove(self, subelement: 'Element') -> None: ...
- def set(self, key: AnyStr, value: AnyStr) -> None: ...
+ def set(self, key: _str_argument_type, value: _str_argument_type) -> None: ...
def __bool__(self) -> bool: ...
def __delitem__(self, index: int) -> None: ...
def __getitem__(self, index) -> 'Element': ...
def __len__(self) -> int: ...
def __setitem__(self, index: int, element: 'Element') -> None: ...
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
-def Comment(text: _str_or_bytes=...) -> Element: ...
-def ProcessingInstruction(target: str, text: str=...) -> Element: ...
+def SubElement(parent: Element, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type]=..., **extra: _str_argument_type) -> Element: ...
+def Comment(text: _str_argument_type=...) -> Element: ...
+def ProcessingInstruction(target: _str_argument_type, text: _str_argument_type=...) -> Element: ...
PI = ... # type: Callable[..., Element]
class QName:
text = ... # type: str
- def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
+ def __init__(self, text_or_uri: _str_argument_type, tag: _str_argument_type=...) -> None: ...
_file_or_filename = Union[str, bytes, int, IO[Any]]
@@ -65,25 +92,25 @@ class ElementTree:
def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
def getroot(self) -> Element: ...
def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
- def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
- def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
- def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
- def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
- def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
- def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
+ def iter(self, tag: _str_argument_type=...) -> Generator[Element, None, None]: ...
+ def getiterator(self, tag: _str_argument_type=...) -> List[Element]: ...
+ def find(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Optional[Element]: ...
+ def findtext(self, path: _str_argument_type, default: _T=..., namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> Union[_T, _str_result_type]: ...
+ def findall(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List[Element]: ...
+ def iterfind(self, path: _str_argument_type, namespaces: Dict[_str_argument_type, _str_argument_type]=...) -> List[Element]: ...
if sys.version_info >= (3, 4):
- def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: _str_argument_type=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
else:
- def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
+ def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: _str_argument_type=..., method: str=...) -> None: ...
def write_c14n(self, file: _file_or_filename) -> None: ...
-def register_namespace(prefix: str, uri: str) -> None: ...
+def register_namespace(prefix: _str_argument_type, uri: _str_argument_type) -> None: ...
if sys.version_info >= (3, 4):
- def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> str: ...
- def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[str]: ...
+ def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> _tostring_result_type: ...
+ def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[_tostring_result_type]: ...
else:
- def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
- def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
+ def tostring(element: Element, encoding: str=..., method: str=...) -> _tostring_result_type: ...
+ def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[_tostring_result_type]: ...
def dump(elem: Element) -> None: ...
def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
@@ -95,20 +122,31 @@ if sys.version_info >= (3, 4):
def close(self) -> None: ...
def read_events(self) -> Iterator[Tuple[str, Element]]: ...
-def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
-def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
+def XML(text: _parser_input_type, parser: 'XMLParser'=...) -> Element: ...
+def XMLID(text: _parser_input_type, parser: 'XMLParser'=...) -> Tuple[Element, Dict[_str_result_type, Element]]: ...
-# TODO-improve this type
-fromstring = ... # type: Callable[..., Element]
+# This is aliased to XML in the source.
+fromstring = XML
-def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
+def fromstringlist(sequence: Sequence[_parser_input_type], parser: 'XMLParser'=...) -> Element: ...
+
+# This type is both not precise enough and too precise. The TreeBuilder
+# requires the elementfactory to accept tag and attrs in its args and produce
+# some kind of object that has .text and .tail properties.
+# I've chosen to constrain the ElementFactory to always produce an Element
+# because that is how almost everyone will use it.
+# Unfortunately, the type of the factory arguments is dependent on how
+# TreeBuilder is called by client code (they could pass strs, bytes or whatever);
+# but we don't want to use a too-broad type, or it would be too hard to write
+# elementfactories.
+_ElementFactory = Callable[[Any, Dict[Any, Any]], Element]
class TreeBuilder:
- def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
+ def __init__(self, element_factory: _ElementFactory=...) -> None: ...
def close(self) -> Element: ...
- def data(self, data: AnyStr) -> None: ...
- def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
- def end(self, tag: AnyStr) -> Element: ...
+ def data(self, data: _parser_input_type) -> None: ...
+ def start(self, tag: _parser_input_type, attrs: Dict[_parser_input_type, _parser_input_type]) -> Element: ...
+ def end(self, tag: _parser_input_type) -> Element: ...
class XMLParser:
parser = ... # type: Any
@@ -118,5 +156,5 @@ class XMLParser:
version = ... # type: str
def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
def doctype(self, name: str, pubid: str, system: str) -> None: ...
- def close(self) -> Any: ... # TODO-most of the time, this will be Element, but it can be anything target.close() returns
- def feed(self, data: AnyStr)-> None: ...
+ def close(self) -> Element: ...
+ def feed(self, data: _parser_input_type) -> None: ...
diff --git a/typeshed/stdlib/2and3/xml/sax/__init__.pyi b/typeshed/stdlib/2and3/xml/sax/__init__.pyi
index 71534b0..93a0f9a 100644
--- a/typeshed/stdlib/2and3/xml/sax/__init__.pyi
+++ b/typeshed/stdlib/2and3/xml/sax/__init__.pyi
@@ -1,19 +1,20 @@
-from typing import List
+from typing import Any, List, Optional, Text, Union
+from mypy_extensions import NoReturn
import xml.sax
-from xml.sax.xmlreader import InputSource
+from xml.sax.xmlreader import InputSource, Locator
from xml.sax.handler import ContentHandler, ErrorHandler
class SAXException(Exception):
- def __init__(self, msg, exception=None): ...
- def getMessage(self): ...
- def getException(self): ...
- def __getitem__(self, ix): ...
+ def __init__(self, msg: str, exception: Optional[Exception] = ...) -> None: ...
+ def getMessage(self) -> str: ...
+ def getException(self) -> Exception: ...
+ def __getitem__(self, ix: Any) -> NoReturn: ...
class SAXParseException(SAXException):
- def __init__(self, msg, exception, locator): ...
- def getColumnNumber(self): ...
- def getLineNumber(self): ...
+ def __init__(self, msg: str, exception: Exception, locator: Locator) -> None: ...
+ def getColumnNumber(self) -> int: ...
+ def getLineNumber(self) -> int: ...
def getPublicId(self): ...
def getSystemId(self): ...
@@ -23,12 +24,12 @@ class SAXReaderNotAvailable(SAXNotSupportedException): ...
default_parser_list = ... # type: List[str]
-def make_parser(parser_list: List[str]) -> xml.sax.xmlreader.XMLReader: ...
+def make_parser(parser_list: List[str] = ...) -> xml.sax.xmlreader.XMLReader: ...
def parse(source: str, handler: xml.sax.handler.ContentHandler,
- errorHandler: xml.sax.handler.ErrorHandler=...): ...
+ errorHandler: xml.sax.handler.ErrorHandler = ...) -> None: ...
-def parseString(string: str, handler: xml.sax.handler.ContentHandler,
- errorHandler: xml.sax.handler.ErrorHandler=...): ...
+def parseString(string: Union[bytes, Text], handler: xml.sax.handler.ContentHandler,
+ errorHandler: Optional[xml.sax.handler.ErrorHandler] = ...) -> None: ...
def _create_parser(parser_name: str) -> xml.sax.xmlreader.XMLReader: ...
diff --git a/typeshed/stdlib/3/zlib.pyi b/typeshed/stdlib/2and3/zlib.pyi
similarity index 61%
rename from typeshed/stdlib/3/zlib.pyi
rename to typeshed/stdlib/2and3/zlib.pyi
index 39db843..34aaf1b 100644
--- a/typeshed/stdlib/3/zlib.pyi
+++ b/typeshed/stdlib/2and3/zlib.pyi
@@ -1,10 +1,9 @@
# Stubs for zlib
+import sys
DEFLATED = ... # type: int
-DEF_BUF_SIZE = ... # type: int
DEF_MEM_LEVEL = ... # type: int
MAX_WBITS = ... # type: int
-ZLIB_RUNTIME_VERSION = ... # type: str
ZLIB_VERSION = ... # type: str
Z_BEST_COMPRESSION = ... # type: int
Z_BEST_SPEED = ... # type: int
@@ -16,7 +15,9 @@ Z_FULL_FLUSH = ... # type: int
Z_HUFFMAN_ONLY = ... # type: int
Z_NO_FLUSH = ... # type: int
Z_SYNC_FLUSH = ... # type: int
-
+if sys.version_info >= (3,):
+ DEF_BUF_SIZE = ... # type: int
+ ZLIB_RUNTIME_VERSION = ... # type: str
class error(Exception): ...
@@ -30,7 +31,8 @@ class Compress:
class Decompress:
unused_data = ... # type: bytes
unconsumed_tail = ... # type: bytes
- eof = ... # type: bool
+ if sys.version_info >= (3,):
+ eof = ... # type: bool
def decompress(self, data: bytes, max_length: int = ...) -> bytes: ...
def flush(self, length: int = ...) -> bytes: ...
def copy(self) -> "Decompress": ...
@@ -38,9 +40,16 @@ class Decompress:
def adler32(data: bytes, value: int = ...) -> int: ...
def compress(data: bytes, level: int = ...) -> bytes: ...
-def compressobj(level: int = ..., method: int = ..., wbits: int = ...,
- memlevel: int = ..., strategy: int = ...,
- zdict: bytes = ...) -> Compress: ...
+if sys.version_info >= (3,):
+ def compressobj(level: int = ..., method: int = ..., wbits: int = ...,
+ memLevel: int = ..., strategy: int = ...,
+ zdict: bytes = ...) -> Compress: ...
+else:
+ def compressobj(level: int = ..., method: int = ..., wbits: int = ...,
+ memlevel: int = ..., strategy: int = ...) -> Compress: ...
def crc32(data: bytes, value: int = ...) -> int: ...
def decompress(data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ...
-def decompressobj(wbits: int = ..., zdict: bytes = ...) -> Decompress: ...
+if sys.version_info >= (3,):
+ def decompressobj(wbits: int = ..., zdict: bytes = ...) -> Decompress: ...
+else:
+ def decompressobj(wbits: int = ...) -> Decompress: ...
diff --git a/typeshed/stdlib/3.4/asyncio/__init__.pyi b/typeshed/stdlib/3.4/asyncio/__init__.pyi
index 3f88dac..60a7dde 100644
--- a/typeshed/stdlib/3.4/asyncio/__init__.pyi
+++ b/typeshed/stdlib/3.4/asyncio/__init__.pyi
@@ -1,5 +1,6 @@
"""The asyncio package, tracking PEP 3156."""
+import socket
import sys
from typing import Type
@@ -40,6 +41,7 @@ from asyncio.futures import (
CancelledError as CancelledError,
TimeoutError as TimeoutError,
InvalidStateError as InvalidStateError,
+ wrap_future as wrap_future,
)
from asyncio.tasks import (
FIRST_COMPLETED as FIRST_COMPLETED,
@@ -47,6 +49,7 @@ from asyncio.tasks import (
ALL_COMPLETED as ALL_COMPLETED,
as_completed as as_completed,
ensure_future as ensure_future,
+ ensure_future as async,
gather as gather,
run_coroutine_threadsafe as run_coroutine_threadsafe,
shield as shield,
@@ -85,6 +88,11 @@ from asyncio.locks import (
if sys.version_info < (3, 5):
from asyncio.queues import JoinableQueue as JoinableQueue
+if sys.platform != 'win32':
+ from asyncio.streams import (
+ open_unix_connection as open_unix_connection,
+ start_unix_server as start_unix_server,
+ )
# TODO: It should be possible to instantiate these classes, but mypy
# currently disallows this.
diff --git a/typeshed/stdlib/3.4/asyncio/events.pyi b/typeshed/stdlib/3.4/asyncio/events.pyi
index 2e87f70..e20a023 100644
--- a/typeshed/stdlib/3.4/asyncio/events.pyi
+++ b/typeshed/stdlib/3.4/asyncio/events.pyi
@@ -1,13 +1,22 @@
+from socket import socket
+import ssl
import sys
-from typing import Any, Awaitable, TypeVar, List, Callable, Tuple, Union, Dict, Generator, overload, Optional
+from typing import Any, Awaitable, Callable, Dict, Generator, List, Optional, Sequence, Tuple, TypeVar, Union, overload
from abc import ABCMeta, abstractmethod
from asyncio.futures import Future
from asyncio.coroutines import coroutine
+from asyncio.protocols import BaseProtocol
from asyncio.tasks import Task
+from asyncio.transports import BaseTransport
__all__ = ... # type: str
_T = TypeVar('_T')
+_Context = Dict[str, Any]
+_ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any]
+_ProtocolFactory = Callable[[], BaseProtocol]
+_SSLContext = Union[bool, None, ssl.SSLContext]
+_TransProtPair = Tuple[BaseTransport, BaseProtocol]
PIPE = ... # type: Any # from subprocess.PIPE
@@ -26,7 +35,7 @@ class Handle:
class AbstractServer:
def close(self) -> None: ...
@coroutine
- def wait_closed(self) -> Generator[Any, Any, None]: ...
+ def wait_closed(self) -> Generator[Any, None, None]: ...
class AbstractEventLoop(metaclass=ABCMeta):
@abstractmethod
@@ -35,7 +44,7 @@ class AbstractEventLoop(metaclass=ABCMeta):
# Can't use a union, see mypy issue # 1873.
@overload
@abstractmethod
- def run_until_complete(self, future: Generator[Any, Any, _T]) -> _T: ...
+ def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ...
@overload
@abstractmethod
def run_until_complete(self, future: Awaitable[_T]) -> _T: ...
@@ -45,7 +54,13 @@ class AbstractEventLoop(metaclass=ABCMeta):
@abstractmethod
def is_running(self) -> bool: ...
@abstractmethod
+ def is_closed(self) -> bool: ...
+ @abstractmethod
def close(self) -> None: ...
+ if sys.version_info >= (3, 6):
+ @abstractmethod
+ @coroutine
+ def shutdown_asyncgens(self) -> Generator[Any, None, None]: ...
# Methods scheduling callbacks. All these return Handles.
@abstractmethod
def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ...
@@ -70,77 +85,105 @@ class AbstractEventLoop(metaclass=ABCMeta):
@abstractmethod
def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ...
@abstractmethod
+ @coroutine
def run_in_executor(self, executor: Any,
- callback: Callable[[], Any], *args: Any) -> Future[Any]: ...
+ callback: Callable[..., _T], *args: Any) -> Generator[Any, None, _T]: ...
@abstractmethod
def set_default_executor(self, executor: Any) -> None: ...
# Network I/O methods returning Futures.
@abstractmethod
+ @coroutine
+ # TODO the "Tuple[Any, ...]" should be "Union[Tuple[str, int], Tuple[str, int, int, int]]" but that triggers
+ # https://github.com/python/mypy/issues/2509
def getaddrinfo(self, host: str, port: int, *,
- family: int = ..., type: int = ..., proto: int = ..., flags: int = ...) -> Future[List[Tuple[int, int, int, str, tuple]]]: ...
+ family: int = ..., type: int = ..., proto: int = ..., flags: int = ...) -> Generator[Any, None, List[Tuple[int, int, int, str, Tuple[Any, ...]]]]: ...
@abstractmethod
- def getnameinfo(self, sockaddr: tuple, flags: int = ...) -> Future[Tuple[str, int]]: ...
+ @coroutine
+ def getnameinfo(self, sockaddr: tuple, flags: int = ...) -> Generator[Any, None, Tuple[str, int]]: ...
@abstractmethod
- def create_connection(self, protocol_factory: Any, host: str = ..., port: int = ..., *,
- ssl: Any = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: Any = ...,
- local_addr: str = ..., server_hostname: str = ...) -> tuple: ...
+ @coroutine
+ def create_connection(self, protocol_factory: _ProtocolFactory, host: str = ..., port: int = ..., *,
+ ssl: _SSLContext = ..., family: int = ..., proto: int = ..., flags: int = ..., sock: Optional[socket] = ...,
+ local_addr: str = ..., server_hostname: str = ...) -> Generator[Any, None, _TransProtPair]: ...
@abstractmethod
- def create_server(self, protocol_factory: Any, host: str = ..., port: int = ..., *,
+ @coroutine
+ def create_server(self, protocol_factory: _ProtocolFactory, host: Union[str, Sequence[str]] = ..., port: int = ..., *,
family: int = ..., flags: int = ...,
- sock: Any = ..., backlog: int = ..., ssl: Any = ..., reuse_address: Any = ...) -> Any: ...
+ sock: Optional[socket] = ..., backlog: int = ..., ssl: _SSLContext = ...,
+ reuse_address: Optional[bool] = ...,
+ reuse_port: Optional[bool] = ...) -> Generator[Any, None, AbstractServer]: ...
@abstractmethod
- def create_unix_connection(self, protocol_factory: Any, path: str, *,
- ssl: Any = ..., sock: Any = ...,
- server_hostname: str = ...) -> tuple: ...
+ @coroutine
+ def create_unix_connection(self, protocol_factory: _ProtocolFactory, path: str, *,
+ ssl: _SSLContext = ..., sock: Optional[socket] = ...,
+ server_hostname: str = ...) -> Generator[Any, None, _TransProtPair]: ...
@abstractmethod
- def create_unix_server(self, protocol_factory: Any, path: str, *,
- sock: Any = ..., backlog: int = ..., ssl: Any = ...) -> Any: ...
+ @coroutine
+ def create_unix_server(self, protocol_factory: _ProtocolFactory, path: str, *,
+ sock: Optional[socket] = ..., backlog: int = ..., ssl: _SSLContext = ...) -> Generator[Any, None, AbstractServer]: ...
@abstractmethod
- def create_datagram_endpoint(self, protocol_factory: Any,
+ @coroutine
+ def create_datagram_endpoint(self, protocol_factory: _ProtocolFactory,
local_addr: str = ..., remote_addr: str = ..., *,
- family: int = ..., proto: int = ..., flags: int = ...) -> tuple: ...
+ family: int = ..., proto: int = ..., flags: int = ...,
+ reuse_address: Optional[bool] = ..., reuse_port: Optional[bool] = ...,
+ allow_broadcast: Optional[bool] = ...,
+ sock: Optional[socket] = ...) -> Generator[Any, None, _TransProtPair]: ...
+ @abstractmethod
+ @coroutine
+ def connect_accepted_socket(self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ...) -> Generator[Any, None, _TransProtPair]: ...
# Pipes and subprocesses.
@abstractmethod
- def connect_read_pipe(self, protocol_factory: Any, pipe: Any) -> tuple: ...
+ @coroutine
+ def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> Generator[Any, None, _TransProtPair]: ...
@abstractmethod
- def connect_write_pipe(self, protocol_factory: Any, pipe: Any) -> tuple: ...
+ @coroutine
+ def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> Generator[Any, None, _TransProtPair]: ...
@abstractmethod
- def subprocess_shell(self, protocol_factory: Any, cmd: Union[bytes, str], *, stdin: Any = ...,
+ @coroutine
+ def subprocess_shell(self, protocol_factory: _ProtocolFactory, cmd: Union[bytes, str], *, stdin: Any = ...,
stdout: Any = ..., stderr: Any = ...,
- **kwargs: Any) -> tuple: ...
+ **kwargs: Any) -> Generator[Any, None, _TransProtPair]: ...
@abstractmethod
- def subprocess_exec(self, protocol_factory: Any, *args: List[Any], stdin: Any = ...,
+ @coroutine
+ def subprocess_exec(self, protocol_factory: _ProtocolFactory, *args: List[Any], stdin: Any = ...,
stdout: Any = ..., stderr: Any = ...,
- **kwargs: Any) -> tuple: ...
+ **kwargs: Any) -> Generator[Any, None, _TransProtPair]: ...
@abstractmethod
- def add_reader(self, fd: int, callback: Callable[[], Any], *args: List[Any]) -> None: ...
+ def add_reader(self, fd: int, callback: Callable[..., Any], *args: List[Any]) -> None: ...
@abstractmethod
def remove_reader(self, fd: int) -> None: ...
@abstractmethod
- def add_writer(self, fd: int, callback: Callable[[], Any], *args: List[Any]) -> None: ...
+ def add_writer(self, fd: int, callback: Callable[..., Any], *args: List[Any]) -> None: ...
@abstractmethod
def remove_writer(self, fd: int) -> None: ...
# Completion based I/O methods returning Futures.
@abstractmethod
- def sock_recv(self, sock: Any, nbytes: int) -> Any: ... # TODO
+ @coroutine
+ def sock_recv(self, sock: socket, nbytes: int) -> Generator[Any, None, bytes]: ...
@abstractmethod
- def sock_sendall(self, sock: Any, data: bytes) -> None: ... # TODO
+ @coroutine
+ def sock_sendall(self, sock: socket, data: bytes) -> Generator[Any, None, None]: ...
@abstractmethod
- def sock_connect(self, sock: Any, address: str) -> Any: ... # TODO
+ @coroutine
+ def sock_connect(self, sock: socket, address: str) -> Generator[Any, None, None]: ...
@abstractmethod
- def sock_accept(self, sock: Any) -> Any: ...
+ @coroutine
+ def sock_accept(self, sock: socket) -> Generator[Any, None, Tuple[socket, Any]]: ...
# Signal handling.
@abstractmethod
- def add_signal_handler(self, sig: int, callback: Callable[[], Any], *args: List[Any]) -> None: ...
+ def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: List[Any]) -> None: ...
@abstractmethod
def remove_signal_handler(self, sig: int) -> None: ...
# Error handlers.
@abstractmethod
- def set_exception_handler(self, handler: Callable[[], Any]) -> None: ...
+ def set_exception_handler(self, handler: _ExceptionHandler) -> None: ...
+ @abstractmethod
+ def get_exception_handler(self) -> _ExceptionHandler: ...
@abstractmethod
- def default_exception_handler(self, context: Any) -> None: ...
+ def default_exception_handler(self, context: _Context) -> None: ...
@abstractmethod
- def call_exception_handler(self, context: Any) -> None: ...
+ def call_exception_handler(self, context: _Context) -> None: ...
# Debug flag management.
@abstractmethod
def get_debug(self) -> bool: ...
diff --git a/typeshed/stdlib/3.4/asyncio/futures.pyi b/typeshed/stdlib/3.4/asyncio/futures.pyi
index b880093..f651c66 100644
--- a/typeshed/stdlib/3.4/asyncio/futures.pyi
+++ b/typeshed/stdlib/3.4/asyncio/futures.pyi
@@ -6,6 +6,7 @@ from concurrent.futures._base import (
from concurrent.futures import (
CancelledError as CancelledError,
TimeoutError as TimeoutError,
+ Future as ConcurrentFuture,
)
__all__ = ... # type: str
@@ -44,3 +45,5 @@ class Future(Iterable[_T], Awaitable[_T], Generic[_T]):
def _copy_state(self, other: Any) -> None: ...
def __iter__(self) -> Generator[Any, None, _T]: ...
def __await__(self) -> Generator[Any, None, _T]: ...
+
+def wrap_future(f: Union[ConcurrentFuture[_T], Future[_T]]) -> Future[_T]: ...
diff --git a/typeshed/stdlib/3.4/asyncio/locks.pyi b/typeshed/stdlib/3.4/asyncio/locks.pyi
index 2467ffe..559effd 100644
--- a/typeshed/stdlib/3.4/asyncio/locks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/locks.pyi
@@ -1,10 +1,10 @@
-from typing import Any, Callable, Generator, Iterable, Iterator, TypeVar, Union
+from typing import Any, Callable, Generator, Iterable, Iterator, TypeVar, Union, Optional
from .coroutines import coroutine
from .events import AbstractEventLoop
from .futures import Future
-T = TypeVar('T')
+_T = TypeVar('_T')
__all__ = ... # type: str
@@ -24,7 +24,7 @@ class Lock(_ContextManagerMixin):
def __init__(self, *, loop: AbstractEventLoop = None) -> None: ...
def locked(self) -> bool: ...
@coroutine
- def acquire(self) -> Future[bool]: ...
+ def acquire(self) -> Generator[Any, None, bool]: ...
def release(self) -> None: ...
class Event:
@@ -33,27 +33,27 @@ class Event:
def set(self) -> None: ...
def clear(self) -> None: ...
@coroutine
- def wait(self) -> bool: ...
+ def wait(self) -> Generator[Any, None, bool]: ...
class Condition(_ContextManagerMixin):
def __init__(self, lock: Lock = None, *, loop: AbstractEventLoop = None) -> None: ...
def locked(self) -> bool: ...
@coroutine
- def acquire(self) -> Future[bool]: ...
+ def acquire(self) -> Generator[Any, None, bool]: ...
def release(self) -> None: ...
@coroutine
- def wait(self) -> Future[bool]: ...
+ def wait(self) -> Generator[Any, None, bool]: ...
@coroutine
- def wait_for(self, predicate: Callable[[], T]) -> Future[T]: ...
+ def wait_for(self, predicate: Callable[[], _T]) -> Generator[Any, None, _T]: ...
def notify(self, n: int = 1) -> None: ...
def notify_all(self) -> None: ...
class Semaphore(_ContextManagerMixin):
- def __init__(self, value: int = 1, *, loop: AbstractEventLoop = None) -> None: ...
+ def __init__(self, value: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ...
def locked(self) -> bool: ...
@coroutine
- def acquire(self) -> Future[bool]: ...
+ def acquire(self) -> Generator[Any, None, bool]: ...
def release(self) -> None: ...
class BoundedSemaphore(Semaphore):
- def __init__(self, value=1, *, loop: AbstractEventLoop = None) -> None: ...
+ def __init__(self, value: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/queues.pyi b/typeshed/stdlib/3.4/asyncio/queues.pyi
index 2901218..847f02b 100644
--- a/typeshed/stdlib/3.4/asyncio/queues.pyi
+++ b/typeshed/stdlib/3.4/asyncio/queues.pyi
@@ -2,7 +2,7 @@ import sys
from asyncio.events import AbstractEventLoop
from .coroutines import coroutine
from .futures import Future
-from typing import TypeVar, Generic
+from typing import Any, Generator, Generic, TypeVar
__all__ = ... # type: str
@@ -10,13 +10,13 @@ __all__ = ... # type: str
class QueueEmpty(Exception): ...
class QueueFull(Exception): ...
-T = TypeVar('T')
+_T = TypeVar('_T')
-class Queue(Generic[T]):
+class Queue(Generic[_T]):
def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop = ...) -> None: ...
def _init(self, maxsize: int) -> None: ...
- def _get(self) -> T: ...
- def _put(self, item: T) -> None: ...
+ def _get(self) -> _T: ...
+ def _put(self, item: _T) -> None: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
def _format(self) -> str: ...
@@ -28,24 +28,24 @@ class Queue(Generic[T]):
def empty(self) -> bool: ...
def full(self) -> bool: ...
@coroutine
- def put(self, item: T) -> Future[None]: ...
- def put_nowait(self, item: T) -> None: ...
+ def put(self, item: _T) -> Generator[Any, None, None]: ...
+ def put_nowait(self, item: _T) -> None: ...
@coroutine
- def get(self) -> Future[T]: ...
- def get_nowait(self) -> T: ...
+ def get(self) -> Generator[Any, None, _T]: ...
+ def get_nowait(self) -> _T: ...
if sys.version_info >= (3, 4):
@coroutine
- def join(self) -> None: ...
+ def join(self) -> Generator[Any, None, bool]: ...
def task_done(self) -> None: ...
-class PriorityQueue(Queue): ...
+class PriorityQueue(Queue[_T]): ...
-class LifoQueue(Queue): ...
+class LifoQueue(Queue[_T]): ...
if sys.version_info < (3, 5):
- class JoinableQueue(Queue):
+ class JoinableQueue(Queue[_T]):
def task_done(self) -> None: ...
@coroutine
- def join(self) -> None: ...
+ def join(self) -> Generator[Any, None, bool]: ...
diff --git a/typeshed/stdlib/3.4/asyncio/streams.pyi b/typeshed/stdlib/3.4/asyncio/streams.pyi
index 82893f0..fd677ab 100644
--- a/typeshed/stdlib/3.4/asyncio/streams.pyi
+++ b/typeshed/stdlib/3.4/asyncio/streams.pyi
@@ -1,4 +1,4 @@
-import socket
+import sys
from typing import Any, Awaitable, Callable, Generator, Iterable, Optional, Tuple
from . import coroutines
@@ -38,7 +38,7 @@ def start_server(
**kwds: Any
) -> Generator[Any, None, events.AbstractServer]: ...
-if hasattr(socket, 'AF_UNIX'):
+if sys.platform != 'win32':
@coroutines.coroutine
def open_unix_connection(
path: str = ...,
@@ -84,7 +84,7 @@ class StreamWriter:
def close(self) -> None: ...
def get_extra_info(self, name: str, default: Any = ...) -> Any: ...
@coroutines.coroutine
- def drain(self) -> None: ...
+ def drain(self) -> Generator[Any, None, None]: ...
class StreamReader:
def __init__(self,
diff --git a/typeshed/stdlib/3.4/asyncio/subprocess.pyi b/typeshed/stdlib/3.4/asyncio/subprocess.pyi
index b492506..bd7fd94 100644
--- a/typeshed/stdlib/3.4/asyncio/subprocess.pyi
+++ b/typeshed/stdlib/3.4/asyncio/subprocess.pyi
@@ -3,7 +3,7 @@ from asyncio import protocols
from asyncio import streams
from asyncio import transports
from asyncio.coroutines import coroutine
-from typing import Any, AnyStr, Optional, Tuple, Union
+from typing import Any, AnyStr, Generator, Optional, Tuple, Union
__all__ = ... # type: str
@@ -13,6 +13,9 @@ DEVNULL = ... # type: int
class SubprocessStreamProtocol(streams.FlowControlMixin,
protocols.SubprocessProtocol):
+ stdin = ... # type: Optional[streams.StreamWriter]
+ stdout = ... # type: Optional[streams.StreamReader]
+ stderr = ... # type: Optional[streams.StreamReader]
def __init__(self, limit: int, loop: events.AbstractEventLoop) -> None: ...
def connection_made(self, transport: transports.BaseTransport) -> None: ...
def pipe_data_received(self, fd: int, data: AnyStr) -> None: ...
@@ -21,6 +24,10 @@ class SubprocessStreamProtocol(streams.FlowControlMixin,
class Process:
+ stdin = ... # type: Optional[streams.StreamWriter]
+ stdout = ... # type: Optional[streams.StreamReader]
+ stderr = ... # type: Optional[streams.StreamReader]
+ pid = ... # type: int
def __init__(self,
transport: transports.BaseTransport,
protocol: protocols.BaseProtocol,
@@ -28,12 +35,12 @@ class Process:
@property
def returncode(self) -> int: ...
@coroutine
- def wait(self) -> int: ...
+ def wait(self) -> Generator[Any, None, int]: ...
def send_signal(self, signal: int) -> None: ...
def terminate(self) -> None: ...
def kill(self) -> None: ...
@coroutine
- def communicate(self, input: Optional[bytes] = ...) -> Tuple[bytes, bytes]: ...
+ def communicate(self, input: Optional[bytes] = ...) -> Generator[Any, None, Tuple[bytes, bytes]]: ...
@coroutine
@@ -45,7 +52,7 @@ def create_subprocess_shell(
loop: events.AbstractEventLoop = ...,
limit: int = ...,
**kwds: Any
-): ...
+) -> Generator[Any, None, Process]: ...
@coroutine
def create_subprocess_exec(
@@ -57,4 +64,4 @@ def create_subprocess_exec(
loop: events.AbstractEventLoop = ...,
limit: int = ...,
**kwds: Any
-) -> Process: ...
+) -> Generator[Any, None, Process]: ...
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index e95d758..bf69df8 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -15,7 +15,7 @@ FIRST_COMPLETED = 'FIRST_COMPLETED'
ALL_COMPLETED = 'ALL_COMPLETED'
def as_completed(fs: Sequence[_FutureT[_T]], *, loop: AbstractEventLoop = ...,
- timeout=None) -> Iterator[Generator[Any, None, _T]]: ...
+ timeout: Optional[float] = ...) -> Iterator[Generator[Any, None, _T]]: ...
def ensure_future(coro_or_future: _FutureT[_T],
*, loop: AbstractEventLoop = ...) -> Future[_T]: ...
# TODO: gather() should use variadic type vars instead of _TAny.
diff --git a/typeshed/stdlib/3.4/enum.pyi b/typeshed/stdlib/3.4/enum.pyi
index 3b97e1b..a7e0fd6 100644
--- a/typeshed/stdlib/3.4/enum.pyi
+++ b/typeshed/stdlib/3.4/enum.pyi
@@ -1,12 +1,19 @@
-# FIXME: Stub incomplete, ommissions include:
-# * the metaclass
-# * _sunder_ methods with their transformations
-
import sys
-from typing import List, Any, TypeVar, Union
+from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
+
+_T = TypeVar('_T', bound=Enum)
+_S = TypeVar('_S', bound=Type[Enum])
-class Enum:
- def __new__(cls, value: Any) -> None: ...
+class EnumMeta(type, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
+ def __iter__(self: Type[_T]) -> Iterator[_T]: ... # type: ignore
+ def __reversed__(self: Type[_T]) -> Iterator[_T]: ...
+ def __contains__(self, member: Any) -> bool: ...
+ def __getitem__(self: Type[_T], name: str) -> _T: ...
+ @property
+ def __members__(self: Type[_T]) -> Mapping[str, _T]: ...
+
+class Enum(metaclass=EnumMeta):
+ def __new__(cls: Type[_T], value: Any) -> _T: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
def __dir__(self) -> List[str]: ...
@@ -20,9 +27,7 @@ class Enum:
class IntEnum(int, Enum):
value = ... # type: int
-_T = TypeVar('_T')
-
-def unique(enumeration: _T) -> _T: ...
+def unique(enumeration: _S) -> _S: ...
if sys.version_info >= (3, 6):
_auto_null = ... # type: Any
diff --git a/typeshed/stdlib/3.4/pathlib.pyi b/typeshed/stdlib/3.4/pathlib.pyi
index 67ad853..0051266 100644
--- a/typeshed/stdlib/3.4/pathlib.pyi
+++ b/typeshed/stdlib/3.4/pathlib.pyi
@@ -1,13 +1,13 @@
# Stubs for pathlib (Python 3.4)
-from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union
+from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union, List
import os
import sys
_P = TypeVar('_P', bound='PurePath')
if sys.version_info >= (3, 6):
- _PurePathBase = os.PathLike
+ _PurePathBase = os.PathLike[str]
else:
_PurePathBase = object
@@ -22,8 +22,10 @@ class PurePath(_PurePathBase):
stem = ... # type: str
if sys.version_info < (3, 5):
def __init__(self, *pathsegments: str) -> None: ...
- else:
+ elif sys.version_info < (3, 6):
def __new__(cls: Type[_P], *args: Union[str, PurePath]) -> _P: ...
+ else:
+ def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]]) -> _P: ...
def __hash__(self) -> int: ...
def __lt__(self, other: PurePath) -> bool: ...
def __le__(self, other: PurePath) -> bool: ...
@@ -67,7 +69,7 @@ class Path(PurePath):
def iterdir(self) -> Generator[Path, None, None]: ...
def lchmod(self, mode: int) -> None: ...
def lstat(self) -> os.stat_result: ...
- if sys.version_info <= (3, 4):
+ if sys.version_info < (3, 5):
def mkdir(self, mode: int = ...,
parents: bool = ...) -> None: ...
else:
@@ -79,7 +81,7 @@ class Path(PurePath):
def owner(self) -> str: ...
def rename(self, target: Union[str, PurePath]) -> None: ...
def replace(self, target: Union[str, PurePath]) -> None: ...
- if sys.version_info <= (3, 5):
+ if sys.version_info < (3, 6):
def resolve(self: _P) -> _P: ...
else:
def resolve(self: _P, strict: bool = ...) -> _P: ...
@@ -93,8 +95,12 @@ class Path(PurePath):
if sys.version_info >= (3, 5):
@classmethod
def home(cls: Type[_P]) -> _P: ...
- def __new__(cls: Type[_P], *args: Union[str, PurePath],
- **kwargs: Any) -> _P: ...
+ if sys.version_info < (3, 6):
+ def __new__(cls: Type[_P], *args: Union[str, PurePath],
+ **kwargs: Any) -> _P: ...
+ else:
+ def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]],
+ **kwargs: Any) -> _P: ...
def absolute(self: _P) -> _P: ...
def expanduser(self: _P) -> _P: ...
diff --git a/typeshed/stdlib/3.4/statistics.pyi b/typeshed/stdlib/3.4/statistics.pyi
new file mode 100644
index 0000000..d9116e5
--- /dev/null
+++ b/typeshed/stdlib/3.4/statistics.pyi
@@ -0,0 +1,24 @@
+# Stubs for statistics
+
+from decimal import Decimal
+from fractions import Fraction
+import sys
+from typing import Iterable, Optional, TypeVar
+
+# Most functions in this module accept homogeneous collections of one of these types
+_Number = TypeVar('_Number', float, Decimal, Fraction)
+
+class StatisticsError(ValueError): ...
+
+def mean(data: Iterable[_Number]) -> _Number: ...
+if sys.version_info >= (3, 6):
+ def harmonic_mean(data: Iterable[_Number]) -> _Number: ...
+def median(data: Iterable[_Number]) -> _Number: ...
+def median_low(data: Iterable[_Number]) -> _Number: ...
+def median_high(data: Iterable[_Number]) -> _Number: ...
+def median_grouped(data: Iterable[_Number]) -> _Number: ...
+def mode(data: Iterable[_Number]) -> _Number: ...
+def pstdev(data: Iterable[_Number], mu: Optional[_Number] = ...) -> _Number: ...
+def pvariance(data: Iterable[_Number], mu: Optional[_Number] = ...) -> _Number: ...
+def stdev(data: Iterable[_Number], xbar: Optional[_Number] = ...) -> _Number: ...
+def variance(data: Iterable[_Number], xbar: Optional[_Number] = ...) -> _Number: ...
diff --git a/typeshed/stdlib/3.4/tracemalloc.pyi b/typeshed/stdlib/3.4/tracemalloc.pyi
new file mode 100644
index 0000000..57b8f6c
--- /dev/null
+++ b/typeshed/stdlib/3.4/tracemalloc.pyi
@@ -0,0 +1,65 @@
+# Stubs for tracemalloc (Python 3.4+)
+
+import sys
+from typing import Any, List, Optional, Sequence, Tuple, Union
+
+def clear_traces() -> None: ...
+def get_object_traceback(obj: object) -> Optional[Traceback]: ...
+def get_traceback_limit() -> int: ...
+def get_traced_memory() -> Tuple[int, int]: ...
+def get_tracemalloc_memory() -> int: ...
+def is_tracing() -> bool: ...
+def start(nframe: int = ...) -> None: ...
+def stop() -> None: ...
+def take_snapshot() -> Snapshot: ...
+
+if sys.version_info >= (3, 6):
+ class DomainFilter:
+ inclusive = ... # type: bool
+ domain = ... # type: int
+ def __init__(self, inclusive: bool, domain: int) -> None: ...
+
+class Filter:
+ if sys.version_info >= (3, 6):
+ domain = ... # type: Optional[int]
+ inclusive = ... # type: bool
+ lineno = ... # type: Optional[int]
+ filename_pattern = ... # type: str
+ all_frames = ... # type: bool
+ def __init__(self, inclusive: bool, filename_pattern: str, lineno: Optional[int] = ..., all_frames: bool = ..., domain: Optional[int] = ...) -> None: ...
+
+class Frame:
+ filename = ... # type: str
+ lineno = ... # type: int
+
+class Snapshot:
+ def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = ...) -> List[StatisticDiff]: ...
+ def dump(self, filename: str) -> None: ...
+ if sys.version_info >= (3, 6):
+ def filter_traces(self, filters: Sequence[Union[DomainFilter, Filter]]) -> Snapshot: ...
+ else:
+ def filter_traces(self, filters: Sequence[Filter]) -> Snapshot: ...
+ @classmethod
+ def load(cls, filename: str) -> Snapshot: ...
+ def statistics(self, key_type: str, cumulative: bool = ...) -> List[Statistic]: ...
+ traceback_limit = ... # type: int
+ traces = ... # type: Sequence[Trace]
+
+class Statistic:
+ count = ... # type: int
+ size = ... # type: int
+ traceback = ... # type: Traceback
+
+class StatisticDiff:
+ count = ... # type: int
+ count_diff = ... # type: int
+ size = ... # type: int
+ size_diff = ... # type: int
+ traceback = ... # type: Traceback
+
+class Trace:
+ size = ... # type: int
+ traceback = ... # type: Traceback
+
+class Traceback(Sequence[Frame]):
+ def format(self, limit: Optional[int] = ...) -> List[str]: ...
diff --git a/typeshed/stdlib/3.5/zipapp.pyi b/typeshed/stdlib/3.5/zipapp.pyi
new file mode 100644
index 0000000..9fac5a0
--- /dev/null
+++ b/typeshed/stdlib/3.5/zipapp.pyi
@@ -0,0 +1,11 @@
+# Stubs for zipapp (Python 3.5+)
+
+from pathlib import Path
+from typing import BinaryIO, Optional, Union
+
+_Path = Union[str, Path, BinaryIO]
+
+class ZipAppError(Exception): ...
+
+def create_archive(source: _Path, target: Optional[_Path] = ..., interpreter: Optional[str] = ..., main: Optional[str] = ...) -> None: ...
+def get_interpreter(archive: _Path) -> str: ...
diff --git a/typeshed/stdlib/3/_codecs.pyi b/typeshed/stdlib/3/_codecs.pyi
deleted file mode 100644
index 8d4fb3c..0000000
--- a/typeshed/stdlib/3/_codecs.pyi
+++ /dev/null
@@ -1,51 +0,0 @@
-"""Stub file for the '_codecs' module."""
-
-from typing import Any, AnyStr, Callable, Tuple, Optional, Dict
-
-import codecs
-
-# For convenience:
-_Handler = Callable[[Exception], Tuple[str, int]]
-
-def register(search_function: Callable[[str], Any]) -> None: ...
-def register_error(errors: str, handler: _Handler) -> None: ...
-def lookup(a: str) -> codecs.CodecInfo: ...
-def lookup_error(a: str) -> _Handler: ...
-def decode(obj: Any, encoding: str = ..., errors: str = ...) -> Any: ...
-def encode(obj: Any, encoding: str = ..., errors: str = ...) -> Any: ...
-def charmap_build(a: str) -> Dict[int, int]: ...
-
-def ascii_decode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def ascii_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def charbuffer_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def charmap_decode(data: AnyStr, errors: str = ..., mapping: Optional[Dict[int, int]] = ...) -> Tuple[str, int]: ...
-def charmap_encode(data: AnyStr, errors: str, mapping: Optional[Dict[int, int]] = ...) -> Tuple[bytes, int]: ...
-def escape_decode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def escape_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def latin_1_decode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def latin_1_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def raw_unicode_escape_decode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def raw_unicode_escape_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def readbuffer_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def unicode_escape_decode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def unicode_escape_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def unicode_internal_decode(data: AnyStr, errors: str = ...) -> Tuple[str, int]: ...
-def unicode_internal_encode(data: AnyStr, errors: str = ...) -> Tuple[bytes, int]: ...
-def utf_16_be_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_16_be_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_16_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_16_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_16_ex_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_16_le_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_16_le_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_32_be_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_be_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_32_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_32_ex_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_le_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_32_le_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_7_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_7_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
-def utf_8_decode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[str, int]: ...
-def utf_8_encode(data: AnyStr, errors: str = ..., final: int = ...) -> Tuple[bytes, int]: ...
diff --git a/typeshed/stdlib/3/_imp.pyi b/typeshed/stdlib/3/_imp.pyi
new file mode 100644
index 0000000..7015b3b
--- /dev/null
+++ b/typeshed/stdlib/3/_imp.pyi
@@ -0,0 +1,22 @@
+# Stubs for _imp (Python 3.6)
+
+import sys
+import types
+from typing import Any, List
+
+if sys.version_info >= (3, 5):
+ from importlib.machinery import ModuleSpec
+ def create_builtin(spec: ModuleSpec) -> types.ModuleType: ...
+ def create_dynamic(spec: ModuleSpec, file: Any = ...) -> None: ...
+
+def acquire_lock() -> None: ...
+def exec_builtin(mod: types.ModuleType) -> int: ...
+def exec_dynamic(mod: types.ModuleType) -> int: ...
+def extension_suffixes() -> List[str]: ...
+def get_frozen_object(name: str) -> types.CodeType: ...
+def init_frozen(name: str) -> types.ModuleType: ...
+def is_builtin(name: str) -> int: ...
+def is_frozen(name: str) -> bool: ...
+def is_frozen_package(name: str) -> bool: ...
+def lock_held() -> bool: ...
+def release_lock() -> None: ...
diff --git a/typeshed/stdlib/3/_importlib_modulespec.pyi b/typeshed/stdlib/3/_importlib_modulespec.pyi
index 1acd9f1..bfd27ad 100644
--- a/typeshed/stdlib/3/_importlib_modulespec.pyi
+++ b/typeshed/stdlib/3/_importlib_modulespec.pyi
@@ -7,7 +7,7 @@
from abc import ABCMeta
import sys
-from typing import Any, Optional
+from typing import Dict, Any, Optional
if sys.version_info >= (3, 4):
class ModuleSpec:
@@ -26,11 +26,12 @@ if sys.version_info >= (3, 4):
class ModuleType:
__name__ = ... # type: str
__file__ = ... # type: str
+ __dict__ = ... # type: Dict[str, Any]
if sys.version_info >= (3, 4):
__loader__ = ... # type: Optional[Loader]
__package__ = ... # type: Optional[str]
__spec__ = ... # type: Optional[ModuleSpec]
- def __init__(self, name: str, doc: str) -> None: ...
+ def __init__(self, name: str, doc: Optional[str] = ...) -> None: ...
class Loader(metaclass=ABCMeta):
def load_module(self, fullname: str) -> ModuleType: ...
diff --git a/typeshed/stdlib/3/_markupbase.pyi b/typeshed/stdlib/3/_markupbase.pyi
index 129b49b..58d1070 100644
--- a/typeshed/stdlib/3/_markupbase.pyi
+++ b/typeshed/stdlib/3/_markupbase.pyi
@@ -1,6 +1,6 @@
from typing import Tuple
-class ParserBase(object):
+class ParserBase:
def __init__(self) -> None: ...
def error(self, message: str) -> None: ...
def reset(self) -> None: ...
diff --git a/typeshed/stdlib/3/_operator.pyi b/typeshed/stdlib/3/_operator.pyi
index 0f64f95..99262e5 100644
--- a/typeshed/stdlib/3/_operator.pyi
+++ b/typeshed/stdlib/3/_operator.pyi
@@ -1,71 +1,65 @@
# Stubs for _operator (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-def _compare_digest(*args, **kwargs): ...
-def abs(a): ...
-def add(a, b): ...
-def and_(a, b): ...
-def concat(a, b): ...
-def contains(a, b): ...
-def countOf(a, b): ...
-def delitem(a, b): ...
-def eq(a, b): ...
-def floordiv(a, b): ...
-def ge(a, b): ...
-def getitem(a, b): ...
-def gt(a, b): ...
-def iadd(*args, **kwargs): ...
-def iand(*args, **kwargs): ...
-def iconcat(*args, **kwargs): ...
-def ifloordiv(*args, **kwargs): ...
-def ilshift(*args, **kwargs): ...
-def imatmul(*args, **kwargs): ...
-def imod(*args, **kwargs): ...
-def imul(*args, **kwargs): ...
-def index(a): ...
-def indexOf(a, b): ...
-def inv(a): ...
-def invert(a): ...
-def ior(*args, **kwargs): ...
-def ipow(*args, **kwargs): ...
-def irshift(*args, **kwargs): ...
-def is_(a, b): ...
-def is_not(a, b): ...
-def isub(*args, **kwargs): ...
-def itruediv(*args, **kwargs): ...
-def ixor(*args, **kwargs): ...
-def le(a, b): ...
-def length_hint(obj, default=0): ...
-def lshift(a, b): ...
-def lt(a, b): ...
-def matmul(a, b): ...
-def mod(a, b): ...
-def mul(a, b): ...
-def ne(a, b): ...
-def neg(a): ...
-def not_(a): ...
-def or_(a, b): ...
-def pos(a): ...
-def pow(a, b): ...
-def rshift(a, b): ...
-def setitem(a, b, c): ...
-def sub(a, b): ...
-def truediv(a, b): ...
-def truth(a): ...
-def xor(a, b): ...
+import sys
+from typing import AnyStr
-class attrgetter:
- def __init__(self, *args, **kwargs): ...
- def __call__(self, *args, **kwargs): ...
- def __reduce__(self): ...
+# In reality the import is the other way around, but this way we can keep the operator stub in 2and3
+from operator import (
+ truth as truth,
+ contains as contains,
+ indexOf as indexOf,
+ countOf as countOf,
+ is_ as is_,
+ is_not as is_not,
+ index as index,
+ add as add,
+ sub as sub,
+ mul as mul,
+ floordiv as floordiv,
+ truediv as truediv,
+ mod as mod,
+ neg as neg,
+ pos as pos,
+ abs as abs,
+ inv as inv,
+ invert as invert,
+ lshift as lshift,
+ rshift as rshift,
+ not_ as not_,
+ and_ as and_,
+ xor as xor,
+ or_ as or_,
+ iadd as iadd,
+ isub as isub,
+ imul as imul,
+ ifloordiv as ifloordiv,
+ itruediv as itruediv,
+ imod as imod,
+ ilshift as ilshift,
+ irshift as irshift,
+ iand as iand,
+ ixor as ixor,
+ ior as ior,
+ concat as concat,
+ iconcat as iconcat,
+ getitem as getitem,
+ setitem as setitem,
+ delitem as delitem,
+ pow as pow,
+ ipow as ipow,
+ eq as eq,
+ ne as ne,
+ lt as lt,
+ le as le,
+ gt as gt,
+ ge as ge,
+ itemgetter as itemgetter,
+ attrgetter as attrgetter,
+ methodcaller as methodcaller,
+)
+if sys.version_info >= (3, 5):
+ from operator import matmul as matmul, imatmul as imatmul
+if sys.version_info >= (3, 4):
+ from operator import length_hint as length_hint
-class itemgetter:
- def __init__(self, *args, **kwargs): ...
- def __call__(self, *args, **kwargs): ...
- def __reduce__(self): ...
-
-class methodcaller:
- def __init__(self, *args, **kwargs): ...
- def __call__(self, *args, **kwargs): ...
- def __reduce__(self): ...
+def _compare_digest(a: AnyStr, b: AnyStr) -> bool: ...
diff --git a/typeshed/stdlib/3/_posixsubprocess.pyi b/typeshed/stdlib/3/_posixsubprocess.pyi
index a048a10..67b7d7c 100644
--- a/typeshed/stdlib/3/_posixsubprocess.pyi
+++ b/typeshed/stdlib/3/_posixsubprocess.pyi
@@ -2,12 +2,13 @@
# NOTE: These are incomplete!
-from typing import Tuple, Sequence
+from typing import Tuple, Sequence, Callable
def cloexec_pipe() -> Tuple[int, int]: ...
def fork_exec(args: Sequence[str],
- executable_list, close_fds, fds_to_keep, cwd: str, env_list,
+ executable_list: Sequence[bytes], close_fds: bool, fds_to_keep: Sequence[int],
+ cwd: str, env_list: Sequence[bytes],
p2cread: int, p2cwrite: int, c2pred: int, c2pwrite: int,
errread: int, errwrite: int, errpipe_read: int,
- errpipe_write: int, restore_signals, start_new_session,
- preexec_fn) -> int: ...
+ errpipe_write: int, restore_signals: int, start_new_session: int,
+ preexec_fn: Callable[[], None]) -> int: ...
diff --git a/typeshed/stdlib/3/_random.pyi b/typeshed/stdlib/3/_random.pyi
deleted file mode 100644
index b3fcdb4..0000000
--- a/typeshed/stdlib/3/_random.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for _random
-
-# NOTE: These are incomplete!
-
-from typing import Any
-
-class Random:
- def seed(self, x: Any = ...) -> None: ...
- def getstate(self) -> tuple: ...
- def setstate(self, state: tuple) -> None: ...
- def random(self) -> float: ...
- def getrandbits(self, k: int) -> int: ...
diff --git a/typeshed/stdlib/3/_warnings.pyi b/typeshed/stdlib/3/_warnings.pyi
index b1f0431..4d890e5 100644
--- a/typeshed/stdlib/3/_warnings.pyi
+++ b/typeshed/stdlib/3/_warnings.pyi
@@ -1,11 +1,11 @@
-from typing import Any, List
+from typing import Any, List, Optional, Type
_defaultaction = ... # type: str
_onceregistry = ... # type: dict
filters = ... # type: List[tuple]
-def warn(message: Warning, category: type = ..., stacklevel: int = ...) -> None: ...
-def warn_explicit(message: Warning, category: type,
+def warn(message: Warning, category: Optional[Type[Warning]] = ..., stacklevel: int = ...) -> None: ...
+def warn_explicit(message: Warning, category: Optional[Type[Warning]],
filename: str, lineno: int,
module: Any = ..., registry: dict = ...,
module_globals: dict = ...) -> None: ...
diff --git a/typeshed/stdlib/3/abc.pyi b/typeshed/stdlib/3/abc.pyi
index 80287e1..f50f790 100644
--- a/typeshed/stdlib/3/abc.pyi
+++ b/typeshed/stdlib/3/abc.pyi
@@ -1,13 +1,24 @@
-from typing import Any
+from typing import Any, Callable, Type, TypeVar
import sys
# Stubs for abc.
-# Thesee definitions have special processing in type checker.
+_T = TypeVar('_T')
+_FuncT = TypeVar('_FuncT', bound=Callable[..., Any])
+
+# Thesee definitions have special processing in mypy
class ABCMeta(type):
- def register(cls: "ABCMeta", subclass: Any) -> None: ...
-abstractmethod = object()
-abstractproperty = object()
+ if sys.version_info >= (3, 3):
+ def register(cls: "ABCMeta", subclass: Type[_T]) -> Type[_T]: ...
+ else:
+ def register(cls: "ABCMeta", subclass: Type[Any]) -> None: ...
+
+def abstractmethod(callable: _FuncT) -> _FuncT: ...
+def abstractproperty(callable: _FuncT) -> _FuncT: ...
+# These two are deprecated and not supported by mypy
+def abstractstaticmethod(callable: _FuncT) -> _FuncT: ...
+def abstractclassmethod(callable: _FuncT) -> _FuncT: ...
if sys.version_info >= (3, 4):
class ABC(metaclass=ABCMeta):
pass
+ def get_cache_token() -> object: ...
diff --git a/typeshed/stdlib/3/binascii.pyi b/typeshed/stdlib/3/binascii.pyi
deleted file mode 100644
index edbd970..0000000
--- a/typeshed/stdlib/3/binascii.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-# Stubs for binascii
-
-# Based on http://docs.python.org/3.2/library/binascii.html
-
-from typing import Union
-
-def a2b_uu(string: Union[str, bytes]) -> bytes: ...
-def b2a_uu(data: bytes) -> bytes: ...
-def a2b_base64(string: Union[str, bytes]) -> bytes: ...
-def b2a_base64(data: bytes) -> bytes: ...
-def a2b_qp(string: Union[str, bytes], header: bool = ...) -> bytes: ...
-def b2a_qp(data: bytes, quotetabs: bool = ..., istext: bool = ...,
- header: bool = ...) -> bytes: ...
-def a2b_hqx(string: Union[str, bytes]) -> bytes: ...
-def rledecode_hqx(data: bytes) -> bytes: ...
-def rlecode_hqx(data: bytes) -> bytes: ...
-def b2a_hqx(data: bytes) -> bytes: ...
-def crc_hqx(data: bytes, crc: int) -> int: ...
-def crc32(data: bytes, crc: int = ...) -> int: ...
-def b2a_hex(data: bytes) -> bytes: ...
-def hexlify(data: bytes) -> bytes: ...
-def a2b_hex(hexstr: Union[str, bytes]) -> bytes: ...
-def unhexlify(hexlify: Union[str, bytes]) -> bytes: ...
-
-class Error(Exception): ...
-class Incomplete(Exception): ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 952cb3d..3cd9da4 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -5,7 +5,7 @@ from typing import (
Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic,
Set, AbstractSet, FrozenSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat,
SupportsBytes, SupportsAbs, SupportsRound, IO, Union, ItemsView, KeysView, ValuesView,
- ByteString, Optional
+ ByteString, Optional, AnyStr, Type,
)
from abc import abstractmethod, ABCMeta
from types import TracebackType
@@ -24,16 +24,17 @@ _T1 = TypeVar('_T1')
_T2 = TypeVar('_T2')
_T3 = TypeVar('_T3')
_T4 = TypeVar('_T4')
+_T5 = TypeVar('_T5')
_TT = TypeVar('_TT', bound='type')
-class staticmethod: pass # Special, only valid as a decorator.
-class classmethod: pass # Special, only valid as a decorator.
-
class object:
__doc__ = ... # type: Optional[str]
__class__ = ... # type: type
__dict__ = ... # type: Dict[str, Any]
__slots__ = ... # type: Optional[Union[str, Iterable[str]]]
+ __module__ = ... # type: str
+ if sys.version_info >= (3, 6):
+ __annotations__ = ... # type: Dict[str, Any]
def __init__(self) -> None: ...
def __new__(cls) -> Any: ...
@@ -47,10 +48,28 @@ class object:
def __getattribute__(self, name: str) -> Any: ...
def __delattr__(self, name: str) -> None: ...
def __sizeof__(self) -> int: ...
+ def __reduce__(self) -> tuple: ...
+ def __reduce_ex__(self, protocol: int) -> tuple: ...
if sys.version_info >= (3, 6):
def __init_subclass__(cls) -> None: ...
+class staticmethod: # Special, only valid as a decorator.
+ __func__ = ... # type: function
+ __isabstractmethod__ = ... # type: bool
+
+ def __init__(self, f: function) -> None: ...
+ def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
+ def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ...
+
+class classmethod: # Special, only valid as a decorator.
+ __func__ = ... # type: function
+ __isabstractmethod__ = ... # type: bool
+
+ def __init__(self, f: function) -> None: ...
+ def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
+ def __get__(self, obj: _T, type: Optional[Type[_T]]=...) -> function: ...
+
class type:
__bases__ = ... # type: Tuple[type, ...]
__name__ = ... # type: str
@@ -75,7 +94,7 @@ class type:
def __instancecheck__(self, instance: Any) -> bool: ...
def __subclasscheck__(self, subclass: type) -> bool: ...
-class super(object):
+class super:
@overload
def __init__(self, t: Any, obj: Any) -> None: ...
@overload
@@ -128,7 +147,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
def __str__(self) -> str: ...
def __float__(self) -> float: ...
- def __int__(self) -> int: return self
+ def __int__(self) -> int: ...
def __abs__(self) -> int: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
@@ -215,15 +234,15 @@ class str(Sequence[str]):
def __init__(self, o: bytes, encoding: str = ..., errors: str = 'strict') -> None: ...
def capitalize(self) -> str: ...
def center(self, width: int, fillchar: str = ' ') -> str: ...
- def count(self, x: str) -> int: ...
+ def count(self, x: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def encode(self, encoding: str = 'utf-8', errors: str = 'strict') -> bytes: ...
def endswith(self, suffix: Union[str, Tuple[str, ...]], start: int = None,
end: int = None) -> bool: ...
def expandtabs(self, tabsize: int = 8) -> str: ...
- def find(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+ def find(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def format(self, *args: Any, **kwargs: Any) -> str: ...
def format_map(self, map: Mapping[str, Any]) -> str: ...
- def index(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+ def index(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
def isdecimal(self) -> bool: ...
@@ -241,8 +260,8 @@ class str(Sequence[str]):
def lstrip(self, chars: str = None) -> str: ...
def partition(self, sep: str) -> Tuple[str, str, str]: ...
def replace(self, old: str, new: str, count: int = -1) -> str: ...
- def rfind(self, sub: str, start: int = 0, end: int = 0) -> int: ...
- def rindex(self, sub: str, start: int = 0, end: int = 0) -> int: ...
+ def rfind(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
+ def rindex(self, sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, width: int, fillchar: str = ' ') -> str: ...
def rpartition(self, sep: str) -> Tuple[str, str, str]: ...
def rsplit(self, sep: str = None, maxsplit: int = -1) -> List[str]: ...
@@ -268,7 +287,7 @@ class str(Sequence[str]):
def __add__(self, s: str) -> str: ...
def __mul__(self, n: int) -> str: ...
def __rmul__(self, n: int) -> str: ...
- def __mod__(self, *args: Any) -> str: ...
+ def __mod__(self, value: Any) -> str: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: str) -> bool: ...
@@ -279,7 +298,7 @@ class str(Sequence[str]):
def __len__(self) -> int: ...
def __contains__(self, s: object) -> bool: ...
def __iter__(self) -> Iterator[str]: ...
- def __str__(self) -> str: return self
+ def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
@@ -299,14 +318,23 @@ class bytes(ByteString):
def __init__(self, o: SupportsBytes) -> None: ...
def capitalize(self) -> bytes: ...
def center(self, width: int, fillchar: bytes = ...) -> bytes: ...
- def count(self, x: bytes) -> int: ...
+ if sys.version_info >= (3, 3):
+ def count(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def count(self, sub: bytes, start: int = None, end: int = None) -> int: ...
def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ...
def expandtabs(self, tabsize: int = 8) -> bytes: ...
- def find(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+ if sys.version_info >= (3, 3):
+ def find(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def find(self, sub: bytes, start: int = None, end: int = None) -> int: ...
if sys.version_info >= (3, 5):
def hex(self) -> str: ...
- def index(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+ if sys.version_info >= (3, 3):
+ def index(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def index(self, sub: bytes, start: int = None, end: int = None) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
def isdigit(self) -> bool: ...
@@ -320,8 +348,14 @@ class bytes(ByteString):
def lstrip(self, chars: bytes = None) -> bytes: ...
def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
def replace(self, old: bytes, new: bytes, count: int = -1) -> bytes: ...
- def rfind(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
- def rindex(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+ if sys.version_info >= (3, 3):
+ def rfind(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def rfind(self, sub: bytes, start: int = None, end: int = None) -> int: ...
+ if sys.version_info >= (3, 3):
+ def rindex(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def rindex(self, sub: bytes, start: int = None, end: int = None) -> int: ...
def rjust(self, width: int, fillchar: bytes = ...) -> bytes: ...
def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
def rsplit(self, sep: bytes = None, maxsplit: int = -1) -> List[bytes]: ...
@@ -354,6 +388,8 @@ class bytes(ByteString):
def __add__(self, s: bytes) -> bytes: ...
def __mul__(self, n: int) -> bytes: ...
def __rmul__(self, n: int) -> bytes: ...
+ if sys.version_info >= (3, 5):
+ def __mod__(self, value: Any) -> bytes: ...
def __contains__(self, o: object) -> bool: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
@@ -373,14 +409,23 @@ class bytearray(MutableSequence[int], ByteString):
def __init__(self) -> None: ...
def capitalize(self) -> bytearray: ...
def center(self, width: int, fillchar: bytes = ...) -> bytearray: ...
- def count(self, x: bytes) -> int: ...
+ if sys.version_info >= (3, 3):
+ def count(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def count(self, sub: bytes, start: int = None, end: int = None) -> int: ...
def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
def endswith(self, suffix: bytes) -> bool: ...
def expandtabs(self, tabsize: int = 8) -> bytearray: ...
- def find(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+ if sys.version_info >= (3, 3):
+ def find(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def find(self, sub: bytes, start: int = None, end: int = None) -> int: ...
if sys.version_info >= (3, 5):
def hex(self) -> str: ...
- def index(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+ if sys.version_info >= (3, 3):
+ def index(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def index(self, sub: bytes, start: int = None, end: int = None) -> int: ...
def insert(self, index: int, object: int) -> None: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
@@ -395,8 +440,14 @@ class bytearray(MutableSequence[int], ByteString):
def lstrip(self, chars: bytes = None) -> bytearray: ...
def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
def replace(self, old: bytes, new: bytes, count: int = -1) -> bytearray: ...
- def rfind(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
- def rindex(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+ if sys.version_info >= (3, 3):
+ def rfind(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def rfind(self, sub: bytes, start: int = None, end: int = None) -> int: ...
+ if sys.version_info >= (3, 3):
+ def rindex(self, sub: Union[bytes, int], start: int = None, end: int = None) -> int: ...
+ else:
+ def rindex(self, sub: bytes, start: int = None, end: int = None) -> int: ...
def rjust(self, width: int, fillchar: bytes = ...) -> bytearray: ...
def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
def rsplit(self, sep: bytes = None, maxsplit: int = -1) -> List[bytearray]: ...
@@ -436,6 +487,8 @@ class bytearray(MutableSequence[int], ByteString):
def __mul__(self, n: int) -> bytearray: ...
def __rmul__(self, n: int) -> bytearray: ...
def __imul__(self, n: int) -> bytearray: ...
+ if sys.version_info >= (3, 5):
+ def __mod__(self, value: Any) -> bytes: ...
def __contains__(self, o: object) -> bool: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
@@ -487,9 +540,10 @@ class slice:
step = ... # type: Optional[int]
stop = ... # type: Optional[int]
@overload
- def __init__(self, stop: int = None) -> None: ...
+ def __init__(self, stop: Optional[int]) -> None: ...
@overload
- def __init__(self, start: int = None, stop: int = None, step: int = None) -> None: ...
+ def __init__(self, start: Optional[int], stop: Optional[int], step: int = None) -> None: ...
+ def indices(self, len: int) -> Tuple[int, int, int]: ...
class tuple(Sequence[_T_co], Generic[_T_co]):
def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
@@ -519,6 +573,7 @@ class function:
__qualname__ = ... # type: str
__module__ = ... # type: str
__code__ = ... # type: Any
+ __annotations__ = ... # type: Dict[str, Any]
class list(MutableSequence[_T], Generic[_T]):
@overload
@@ -571,15 +626,17 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
def __init__(self, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
+
+ def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ...
+
def clear(self) -> None: ...
def copy(self) -> Dict[_KT, _VT]: ...
- def pop(self, k: _KT, default: _VT = None) -> _VT: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = None) -> _VT: ...
@overload
- def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
- def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+ def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
def keys(self) -> KeysView[_KT]: ...
def values(self) -> ValuesView[_VT]: ...
def items(self) -> ItemsView[_KT, _VT]: ...
@@ -600,57 +657,57 @@ class set(MutableSet[_T], Generic[_T]):
def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
def add(self, element: _T) -> None: ...
def clear(self) -> None: ...
- def copy(self) -> set[_T]: ...
- def difference(self, *s: Iterable[Any]) -> set[_T]: ...
+ def copy(self) -> Set[_T]: ...
+ def difference(self, *s: Iterable[Any]) -> Set[_T]: ...
def difference_update(self, *s: Iterable[Any]) -> None: ...
def discard(self, element: _T) -> None: ...
- def intersection(self, *s: Iterable[Any]) -> set[_T]: ...
+ def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
def intersection_update(self, *s: Iterable[Any]) -> None: ...
def isdisjoint(self, s: Iterable[Any]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
def pop(self) -> _T: ...
def remove(self, element: _T) -> None: ...
- def symmetric_difference(self, s: Iterable[_T]) -> set[_T]: ...
+ def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
- def union(self, *s: Iterable[_T]) -> set[_T]: ...
+ def union(self, *s: Iterable[_T]) -> Set[_T]: ...
def update(self, *s: Iterable[_T]) -> None: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
- def __and__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __iand__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __or__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __ior__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __sub__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __isub__(self, s: AbstractSet[Any]) -> set[_T]: ...
- def __xor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
- def __ixor__(self, s: AbstractSet[_S]) -> set[Union[_T, _S]]: ...
+ def __and__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __iand__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __sub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __isub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+ def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
+ def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[Any]) -> bool: ...
def __lt__(self, s: AbstractSet[Any]) -> bool: ...
def __ge__(self, s: AbstractSet[Any]) -> bool: ...
def __gt__(self, s: AbstractSet[Any]) -> bool: ...
# TODO more set operations
-class frozenset(FrozenSet[_T], Generic[_T]):
+class frozenset(AbstractSet[_T], Generic[_T]):
def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
- def copy(self) -> frozenset[_T]: ...
- def difference(self, *s: Iterable[Any]) -> frozenset[_T]: ...
- def intersection(self, *s: Iterable[Any]) -> frozenset[_T]: ...
+ def copy(self) -> FrozenSet[_T]: ...
+ def difference(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
+ def intersection(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
def isdisjoint(self, s: Iterable[_T]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
- def symmetric_difference(self, s: Iterable[_T]) -> frozenset[_T]: ...
- def union(self, *s: Iterable[_T]) -> frozenset[_T]: ...
+ def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ...
+ def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
- def __and__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
- def __or__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
- def __sub__(self, s: AbstractSet[_T]) -> frozenset[_T]: ...
- def __xor__(self, s: AbstractSet[_S]) -> frozenset[Union[_T, _S]]: ...
+ def __and__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
+ def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
+ def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
+ def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[Any]) -> bool: ...
def __lt__(self, s: AbstractSet[Any]) -> bool: ...
def __ge__(self, s: AbstractSet[Any]) -> bool: ...
@@ -678,12 +735,6 @@ class range(Sequence[int]):
def __repr__(self) -> str: ...
def __reversed__(self) -> Iterator[int]: ...
-class module:
- # TODO not defined in builtins!
- __name__ = ... # type: str
- __file__ = ... # type: str
- __dict__ = ... # type: Dict[str, Any]
-
class property:
def __init__(self, fget: Callable[[Any], Any] = None,
fset: Callable[[Any, Any], None] = None,
@@ -694,6 +745,9 @@ class property:
def __get__(self, obj: Any, type: type=None) -> Any: ...
def __set__(self, obj: Any, value: Any) -> None: ...
def __delete__(self, obj: Any) -> None: ...
+ def fget(self) -> Any: ...
+ def fset(self, value: Any) -> None: ...
+ def fdel(self) -> None: ...
NotImplemented = ... # type: Any
@@ -716,7 +770,7 @@ def eval(source: str, globals: Dict[str, Any] = None,
locals: Mapping[str, Any] = None) -> Any: ... # TODO code object as source
def exec(object: str, globals: Dict[str, Any] = None,
locals: Mapping[str, Any] = None) -> Any: ... # TODO code object as source
-def exit(code: int = None) -> NoReturn: ...
+def exit(code: Any = ...) -> NoReturn: ...
@overload
def filter(function: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
@@ -734,8 +788,8 @@ def input(prompt: Any = None) -> str: ...
def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
def iter(function: Callable[[], _T], sentinel: _T) -> Iterator[_T]: ...
-def isinstance(o: object, t: Union[type, Tuple[type, ...]]) -> bool: ...
-def issubclass(cls: type, classinfo: Union[type, Tuple[type, ...]]) -> bool: ...
+def isinstance(o: object, t: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ...
+def issubclass(cls: type, classinfo: Union[type, Tuple[Union[type, Tuple], ...]]) -> bool: ...
def len(o: Sized) -> int: ...
def license() -> None: ...
def locals() -> Dict[str, Any]: ...
@@ -759,8 +813,13 @@ def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ...
def oct(i: int) -> str: ... # TODO __index__
if sys.version_info >= (3, 6):
- from pathlib import Path
- def open(file: Union[str, bytes, int, Path], mode: str = 'r', buffering: int = -1, encoding: str = None,
+ # This class is to be exported as PathLike from os,
+ # but we define it here as _PathLike to avoid import cycle issues.
+ # See https://github.com/python/typeshed/pull/991#issuecomment-288160993
+ class _PathLike(Generic[AnyStr]):
+ def __fspath__(self) -> AnyStr: ...
+
+ def open(file: Union[str, bytes, int, _PathLike], mode: str = 'r', buffering: int = -1, encoding: str = None,
errors: str = None, newline: str = None, closefd: bool = ...) -> IO[Any]: ...
else:
def open(file: Union[str, bytes, int], mode: str = 'r', buffering: int = -1, encoding: str = None,
@@ -806,7 +865,15 @@ def zip(iter1: Iterable[_T1], iter2: Iterable[_T2],
@overload
def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
iter4: Iterable[_T4]) -> Iterator[Tuple[_T1, _T2,
- _T3, _T4]]: ... # TODO more than four iterables
+ _T3, _T4]]: ...
+ at overload
+def zip(iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3],
+ iter4: Iterable[_T4], iter5: Iterable[_T5]) -> Iterator[Tuple[_T1, _T2,
+ _T3, _T4, _T5]]: ...
+ at overload
+def zip(iter1: Iterable[Any], iter2: Iterable[Any], iter3: Iterable[Any],
+ iter4: Iterable[Any], iter5: Iterable[Any], iter6: Iterable[Any],
+ *iterables: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ...
def __import__(name: str, globals: Dict[str, Any] = ..., locals: Dict[str, Any] = ...,
fromlist: List[str] = ..., level: int = -1) -> Any: ...
@@ -848,6 +915,8 @@ class EOFError(Exception): ...
class FloatingPointError(ArithmeticError): ...
class IOError(EnvironmentError): ...
class ImportError(Exception): ...
+if sys.version_info >= (3, 6):
+ class ModuleNotFoundError(ImportError): ...
class IndexError(LookupError): ...
class KeyError(LookupError): ...
class MemoryError(Exception): ...
@@ -885,6 +954,7 @@ class SyntaxError(Exception):
lineno = ... # type: int
offset = ... # type: int
text = ... # type: str
+ filename = ... # type: str
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class SystemError(Exception): ...
diff --git a/typeshed/stdlib/3/calendar.pyi b/typeshed/stdlib/3/calendar.pyi
index a9ce686..957f8ce 100644
--- a/typeshed/stdlib/3/calendar.pyi
+++ b/typeshed/stdlib/3/calendar.pyi
@@ -19,7 +19,7 @@ def leapdays(y1: int, y2: int) -> int: ...
def weekday(year: int, month: int, day: int) -> int: ...
def monthrange(year: int, month: int) -> Tuple[int, int]: ...
-class Calendar(object):
+class Calendar:
def __init__(self, firstweekday: int = 0) -> None: ...
def getfirstweekday(self) -> int: ...
def setfirstweekday(self, firstweekday: int) -> None: ...
diff --git a/typeshed/stdlib/3/cgi.pyi b/typeshed/stdlib/3/cgi.pyi
deleted file mode 100644
index 16f5598..0000000
--- a/typeshed/stdlib/3/cgi.pyi
+++ /dev/null
@@ -1,4 +0,0 @@
-from typing import Dict, Tuple
-
-def escape(s: str, quote: bool = ...) -> str: ...
-def parse_header(s: str) -> Tuple[str, Dict[str, str]]: ...
diff --git a/typeshed/stdlib/3/codecs.pyi b/typeshed/stdlib/3/codecs.pyi
deleted file mode 100644
index cc56eff..0000000
--- a/typeshed/stdlib/3/codecs.pyi
+++ /dev/null
@@ -1,194 +0,0 @@
-# Better codecs stubs hand-written by o11c.
-# https://docs.python.org/3/library/codecs.html
-from typing import (
- BinaryIO,
- Callable,
- Iterable,
- Iterator,
- List,
- Tuple,
- Union,
-)
-
-from abc import abstractmethod
-
-
-# TODO: this only satisfies the most common interface, where
-# bytes is the raw form and str is the cooked form.
-# In the long run, both should become template parameters maybe?
-# There *are* bytes->bytes and str->str encodings in the standard library.
-# Python 3.5 supposedly might change something there.
-
-_decoded = str
-_encoded = bytes
-
-# TODO: It is not possible to specify these signatures correctly, because
-# they have an optional positional or keyword argument for errors=.
-_encode_type = Callable[[_decoded], _encoded] # signature of Codec().encode
-_decode_type = Callable[[_encoded], _decoded] # signature of Codec().decode
-_stream_reader_type = Callable[[BinaryIO], 'StreamReader'] # signature of StreamReader __init__
-_stream_writer_type = Callable[[BinaryIO], 'StreamWriter'] # signature of StreamWriter __init__
-_incremental_encoder_type = Callable[[], 'IncrementalEncoder'] # signature of IncrementalEncoder __init__
-_incremental_decoder_type = Callable[[], 'IncrementalDecoder'] # signature of IncrementalDecoder __init__
-
-
-def encode(obj: _decoded, encoding: str = ..., errors: str = ...) -> _encoded:
- ...
-def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded:
- ...
-
-def lookup(encoding: str) -> 'CodecInfo':
- ...
-class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]):
- def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decoder_type = ..., name: str = ...) -> None:
- self.encode = encode
- self.decode = decode
- self.streamreader = streamreader
- self.streamwriter = streamwriter
- self.incrementalencoder = incrementalencoder
- self.incrementaldecoder = incrementaldecoder
- self.name = name
-
-def getencoder(encoding: str) -> _encode_type:
- ...
-def getdecoder(encoding: str) -> _decode_type:
- ...
-def getincrementalencoder(encoding: str) -> _incremental_encoder_type:
- ...
-def getincrementaldecoder(encoding: str) -> _incremental_decoder_type:
- ...
-def getreader(encoding: str) -> _stream_reader_type:
- ...
-def getwriter(encoding: str) -> _stream_writer_type:
- ...
-
-def register(search_function: Callable[[str], CodecInfo]) -> None:
- ...
-
-def open(filename: str, mode: str = ..., encoding: str = ..., errors: str = ..., buffering: int = ...) -> StreamReaderWriter:
- ...
-
-def EncodedFile(file: BinaryIO, data_encoding: str, file_encoding: str = ..., errors: str = ...) -> 'StreamRecoder':
- ...
-
-def iterencode(iterator: Iterable[_decoded], encoding: str, errors: str = ...) -> Iterator[_encoded]:
- ...
-def iterdecode(iterator: Iterable[_encoded], encoding: str, errors: str = ...) -> Iterator[_decoded]:
- ...
-
-BOM = b''
-BOM_BE = b''
-BOM_LE = b''
-BOM_UTF8 = b''
-BOM_UTF16 = b''
-BOM_UTF16_BE = b''
-BOM_UTF16_LE = b''
-BOM_UTF32 = b''
-BOM_UTF32_BE = b''
-BOM_UTF32_LE = b''
-
-# It is expected that different actions be taken depending on which of the
-# three subclasses of `UnicodeError` is actually ...ed. However, the Union
-# is still needed for at least one of the cases.
-def register_error(name: str, error_handler: Callable[[UnicodeError], Tuple[Union[str, bytes], int]]) -> None:
- ...
-def lookup_error(name: str) -> Callable[[UnicodeError], Tuple[Union[str, bytes], int]]:
- ...
-
-def strict_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
- ...
-def replace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
- ...
-def ignore_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
- ...
-def xmlcharrefreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
- ...
-def backslashreplace_errors(exception: UnicodeError) -> Tuple[Union[str, bytes], int]:
- ...
-
-class Codec:
- # These are sort of @abstractmethod but sort of not.
- # The StreamReader and StreamWriter subclasses only implement one.
- def encode(self, input: _decoded, errors: str = ...) -> Tuple[_encoded, int]:
- ...
- def decode(self, input: _encoded, errors: str = ...) -> Tuple[_decoded, int]:
- ...
-
-class IncrementalEncoder:
- def __init__(self, errors: str = ...) -> None:
- self.errors = errors
- @abstractmethod
- def encode(self, object: _decoded, final: bool = ...) -> _encoded:
- ...
- def reset(self) -> None:
- ...
- # documentation says int but str is needed for the subclass.
- def getstate(self) -> Union[int, _decoded]:
- ...
- def setstate(self, state: Union[int, _decoded]) -> None:
- ...
-
-class IncrementalDecoder:
- def __init__(self, errors: str = ...) -> None:
- self.errors = errors
- @abstractmethod
- def decode(self, object: _encoded, final: bool = ...) -> _decoded:
- ...
- def reset(self) -> None:
- ...
- def getstate(self) -> Tuple[_encoded, int]:
- ...
- def setstate(self, state: Tuple[_encoded, int]) -> None:
- ...
-
-# These are not documented but used in encodings/*.py implementations.
-class BufferedIncrementalEncoder(IncrementalEncoder):
- def __init__(self, errors: str = ...) -> None:
- IncrementalEncoder.__init__(self, errors)
- self.buffer = ''
- @abstractmethod
- def _buffer_encode(self, input: _decoded, errors: str, final: bool) -> _encoded:
- ...
- def encode(self, input: _decoded, final: bool = ...) -> _encoded:
- ...
-class BufferedIncrementalDecoder(IncrementalDecoder):
- def __init__(self, errors: str = ...) -> None:
- IncrementalDecoder.__init__(self, errors)
- self.buffer = b''
- @abstractmethod
- def _buffer_decode(self, input: _encoded, errors: str, final: bool) -> Tuple[_decoded, int]:
- ...
- def decode(self, object: _encoded, final: bool = ...) -> _decoded:
- ...
-
-# TODO: it is not possible to specify the requirement that all other
-# attributes and methods are passed-through from the stream.
-class StreamWriter(Codec):
- def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
- self.errors = errors
- def write(self, obj: _decoded) -> None:
- ...
- def writelines(self, list: List[str]) -> None:
- ...
- def reset(self) -> None:
- ...
-
-class StreamReader(Codec):
- def __init__(self, stream: BinaryIO, errors: str = ...) -> None:
- self.errors = errors
- def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> _decoded:
- ...
- def readline(self, size: int = ..., keepends: bool = ...) -> _decoded:
- ...
- def readlines(self, sizehint: int = ..., keepends: bool = ...) -> List[_decoded]:
- ...
- def reset(self) -> None:
- ...
-
-class StreamReaderWriter:
- def __init__(self, stream: BinaryIO, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
- ...
-
-class StreamRecoder(BinaryIO):
- def __init__(self, stream: BinaryIO, encode: _encode_type, decode: _decode_type, Reader: _stream_reader_type, Writer: _stream_writer_type, errors: str = ...) -> None:
- ...
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index 5cb89f7..4e460b8 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -2,17 +2,18 @@
# Based on http://docs.python.org/3.2/library/collections.html
-# TODO more abstract base classes (interfaces in mypy)
-
# These are not exported.
import sys
+import typing
from typing import (
TypeVar, Generic, Dict, overload, List, Tuple,
- Callable, Any, Type, Optional, Union
+ Any, Type, Optional, Union
)
# These are exported.
-# TODO reexport more.
+from . import abc
+
from typing import (
+ Callable as Callable,
Container as Container,
Hashable as Hashable,
Iterable as Iterable,
@@ -20,10 +21,6 @@ from typing import (
Sized as Sized,
Generator as Generator,
ByteString as ByteString,
- Awaitable as Awaitable,
- Coroutine as Coroutine,
- AsyncIterable as AsyncIterable,
- AsyncIterator as AsyncIterator,
Reversible as Reversible,
Mapping as Mapping,
MappingView as MappingView,
@@ -37,7 +34,17 @@ from typing import (
AbstractSet as Set,
)
if sys.version_info >= (3, 6):
- from typing import AsyncGenerator as AsyncGenerator
+ from typing import (
+ Collection as Collection,
+ AsyncGenerator as AsyncGenerator,
+ )
+if sys.version_info >= (3, 5):
+ from typing import (
+ Awaitable as Awaitable,
+ Coroutine as Coroutine,
+ AsyncIterable as AsyncIterable,
+ AsyncIterator as AsyncIterator,
+ )
_T = TypeVar('_T')
_KT = TypeVar('_KT')
@@ -124,25 +131,25 @@ class Counter(Dict[_T, int], Generic[_T]):
# Dict.update. Not sure if we should use '# type: ignore' instead
# and omit the type from the union.
@overload
- def update(self, m: Mapping[_T, int]) -> None: ...
+ def update(self, m: Mapping[_T, int], **kwargs: int) -> None: ...
@overload
- def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]]) -> None: ...
+ def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]], **kwargs: int) -> None: ...
- def __add__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __sub__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __and__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __or__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __pos__(self) -> Counter[_T]: ...
- def __neg__(self) -> Counter[_T]: ...
- def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __isub__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __iand__(self, other: Counter[_T]) -> Counter[_T]: ...
- def __ior__(self, other: Counter[_T]) -> Counter[_T]: ...
+ def __add__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __sub__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __and__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __or__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __pos__(self) -> typing.Counter[_T]: ...
+ def __neg__(self) -> typing.Counter[_T]: ...
+ def __iadd__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __isub__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __iand__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
+ def __ior__(self, other: typing.Counter[_T]) -> typing.Counter[_T]: ...
-class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
+class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
def move_to_end(self, key: _KT, last: bool = ...) -> None: ...
-
+ def __reversed__(self) -> Iterator[_KT]: ...
class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
default_factory = ... # type: Callable[[], _VT]
@@ -176,7 +183,13 @@ if sys.version_info >= (3, 3):
@property
def maps(self) -> List[Mapping[_KT, _VT]]: ...
- def new_child(self, m: Mapping[_KT, _VT] = ...) -> ChainMap[_KT, _VT]: ...
+ def new_child(self, m: Mapping[_KT, _VT] = ...) -> typing.ChainMap[_KT, _VT]: ...
@property
- def parents(self) -> ChainMap[_KT, _VT]: ...
+ def parents(self) -> typing.ChainMap[_KT, _VT]: ...
+
+ def __setitem__(self, k: _KT, v: _VT) -> None: ...
+ def __delitem__(self, v: _KT) -> None: ...
+ def __getitem__(self, k: _KT) -> _VT: ...
+ def __iter__(self) -> Iterator[_KT]: ...
+ def __len__(self) -> int: ...
diff --git a/typeshed/stdlib/3/collections/abc.pyi b/typeshed/stdlib/3/collections/abc.pyi
index 5106ec5..03e7086 100644
--- a/typeshed/stdlib/3/collections/abc.pyi
+++ b/typeshed/stdlib/3/collections/abc.pyi
@@ -34,6 +34,7 @@ if sys.version_info >= (3, 5):
if sys.version_info >= (3, 6):
from . import (
+ Collection as Collection,
Reversible as Reversible,
AsyncGenerator as AsyncGenerator,
)
diff --git a/typeshed/stdlib/3/compileall.pyi b/typeshed/stdlib/3/compileall.pyi
new file mode 100644
index 0000000..d8093bf
--- /dev/null
+++ b/typeshed/stdlib/3/compileall.pyi
@@ -0,0 +1,18 @@
+# Stubs for compileall (Python 3)
+
+import os
+import sys
+from typing import Optional, Union, Pattern
+
+if sys.version_info < (3, 6):
+ _Path = Union[str, bytes]
+else:
+ _Path = Union[str, bytes, os.PathLike]
+
+# fx can be any object with a 'search' method; once we have Protocols we can change the type
+if sys.version_info < (3, 5):
+ def compile_dir(dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ...) -> None: ...
+else:
+ def compile_dir(dir: _Path, maxlevels: int = ..., ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ..., workers: int = ...) -> None: ...
+def compile_file(fullname: _Path, ddir: Optional[_Path] = ..., force: bool = ..., rx: Optional[Pattern] = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ...) -> None: ...
+def compile_path(skip_curdir: bool = ..., maxlevels: int = ..., force: bool = ..., quiet: int = ..., legacy: bool = ..., optimize: int = ...) -> None: ...
diff --git a/typeshed/stdlib/3/concurrent/futures/_base.pyi b/typeshed/stdlib/3/concurrent/futures/_base.pyi
index 27c2711..00ef5b6 100644
--- a/typeshed/stdlib/3/concurrent/futures/_base.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/_base.pyi
@@ -27,10 +27,10 @@ class Future(Generic[_T]):
def done(self) -> bool: ...
def add_done_callback(self, fn: Callable[[Future], Any]) -> None: ...
def result(self, timeout: Optional[float] = ...) -> _T: ...
- def exception(self, timeout: Optional[float] = ...) -> Exception: ...
+ def exception(self, timeout: Optional[float] = ...) -> BaseException: ...
def set_running_or_notify_cancel(self) -> None: ...
def set_result(self, result: _T) -> None: ...
- def set_exception(self, exception: Exception) -> None: ...
+ def set_exception(self, exception: BaseException) -> None: ...
class Executor:
def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
diff --git a/typeshed/stdlib/3/copy.pyi b/typeshed/stdlib/3/copy.pyi
deleted file mode 100644
index 0661cb7..0000000
--- a/typeshed/stdlib/3/copy.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-# Stubs for copy
-
-# NOTE: These are incomplete!
-
-from typing import TypeVar, Dict, Any
-
-_T = TypeVar('_T')
-
-def deepcopy(x: _T, memo: Dict[Any, Any] = ...) -> _T: ...
-def copy(x: _T) -> _T: ...
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
index 7925226..a894ab8 100644
--- a/typeshed/stdlib/3/datetime.pyi
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -31,7 +31,7 @@ class date:
max = ... # type: date
resolution = ... # type: timedelta
- def __init__(self, year: int, month: int = ..., day: int = ...) -> None: ...
+ def __init__(self, year: int, month: int, day: int) -> None: ...
@classmethod
def fromtimestamp(cls, t: float) -> date: ...
diff --git a/typeshed/stdlib/3/difflib.pyi b/typeshed/stdlib/3/difflib.pyi
index eaf068e..f2fb7a8 100644
--- a/typeshed/stdlib/3/difflib.pyi
+++ b/typeshed/stdlib/3/difflib.pyi
@@ -47,7 +47,7 @@ def ndiff(a: Sequence[str], b: Sequence[str],
charjunk: Callable[[str], bool] = ...
) -> Iterator[str]: ...
-class HtmlDiff(object):
+class HtmlDiff:
def __init__(self, tabsize: int = ..., wrapcolumn: int = ...,
linejunk: Callable[[str], bool] = ...,
charjunk: Callable[[str], bool] = ...
diff --git a/typeshed/stdlib/3/dis.pyi b/typeshed/stdlib/3/dis.pyi
deleted file mode 100644
index c26afc1..0000000
--- a/typeshed/stdlib/3/dis.pyi
+++ /dev/null
@@ -1,63 +0,0 @@
-from typing import List, Union, Iterator, Tuple, Optional, Any, IO, NamedTuple
-
-from opcode import (hasconst, hasname, hasjrel, hasjabs, haslocal, hascompare,
- hasfree, hasnargs, cmp_op, opname, opmap, HAVE_ARGUMENT,
- EXTENDED_ARG, stack_effect)
-
-import types
-
-_have_code = Union[types.MethodType, types.FunctionType, types.CodeType, type]
-_have_code_or_string = Union[_have_code, str, bytes]
-
-
-Instruction = NamedTuple(
- "Instruction",
- [
- ('opname', str),
- ('opcode', int),
- ('arg', Optional[int]),
- ('argval', Any),
- ('argrepr', str),
- ('offset', int),
- ('starts_line', Optional[int]),
- ('is_jump_target', bool)
- ]
-)
-
-
-# if sys.version_info >= (3, 4):
-class Bytecode:
- codeobj = ... # type: types.CodeType
- first_line = ... # type: int
- def __init__(self, x: _have_code_or_string, *, first_line: int=...,
- current_offset: int=...) -> None: ...
- def __iter__(self) -> Iterator[Instruction]: ...
- def __repr__(self) -> str: ...
- def info(self) -> str: ...
- def dis(self) -> str: ...
-
- @classmethod
- def from_traceback(cls, tb: types.TracebackType) -> Bytecode: ...
-
-
-COMPILER_FLAG_NAMES = ... # type: Dict[int, str]
-
-
-def pretty_flags(flags: int) -> str: ...
-def findlabels(code: _have_code) -> List[int]: ...
-def findlinestarts(code: _have_code) -> Iterator[Tuple[int, int]]: ...
-
-# Signature changes are not allowed by mypy
-# 'All conditional function variants must have identical signatures'
-# TODO: mypy issue #698
-
-# if sys.version_info >= (3, 2):
-def code_info(x: _have_code_or_string) -> str: ...
-
-# `file` parameter requires sys.version_info >= (3, 4):
-def dis(x: _have_code_or_string = ..., *, file: IO[str] = None) -> None: ...
-def distb(tb: types.TracebackType = ..., *, file: IO[str] = None) -> None: ...
-def disassemble(co: _have_code, lasti: int = ..., *, file: IO[str] = None) -> None: ...
-def show_code(co: _have_code, *, file: IO[str] = None) -> None: ...
-
-def get_instructions(x: _have_code, *, first_line: int = ...) -> Iterator[Instruction]: ...
diff --git a/typeshed/stdlib/3/email/message.pyi b/typeshed/stdlib/3/email/message.pyi
index d2068f6..3acd111 100644
--- a/typeshed/stdlib/3/email/message.pyi
+++ b/typeshed/stdlib/3/email/message.pyi
@@ -118,7 +118,7 @@ class MIMEPart:
**kw: Any) -> None: ...
def clear(self) -> None: ...
def clear_content(self) -> None: ...
- if sys.version_info >= (3, 4, 2):
+ if sys.version_info >= (3, 4):
def is_attachment(self) -> bool: ...
else:
@property
diff --git a/typeshed/stdlib/3/encodings/utf_8.pyi b/typeshed/stdlib/3/encodings/utf_8.pyi
index 3be496a..0111184 100644
--- a/typeshed/stdlib/3/encodings/utf_8.pyi
+++ b/typeshed/stdlib/3/encodings/utf_8.pyi
@@ -9,6 +9,6 @@ class StreamWriter(codecs.StreamWriter):
class StreamReader(codecs.StreamReader):
pass
-def getregentry() -> codecs.CodecInfo: pass
-def encode(input: str, errors: str = ...) -> bytes: pass
-def decode(input: bytes, errors: str = ...) -> str: pass
+def getregentry() -> codecs.CodecInfo: ...
+def encode(input: str, errors: str = ...) -> bytes: ...
+def decode(input: bytes, errors: str = ...) -> str: ...
diff --git a/typeshed/stdlib/3/fileinput.pyi b/typeshed/stdlib/3/fileinput.pyi
index dead939..9ae4c0f 100644
--- a/typeshed/stdlib/3/fileinput.pyi
+++ b/typeshed/stdlib/3/fileinput.pyi
@@ -1,4 +1,13 @@
-from typing import Iterable, Callable, IO, AnyStr, Generic, Any, Union, Iterator
+from typing import Iterable, Callable, IO, AnyStr, Generic, Any, Union, Iterator, Optional
+
+import os
+import sys
+
+if sys.version_info >= (3, 6):
+ _Path = Union[str, bytes, os.PathLike[Any]]
+else:
+ _Path = Union[str, bytes]
+_Opener = Callable[[_Path, str], IO[AnyStr]]
def input(
@@ -20,12 +29,12 @@ def isstdin() -> bool: ...
class FileInput(Iterable[AnyStr], Generic[AnyStr]):
def __init__(
self,
- files: Union[str, Iterable[str]]=None,
- inplace: bool=...,
- backup: str=...,
- bufsize: int=...,
- mode: str=...,
- openhook: Callable[[str, str], IO[AnyStr]]=...
+ files: Union[None, _Path, Iterable[_Path]] = ...,
+ inplace: bool = ...,
+ backup: str = ...,
+ bufsize: int = ...,
+ mode: str = ...,
+ openhook: _Opener[AnyStr] = ...
) -> None: ...
def __del__(self) -> None: ...
@@ -34,7 +43,7 @@ class FileInput(Iterable[AnyStr], Generic[AnyStr]):
def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ...
def __iter__(self) -> Iterator[AnyStr]: ...
def __next__(self) -> AnyStr: ...
- def __getitem__(self, i) -> AnyStr: ...
+ def __getitem__(self, i: int) -> AnyStr: ...
def nextfile(self) -> None: ...
def readline(self) -> AnyStr: ...
def filename(self) -> str: ...
@@ -44,5 +53,5 @@ class FileInput(Iterable[AnyStr], Generic[AnyStr]):
def isfirstline(self) -> bool: ...
def isstdin(self) -> bool: ...
-def hook_compressed(filename: str, mode: str) -> IO[AnyStr]: ...
-def hook_encoded(encoding: str) -> IO[AnyStr]: ...
+def hook_compressed(filename: _Path, mode: str) -> IO[Any]: ...
+def hook_encoded(encoding: str, errors: Optional[str] = ...) -> _Opener[Any]: ...
diff --git a/typeshed/stdlib/3/hashlib.pyi b/typeshed/stdlib/3/hashlib.pyi
index 4d3709b..fd47015 100644
--- a/typeshed/stdlib/3/hashlib.pyi
+++ b/typeshed/stdlib/3/hashlib.pyi
@@ -1,7 +1,9 @@
# Stubs for hashlib
from abc import abstractmethod, ABCMeta
-from typing import AbstractSet
+from typing import AbstractSet, Union
+
+_DataType = Union[bytes, bytearray, memoryview]
class Hash(metaclass=ABCMeta):
digest_size = ... # type: int
@@ -13,7 +15,7 @@ class Hash(metaclass=ABCMeta):
name = ... # type: str
@abstractmethod
- def update(self, arg: bytes) -> None: ...
+ def update(self, arg: _DataType) -> None: ...
@abstractmethod
def digest(self) -> bytes: ...
@abstractmethod
@@ -21,20 +23,18 @@ class Hash(metaclass=ABCMeta):
@abstractmethod
def copy(self) -> 'Hash': ...
-def md5(arg: bytes = ...) -> Hash: ...
-def sha1(arg: bytes = ...) -> Hash: ...
-def sha224(arg: bytes = ...) -> Hash: ...
-def sha256(arg: bytes = ...) -> Hash: ...
-def sha384(arg: bytes = ...) -> Hash: ...
-def sha512(arg: bytes = ...) -> Hash: ...
+def md5(arg: _DataType = ...) -> Hash: ...
+def sha1(arg: _DataType = ...) -> Hash: ...
+def sha224(arg: _DataType = ...) -> Hash: ...
+def sha256(arg: _DataType = ...) -> Hash: ...
+def sha384(arg: _DataType = ...) -> Hash: ...
+def sha512(arg: _DataType = ...) -> Hash: ...
-def new(name: str, data: bytes = ...) -> Hash: ...
+def new(name: str, data: _DataType = ...) -> Hash: ...
# New in version 3.2
algorithms_guaranteed = ... # type: AbstractSet[str]
algorithms_available = ... # type: AbstractSet[str]
# New in version 3.4
-# TODO The documentation says "password and salt are interpreted as buffers of
-# bytes", should we declare something other than bytes here?
-def pbkdf2_hmac(name: str, password: bytes, salt: bytes, rounds: int, dklen: int = ...) -> bytes: ...
+def pbkdf2_hmac(name: str, password: _DataType, salt: _DataType, rounds: int, dklen: int = ...) -> bytes: ...
diff --git a/typeshed/stdlib/3/http/__init__.pyi b/typeshed/stdlib/3/http/__init__.pyi
index a9a77da..a72ccba 100644
--- a/typeshed/stdlib/3/http/__init__.pyi
+++ b/typeshed/stdlib/3/http/__init__.pyi
@@ -9,60 +9,60 @@ if sys.version_info >= (3, 5):
self.phrase = ... # type: str
self.description = ... # type: str
- CONTINUE = ... # type: object
- SWITCHING_PROTOCOLS = ... # type: object
- PROCESSING = ... # type: object
- OK = ... # type: object
- CREATED = ... # type: object
- ACCEPTED = ... # type: object
- NON_AUTHORITATIVE_INFORMATION = ... # type: object
- NO_CONTENT = ... # type: object
- RESET_CONTENT = ... # type: object
- PARTIAL_CONTENT = ... # type: object
- MULTI_STATUS = ... # type: object
- ALREADY_REPORTED = ... # type: object
- IM_USED = ... # type: object
- MULTIPLE_CHOICES = ... # type: object
- MOVED_PERMANENTLY = ... # type: object
- FOUND = ... # type: object
- SEE_OTHER = ... # type: object
- NOT_MODIFIED = ... # type: object
- USE_PROXY = ... # type: object
- TEMPORARY_REDIRECT = ... # type: object
- PERMANENT_REDIRECT = ... # type: object
- BAD_REQUEST = ... # type: object
- UNAUTHORIZED = ... # type: object
- PAYMENT_REQUIRED = ... # type: object
- FORBIDDEN = ... # type: object
- NOT_FOUND = ... # type: object
- METHOD_NOT_ALLOWED = ... # type: object
- NOT_ACCEPTABLE = ... # type: object
- PROXY_AUTHENTICATION_REQUIRED = ... # type: object
- REQUEST_TIMEOUT = ... # type: object
- CONFLICT = ... # type: object
- GONE = ... # type: object
- LENGTH_REQUIRED = ... # type: object
- PRECONDITION_FAILED = ... # type: object
- REQUEST_ENTITY_TOO_LARGE = ... # type: object
- REQUEST_URI_TOO_LONG = ... # type: object
- UNSUPPORTED_MEDIA_TYPE = ... # type: object
- REQUESTED_RANGE_NOT_SATISFIABLE = ... # type: object
- EXPECTATION_FAILED = ... # type: object
- UNPROCESSABLE_ENTITY = ... # type: object
- LOCKED = ... # type: object
- FAILED_DEPENDENCY = ... # type: object
- UPGRADE_REQUIRED = ... # type: object
- PRECONDITION_REQUIRED = ... # type: object
- TOO_MANY_REQUESTS = ... # type: object
- REQUEST_HEADER_FIELDS_TOO_LARGE = ... # type: object
- INTERNAL_SERVER_ERROR = ... # type: object
- NOT_IMPLEMENTED = ... # type: object
- BAD_GATEWAY = ... # type: object
- SERVICE_UNAVAILABLE = ... # type: object
- GATEWAY_TIMEOUT = ... # type: object
- HTTP_VERSION_NOT_SUPPORTED = ... # type: object
- VARIANT_ALSO_NEGOTIATES = ... # type: object
- INSUFFICIENT_STORAGE = ... # type: object
- LOOP_DETECTED = ... # type: object
- NOT_EXTENDED = ... # type: object
- NETWORK_AUTHENTICATION_REQUIRED = ... # type: object
+ CONTINUE = ... # type: HTTPStatus
+ SWITCHING_PROTOCOLS = ... # type: HTTPStatus
+ PROCESSING = ... # type: HTTPStatus
+ OK = ... # type: HTTPStatus
+ CREATED = ... # type: HTTPStatus
+ ACCEPTED = ... # type: HTTPStatus
+ NON_AUTHORITATIVE_INFORMATION = ... # type: HTTPStatus
+ NO_CONTENT = ... # type: HTTPStatus
+ RESET_CONTENT = ... # type: HTTPStatus
+ PARTIAL_CONTENT = ... # type: HTTPStatus
+ MULTI_STATUS = ... # type: HTTPStatus
+ ALREADY_REPORTED = ... # type: HTTPStatus
+ IM_USED = ... # type: HTTPStatus
+ MULTIPLE_CHOICES = ... # type: HTTPStatus
+ MOVED_PERMANENTLY = ... # type: HTTPStatus
+ FOUND = ... # type: HTTPStatus
+ SEE_OTHER = ... # type: HTTPStatus
+ NOT_MODIFIED = ... # type: HTTPStatus
+ USE_PROXY = ... # type: HTTPStatus
+ TEMPORARY_REDIRECT = ... # type: HTTPStatus
+ PERMANENT_REDIRECT = ... # type: HTTPStatus
+ BAD_REQUEST = ... # type: HTTPStatus
+ UNAUTHORIZED = ... # type: HTTPStatus
+ PAYMENT_REQUIRED = ... # type: HTTPStatus
+ FORBIDDEN = ... # type: HTTPStatus
+ NOT_FOUND = ... # type: HTTPStatus
+ METHOD_NOT_ALLOWED = ... # type: HTTPStatus
+ NOT_ACCEPTABLE = ... # type: HTTPStatus
+ PROXY_AUTHENTICATION_REQUIRED = ... # type: HTTPStatus
+ REQUEST_TIMEOUT = ... # type: HTTPStatus
+ CONFLICT = ... # type: HTTPStatus
+ GONE = ... # type: HTTPStatus
+ LENGTH_REQUIRED = ... # type: HTTPStatus
+ PRECONDITION_FAILED = ... # type: HTTPStatus
+ REQUEST_ENTITY_TOO_LARGE = ... # type: HTTPStatus
+ REQUEST_URI_TOO_LONG = ... # type: HTTPStatus
+ UNSUPPORTED_MEDIA_TYPE = ... # type: HTTPStatus
+ REQUESTED_RANGE_NOT_SATISFIABLE = ... # type: HTTPStatus
+ EXPECTATION_FAILED = ... # type: HTTPStatus
+ UNPROCESSABLE_ENTITY = ... # type: HTTPStatus
+ LOCKED = ... # type: HTTPStatus
+ FAILED_DEPENDENCY = ... # type: HTTPStatus
+ UPGRADE_REQUIRED = ... # type: HTTPStatus
+ PRECONDITION_REQUIRED = ... # type: HTTPStatus
+ TOO_MANY_REQUESTS = ... # type: HTTPStatus
+ REQUEST_HEADER_FIELDS_TOO_LARGE = ... # type: HTTPStatus
+ INTERNAL_SERVER_ERROR = ... # type: HTTPStatus
+ NOT_IMPLEMENTED = ... # type: HTTPStatus
+ BAD_GATEWAY = ... # type: HTTPStatus
+ SERVICE_UNAVAILABLE = ... # type: HTTPStatus
+ GATEWAY_TIMEOUT = ... # type: HTTPStatus
+ HTTP_VERSION_NOT_SUPPORTED = ... # type: HTTPStatus
+ VARIANT_ALSO_NEGOTIATES = ... # type: HTTPStatus
+ INSUFFICIENT_STORAGE = ... # type: HTTPStatus
+ LOOP_DETECTED = ... # type: HTTPStatus
+ NOT_EXTENDED = ... # type: HTTPStatus
+ NETWORK_AUTHENTICATION_REQUIRED = ... # type: HTTPStatus
diff --git a/typeshed/stdlib/3/http/client.pyi b/typeshed/stdlib/3/http/client.pyi
index 6a55f5c..340f090 100644
--- a/typeshed/stdlib/3/http/client.pyi
+++ b/typeshed/stdlib/3/http/client.pyi
@@ -7,6 +7,7 @@ from typing import (
)
import email.message
import io
+from socket import socket
import sys
import ssl
import types
@@ -87,6 +88,8 @@ if sys.version_info >= (3, 5):
closed = ... # type: bool
status = ... # type: int
reason = ... # type: str
+ def __init__(self, sock: socket, debuglevel: int = ...,
+ method: Optional[str] = ..., url: Optional[str] = ...) -> None: ...
def read(self, amt: Optional[int] = ...) -> bytes: ...
def readinto(self, b: bytearray) -> int: ...
@overload
@@ -95,6 +98,7 @@ if sys.version_info >= (3, 5):
def getheader(self, name: str, default: _T) -> Union[str, _T]: ...
def getheaders(self) -> List[Tuple[str, str]]: ...
def fileno(self) -> int: ...
+ def isclosed(self) -> bool: ...
def __iter__(self) -> Iterator[bytes]: ...
def __enter__(self) -> 'HTTPResponse': ...
def __exit__(self, exc_type: Optional[type],
diff --git a/typeshed/stdlib/3/imp.pyi b/typeshed/stdlib/3/imp.pyi
index 3abe628..3344091 100644
--- a/typeshed/stdlib/3/imp.pyi
+++ b/typeshed/stdlib/3/imp.pyi
@@ -1,10 +1,55 @@
-# Stubs for imp
+# Stubs for imp (Python 3.6)
-# NOTE: These are incomplete!
+import os
+import sys
+import types
+from typing import Any, IO, List, Optional, Tuple, TypeVar, Union
-from typing import TypeVar
+from _imp import (lock_held as lock_held, acquire_lock as acquire_lock, release_lock as release_lock,
+ get_frozen_object as get_frozen_object, is_frozen_package as is_frozen_package,
+ init_frozen as init_frozen, is_builtin as is_builtin, is_frozen as is_frozen)
+
+if sys.version_info >= (3, 5):
+ from _imp import create_dynamic as create_dynamic
_T = TypeVar('_T')
-def cache_from_source(path: str, debug_override: bool = ...) -> str: ...
-def reload(module: _T) -> _T: ... # TODO imprecise signature
+if sys.version_info >= (3, 6):
+ _Path = Union[str, os.PathLike[str]]
+else:
+ _Path = str
+
+SEARCH_ERROR: int
+PY_SOURCE: int
+PY_COMPILED: int
+C_EXTENSION: int
+PY_RESOURCE: int
+PKG_DIRECTORY: int
+C_BUILTIN: int
+PY_FROZEN: int
+PY_CODERESOURCE: int
+IMP_HOOK: int
+
+def new_module(name: str) -> types.ModuleType: ...
+def get_magic() -> bytes: ...
+def get_tag() -> str: ...
+def cache_from_source(path: _Path, debug_override: Optional[bool] = ...) -> str: ...
+def source_from_cache(path: _Path) -> str: ...
+def get_suffixes() -> List[Tuple[str, str, int]]: ...
+
+class NullImporter:
+ def __init__(self, path: _Path) -> None: ...
+ def find_module(self, fullname: Any) -> None: ...
+
+# PathLike doesn't work for the pathname argument here
+def load_source(name: str, pathname: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ...
+def load_compiled(name: str, pathname: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ...
+def load_package(name: str, path: _Path) -> types.ModuleType: ...
+def load_module(name: str, file: IO[Any], filename: str, details: Tuple[str, str, int]) -> types.ModuleType: ...
+if sys.version_info >= (3, 6):
+ def find_module(name: str, path: Union[None, List[str], List[os.PathLike[str]], List[_Path]] = ...) -> Tuple[str, str, Tuple[IO[Any], str, int]]: ...
+else:
+ def find_module(name: str, path: Optional[List[str]] = ...) -> Tuple[str, str, Tuple[IO[Any], str, int]]: ...
+def reload(module: types.ModuleType) -> types.ModuleType: ...
+def init_builtin(name: str) -> Optional[types.ModuleType]: ...
+def load_dynamic(name: str, path: str, file: Optional[IO[Any]] = ...) -> types.ModuleType: ...
diff --git a/typeshed/stdlib/3/inspect.pyi b/typeshed/stdlib/3/inspect.pyi
index 5fc5b19..ddf28b9 100644
--- a/typeshed/stdlib/3/inspect.pyi
+++ b/typeshed/stdlib/3/inspect.pyi
@@ -95,7 +95,7 @@ class Signature:
follow_wrapped: bool = True) -> 'Signature': ...
# The name is the same as the enum's name in CPython
-class _ParameterKind: pass
+class _ParameterKind: ...
class Parameter:
def __init__(self,
@@ -172,7 +172,7 @@ def getargvalues(frame: FrameType) -> ArgInfo: ...
def formatargspec(args: List[str],
varargs: Optional[str] = ...,
varkw: Optional[str] = ...,
- defaults: Optional[Tuple[Any]] = ...,
+ defaults: Optional[Tuple[Any, ...]] = ...,
kwonlyargs: Optional[List[str]] = ...,
kwonlydefaults: Optional[Dict[str, Any]] = ...,
annotations: Optional[Dict[str, Any]] = ...,
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
index be70e54..1673853 100644
--- a/typeshed/stdlib/3/io.pyi
+++ b/typeshed/stdlib/3/io.pyi
@@ -1,7 +1,7 @@
# Stubs for io
from typing import (
- List, BinaryIO, TextIO, Iterator, Union, Optional, Callable, Tuple, Any, IO
+ List, BinaryIO, TextIO, Iterator, Union, Optional, Callable, Tuple, Any, IO, Iterable
)
import builtins
import codecs
@@ -47,7 +47,7 @@ class IOBase:
def tell(self) -> int: ...
def truncate(self, size: Optional[int] = ...) -> int: ...
def writable(self) -> bool: ...
- def writelines(self, lines: List[Union[bytes, bytearray]]) -> None: ...
+ def writelines(self, lines: Iterable[Union[bytes, bytearray]]) -> None: ...
if sys.version_info >= (3, 4):
def readline(self, size: int = ...) -> bytes: ...
def __del__(self) -> None: ...
diff --git a/typeshed/stdlib/3/itertools.pyi b/typeshed/stdlib/3/itertools.pyi
index 889d583..774d68f 100644
--- a/typeshed/stdlib/3/itertools.pyi
+++ b/typeshed/stdlib/3/itertools.pyi
@@ -3,7 +3,7 @@
# Based on http://docs.python.org/3.2/library/itertools.html
from typing import (Iterator, TypeVar, Iterable, overload, Any, Callable, Tuple,
- Union, Sequence, Generic, Optional)
+ Generic, Optional)
_T = TypeVar('_T')
_S = TypeVar('_S')
@@ -44,20 +44,18 @@ def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ...
def islice(iterable: Iterable[_T], start: int, stop: Optional[int],
step: int = ...) -> Iterator[_T]: ...
-def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ...
+def starmap(func: Callable[..., _S], iterable: Iterable[Iterable[Any]]) -> Iterator[_S]: ...
def takewhile(predicate: Callable[[_T], Any],
iterable: Iterable[_T]) -> Iterator[_T]: ...
-def tee(iterable: Iterable[Any], n: int = ...) -> Iterator[Any]: ...
+def tee(iterable: Iterable[_T], n: int = ...) -> Tuple[Iterator[_T], ...]: ...
def zip_longest(*p: Iterable[Any],
fillvalue: Any = ...) -> Iterator[Any]: ...
-# TODO: Return type should be Iterator[Tuple[..]], but unknown tuple shape.
-# Iterator[Sequence[_T]] loses this type information.
-def product(*p: Iterable[_T], repeat: int = ...) -> Iterator[Sequence[_T]]: ...
+def product(*p: Iterable[_T], repeat: int = ...) -> Iterator[Tuple[_T, ...]]: ...
def permutations(iterable: Iterable[_T],
- r: Union[int, None] = ...) -> Iterator[Sequence[_T]]: ...
+ r: Optional[int] = ...) -> Iterator[Tuple[_T, ...]]: ...
def combinations(iterable: Iterable[_T],
- r: int) -> Iterable[Sequence[_T]]: ...
+ r: int) -> Iterable[Tuple[_T, ...]]: ...
def combinations_with_replacement(iterable: Iterable[_T],
- r: int) -> Iterable[Sequence[_T]]: ...
+ r: int) -> Iterable[Tuple[_T, ...]]: ...
diff --git a/typeshed/stdlib/3/json.pyi b/typeshed/stdlib/3/json.pyi
deleted file mode 100644
index 368cae4..0000000
--- a/typeshed/stdlib/3/json.pyi
+++ /dev/null
@@ -1,88 +0,0 @@
-from typing import Any, IO, Iterator, Optional, Tuple, Callable, Dict, List, Union
-
-class JSONDecodeError(ValueError):
- def dumps(self, obj: Any) -> str: ...
- def dump(self, obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
- def loads(self, s: str) -> Any: ...
- def load(self, fp: IO[str]) -> Any: ...
-
-def dumps(obj: Any,
- skipkeys: bool = ...,
- ensure_ascii: bool = ...,
- check_circular: bool = ...,
- allow_nan: bool = ...,
- cls: Any = ...,
- indent: Union[None, int, str] = ...,
- separators: Optional[Tuple[str, str]] = ...,
- default: Optional[Callable[[Any], Any]] = ...,
- sort_keys: bool = ...,
- **kwds: Any) -> str: ...
-
-def dump(obj: Any,
- fp: IO[str],
- skipkeys: bool = ...,
- ensure_ascii: bool = ...,
- check_circular: bool = ...,
- allow_nan: bool = ...,
- cls: Any = ...,
- indent: Union[None, int, str] = ...,
- separators: Optional[Tuple[str, str]] = ...,
- default: Optional[Callable[[Any], Any]] = ...,
- sort_keys: bool = ...,
- **kwds: Any) -> None: ...
-
-def loads(s: str,
- encoding: Any = ..., # ignored and deprecated
- cls: Any = ...,
- object_hook: Optional[Callable[[Dict], Any]] = ...,
- parse_float: Optional[Callable[[str], Any]] = ...,
- parse_int: Optional[Callable[[str], Any]] = ...,
- parse_constant: Optional[Callable[[str], Any]] = ...,
- object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
- **kwds: Any) -> Any: ...
-
-def load(fp: IO[str],
- cls: Any = ...,
- object_hook: Optional[Callable[[Dict], Any]] = ...,
- parse_float: Optional[Callable[[str], Any]] = ...,
- parse_int: Optional[Callable[[str], Any]] = ...,
- parse_constant: Optional[Callable[[str], Any]] = ...,
- object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
- **kwds: Any) -> Any: ...
-
-class JSONEncoder(object):
- item_separator = ... # type: str
- key_separator = ... # type: str
-
- skipkeys = ... # type: bool
- ensure_ascii = ... # type: bool
- check_circular = ... # type: bool
- allow_nan = ... # type: bool
- sort_keys = ... # type: bool
- indent = None # type: int
-
- def __init__(self, skipkeys: bool=..., ensure_ascii: bool=...,
- check_circular: bool=..., allow_nan: bool=..., sort_keys: bool=...,
- indent: int=None, separators: Tuple[str, str]=None, default: Callable=None) -> None: ...
-
- def default(self, o: Any) -> Any: ...
- def encode(self, o: Any) -> str: ...
- def iterencode(self, o: Any, _one_shot: bool=False) -> Iterator[str]: ...
-
-class JSONDecoder(object):
-
- object_hook = None # type: Callable[[Dict[str, Any]], Any]
- parse_float = ... # Callable[[str], Any]
- parse_int = ... # Callable[[str], Any]
- parse_constant = ... # Callable[[str], Any]
- strict = ... # type: bool
- object_pairs_hook = None # type: Callable[[List[Tuple[str, Any]]], Any]
-
- def __init__(self, object_hook: Callable[[Dict[str, Any]], Any]=None,
- parse_float: Callable[[str], Any]=None,
- parse_int: Callable[[str], Any]=None,
- parse_constant: Callable[[str], Any]=None,
- strict: bool=True,
- object_pairs_hook: Callable[[List[Tuple[str, Any]]], Any]=None) -> None: ...
- def decode(self, s: str) -> Any: ...
- def raw_decode(self, s: str, idx: int=...) -> Tuple[Any, int]: ...
diff --git a/typeshed/stdlib/3/json/__init__.pyi b/typeshed/stdlib/3/json/__init__.pyi
new file mode 100644
index 0000000..ccd121d
--- /dev/null
+++ b/typeshed/stdlib/3/json/__init__.pyi
@@ -0,0 +1,51 @@
+import sys
+from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union
+
+from .decoder import JSONDecoder
+from .encoder import JSONEncoder
+if sys.version_info >= (3, 5):
+ from .decoder import JSONDecodeError
+
+def dumps(obj: Any,
+ skipkeys: bool = ...,
+ ensure_ascii: bool = ...,
+ check_circular: bool = ...,
+ allow_nan: bool = ...,
+ cls: Any = ...,
+ indent: Union[None, int, str] = ...,
+ separators: Optional[Tuple[str, str]] = ...,
+ default: Optional[Callable[[Any], Any]] = ...,
+ sort_keys: bool = ...,
+ **kwds: Any) -> str: ...
+
+def dump(obj: Any,
+ fp: IO[str],
+ skipkeys: bool = ...,
+ ensure_ascii: bool = ...,
+ check_circular: bool = ...,
+ allow_nan: bool = ...,
+ cls: Any = ...,
+ indent: Union[None, int, str] = ...,
+ separators: Optional[Tuple[str, str]] = ...,
+ default: Optional[Callable[[Any], Any]] = ...,
+ sort_keys: bool = ...,
+ **kwds: Any) -> None: ...
+
+def loads(s: Union[str, bytes, bytearray],
+ encoding: Any = ..., # ignored and deprecated
+ cls: Any = ...,
+ object_hook: Optional[Callable[[Dict], Any]] = ...,
+ parse_float: Optional[Callable[[str], Any]] = ...,
+ parse_int: Optional[Callable[[str], Any]] = ...,
+ parse_constant: Optional[Callable[[str], Any]] = ...,
+ object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
+ **kwds: Any) -> Any: ...
+
+def load(fp: IO[str],
+ cls: Any = ...,
+ object_hook: Optional[Callable[[Dict], Any]] = ...,
+ parse_float: Optional[Callable[[str], Any]] = ...,
+ parse_int: Optional[Callable[[str], Any]] = ...,
+ parse_constant: Optional[Callable[[str], Any]] = ...,
+ object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
+ **kwds: Any) -> Any: ...
diff --git a/typeshed/stdlib/3/json/decoder.pyi b/typeshed/stdlib/3/json/decoder.pyi
new file mode 100644
index 0000000..919039e
--- /dev/null
+++ b/typeshed/stdlib/3/json/decoder.pyi
@@ -0,0 +1,28 @@
+import sys
+from typing import Any, Callable, Dict, List, Tuple
+
+if sys.version_info >= (3, 5):
+ class JSONDecodeError(ValueError):
+ msg: str
+ doc: str
+ pos: int
+ lineno: int
+ colno: int
+ def __init__(self, msg: str, doc: str, pos: int) -> None: ...
+
+class JSONDecoder:
+ object_hook = None # type: Callable[[Dict[str, Any]], Any]
+ parse_float = ... # Callable[[str], Any]
+ parse_int = ... # Callable[[str], Any]
+ parse_constant = ... # Callable[[str], Any]
+ strict = ... # type: bool
+ object_pairs_hook = None # type: Callable[[List[Tuple[str, Any]]], Any]
+
+ def __init__(self, object_hook: Callable[[Dict[str, Any]], Any]=None,
+ parse_float: Callable[[str], Any]=None,
+ parse_int: Callable[[str], Any]=None,
+ parse_constant: Callable[[str], Any]=None,
+ strict: bool=True,
+ object_pairs_hook: Callable[[List[Tuple[str, Any]]], Any]=None) -> None: ...
+ def decode(self, s: str) -> Any: ...
+ def raw_decode(self, s: str, idx: int=...) -> Tuple[Any, int]: ...
diff --git a/typeshed/stdlib/3/json/encoder.pyi b/typeshed/stdlib/3/json/encoder.pyi
new file mode 100644
index 0000000..f75304d
--- /dev/null
+++ b/typeshed/stdlib/3/json/encoder.pyi
@@ -0,0 +1,20 @@
+from typing import Any, Callable, Iterator, Tuple
+
+class JSONEncoder:
+ item_separator = ... # type: str
+ key_separator = ... # type: str
+
+ skipkeys = ... # type: bool
+ ensure_ascii = ... # type: bool
+ check_circular = ... # type: bool
+ allow_nan = ... # type: bool
+ sort_keys = ... # type: bool
+ indent = None # type: int
+
+ def __init__(self, skipkeys: bool=..., ensure_ascii: bool=...,
+ check_circular: bool=..., allow_nan: bool=..., sort_keys: bool=...,
+ indent: int=None, separators: Tuple[str, str]=None, default: Callable=None) -> None: ...
+
+ def default(self, o: Any) -> Any: ...
+ def encode(self, o: Any) -> str: ...
+ def iterencode(self, o: Any, _one_shot: bool=False) -> Iterator[str]: ...
diff --git a/typeshed/stdlib/3/linecache.pyi b/typeshed/stdlib/3/linecache.pyi
index a77de88..4f14f6a 100644
--- a/typeshed/stdlib/3/linecache.pyi
+++ b/typeshed/stdlib/3/linecache.pyi
@@ -1,5 +1,5 @@
from typing import Any
-def getline(filename: str, lineno: int, module_globals: Any=...) -> str: pass
-def clearcache() -> None: pass
-def getlines(filename: str, module_globals: Any=...) -> None: pass
+def getline(filename: str, lineno: int, module_globals: Any=...) -> str: ...
+def clearcache() -> None: ...
+def getlines(filename: str, module_globals: Any=...) -> None: ...
diff --git a/typeshed/stdlib/3/macpath.pyi b/typeshed/stdlib/3/macpath.pyi
new file mode 100644
index 0000000..7d8bb09
--- /dev/null
+++ b/typeshed/stdlib/3/macpath.pyi
@@ -0,0 +1,46 @@
+# Stubs for os.path
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/os.path.html
+
+from typing import Any, List, Tuple, IO
+
+# ----- os.path variables -----
+supports_unicode_filenames = False
+
+# ----- os.path function stubs -----
+def abspath(path: str) -> str: ...
+def basename(path) -> str: ...
+def commonprefix(list: List[str]) -> str: ...
+def dirname(path: str) -> str: ...
+def exists(path: str) -> bool: ...
+def lexists(path: str) -> bool: ...
+def expanduser(path: str) -> str: ...
+def expandvars(path: str) -> str: ...
+def getatime(path: str) -> int:
+ ... # return float if os.stat_float_times() returns True
+def getmtime(path: str) -> int:
+ ... # return float if os.stat_float_times() returns True
+def getctime(path: str) -> int:
+ ... # return float if os.stat_float_times() returns True
+def getsize(path: str) -> int: ...
+def isabs(path: str) -> bool: ...
+def isfile(path: str) -> bool: ...
+def isdir(path: str) -> bool: ...
+def islink(path: str) -> bool: ...
+def ismount(path: str) -> bool: ...
+def join(path: str, *paths: str) -> str: ...
+def normcase(path: str) -> str: ...
+def normpath(path: str) -> str: ...
+def realpath(path: str) -> str: ...
+def relpath(path: str, start: str = ...) -> str: ...
+def samefile(path1: str, path2: str) -> bool: ...
+
+def sameopenfile(fp1: IO[Any], fp2: IO[Any]) -> bool: ...
+
+# def samestat(stat1: stat_result, stat2: stat_result) -> bool:
+# ... # Unix only
+def split(path: str) -> Tuple[str, str]: ...
+def splitdrive(path: str) -> Tuple[str, str]: ...
+def splitext(path: str) -> Tuple[str, str]: ...
+# def splitunc(path: str) -> Tuple[str, str] : ... # Windows only, deprecated
diff --git a/typeshed/stdlib/3/nntplib.pyi b/typeshed/stdlib/3/nntplib.pyi
new file mode 100644
index 0000000..1bf2f41
--- /dev/null
+++ b/typeshed/stdlib/3/nntplib.pyi
@@ -0,0 +1,104 @@
+# Stubs for nntplib (Python 3)
+
+import datetime
+import socket
+import ssl
+import sys
+from typing import Any, Dict, IO, Iterable, List, NamedTuple, Optional, Tuple, TypeVar, Union
+
+_SelfT = TypeVar('_SelfT', bound=_NNTPBase)
+_File = Union[IO[bytes], bytes, str, None]
+
+
+class NNTPError(Exception):
+ response: str
+class NNTPReplyError(NNTPError): ...
+class NNTPTemporaryError(NNTPError): ...
+class NNTPPermanentError(NNTPError): ...
+class NNTPProtocolError(NNTPError): ...
+class NNTPDataError(NNTPError): ...
+
+NNTP_PORT: int
+NNTP_SSL_PORT: int
+
+GroupInfo = NamedTuple('GroupInfo', [
+ ('group', str),
+ ('last', str),
+ ('first', str),
+ ('flag', str),
+])
+ArticleInfo = NamedTuple('ArticleInfo', [
+ ('number', int),
+ ('message_id', str),
+ ('lines', List[bytes]),
+])
+
+def decode_header(header_str: str) -> str: ...
+
+class _NNTPBase:
+ encoding: str
+ errors: str
+
+ host: str
+ file: IO[bytes]
+ debugging: int
+ welcome: str
+ readermode_afterauth: bool
+ tls_on: bool
+ authenticated: bool
+ nntp_implementation: str
+ nntp_version: int
+
+ def __init__(self, file: IO[bytes], host: str,
+ readermode: Optional[bool] = ..., timeout: float = ...) -> None: ...
+ if sys.version_info >= (3, 3):
+ def __enter__(self: _SelfT) -> _SelfT: ...
+ def __exit__(self, *args: Any) -> None: ...
+ def getwelcome(self) -> str: ...
+ def getcapabilities(self) -> Dict[str, List[str]]: ...
+ def set_debuglevel(self, level: int) -> None: ...
+ def debug(self, level: int) -> None: ...
+ def capabilities(self) -> Tuple[str, Dict[str, List[str]]]: ...
+ def newgroups(self, date: Union[datetime.date, datetime.datetime], *, file: _File = ...) -> Tuple[str, List[str]]: ...
+ def newnews(self, group: str, date: Union[datetime.date, datetime.datetime], *, file: _File = ...) -> Tuple[str, List[str]]: ...
+ def list(self, group_pattern: Optional[str] = ..., *, file: _File = ...) -> Tuple[str, List[str]]: ...
+ def description(self, group: str) -> str: ...
+ def descriptions(self, group_pattern: str) -> Tuple[str, Dict[str, str]]: ...
+ def group(self, name: str) -> Tuple[str, int, int, int, str]: ...
+ def help(self, *, file: _File = ...) -> Tuple[str, List[str]]: ...
+ def stat(self, message_spec: Any = ...) -> Tuple[str, int, str]: ...
+ def next(self) -> Tuple[str, int, str]: ...
+ def last(self) -> Tuple[str, int, str]: ...
+ def head(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ...
+ def body(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ...
+ def article(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ...
+ def slave(self) -> str: ...
+ def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> Tuple[str, List[str]]: ...
+ def xover(self, start: int, end: int, *, file: _File = ...) -> Tuple[str, List[Tuple[int, Dict[str, str]]]]: ...
+ def over(self, message_spec: Union[None, str, List[Any], Tuple[Any, ...]], *, file: _File = ...) -> Tuple[str, List[Tuple[int, Dict[str, str]]]]: ...
+ def xgtitle(self, group: str, *, file: _File = ...) -> Tuple[str, List[Tuple[str, str]]]: ...
+ def xpath(self, id: Any) -> Tuple[str, str]: ...
+ def date(self) -> Tuple[str, datetime.datetime]: ...
+ def post(self, data: Union[bytes, Iterable[bytes]]) -> str: ...
+ def ihave(self, message_id: Any, data: Union[bytes, Iterable[bytes]]) -> str: ...
+ def quit(self) -> str: ...
+ def login(self, user: Optional[str] = ..., password: Optional[str] = ..., usenetrc: bool = ...) -> None: ...
+ def starttls(self, ssl_context: Optional[ssl.SSLContext] = ...) -> None: ...
+
+
+class NNTP(_NNTPBase):
+ port: int
+ sock: socket.socket
+
+ def __init__(self, host: str, port: int = ..., user: Optional[str] = ..., password: Optional[str] = ...,
+ readermode: Optional[bool] = ..., usenetrc: bool = ...,
+ timeout: float = ...) -> None: ...
+
+
+class NNTP_SSL(_NNTPBase):
+ sock: socket.socket
+
+ def __init__(self, host: str, port: int = ..., user: Optional[str] = ..., password: Optional[str] = ...,
+ ssl_context: Optional[ssl.SSLContext] = ...,
+ readermode: Optional[bool] = ..., usenetrc: bool = ...,
+ timeout: float = ...) -> None: ...
diff --git a/typeshed/stdlib/3/ntpath.pyi b/typeshed/stdlib/3/ntpath.pyi
new file mode 100644
index 0000000..7d8bb09
--- /dev/null
+++ b/typeshed/stdlib/3/ntpath.pyi
@@ -0,0 +1,46 @@
+# Stubs for os.path
+# Ron Murawski <ron at horizonchess.com>
+
+# based on http://docs.python.org/3.2/library/os.path.html
+
+from typing import Any, List, Tuple, IO
+
+# ----- os.path variables -----
+supports_unicode_filenames = False
+
+# ----- os.path function stubs -----
+def abspath(path: str) -> str: ...
+def basename(path) -> str: ...
+def commonprefix(list: List[str]) -> str: ...
+def dirname(path: str) -> str: ...
+def exists(path: str) -> bool: ...
+def lexists(path: str) -> bool: ...
+def expanduser(path: str) -> str: ...
+def expandvars(path: str) -> str: ...
+def getatime(path: str) -> int:
+ ... # return float if os.stat_float_times() returns True
+def getmtime(path: str) -> int:
+ ... # return float if os.stat_float_times() returns True
+def getctime(path: str) -> int:
+ ... # return float if os.stat_float_times() returns True
+def getsize(path: str) -> int: ...
+def isabs(path: str) -> bool: ...
+def isfile(path: str) -> bool: ...
+def isdir(path: str) -> bool: ...
+def islink(path: str) -> bool: ...
+def ismount(path: str) -> bool: ...
+def join(path: str, *paths: str) -> str: ...
+def normcase(path: str) -> str: ...
+def normpath(path: str) -> str: ...
+def realpath(path: str) -> str: ...
+def relpath(path: str, start: str = ...) -> str: ...
+def samefile(path1: str, path2: str) -> bool: ...
+
+def sameopenfile(fp1: IO[Any], fp2: IO[Any]) -> bool: ...
+
+# def samestat(stat1: stat_result, stat2: stat_result) -> bool:
+# ... # Unix only
+def split(path: str) -> Tuple[str, str]: ...
+def splitdrive(path: str) -> Tuple[str, str]: ...
+def splitext(path: str) -> Tuple[str, str]: ...
+# def splitunc(path: str) -> Tuple[str, str] : ... # Windows only, deprecated
diff --git a/typeshed/stdlib/3/nturl2path.pyi b/typeshed/stdlib/3/nturl2path.pyi
new file mode 100644
index 0000000..b8ad8d6
--- /dev/null
+++ b/typeshed/stdlib/3/nturl2path.pyi
@@ -0,0 +1,2 @@
+def url2pathname(url: str) -> str: ...
+def pathname2url(p: str) -> str: ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index ecf894e..827a817 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -8,15 +8,23 @@ from io import TextIOWrapper as _TextIOWrapper
import sys
from typing import (
Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr,
- Optional, Generic, Set, Callable
+ Optional, Generic, Set, Callable, Text, Sequence, NamedTuple, TypeVar
)
-from . import path
+from . import path as path
from mypy_extensions import NoReturn
+_T = TypeVar('_T')
+
# ----- os variables -----
supports_bytes_environ = False # TODO: True when bytes implemented?
+if sys.version_info >= (3, 3):
+ supports_dir_fd = ... # type: Set[Callable[..., Any]]
+ supports_fd = ... # type: Set[Callable[..., Any]]
+ supports_effective_ids = ... # type: Set[Callable[..., Any]]
+ supports_follow_symlinks = ... # type: Set[Callable[..., Any]]
+
SEEK_SET = 0
SEEK_CUR = 0
SEEK_END = 0
@@ -108,17 +116,31 @@ TMP_MAX = 0 # Undocumented, but used by tempfile
# ----- os classes (structures) -----
if sys.version_info >= (3, 6):
- class PathLike:
- def __fspath__(self) -> AnyStr: ...
+ from builtins import _PathLike as PathLike # See comment in builtins
+_PathType = path._PathType
-if sys.version_info >= (3, 5):
- class DirEntry:
+if sys.version_info >= (3, 6):
+ class DirEntry(PathLike[AnyStr]):
+ # This is what the scandir interator yields
+ # The constructor is hidden
+
+ name = ... # type: AnyStr
+ path = ... # type: AnyStr
+ def inode(self) -> int: ...
+ def is_dir(self, follow_symlinks: bool = ...) -> bool: ...
+ def is_file(self, follow_symlinks: bool = ...) -> bool: ...
+ def is_symlink(self) -> bool: ...
+ def stat(self) -> stat_result: ...
+
+ def __fspath__(self) -> AnyStr: ...
+elif sys.version_info >= (3, 5):
+ class DirEntry(Generic[AnyStr]):
# This is what the scandir interator yields
# The constructor is hidden
- name = ''
- path = ''
+ name = ... # type: AnyStr
+ path = ... # type: AnyStr
def inode(self) -> int: ...
def is_dir(self, follow_symlinks: bool = ...) -> bool: ...
def is_file(self, follow_symlinks: bool = ...) -> bool: ...
@@ -182,8 +204,24 @@ class statvfs_result: # Unix only
f_namemax = 0
# ----- os function stubs -----
-def fsencode(filename: str) -> bytes: ...
-def fsdecode(filename: bytes) -> str: ...
+if sys.version_info >= (3, 6):
+ def fsencode(filename: Union[str, bytes, PathLike]) -> bytes: ...
+else:
+ def fsencode(filename: Union[str, bytes]) -> bytes: ...
+
+if sys.version_info >= (3, 6):
+ def fsdecode(filename: Union[str, bytes, PathLike]) -> str: ...
+else:
+ def fsdecode(filename: Union[str, bytes]) -> str: ...
+
+if sys.version_info >= (3, 6):
+ @overload
+ def fspath(path: str) -> str: ...
+ @overload
+ def fspath(path: bytes) -> bytes: ...
+ @overload
+ def fspath(path: PathLike) -> Any: ...
+
def get_exec_path(env: Optional[Mapping[str, str]] = ...) -> List[str]: ...
# NOTE: get_exec_path(): returns List[bytes] when env not None
def ctermid() -> str: ... # Unix only
@@ -200,27 +238,31 @@ def getppid() -> int: ...
def getresuid() -> Tuple[int, int, int]: ... # Unix only
def getresgid() -> Tuple[int, int, int]: ... # Unix only
def getuid() -> int: ... # Unix only
-def getenv(key: str, default: str = ...) -> str: ...
-def getenvb(key: bytes, default: bytes = ...) -> bytes: ...
-# TODO mixed str/bytes putenv arguments
-def putenv(key: AnyStr, value: AnyStr) -> None: ...
def setegid(egid: int) -> None: ... # Unix only
def seteuid(euid: int) -> None: ... # Unix only
def setgid(gid: int) -> None: ... # Unix only
-def setgroups(groups: List[int]) -> None: ... # Unix only
-def setpgrp() -> int: ... # Unix only
-def setpgid(pid: int, pgrp: int) -> int: ... # Unix only
+def setgroups(groups: Sequence[int]) -> None: ... # Unix only
+def setpgrp() -> None: ... # Unix only
+def setpgid(pid: int, pgrp: int) -> None: ... # Unix only
def setregid(rgid: int, egid: int) -> None: ... # Unix only
def setresgid(rgid: int, egid: int, sgid: int) -> None: ... # Unix only
def setresuid(ruid: int, euid: int, suid: int) -> None: ... # Unix only
def setreuid(ruid: int, euid: int) -> None: ... # Unix only
def getsid(pid: int) -> int: ... # Unix only
-def setsid() -> int: ... # Unix only
+def setsid() -> None: ... # Unix only
def setuid(uid: int) -> None: ... # Unix only
def strerror(code: int) -> str: ...
def umask(mask: int) -> int: ...
def uname() -> Tuple[str, str, str, str, str]: ... # Unix only
-def unsetenv(key: AnyStr) -> None: ...
+
+ at overload
+def getenv(key: Text) -> Optional[str]: ...
+ at overload
+def getenv(key: Text, default: _T) -> Union[str, _T]: ...
+def getenvb(key: bytes, default: bytes = ...) -> bytes: ...
+def putenv(key: Union[bytes, Text], value: Union[bytes, Text]) -> None: ...
+def unsetenv(key: Union[bytes, Text]) -> None: ...
+
# Return IO or TextIO
def fdopen(fd: int, mode: str = ..., buffering: int = ..., encoding: str = ...,
errors: str = ..., newline: str = ..., closefd: bool = ...) -> Any: ...
@@ -232,14 +274,14 @@ def dup2(fd: int, fd2: int) -> None: ...
def fchmod(fd: int, mode: int) -> None: ... # Unix only
def fchown(fd: int, uid: int, gid: int) -> None: ... # Unix only
def fdatasync(fd: int) -> None: ... # Unix only, not Mac
-def fpathconf(fd: int, name: str) -> int: ... # Unix only
+def fpathconf(fd: int, name: Union[str, int]) -> int: ... # Unix only
def fstat(fd: int) -> stat_result: ...
def fstatvfs(fd: int) -> statvfs_result: ... # Unix only
def fsync(fd: int) -> None: ...
def ftruncate(fd: int, length: int) -> None: ... # Unix only
def isatty(fd: int) -> bool: ... # Unix only
def lseek(fd: int, pos: int, how: int) -> int: ...
-def open(file: AnyStr, flags: int, mode: int = ...) -> int: ...
+def open(file: _PathType, flags: int, mode: int = ...) -> int: ...
def openpty() -> Tuple[int, int]: ... # some flavors of Unix
def pipe() -> Tuple[int, int]: ...
def read(fd: int, n: int) -> bytes: ...
@@ -247,74 +289,79 @@ def tcgetpgrp(fd: int) -> int: ... # Unix only
def tcsetpgrp(fd: int, pg: int) -> None: ... # Unix only
def ttyname(fd: int) -> str: ... # Unix only
def write(fd: int, string: bytes) -> int: ...
-def access(path: AnyStr, mode: int) -> bool: ...
-def chdir(path: AnyStr) -> None: ...
+def access(path: _PathType, mode: int) -> bool: ...
+def chdir(path: _PathType) -> None: ...
def fchdir(fd: int) -> None: ...
def getcwd() -> str: ...
def getcwdb() -> bytes: ...
-def chflags(path: str, flags: int) -> None: ... # Unix only
-def chroot(path: str) -> None: ... # Unix only
-def chmod(path: AnyStr, mode: int) -> None: ...
-def chown(path: AnyStr, uid: int, gid: int) -> None: ... # Unix only
-def lchflags(path: str, flags: int) -> None: ... # Unix only
-def lchmod(path: str, mode: int) -> None: ... # Unix only
-def lchown(path: str, uid: int, gid: int) -> None: ... # Unix only
-def link(src: AnyStr, link_name: AnyStr) -> None: ...
+def chflags(path: _PathType, flags: int) -> None: ... # Unix only
+def chroot(path: _PathType) -> None: ... # Unix only
+def chmod(path: _PathType, mode: int) -> None: ...
+def chown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
+def lchflags(path: _PathType, flags: int) -> None: ... # Unix only
+def lchmod(path: _PathType, mode: int) -> None: ... # Unix only
+def lchown(path: _PathType, uid: int, gid: int) -> None: ... # Unix only
+def link(src: _PathType, link_name: _PathType) -> None: ...
@overload
def listdir(path: str = ...) -> List[str]: ...
@overload
def listdir(path: bytes) -> List[bytes]: ...
-def lstat(path: AnyStr) -> stat_result: ...
-def mkfifo(path: str, mode: int = ...) -> None: ... # Unix only
-def mknod(filename: AnyStr, mode: int = ..., device: int = ...) -> None: ...
+def lstat(path: _PathType) -> stat_result: ...
+def mkfifo(path: _PathType, mode: int = ...) -> None: ... # Unix only
+def mknod(filename: _PathType, mode: int = ..., device: int = ...) -> None: ...
def major(device: int) -> int: ...
def minor(device: int) -> int: ...
def makedev(major: int, minor: int) -> int: ...
-def mkdir(path: AnyStr, mode: int = ...) -> None: ...
-def makedirs(path: AnyStr, mode: int = ...,
+def mkdir(path: _PathType, mode: int = ...) -> None: ...
+def makedirs(path: _PathType, mode: int = ...,
exist_ok: bool = ...) -> None: ...
-def pathconf(path: str, name: str) -> int: ... # Unix only
+def pathconf(path: _PathType, name: Union[str, int]) -> int: ... # Unix only
def readlink(path: AnyStr) -> AnyStr: ...
-def remove(path: AnyStr) -> None: ...
-def removedirs(path: AnyStr) -> None: ...
-def rename(src: AnyStr, dst: AnyStr) -> None: ...
-def renames(old: AnyStr, new: AnyStr) -> None: ...
+def remove(path: _PathType) -> None: ...
+def removedirs(path: _PathType) -> None: ...
+def rename(src: _PathType, dst: _PathType) -> None: ...
+def renames(old: _PathType, new: _PathType) -> None: ...
if sys.version_info >= (3, 3):
- def replace(src: AnyStr, dst: AnyStr) -> None: ...
-def rmdir(path: AnyStr) -> None: ...
+ def replace(src: _PathType, dst: _PathType) -> None: ...
+def rmdir(path: _PathType) -> None: ...
if sys.version_info >= (3, 5):
@overload
- def scandir(path: str = ...) -> Iterator[DirEntry]: ...
+ def scandir(path: str = ...) -> Iterator[DirEntry[str]]: ...
@overload
- def scandir(path: bytes) -> Iterator[DirEntry]: ...
-def stat(path: AnyStr) -> stat_result: ...
+ def scandir(path: bytes) -> Iterator[DirEntry[bytes]]: ...
+def stat(path: _PathType) -> stat_result: ...
def stat_float_times(newvalue: Union[bool, None] = ...) -> bool: ...
-def statvfs(path: str) -> statvfs_result: ... # Unix only
-def symlink(source: AnyStr, link_name: AnyStr,
+def statvfs(path: _PathType) -> statvfs_result: ... # Unix only
+def symlink(source: _PathType, link_name: _PathType,
target_is_directory: bool = ...) -> None:
... # final argument in Windows only
-def unlink(path: AnyStr) -> None: ...
-def utime(path: AnyStr, times: Union[Tuple[int, int], Tuple[float, float]] = ...) -> None: ...
+def unlink(path: _PathType) -> None: ...
+def utime(path: _PathType, times: Optional[Tuple[float, float]] = ...) -> None: ...
# TODO onerror: function from OSError to void
def walk(top: AnyStr, topdown: bool = ..., onerror: Any = ...,
followlinks: bool = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
List[AnyStr]]]: ...
-def abort() -> 'None': ...
-def execl(path: AnyStr, arg0: AnyStr, *args: AnyStr) -> None: ...
-def execle(path: AnyStr, arg0: AnyStr,
- *args: Any) -> None: ... # Imprecise signature
-def execlp(path: AnyStr, arg0: AnyStr, *args: AnyStr) -> None: ...
-def execlpe(path: AnyStr, arg0: AnyStr,
- *args: Any) -> None: ... # Imprecise signature
-def execv(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
-def execve(path: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]], env: Mapping[AnyStr, AnyStr]) -> None: ...
-def execvp(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]]) -> None: ...
-def execvpe(file: AnyStr, args: Union[Tuple[AnyStr], List[AnyStr]],
- env: Mapping[str, str]) -> None: ...
+def abort() -> NoReturn: ...
+# These are defined as execl(file, *args) but the first *arg is mandatory.
+def execl(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ...
+def execlp(file: _PathType, __arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> NoReturn: ...
+
+# These are: execle(file, *args, env) but env is pulled from the last element of the args.
+def execle(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ...
+def execlpe(file: _PathType, __arg0: Union[bytes, Text], *args: Any) -> NoReturn: ...
+
+# The docs say `args: tuple or list of strings`
+# The implementation enforces tuple or list so we can't use Sequence.
+_ExecVArgs = Union[Tuple[Union[bytes, Text], ...], List[bytes], List[Text], List[Union[bytes, Text]]]
+def execv(path: _PathType, args: _ExecVArgs) -> None: ...
+def execve(path: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
+def execvp(file: _PathType, args: _ExecVArgs) -> None: ...
+def execvpe(file: _PathType, args: _ExecVArgs, env: Mapping[str, str]) -> None: ...
+
def _exit(n: int) -> NoReturn: ...
def fork() -> int: ... # Unix only
def forkpty() -> Tuple[int, int]: ... # some flavors of Unix
@@ -329,39 +376,38 @@ class popen(_TextIOWrapper):
bufsize: int = ...) -> None: ...
def close(self) -> Any: ... # may return int
-def spawnl(mode: int, path: AnyStr, arg0: AnyStr, *args: AnyStr) -> int: ...
-def spawnle(mode: int, path: AnyStr, arg0: AnyStr,
+def spawnl(mode: int, path: _PathType, arg0: Union[bytes, Text], *args: Union[bytes, Text]) -> int: ...
+def spawnle(mode: int, path: _PathType, arg0: Union[bytes, Text],
*args: Any) -> int: ... # Imprecise sig
-def spawnlp(mode: int, file: AnyStr, arg0: AnyStr,
- *args: AnyStr) -> int: ... # Unix only TODO
-def spawnlpe(mode: int, file: AnyStr, arg0: AnyStr, *args: Any) -> int:
+def spawnlp(mode: int, file: _PathType, arg0: Union[bytes, Text],
+ *args: Union[bytes, Text]) -> int: ... # Unix only TODO
+def spawnlpe(mode: int, file: _PathType, arg0: Union[bytes, Text], *args: Any) -> int:
... # Imprecise signature; Unix only TODO
-def spawnv(mode: int, path: AnyStr, args: List[AnyStr]) -> int: ...
-def spawnve(mode: int, path: AnyStr, args: List[AnyStr],
+def spawnv(mode: int, path: _PathType, args: List[Union[bytes, Text]]) -> int: ...
+def spawnve(mode: int, path: _PathType, args: List[Union[bytes, Text]],
env: Mapping[str, str]) -> int: ...
-def spawnvp(mode: int, file: AnyStr, args: List[AnyStr]) -> int: ... # Unix only
-def spawnvpe(mode: int, file: AnyStr, args: List[AnyStr],
+def spawnvp(mode: int, file: _PathType, args: List[Union[bytes, Text]]) -> int: ... # Unix only
+def spawnvpe(mode: int, file: _PathType, args: List[Union[bytes, Text]],
env: Mapping[str, str]) -> int:
... # Unix only
-def startfile(path: str, operation: Union[str, None] = ...) -> None: ... # Windows only
-def system(command: AnyStr) -> int: ...
+def startfile(path: _PathType, operation: Optional[str] = ...) -> None: ... # Windows only
+def system(command: _PathType) -> int: ...
def times() -> Tuple[float, float, float, float, float]: ...
def wait() -> Tuple[int, int]: ... # Unix only
def waitpid(pid: int, options: int) -> Tuple[int, int]: ...
-def wait3(options: Union[int, None] = ...) -> Tuple[int, int, Any]: ... # Unix only
-def wait4(pid: int, options: int) -> Tuple[int, int, Any]:
- ... # Unix only
+def wait3(options: int) -> Tuple[int, int, Any]: ... # Unix only
+def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... # Unix only
def WCOREDUMP(status: int) -> bool: ... # Unix only
def WIFCONTINUED(status: int) -> bool: ... # Unix only
def WIFSTOPPED(status: int) -> bool: ... # Unix only
def WIFSIGNALED(status: int) -> bool: ... # Unix only
def WIFEXITED(status: int) -> bool: ... # Unix only
-def WEXITSTATUS(status: int) -> bool: ... # Unix only
-def WSTOPSIG(status: int) -> bool: ... # Unix only
-def WTERMSIG(status: int) -> bool: ... # Unix only
-def confstr(name: str) -> str: ... # Unix only
+def WEXITSTATUS(status: int) -> int: ... # Unix only
+def WSTOPSIG(status: int) -> int: ... # Unix only
+def WTERMSIG(status: int) -> int: ... # Unix only
+def confstr(name: Union[str, int]) -> Optional[str]: ... # Unix only
def getloadavg() -> Tuple[float, float, float]: ... # Unix only
-def sysconf(name: str) -> int: ... # Unix only
+def sysconf(name: Union[str, int]) -> int: ... # Unix only
def urandom(n: int) -> bytes: ...
def sched_getaffinity(id: int) -> Set[int]: ...
@@ -375,14 +421,15 @@ WNOWAIT = 0
if sys.version_info >= (3, 3):
def sync() -> None: ... # Unix only
- def truncate(path: Union[AnyStr, int], length: int) -> None: ... # Unix only up to version 3.4
+ def truncate(path: Union[_PathType, int], length: int) -> None: ... # Unix only up to version 3.4
def fwalk(top: AnyStr = ..., topdown: bool = ...,
onerror: Callable = ..., *, follow_symlinks: bool = ...,
dir_fd: int = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
List[AnyStr], int]]: ... # Unix only
- def get_terminal_size(fd: int = ...) -> Tuple[int, int]: ...
+ terminal_size = NamedTuple('terminal_size', [('columns', int), ('lines', int)])
+ def get_terminal_size(fd: int = ...) -> terminal_size: ...
if sys.version_info >= (3, 4):
def cpu_count() -> Optional[int]: ...
diff --git a/typeshed/stdlib/3/os/path.pyi b/typeshed/stdlib/3/os/path.pyi
index c695c57..eb930c7 100644
--- a/typeshed/stdlib/3/os/path.pyi
+++ b/typeshed/stdlib/3/os/path.pyi
@@ -2,9 +2,20 @@
# Ron Murawski <ron at horizonchess.com>
# based on http://docs.python.org/3.2/library/os.path.html
-
+# adapted for 2.7 by Michal Pokorny
import sys
-from typing import overload, List, Any, AnyStr, Sequence, Tuple, BinaryIO, TextIO
+from typing import (
+ overload, List, Any, AnyStr, Sequence, Tuple, BinaryIO, TextIO,
+ TypeVar, Union, Text, Callable
+)
+
+_T = TypeVar('_T')
+
+if sys.version_info >= (3, 6):
+ from builtins import _PathLike
+ _PathType = Union[bytes, Text, _PathLike]
+else:
+ _PathType = Union[bytes, Text]
# ----- os.path variables -----
supports_unicode_filenames = False
@@ -25,26 +36,29 @@ def basename(path: AnyStr) -> AnyStr: ...
if sys.version_info >= (3, 5):
def commonpath(paths: Sequence[AnyStr]) -> AnyStr: ...
-# NOTE: Empty List[bytes] results in '' (str) => fall back to Any return type.
-def commonprefix(list: List[AnyStr]) -> Any: ...
+# NOTE: Empty lists results in '' (str) regardless of contained type.
+# Also, in Python 2 mixed sequences of Text and bytes results in either Text or bytes
+# So, fall back to Any
+def commonprefix(list: Sequence[AnyStr]) -> Any: ...
+
def dirname(path: AnyStr) -> AnyStr: ...
-def exists(path: AnyStr) -> bool: ...
-def lexists(path: AnyStr) -> bool: ...
+def exists(path: _PathType) -> bool: ...
+def lexists(path: _PathType) -> bool: ...
def expanduser(path: AnyStr) -> AnyStr: ...
def expandvars(path: AnyStr) -> AnyStr: ...
+# These return float if os.stat_float_times() == True,
+# but int is a subclass of float.
+def getatime(path: _PathType) -> float: ...
+def getmtime(path: _PathType) -> float: ...
+def getctime(path: _PathType) -> float: ...
-# These return float if os.stat_float_times() == True
-def getatime(path: AnyStr) -> Any: ...
-def getmtime(path: AnyStr) -> Any: ...
-def getctime(path: AnyStr) -> Any: ...
-
-def getsize(path: AnyStr) -> int: ...
-def isabs(path: AnyStr) -> bool: ...
-def isfile(path: AnyStr) -> bool: ...
-def isdir(path: AnyStr) -> bool: ...
-def islink(path: AnyStr) -> bool: ...
-def ismount(path: AnyStr) -> bool: ...
+def getsize(path: _PathType) -> int: ...
+def isabs(path: _PathType) -> bool: ...
+def isfile(path: _PathType) -> bool: ...
+def isdir(path: _PathType) -> bool: ...
+def islink(path: _PathType) -> bool: ...
+def ismount(path: _PathType) -> bool: ...
def join(path: AnyStr, *paths: AnyStr) -> AnyStr: ...
@@ -53,8 +67,9 @@ def normpath(path: AnyStr) -> AnyStr: ...
def realpath(path: AnyStr) -> AnyStr: ...
def relpath(path: AnyStr, start: AnyStr = ...) -> AnyStr: ...
-def samefile(path1: AnyStr, path2: AnyStr) -> bool: ...
+def samefile(path1: _PathType, path2: _PathType) -> bool: ...
def sameopenfile(fp1: int, fp2: int) -> bool: ...
+# TODO
# def samestat(stat1: stat_result,
# stat2: stat_result) -> bool: ... # Unix only
@@ -62,4 +77,7 @@ def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
-# def splitunc(path: str) -> Tuple[str, str]: ... # Windows only, deprecated
+def splitunc(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # Windows only, deprecated
+
+if sys.version_info < (3,):
+ def walk(path: AnyStr, visit: Callable[[_T, AnyStr, List[AnyStr]], Any], arg: _T) -> None: ...
diff --git a/typeshed/stdlib/3/queue.pyi b/typeshed/stdlib/3/queue.pyi
index f85490d..9827bc7 100644
--- a/typeshed/stdlib/3/queue.pyi
+++ b/typeshed/stdlib/3/queue.pyi
@@ -18,7 +18,7 @@ class Queue(Generic[_T]):
def put_nowait(self, item: _T) -> None: ...
def join(self) -> None: ...
def qsize(self) -> int: ...
- def task_done(self) -> None: pass
+ def task_done(self) -> None: ...
class PriorityQueue(Queue): ...
class LifoQueue(Queue): ...
diff --git a/typeshed/stdlib/3/reprlib.pyi b/typeshed/stdlib/3/reprlib.pyi
new file mode 100644
index 0000000..4622518
--- /dev/null
+++ b/typeshed/stdlib/3/reprlib.pyi
@@ -0,0 +1,37 @@
+# Stubs for reprlib (Python 3)
+
+from array import array
+from typing import Any, Callable, Deque, Dict, FrozenSet, List, Set, Tuple
+
+_ReprFunc = Callable[[Any], str]
+
+def recursive_repr(fillvalue: str = ...) -> Callable[[_ReprFunc], _ReprFunc]: ...
+
+class Repr:
+ maxlevel: int
+ maxdict: int
+ maxlist: int
+ maxtuple: int
+ maxset: int
+ maxfrozenset: int
+ maxdeque: int
+ maxarray: int
+ maxlong: int
+ maxstring: int
+ maxother: int
+ def __init__(self) -> None: ...
+ def repr(self, x: Any) -> str: ...
+ def repr1(self, x: Any, level: int) -> str: ...
+ def repr_tuple(self, x: Tuple[Any, ...], level: int) -> str: ...
+ def repr_list(self, x: List[Any], level: int) -> str: ...
+ def repr_array(self, x: array, level: int) -> str: ...
+ def repr_set(self, x: Set[Any], level: int) -> str: ...
+ def repr_frozenset(self, x: FrozenSet[Any], level: int) -> str: ...
+ def repr_deque(self, x: Deque[Any], level: int) -> str: ...
+ def repr_dict(self, x: Dict[Any, Any], level: int) -> str: ...
+ def repr_str(self, x: str, level: int) -> str: ...
+ def repr_int(self, x: int, level: int) -> str: ...
+ def repr_instance(self, x: Any, level: int) -> str: ...
+
+aRepr: Repr
+def repr(x: object) -> str: ...
diff --git a/typeshed/stdlib/3/shlex.pyi b/typeshed/stdlib/3/shlex.pyi
index db99fc6..ed23d5a 100644
--- a/typeshed/stdlib/3/shlex.pyi
+++ b/typeshed/stdlib/3/shlex.pyi
@@ -2,7 +2,7 @@
# Based on http://docs.python.org/3.2/library/shlex.html
-from typing import List, Tuple, Any, TextIO
+from typing import List, Tuple, Any, TextIO, Union, Optional
def split(s: str, comments: bool = ...,
posix: bool = ...) -> List[str]: ...
@@ -26,7 +26,7 @@ class shlex:
token = ... # type: str
eof = ... # type: str
- def __init__(self, instream=..., infile=...,
+ def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ...,
posix: bool = ...) -> None: ...
def get_token(self) -> str: ...
def push_token(self, tok: str) -> None: ...
diff --git a/typeshed/stdlib/3/shutil.pyi b/typeshed/stdlib/3/shutil.pyi
index 1b917e6..2939a37 100644
--- a/typeshed/stdlib/3/shutil.pyi
+++ b/typeshed/stdlib/3/shutil.pyi
@@ -1,4 +1,5 @@
# Stubs for shutil
+import os
import sys
# Based on http://docs.python.org/3.2/library/shutil.html
@@ -7,40 +8,96 @@ import sys
# sometimes they only work partially (broken exception messages), and the test
# cases don't use them.
-from typing import List, Iterable, Callable, Any, Tuple, Sequence, IO, AnyStr, Optional
+from typing import (
+ List, Iterable, Callable, Any, Tuple, Sequence, NamedTuple, IO,
+ AnyStr, Optional, Union
+)
+
+if sys.version_info >= (3, 6):
+ _Path = Union[str, os.PathLike[str]]
+ # Return value of some functions that may either return a path-like object that was passed in or
+ # a string
+ _PathReturn = Any
+else:
+ _Path = str
+ _PathReturn = str
def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr],
length: int = ...) -> None: ...
-def copyfile(src: str, dst: str) -> None: ...
-def copymode(src: str, dst: str) -> None: ...
-def copystat(src: str, dst: str) -> None: ...
-def copy(src: str, dst: str) -> None: ...
-def copy2(src: str, dst: str) -> None: ...
-def ignore_patterns(*patterns: str) -> Callable[[str, List[str]],
- Iterable[str]]: ...
-def copytree(src: str, dst: str, symlinks: bool = ...,
- ignore: Optional[Callable[[str, List[str]], Iterable[str]]] = ...,
- copy_function: Callable[[str, str], None] = ...,
- ignore_dangling_symlinks: bool = ...) -> None: ...
-def rmtree(path: str, ignore_errors: bool = ...,
- onerror: Callable[[Any, str, Any], None] = ...) -> None: ...
-def move(src: str, dst: str) -> None: ...
+
+if sys.version_info >= (3, 3):
+ def copyfile(src: _Path, dst: _Path, *,
+ follow_symlinks: bool = ...) -> _PathReturn: ...
+ def copymode(src: _Path, dst: _Path, *,
+ follow_symlinks: bool = ...) -> None: ...
+ def copystat(src: _Path, dst: _Path, *,
+ follow_symlinks: bool = ...) -> None: ...
+ def copy(src: _Path, dst: _Path, *,
+ follow_symlinks: bool = ...) -> _PathReturn: ...
+ def copy2(src: _Path, dst: _Path, *,
+ follow_symlinks: bool = ...) -> _PathReturn: ...
+else:
+ def copyfile(src: _Path, dst: _Path) -> None: ...
+ def copymode(src: _Path, dst: _Path) -> None: ...
+ def copystat(src: _Path, dst: _Path) -> None: ...
+ def copy(src: _Path, dst: _Path) -> None: ...
+ def copy2(src: _Path, dst: _Path) -> None: ...
+
+def ignore_patterns(*patterns: _Path) -> Callable[[_Path, List[str]],
+ Iterable[str]]: ...
+
+_IgnoreFn = Union[None, Callable[[str, List[str]], Iterable[str]], Callable[[_Path, List[str]], Iterable[str]]]
+if sys.version_info >= (3, 3):
+ def copytree(src: _Path, dst: _Path, symlinks: bool = ...,
+ ignore: _IgnoreFn = ...,
+ copy_function: Callable[[str, str], None] = ...,
+ ignore_dangling_symlinks: bool = ...) -> _PathReturn: ...
+else:
+ def copytree(src: str, dst: str, symlinks: bool = ...,
+ ignore: _IgnoreFn = ...,
+ copy_function: Callable[[str, str], None] = ...,
+ ignore_dangling_symlinks: bool = ...) -> None: ...
+
+def rmtree(path: _Path, ignore_errors: bool = ...,
+ onerror: Callable[[Any, Any, Any], None] = ...) -> None: ...
+
+if sys.version_info >= (3, 5):
+ def move(src: _Path, dst: _Path,
+ copy_function: Union[Callable[[str, str], None], Callable[[_Path, _Path], None]] = ...) -> _PathReturn: ...
+elif sys.version_info >= (3, 3):
+ def move(src: _Path, dst: _Path) -> str: ...
+else:
+ def move(src: _Path, dst: _Path) -> None: ...
+
+if sys.version_info >= (3, 3):
+ _ntuple_diskusage = NamedTuple('usage', [('total', int),
+ ('used', int),
+ ('free', int)])
+ def disk_usage(path: _Path) -> _ntuple_diskusage: ...
+ def chown(path: _Path, user: Optional[str] = ...,
+ group: Optional[str] = ...) -> None: ...
+ def which(cmd: _Path, mode: int = ...,
+ path: Optional[_Path] = ...) -> Optional[str]: ...
class Error(Exception): ...
if sys.version_info >= (3, 4):
class SameFileError(Error): ...
-def make_archive(base_name: str, format: str, root_dir: str = ...,
- base_dir: str = ..., verbose: bool = ...,
+def make_archive(base_name: str, format: str, root_dir: _Path = ...,
+ base_dir: _Path = ..., verbose: bool = ...,
dry_run: bool = ..., owner: str = ..., group: str = ...,
logger: Any = ...) -> str: ...
def get_archive_formats() -> List[Tuple[str, str]]: ...
+
+# TODO function is a callback that receives keyword arguments; should make it not use Any
+# once we have support for callable types with keyword args
def register_archive_format(name: str, function: Any,
extra_args: Sequence[Tuple[str, Any]] = ...,
description: str = ...) -> None: ...
def unregister_archive_format(name: str) -> None: ...
-def unpack_archive(filename: str, extract_dir: str = ...,
+# Should be _Path once http://bugs.python.org/issue30218 is fixed
+def unpack_archive(filename: str, extract_dir: _Path = ...,
format: str = ...) -> None: ...
def register_unpack_format(name: str, extensions: List[str], function: Any,
extra_args: Sequence[Tuple[str, Any]] = ...,
@@ -48,4 +105,5 @@ def register_unpack_format(name: str, extensions: List[str], function: Any,
def unregister_unpack_format(name: str) -> None: ...
def get_unpack_formats() -> List[Tuple[str, List[str], str]]: ...
-def which(cmd: str, mode: int = ..., path: str = ...) -> Optional[str]: ...
+if sys.version_info >= (3, 3):
+ def get_terminal_size(fallback: Tuple[int, int] = ...) -> os.terminal_size: ...
diff --git a/typeshed/stdlib/3/sqlite3/dbapi2.pyi b/typeshed/stdlib/3/sqlite3/dbapi2.pyi
index ef46085..2c47843 100644
--- a/typeshed/stdlib/3/sqlite3/dbapi2.pyi
+++ b/typeshed/stdlib/3/sqlite3/dbapi2.pyi
@@ -1,11 +1,13 @@
# Filip Hron <filip.hron at gmail.com>
# based heavily on Andrey Vlasovskikh's python-skeletons https://github.com/JetBrains/python-skeletons/blob/master/sqlite3.py
-from typing import Any, Union, List, AnyStr, Iterator, Optional
+from typing import Any, Union, List, Iterator, Optional
from numbers import Integral
from datetime import time, datetime
from collections import Iterable
+import sys
+
paramstyle = ... # type: str
threadsafety = ... # type: int
apilevel = ... # type: str
@@ -66,13 +68,10 @@ version = ... # type: str
# TODO: adapt needs to get probed
def adapt(obj, protocol, alternate): ...
def complete_statement(sql: str) -> bool: ...
-def connect(database: Union[bytes, AnyStr],
- timeout: float = ...,
- detect_types: int = ...,
- isolation_level: Union[str, None] = ...,
- check_same_thread: bool = ...,
- factory: Union[Connection, None] = ...,
- cached_statements: int = ...) -> Connection: ...
+if sys.version_info >= (3, 4):
+ def connect(database: Union[bytes, str], timeout: float = ..., detect_types: int = ..., isolation_level: Union[str, None] = ..., check_same_thread: bool = ..., factory: Union[Connection, None] = ..., cached_statements: int = ..., uri: bool = ...) -> Connection: ...
+else:
+ def connect(database: Union[bytes, str], timeout: float = ..., detect_types: int = ..., isolation_level: Union[str, None] = ..., check_same_thread: bool = ..., factory: Union[Connection, None] = ..., cached_statements: int = ...) -> Connection: ...
def enable_callback_tracebacks(flag: bool) -> None: ...
def enable_shared_cache(do_enable: int) -> None: ...
def register_adapter(type: type, callable: Any) -> None: ...
@@ -110,7 +109,7 @@ class Connection:
def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ...
# TODO: please check in executemany() if seq_of_parameters type is possible like this
def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]) -> Cursor: ...
- def executescript(self, sql_script: Union[bytes, AnyStr]) -> Cursor: ...
+ def executescript(self, sql_script: Union[bytes, str]) -> Cursor: ...
def interrupt(self, *args, **kwargs) -> None: ...
def iterdump(self, *args, **kwargs) -> None: ...
def rollback(self, *args, **kwargs) -> None: ...
@@ -139,7 +138,7 @@ class Cursor(Iterator[Any]):
def close(self, *args, **kwargs): ...
def execute(self, sql: str, parameters: Iterable = ...) -> Cursor: ...
def executemany(self, sql: str, seq_of_parameters: Iterable[Iterable]): ...
- def executescript(self, sql_script: Union[bytes, AnyStr]) -> Cursor: ...
+ def executescript(self, sql_script: Union[bytes, str]) -> Cursor: ...
def fetchall(self) -> List[Any]: ...
def fetchmany(self, size: Integral = ...) -> List[Any]: ...
def fetchone(self) -> Any: ...
diff --git a/typeshed/stdlib/3/ssl.pyi b/typeshed/stdlib/3/ssl.pyi
index 250034e..87ddd65 100644
--- a/typeshed/stdlib/3/ssl.pyi
+++ b/typeshed/stdlib/3/ssl.pyi
@@ -46,7 +46,6 @@ if sys.version_info >= (3, 4):
capath: Optional[str] = ...,
cadata: Optional[str] = ...) -> 'SSLContext': ...
-if sys.version_info >= (3, 4, 3):
def _create_unverified_context(protocol: int = ..., *,
cert_reqs: int = ...,
check_hostname: bool = ...,
@@ -94,7 +93,6 @@ if sys.version_info >= (3, 4):
VERIFY_CRL_CHECK_LEAF = ... # type: int
VERIFY_CRL_CHECK_CHAIN = ... # type: int
VERIFY_X509_STRICT = ... # type: int
-if sys.version_info >= (3, 4, 4):
VERIFY_X509_TRUSTED_FIRST = ... # type: int
PROTOCOL_SSLv23 = ... # type: int
@@ -229,7 +227,7 @@ class SSLContext:
def wrap_socket(self, sock: socket.socket, server_side: bool = ...,
do_handshake_on_connect: bool = ...,
suppress_ragged_eofs: bool = ...,
- server_hostname: Optional[str] = ...) -> 'SSLContext': ...
+ server_hostname: Optional[str] = ...) -> SSLSocket: ...
if sys.version_info >= (3, 5):
def wrap_bio(self, incoming: 'MemoryBIO', outgoing: 'MemoryBIO',
server_side: bool = ...,
diff --git a/typeshed/stdlib/3/string.pyi b/typeshed/stdlib/3/string.pyi
index 365449e..c07c186 100644
--- a/typeshed/stdlib/3/string.pyi
+++ b/typeshed/stdlib/3/string.pyi
@@ -2,7 +2,7 @@
# Based on http://docs.python.org/3.2/library/string.html
-from typing import Mapping, Sequence, Any, Optional, Union, List, Tuple, Iterable, AnyStr
+from typing import Mapping, Sequence, Any, Optional, Union, List, Tuple, Iterable
ascii_letters = ... # type: str
ascii_lowercase = ... # type: str
@@ -25,8 +25,8 @@ class Template:
**kwds: str) -> str: ...
# TODO(MichalPokorny): This is probably badly and/or loosely typed.
-class Formatter(object):
- def format(self, format_string: str, *args, **kwargs) -> str: ...
+class Formatter:
+ def format(self, format_string: str, *args: Any, **kwargs: Any) -> str: ...
def vformat(self, format_string: str, args: Sequence[Any],
kwargs: Mapping[str, Any]) -> str: ...
def parse(self, format_string: str) -> Iterable[Tuple[str, Optional[str], Optional[str], Optional[str]]]: ...
diff --git a/typeshed/stdlib/3/struct.pyi b/typeshed/stdlib/3/struct.pyi
deleted file mode 100644
index f539610..0000000
--- a/typeshed/stdlib/3/struct.pyi
+++ /dev/null
@@ -1,30 +0,0 @@
-# Stubs for struct
-
-# Based on http://docs.python.org/3.2/library/struct.html
-
-from typing import overload, Any, Tuple
-
-class error(Exception): ...
-
-def pack(fmt: str, *v: Any) -> bytes: ...
-# TODO buffer type
-def pack_into(fmt: str, buffer: Any, offset: int, *v: Any) -> None: ...
-
-# TODO buffer type
-def unpack(fmt: str, buffer: Any) -> Tuple[Any, ...]: ...
-def unpack_from(fmt: str, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
-
-def calcsize(fmt: str) -> int: ...
-
-class Struct:
- format = b''
- size = 0
-
- def __init__(self, format: str) -> None: ...
-
- def pack(self, *v: Any) -> bytes: ...
- # TODO buffer type
- def pack_into(self, buffer: Any, offset: int, *v: Any) -> None: ...
- # TODO buffer type
- def unpack(self, buffer: Any) -> Tuple[Any, ...]: ...
- def unpack_from(self, buffer: Any, offset: int = ...) -> Tuple[Any, ...]: ...
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index ebc7084..36e5c5c 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -1,59 +1,91 @@
# Stubs for subprocess
-# Based on http://docs.python.org/3.5/library/subprocess.html
+# Based on http://docs.python.org/3.6/library/subprocess.html
import sys
-from typing import Sequence, Any, AnyStr, Mapping, Callable, Tuple, IO, Optional, Union, List, Type
+from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Optional, Union, List, Type, Text
from types import TracebackType
+_FILE = Union[int, IO[Any]]
+_TXT = Union[bytes, Text]
+_CMD = Union[_TXT, Sequence[_TXT]]
+_ENV = Union[Mapping[bytes, _TXT], Mapping[Text, _TXT]]
if sys.version_info >= (3, 5):
class CompletedProcess:
- args = ... # type: Union[Sequence[str], str]
+ # morally: _CMD
+ args = ... # type: Any
returncode = ... # type: int
+ # morally: Optional[_TXT]
stdout = ... # type: Any
stderr = ... # type: Any
- def __init__(self, args: Union[List, str],
+ def __init__(self, args: _CMD,
returncode: int,
- stdout: Union[str, bytes, None] = ...,
- stderr: Union[str, bytes, None] = ...) -> None: ...
+ stdout: Optional[_TXT] = ...,
+ stderr: Optional[_TXT] = ...) -> None: ...
def check_returncode(self) -> None: ...
- # Nearly same args as Popen.__init__ except for timeout, input, and check
- def run(args: Union[str, Sequence[str]],
- timeout: float = ...,
- input: Union[str, bytes] = ...,
- check: bool = ...,
- bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stdout: Any = ...,
- stderr: Any = ...,
- preexec_fn: Callable[[], Any] = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...) -> CompletedProcess: ...
+ if sys.version_info >= (3, 6):
+ # Nearly same args as Popen.__init__ except for timeout, input, and check
+ def run(args: _CMD,
+ timeout: float = ...,
+ input: _TXT = ...,
+ check: bool = ...,
+ bufsize: int = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stdout: _FILE = ...,
+ stderr: _FILE = ...,
+ preexec_fn: Callable[[], Any] = ...,
+ close_fds: bool = ...,
+ shell: bool = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
+ universal_newlines: bool = ...,
+ startupinfo: Any = ...,
+ creationflags: int = ...,
+ restore_signals: bool = ...,
+ start_new_session: bool = ...,
+ pass_fds: Any = ...,
+ *,
+ encoding: Optional[str] = ...,
+ errors: Optional[str] = ...) -> CompletedProcess: ...
+ else:
+ # Nearly same args as Popen.__init__ except for timeout, input, and check
+ def run(args: _CMD,
+ timeout: float = ...,
+ input: _TXT = ...,
+ check: bool = ...,
+ bufsize: int = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stdout: _FILE = ...,
+ stderr: _FILE = ...,
+ preexec_fn: Callable[[], Any] = ...,
+ close_fds: bool = ...,
+ shell: bool = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
+ universal_newlines: bool = ...,
+ startupinfo: Any = ...,
+ creationflags: int = ...,
+ restore_signals: bool = ...,
+ start_new_session: bool = ...,
+ pass_fds: Any = ...) -> CompletedProcess: ...
# Same args as Popen.__init__
if sys.version_info >= (3, 3):
# 3.3 added timeout
- def call(args: Union[str, Sequence[str]],
+ def call(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stdout: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stdout: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
@@ -62,17 +94,17 @@ if sys.version_info >= (3, 3):
pass_fds: Any = ...,
timeout: float = ...) -> int: ...
else:
- def call(args: Union[str, Sequence[str]],
+ def call(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stdout: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stdout: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
@@ -83,17 +115,17 @@ else:
# Same args as Popen.__init__
if sys.version_info >= (3, 3):
# 3.3 added timeout
- def check_call(args: Union[str, Sequence[str]],
+ def check_call(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stdout: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stdout: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
@@ -102,17 +134,17 @@ if sys.version_info >= (3, 3):
pass_fds: Any = ...,
timeout: float = ...) -> int: ...
else:
- def check_call(args: Union[str, Sequence[str]],
+ def check_call(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stdout: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stdout: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
@@ -120,18 +152,42 @@ else:
start_new_session: bool = ...,
pass_fds: Any = ...) -> int: ...
-if sys.version_info >= (3, 4):
+if sys.version_info >= (3, 6):
+ # 3.6 added encoding and errors
+ def check_output(args: _CMD,
+ bufsize: int = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stderr: _FILE = ...,
+ preexec_fn: Callable[[], Any] = ...,
+ close_fds: bool = ...,
+ shell: bool = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
+ universal_newlines: bool = ...,
+ startupinfo: Any = ...,
+ creationflags: int = ...,
+ restore_signals: bool = ...,
+ start_new_session: bool = ...,
+ pass_fds: Any = ...,
+ *,
+ timeout: float = ...,
+ input: _TXT = ...,
+ encoding: Optional[str] = ...,
+ errors: Optional[str] = ...,
+ ) -> Any: ... # morally: -> _TXT
+elif sys.version_info >= (3, 4):
# 3.4 added input
- def check_output(args: Union[str, Sequence[str]],
+ def check_output(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
@@ -139,44 +195,47 @@ if sys.version_info >= (3, 4):
start_new_session: bool = ...,
pass_fds: Any = ...,
timeout: float = ...,
- input: Union[str, bytes] = ...) -> Any: ...
+ input: _TXT = ...,
+ ) -> Any: ... # morally: -> _TXT
elif sys.version_info >= (3, 3):
# 3.3 added timeout
- def check_output(args: Union[str, Sequence[str]],
+ def check_output(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
restore_signals: bool = ...,
start_new_session: bool = ...,
pass_fds: Any = ...,
- timeout: float = ...) -> Any: ...
+ timeout: float = ...,
+ ) -> Any: ... # morally: -> _TXT
else:
# Same args as Popen.__init__, except for stdout
- def check_output(args: Union[str, Sequence[str]],
+ def check_output(args: _CMD,
bufsize: int = ...,
- executable: str = ...,
- stdin: Any = ...,
- stderr: Any = ...,
+ executable: _TXT = ...,
+ stdin: _FILE = ...,
+ stderr: _FILE = ...,
preexec_fn: Callable[[], Any] = ...,
close_fds: bool = ...,
shell: bool = ...,
- cwd: str = ...,
- env: Mapping[str, str] = ...,
+ cwd: _TXT = ...,
+ env: _ENV = ...,
universal_newlines: bool = ...,
startupinfo: Any = ...,
creationflags: int = ...,
restore_signals: bool = ...,
start_new_session: bool = ...,
- pass_fds: Any = ...) -> Any: ...
+ pass_fds: Any = ...,
+ ) -> Any: ... # morally: -> _TXT
# TODO types
@@ -190,15 +249,21 @@ if sys.version_info >= (3, 3):
class CalledProcessError(Exception):
returncode = 0
- cmd = ... # type: str
- output = b'' # May be None
+ # morally: _CMD
+ cmd = ... # type: Any
+ # morally: Optional[_TXT]
+ output = ... # type: Any
if sys.version_info >= (3, 5):
- stdout = b''
- stderr = b''
+ # morally: Optional[_TXT]
+ stdout = ... # type: Any
+ stderr = ... # type: Any
- def __init__(self, returncode: int, cmd: str, output: Optional[str] = ...,
- stderr: Optional[str] = ...) -> None: ...
+ def __init__(self,
+ returncode: int,
+ cmd: _CMD,
+ output: Optional[_TXT] = ...,
+ stderr: Optional[_TXT] = ...) -> None: ...
class Popen:
stdin = ... # type: IO[Any]
@@ -207,24 +272,47 @@ class Popen:
pid = 0
returncode = 0
- def __init__(self,
- args: Union[str, Sequence[str]],
- bufsize: int = ...,
- executable: Optional[str] = ...,
- stdin: Optional[Any] = ...,
- stdout: Optional[Any] = ...,
- stderr: Optional[Any] = ...,
- preexec_fn: Optional[Callable[[], Any]] = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: Optional[str] = ...,
- env: Optional[Mapping[str, str]] = ...,
- universal_newlines: bool = ...,
- startupinfo: Optional[Any] = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...) -> None: ...
+ if sys.version_info >= (3, 6):
+ def __init__(self,
+ args: _CMD,
+ bufsize: int = ...,
+ executable: Optional[_TXT] = ...,
+ stdin: Optional[_FILE] = ...,
+ stdout: Optional[_FILE] = ...,
+ stderr: Optional[_FILE] = ...,
+ preexec_fn: Optional[Callable[[], Any]] = ...,
+ close_fds: bool = ...,
+ shell: bool = ...,
+ cwd: Optional[_TXT] = ...,
+ env: Optional[_ENV] = ...,
+ universal_newlines: bool = ...,
+ startupinfo: Optional[Any] = ...,
+ creationflags: int = ...,
+ restore_signals: bool = ...,
+ start_new_session: bool = ...,
+ pass_fds: Any = ...,
+ *,
+ encoding: Optional[str] = ...,
+ errors: Optional[str] = ...) -> None: ...
+ else:
+ def __init__(self,
+ args: _CMD,
+ bufsize: int = ...,
+ executable: Optional[_TXT] = ...,
+ stdin: Optional[_FILE] = ...,
+ stdout: Optional[_FILE] = ...,
+ stderr: Optional[_FILE] = ...,
+ preexec_fn: Optional[Callable[[], Any]] = ...,
+ close_fds: bool = ...,
+ shell: bool = ...,
+ cwd: Optional[_TXT] = ...,
+ env: Optional[_ENV] = ...,
+ universal_newlines: bool = ...,
+ startupinfo: Optional[Any] = ...,
+ creationflags: int = ...,
+ restore_signals: bool = ...,
+ start_new_session: bool = ...,
+ pass_fds: Any = ...) -> None: ...
def poll(self) -> int: ...
if sys.version_info >= (3, 3):
@@ -234,16 +322,24 @@ class Popen:
def wait(self) ->int: ...
# Return str/bytes
if sys.version_info >= (3, 3):
- def communicate(self, input: Optional[AnyStr] = ..., timeout: Optional[float] = ...) -> Tuple[Any, Any]: ...
+ def communicate(self,
+ input: Optional[_TXT] = ...,
+ timeout: Optional[float] = ...,
+ # morally: -> Tuple[Optional[_TXT], Optional[_TXT]]
+ ) -> Tuple[Any, Any]: ...
else:
- def communicate(self, input: Optional[AnyStr] = ...) -> Tuple[Any, Any]: ...
+ def communicate(self,
+ input: Optional[_TXT] = ...,
+ # morally: -> Tuple[Optional[_TXT], Optional[_TXT]]
+ ) -> Tuple[Any, Any]: ...
def send_signal(self, signal: int) -> None: ...
def terminate(self) -> None: ...
def kill(self) -> None: ...
def __enter__(self) -> 'Popen': ...
def __exit__(self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> bool: ...
-def getstatusoutput(cmd: str) -> Tuple[int, str]: ...
-def getoutput(cmd: str) -> str: ...
+# The result really is always a str.
+def getstatusoutput(cmd: _TXT) -> Tuple[int, str]: ...
+def getoutput(cmd: _TXT) -> str: ...
# Windows-only: STARTUPINFO etc.
diff --git a/typeshed/stdlib/3/symbol.pyi b/typeshed/stdlib/3/symbol.pyi
new file mode 100644
index 0000000..82f5e2c
--- /dev/null
+++ b/typeshed/stdlib/3/symbol.pyi
@@ -0,0 +1,98 @@
+# Stubs for symbol (Python 3)
+
+import sys
+from typing import Dict
+
+single_input = ... # type: int
+file_input = ... # type: int
+eval_input = ... # type: int
+decorator = ... # type: int
+decorators = ... # type: int
+decorated = ... # type: int
+if sys.version_info >= (3, 5):
+ async_funcdef = ... # type: int
+funcdef = ... # type: int
+parameters = ... # type: int
+typedargslist = ... # type: int
+tfpdef = ... # type: int
+varargslist = ... # type: int
+vfpdef = ... # type: int
+stmt = ... # type: int
+simple_stmt = ... # type: int
+small_stmt = ... # type: int
+expr_stmt = ... # type: int
+if sys.version_info >= (3, 6):
+ annassign = ... # type: int
+testlist_star_expr = ... # type: int
+augassign = ... # type: int
+del_stmt = ... # type: int
+pass_stmt = ... # type: int
+flow_stmt = ... # type: int
+break_stmt = ... # type: int
+continue_stmt = ... # type: int
+return_stmt = ... # type: int
+yield_stmt = ... # type: int
+raise_stmt = ... # type: int
+import_stmt = ... # type: int
+import_name = ... # type: int
+import_from = ... # type: int
+import_as_name = ... # type: int
+dotted_as_name = ... # type: int
+import_as_names = ... # type: int
+dotted_as_names = ... # type: int
+dotted_name = ... # type: int
+global_stmt = ... # type: int
+nonlocal_stmt = ... # type: int
+assert_stmt = ... # type: int
+compound_stmt = ... # type: int
+if sys.version_info >= (3, 5):
+ async_stmt = ... # type: int
+if_stmt = ... # type: int
+while_stmt = ... # type: int
+for_stmt = ... # type: int
+try_stmt = ... # type: int
+with_stmt = ... # type: int
+with_item = ... # type: int
+except_clause = ... # type: int
+suite = ... # type: int
+test = ... # type: int
+test_nocond = ... # type: int
+lambdef = ... # type: int
+lambdef_nocond = ... # type: int
+or_test = ... # type: int
+and_test = ... # type: int
+not_test = ... # type: int
+comparison = ... # type: int
+comp_op = ... # type: int
+star_expr = ... # type: int
+expr = ... # type: int
+xor_expr = ... # type: int
+and_expr = ... # type: int
+shift_expr = ... # type: int
+arith_expr = ... # type: int
+term = ... # type: int
+factor = ... # type: int
+power = ... # type: int
+if sys.version_info >= (3, 5):
+ atom_expr = ... # type: int
+atom = ... # type: int
+testlist_comp = ... # type: int
+trailer = ... # type: int
+subscriptlist = ... # type: int
+subscript = ... # type: int
+sliceop = ... # type: int
+exprlist = ... # type: int
+testlist = ... # type: int
+dictorsetmaker = ... # type: int
+classdef = ... # type: int
+arglist = ... # type: int
+argument = ... # type: int
+comp_iter = ... # type: int
+comp_for = ... # type: int
+comp_if = ... # type: int
+encoding_decl = ... # type: int
+yield_expr = ... # type: int
+if sys.version_info >= (3, 3):
+ yield_arg = ... # type: int
+
+sym_name = ... # type: Dict[int, str]
diff --git a/typeshed/stdlib/3/sysconfig.pyi b/typeshed/stdlib/3/sysconfig.pyi
deleted file mode 100644
index 8d7ab2c..0000000
--- a/typeshed/stdlib/3/sysconfig.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Stubs for sysconfig
-
-# NOTE: These are incomplete!
-
-import typing
-
-def get_config_var(name: str) -> str: ...
-def is_python_build() -> bool: ...
diff --git a/typeshed/stdlib/3/tempfile.pyi b/typeshed/stdlib/3/tempfile.pyi
index ea5207a..ab8c380 100644
--- a/typeshed/stdlib/3/tempfile.pyi
+++ b/typeshed/stdlib/3/tempfile.pyi
@@ -3,49 +3,88 @@
# based on http://docs.python.org/3.3/library/tempfile.html
+import sys
from types import TracebackType
-from typing import BinaryIO, Optional, Tuple, Type
+from typing import Any, AnyStr, Generic, IO, Optional, Tuple, Type
# global variables
-tempdir = ... # type: str
+tempdir = ... # type: Optional[str]
template = ... # type: str
-# TODO text files
-
-# function stubs
-def TemporaryFile(
- mode: str = ..., buffering: int = ..., encoding: str = ...,
- newline: str = ..., suffix: str = ..., prefix: str = ...,
- dir: str = ...
-) -> BinaryIO:
- ...
-def NamedTemporaryFile(
- mode: str = ..., buffering: int = ..., encoding: str = ...,
- newline: str = ..., suffix: str = ..., prefix: str = ...,
- dir: str = ..., delete: bool =...
-) -> BinaryIO:
- ...
-def SpooledTemporaryFile(
- max_size: int = ..., mode: str = ..., buffering: int = ...,
- encoding: str = ..., newline: str = ..., suffix: str = ...,
- prefix: str = ..., dir: str = ...
-) -> BinaryIO:
- ...
-
-class TemporaryDirectory:
- name = ... # type: str
- def __init__(self, suffix: str = ..., prefix: str = ...,
- dir: str = ...) -> None: ...
- def cleanup(self) -> None: ...
- def __enter__(self) -> str: ...
- def __exit__(self, exc_type: Optional[Type[BaseException]],
- exc_val: Optional[Exception],
- exc_tb: Optional[TracebackType]) -> bool: ...
-
-def mkstemp(suffix: str = ..., prefix: str = ..., dir: str = ...,
- text: bool = ...) -> Tuple[int, str]: ...
-def mkdtemp(suffix: str = ..., prefix: str = ...,
- dir: str = ...) -> str: ...
-def mktemp(suffix: str = ..., prefix: str = ..., dir: str = ...) -> str: ...
+
+if sys.version_info >= (3, 5):
+ def TemporaryFile(
+ mode: str = ..., buffering: int = ..., encoding: str = ...,
+ newline: str = ..., suffix: Optional[AnyStr]= ..., prefix: Optional[AnyStr] = ...,
+ dir: Optional[AnyStr] = ...
+ ) -> IO[Any]:
+ ...
+ def NamedTemporaryFile(
+ mode: str = ..., buffering: int = ..., encoding: str = ...,
+ newline: str = ..., suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ...,
+ dir: Optional[AnyStr] = ..., delete: bool =...
+ ) -> IO[Any]:
+ ...
+ def SpooledTemporaryFile(
+ max_size: int = ..., mode: str = ..., buffering: int = ...,
+ encoding: str = ..., newline: str = ..., suffix: Optional[AnyStr] = ...,
+ prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ...
+ ) -> IO[Any]:
+ ...
+
+ class TemporaryDirectory(Generic[AnyStr]):
+ name = ... # type: str
+ def __init__(self, suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ...,
+ dir: Optional[AnyStr] = ...) -> None: ...
+ def cleanup(self) -> None: ...
+ def __enter__(self) -> AnyStr: ...
+ def __exit__(self, exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType]) -> bool: ...
+
+ def mkstemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ...,
+ text: bool = ...) -> Tuple[int, AnyStr]: ...
+ def mkdtemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ...,
+ dir: Optional[str] = ...) -> AnyStr: ...
+ def mktemp(suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[AnyStr] = ...) -> AnyStr: ...
+
+ def gettempdirb() -> bytes: ...
+ def gettempprefixb() -> bytes: ...
+else:
+ def TemporaryFile(
+ mode: str = ..., buffering: int = ..., encoding: str = ...,
+ newline: str = ..., suffix: str = ..., prefix: str = ...,
+ dir: Optional[str] = ...
+ ) -> IO[Any]:
+ ...
+ def NamedTemporaryFile(
+ mode: str = ..., buffering: int = ..., encoding: str = ...,
+ newline: str = ..., suffix: str = ..., prefix: str = ...,
+ dir: Optional[str] = ..., delete: bool =...
+ ) -> IO[Any]:
+ ...
+ def SpooledTemporaryFile(
+ max_size: int = ..., mode: str = ..., buffering: int = ...,
+ encoding: str = ..., newline: str = ..., suffix: str = ...,
+ prefix: str = ..., dir: Optional[str] = ...
+ ) -> IO[Any]:
+ ...
+
+ class TemporaryDirectory:
+ name = ... # type: str
+ def __init__(self, suffix: str = ..., prefix: str = ...,
+ dir: Optional[str] = ...) -> None: ...
+ def cleanup(self) -> None: ...
+ def __enter__(self) -> str: ...
+ def __exit__(self, exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType]) -> bool: ...
+
+ def mkstemp(suffix: str = ..., prefix: str = ..., dir: Optional[str] = ...,
+ text: bool = ...) -> Tuple[int, str]: ...
+ def mkdtemp(suffix: str = ..., prefix: str = ...,
+ dir: Optional[str] = ...) -> str: ...
+ def mktemp(suffix: str = ..., prefix: str = ..., dir: Optional[str] = ...) -> str: ...
+
def gettempdir() -> str: ...
def gettempprefix() -> str: ...
diff --git a/typeshed/stdlib/3/time.pyi b/typeshed/stdlib/3/time.pyi
index edcb9d6..57884b2 100644
--- a/typeshed/stdlib/3/time.pyi
+++ b/typeshed/stdlib/3/time.pyi
@@ -74,11 +74,11 @@ if sys.platform != 'win32':
def tzset() -> None: ... # Unix only
if sys.version_info >= (3, 3):
- def get_clock_info(str) -> SimpleNamespace: ...
+ def get_clock_info(name: str) -> SimpleNamespace: ...
def monotonic() -> float: ...
def perf_counter() -> float: ...
def process_time() -> float: ...
if sys.platform != 'win32':
- def clock_getres(int) -> float: ... # Unix only
- def clock_gettime(int) -> float: ... # Unix only
- def clock_settime(int, struct_time) -> float: ... # Unix only
+ def clock_getres(clk_id: int) -> float: ... # Unix only
+ def clock_gettime(clk_id: int) -> float: ... # Unix only
+ def clock_settime(clk_id: int, time: struct_time) -> float: ... # Unix only
diff --git a/typeshed/stdlib/3/token.pyi b/typeshed/stdlib/3/token.pyi
deleted file mode 100644
index 24e6ea3..0000000
--- a/typeshed/stdlib/3/token.pyi
+++ /dev/null
@@ -1,63 +0,0 @@
-from typing import Dict
-
-ENDMARKER = 0
-NAME = 0
-NUMBER = 0
-STRING = 0
-NEWLINE = 0
-INDENT = 0
-DEDENT = 0
-LPAR = 0
-RPAR = 0
-LSQB = 0
-RSQB = 0
-COLON = 0
-COMMA = 0
-SEMI = 0
-PLUS = 0
-MINUS = 0
-STAR = 0
-SLASH = 0
-VBAR = 0
-AMPER = 0
-LESS = 0
-GREATER = 0
-EQUAL = 0
-DOT = 0
-PERCENT = 0
-LBRACE = 0
-RBRACE = 0
-EQEQUAL = 0
-NOTEQUAL = 0
-LESSEQUAL = 0
-GREATEREQUAL = 0
-TILDE = 0
-CIRCUMFLEX = 0
-LEFTSHIFT = 0
-RIGHTSHIFT = 0
-DOUBLESTAR = 0
-PLUSEQUAL = 0
-MINEQUAL = 0
-STAREQUAL = 0
-SLASHEQUAL = 0
-PERCENTEQUAL = 0
-AMPEREQUAL = 0
-VBAREQUAL = 0
-CIRCUMFLEXEQUAL = 0
-LEFTSHIFTEQUAL = 0
-RIGHTSHIFTEQUAL = 0
-DOUBLESTAREQUAL = 0
-DOUBLESLASH = 0
-DOUBLESLASHEQUAL = 0
-AT = 0
-RARROW = 0
-ELLIPSIS = 0
-OP = 0
-ERRORTOKEN = 0
-N_TOKENS = 0
-NT_OFFSET = 0
-tok_name = ... # type: Dict[int, str]
-
-def ISTERMINAL(x: int) -> bool: pass
-def ISNONTERMINAL(x: int) -> bool: pass
-def ISEOF(x: int) -> bool: pass
diff --git a/typeshed/stdlib/3/tokenize.pyi b/typeshed/stdlib/3/tokenize.pyi
index 9849f68..8e8fc13 100644
--- a/typeshed/stdlib/3/tokenize.pyi
+++ b/typeshed/stdlib/3/tokenize.pyi
@@ -2,34 +2,47 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any, Union, TextIO
+from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Union, Sequence, TextIO, Tuple
from builtins import open as _builtin_open
from token import * # noqa: F403
-COMMENT = ... # type: Any
-NL = ... # type: Any
-ENCODING = ... # type: Any
+COMMENT = ... # type: int
+NL = ... # type: int
+ENCODING = ... # type: int
-class TokenInfo:
+_Position = Tuple[int, int]
+
+_TokenInfo = NamedTuple('TokenInfo', [
+ ('type', int),
+ ('string', str),
+ ('start', _Position),
+ ('end', _Position),
+ ('line', str)
+])
+
+class TokenInfo(_TokenInfo):
@property
- def exact_type(self): ...
+ def exact_type(self) -> int: ...
+
+# Backwards compatible tokens can be sequences of a shorter length too
+_Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]
class TokenError(Exception): ...
class StopTokenizing(Exception): ...
class Untokenizer:
- tokens = ... # type: Any
- prev_row = ... # type: Any
- prev_col = ... # type: Any
- encoding = ... # type: Any
- def __init__(self): ...
- def add_whitespace(self, start): ...
- def untokenize(self, iterable): ...
- def compat(self, token, iterable): ...
+ tokens = ... # type: List[str]
+ prev_row = ... # type: int
+ prev_col = ... # type: int
+ encoding = ... # type: Optional[str]
+ def __init__(self) -> None: ...
+ def add_whitespace(self, start: _Position) -> None: ...
+ def untokenize(self, iterable: Iterable[_Token]) -> str: ...
+ def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ...
-def untokenize(iterable): ...
-def detect_encoding(readline): ...
-def tokenize(readline): ...
+def untokenize(iterable: Iterable[_Token]) -> Any: ...
+def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
+def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
def open(filename: Union[str, bytes, int]) -> TextIO: ...
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
index 4c9535b..084a1b4 100644
--- a/typeshed/stdlib/3/types.pyi
+++ b/typeshed/stdlib/3/types.pyi
@@ -129,7 +129,7 @@ class FrameType:
f_locals = ... # type: Dict[str, Any]
f_trace = ... # type: Callable[[], None]
- def clear(self) -> None: pass
+ def clear(self) -> None: ...
class GetSetDescriptorType:
__name__ = ... # type: str
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index 9c7ad8c..e57bcc1 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -2,6 +2,7 @@
import sys
from abc import abstractmethod, ABCMeta
+from types import CodeType, FrameType
# Definitions of special type checking related constructs. Their definition
# are not used, so their value does not matter.
@@ -32,6 +33,11 @@ List = TypeAlias(object)
Dict = TypeAlias(object)
DefaultDict = TypeAlias(object)
Set = TypeAlias(object)
+FrozenSet = TypeAlias(object)
+Counter = TypeAlias(object)
+Deque = TypeAlias(object)
+if sys.version_info >= (3, 3):
+ ChainMap = TypeAlias(object)
# Predefined type variables.
AnyStr = TypeVar('AnyStr', str, bytes)
@@ -59,11 +65,11 @@ class SupportsFloat(metaclass=ABCMeta):
class SupportsComplex(metaclass=ABCMeta):
@abstractmethod
- def __complex__(self) -> complex: pass
+ def __complex__(self) -> complex: ...
class SupportsBytes(metaclass=ABCMeta):
@abstractmethod
- def __bytes__(self) -> bytes: pass
+ def __bytes__(self) -> bytes: ...
class SupportsAbs(Generic[_T]):
@abstractmethod
@@ -115,6 +121,11 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
@abstractmethod
def __iter__(self) -> 'Generator[_T_co, _T_contra, _V_co]': ...
+ gi_code = ... # type: CodeType
+ gi_frame = ... # type: FrameType
+ gi_running = ... # type: bool
+ gi_yieldfrom = ... # type: Optional[Generator]
+
# TODO: Several types should only be defined if sys.python_version >= (3, 5):
# Awaitable, AsyncIterator, AsyncIterable, Coroutine, Collection, ContextManager.
# See https: //github.com/python/typeshed/issues/655 for why this is not easy.
@@ -170,11 +181,23 @@ if sys.version_info >= (3, 6):
@abstractmethod
def __aiter__(self) -> 'AsyncGenerator[_T_co, _T_contra]': ...
+ ag_await = ... # type: Any
+ ag_code = ... # type: CodeType
+ ag_frame = ... # type: FrameType
+ ag_running = ... # type: bool
+
class Container(Generic[_T_co]):
@abstractmethod
def __contains__(self, x: object) -> bool: ...
-class Sequence(Iterable[_T_co], Container[_T_co], Sized, Reversible[_T_co], Generic[_T_co]):
+
+if sys.version_info >= (3, 6):
+ class Collection(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]): ...
+ _Collection = Collection
+else:
+ class _Collection(Sized, Iterable[_T_co], Container[_T_co], Generic[_T_co]): ...
+
+class Sequence(_Collection[_T_co], Reversible[_T_co], Generic[_T_co]):
@overload
@abstractmethod
def __getitem__(self, i: int) -> _T_co: ...
@@ -214,7 +237,7 @@ class MutableSequence(Sequence[_T], Generic[_T]):
def remove(self, object: _T) -> None: ...
def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ...
-class AbstractSet(Iterable[_T_co], Container[_T_co], Sized, Generic[_T_co]):
+class AbstractSet(_Collection[_T_co], Generic[_T_co]):
@abstractmethod
def __contains__(self, x: object) -> bool: ...
# Mixin methods
@@ -229,8 +252,6 @@ class AbstractSet(Iterable[_T_co], Container[_T_co], Sized, Generic[_T_co]):
# TODO: Argument can be a more general ABC?
def isdisjoint(self, s: AbstractSet[Any]) -> bool: ...
-class FrozenSet(AbstractSet[_T_co], Generic[_T_co]): ...
-
class MutableSet(AbstractSet[_T], Generic[_T]):
@abstractmethod
def add(self, x: _T) -> None: ...
@@ -262,7 +283,7 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
# TODO: ContextManager (only if contextlib.AbstractContextManager exists)
-class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT_co]):
+class Mapping(_Collection[_KT], Generic[_KT, _VT_co]):
# TODO: We wish the key type could also be covariant, but that doesn't work,
# see discussion in https: //github.com/python/typing/pull/273.
@abstractmethod
@@ -285,7 +306,10 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
def __delitem__(self, v: _KT) -> None: ...
def clear(self) -> None: ...
- def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+ @overload
+ def pop(self, k: _KT) -> _VT: ...
+ @overload
+ def pop(self, k: _KT, default: Union[_VT, _T] = ...) -> Union[_VT, _T]: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
# 'update' used to take a Union, but using overloading is better.
@@ -299,9 +323,9 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
# known to be a Mapping with unknown type parameters, which is closer
# to the behavior we want. See mypy issue #1430.
@overload
- def update(self, m: Mapping[_KT, _VT]) -> None: ...
+ def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
- def update(self, m: Iterable[Tuple[_KT, _VT]]) -> None: ...
+ def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
Text = str
@@ -339,9 +363,8 @@ class IO(Iterator[AnyStr], Generic[AnyStr]):
def seekable(self) -> bool: ...
@abstractmethod
def tell(self) -> int: ...
- # TODO None should not be compatible with int
@abstractmethod
- def truncate(self, size: int = ...) -> int: ...
+ def truncate(self, size: Optional[int] = ...) -> int: ...
@abstractmethod
def writable(self) -> bool: ...
# TODO buffer objects
@@ -421,10 +444,12 @@ class Match(Generic[AnyStr]):
def start(self, group: Union[int, str] = ...) -> int: ...
def end(self, group: Union[int, str] = ...) -> int: ...
def span(self, group: Union[int, str] = ...) -> Tuple[int, int]: ...
+ if sys.version_info >= (3, 6):
+ def __getitem__(self, g: Union[int, str]) -> AnyStr: ...
class Pattern(Generic[AnyStr]):
flags = 0
- groupindex = 0
+ groupindex = ... # type: Mapping[str, int]
groups = 0
pattern = ... # type: AnyStr
diff --git a/typeshed/stdlib/3/unicodedata.pyi b/typeshed/stdlib/3/unicodedata.pyi
deleted file mode 100644
index 07b5c47..0000000
--- a/typeshed/stdlib/3/unicodedata.pyi
+++ /dev/null
@@ -1,37 +0,0 @@
-# Stubs for unicodedata (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-ucd_3_2_0 = ... # type: Any
-ucnhash_CAPI = ... # type: Any
-unidata_version = ... # type: str
-
-def bidirectional(unichr): ...
-def category(unichr): ...
-def combining(unichr): ...
-def decimal(chr, default=...): ...
-def decomposition(unichr): ...
-def digit(chr, default=...): ...
-def east_asian_width(unichr): ...
-def lookup(name): ...
-def mirrored(unichr): ...
-def name(chr, default=...): ...
-def normalize(form, unistr): ...
-def numeric(chr, default=...): ...
-
-class UCD:
- unidata_version = ... # type: Any
- def bidirectional(self, unichr): ...
- def category(self, unichr): ...
- def combining(self, unichr): ...
- def decimal(self, chr, default=...): ...
- def decomposition(self, unichr): ...
- def digit(self, chr, default=...): ...
- def east_asian_width(self, unichr): ...
- def lookup(self, name): ...
- def mirrored(self, unichr): ...
- def name(self, chr, default=...): ...
- def normalize(self, form, unistr): ...
- def numeric(self, chr, default=...): ...
diff --git a/typeshed/stdlib/3/unittest/__init__.pyi b/typeshed/stdlib/3/unittest/__init__.pyi
index 15ebff6..f27ee66 100644
--- a/typeshed/stdlib/3/unittest/__init__.pyi
+++ b/typeshed/stdlib/3/unittest/__init__.pyi
@@ -2,7 +2,7 @@
from typing import (
Any, Callable, Dict, Iterable, Iterator, List, Optional, Pattern, Sequence,
- Set, TextIO, Tuple, Type, TypeVar, Union,
+ Set, FrozenSet, TextIO, Tuple, Type, TypeVar, Union, Generic,
overload,
)
import logging
@@ -12,13 +12,15 @@ from contextlib import ContextManager
_T = TypeVar('_T')
-_FT = TypeVar('_FT', Callable[[Any], Any])
+_FT = TypeVar('_FT', bound=Callable[[Any], Any])
+_E = TypeVar('_E', bound=Exception)
-def skip(reason: str) -> Callable[[_FT], _FT]: ...
-def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ...
-def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ...
def expectedFailure(func: _FT) -> _FT: ...
+# TODO: Once python/mypy#1551 is fixed, the following need _FT instead of Any
+def skip(reason: str) -> Callable[[Any], Any]: ...
+def skipIf(condition: object, reason: str) -> Callable[[Any], Any]: ...
+def skipUnless(condition: object, reason: str) -> Callable[[Any], Any]: ...
class SkipTest(Exception):
def __init__(self, reason: str) -> None: ...
@@ -70,25 +72,25 @@ class TestCase:
@overload
def assertRaises(self, # type: ignore
exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
- callable: Callable[..., Any] = ...,
+ callable: Callable[..., Any],
*args: Any, **kwargs: Any) -> None: ...
@overload
def assertRaises(self,
- exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
- msg: Any = ...) -> _AssertRaisesContext: ...
+ exception: Union[Type[_E], Tuple[Type[_E], ...]],
+ msg: Any = ...) -> _AssertRaisesContext[_E]: ...
@overload
def assertRaisesRegex(self, # type: ignore
exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
- callable: Callable[..., Any] = ...,
+ callable: Callable[..., Any],
*args: Any, **kwargs: Any) -> None: ...
@overload
def assertRaisesRegex(self,
- exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
- msg: Any = ...) -> _AssertRaisesContext: ...
+ exception: Union[Type[_E], Tuple[Type[_E], ...]],
+ msg: Any = ...) -> _AssertRaisesContext[_E]: ...
@overload
def assertWarns(self, # type: ignore
exception: Union[Type[Warning], Tuple[Type[Warning], ...]],
- callable: Callable[..., Any] = ...,
+ callable: Callable[..., Any],
*args: Any, **kwargs: Any) -> None: ...
@overload
def assertWarns(self,
@@ -97,7 +99,7 @@ class TestCase:
@overload
def assertWarnsRegex(self, # type: ignore
exception: Union[Type[Warning], Tuple[Type[Warning], ...]],
- callable: Callable[..., Any] = ...,
+ callable: Callable[..., Any],
*args: Any, **kwargs: Any) -> None: ...
@overload
def assertWarnsRegex(self,
@@ -117,7 +119,7 @@ class TestCase:
msg: Any = ...) -> None: ...
def assertNotRegex(self, text: str, regex: Union[str, Pattern[str]],
msg: Any = ...) -> None: ...
- def assertCountEqual(self, first: Sequence[Any], second: Sequence[Any],
+ def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any],
msg: Any = ...) -> None: ...
def addTypeEqualityFunc(self, typeobj: Type[Any],
function: Callable[..., None]) -> None: ...
@@ -130,8 +132,8 @@ class TestCase:
msg: Any = ...) -> None: ...
def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...],
msg: Any = ...) -> None: ...
- def assertSetEqual(self, first: Set[Any], second: Set[Any],
- msg: Any = ...) -> None: ...
+ def assertSetEqual(self, first: Union[Set[Any], FrozenSet[Any]],
+ second: Union[Set[Any], FrozenSet[Any]], msg: Any = ...) -> None: ...
def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any],
msg: Any = ...) -> None: ...
def fail(self, msg: Any = ...) -> None: ...
@@ -159,8 +161,8 @@ class TestCase:
*args: Any, **kwargs: Any) -> None: ...
@overload
def failUnlessRaises(self,
- exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
- msg: Any = ...) -> _AssertRaisesContext: ...
+ exception: Union[Type[_E], Tuple[Type[_E], ...]],
+ msg: Any = ...) -> _AssertRaisesContext[_E]: ...
def failUnlessAlmostEqual(self, first: float, second: float,
places: int = ..., msg: Any = ...) -> None: ...
def assertAlmostEquals(self, first: float, second: float, places: int = ...,
@@ -179,8 +181,8 @@ class TestCase:
*args: Any, **kwargs: Any) -> None: ...
@overload
def assertRaisesRegexp(self,
- exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
- msg: Any = ...) -> _AssertRaisesContext: ...
+ exception: Union[Type[_E], Tuple[Type[_E], ...]],
+ msg: Any = ...) -> _AssertRaisesContext[_E]: ...
class FunctionTestCase(TestCase):
def __init__(self, testFunc: Callable[[], None],
@@ -188,9 +190,9 @@ class FunctionTestCase(TestCase):
tearDown: Optional[Callable[[], None]] = ...,
description: Optional[str] = ...) -> None: ...
-class _AssertRaisesContext:
- exception = ... # type: Exception
- def __enter__(self) -> _AssertRaisesContext: ...
+class _AssertRaisesContext(Generic[_E]):
+ exception = ... # type: _E
+ def __enter__(self) -> _AssertRaisesContext[_E]: ...
def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception],
exc_tb: Optional[TracebackType]) -> bool: ...
diff --git a/typeshed/stdlib/3/unittest/mock.pyi b/typeshed/stdlib/3/unittest/mock.pyi
index 3d54fe9..84fe40e 100644
--- a/typeshed/stdlib/3/unittest/mock.pyi
+++ b/typeshed/stdlib/3/unittest/mock.pyi
@@ -10,35 +10,34 @@ if sys.version_info >= (3, 3):
class _SentinelObject:
name = ... # type: Any
- def __init__(self, name): ...
+ def __init__(self, name: Any) -> None: ...
class _Sentinel:
- def __init__(self): ...
- def __getattr__(self, name): ...
+ def __init__(self) -> None: ...
+ def __getattr__(self, name: str) -> Any: ...
sentinel = ... # type: Any
DEFAULT = ... # type: Any
class _CallList(list):
- def __contains__(self, value): ...
+ def __contains__(self, value: Any) -> bool: ...
class _MockIter:
obj = ... # type: Any
- def __init__(self, obj): ...
- def __iter__(self): ...
- def __next__(self): ...
+ def __init__(self, obj: Any) -> None: ...
+ def __iter__(self) -> Any: ...
+ def __next__(self) -> Any: ...
class Base:
- def __init__(self, *args, **kwargs): ...
+ def __init__(self, *args: Any, **kwargs: Any) -> None: ...
- class NonCallableMock(Base):
- def __new__(cls, *args, **kw): ...
- def __init__(self, spec=None, wraps=None, name=None, spec_set=None, parent=None, _spec_state=None, _new_name='', _new_parent=None, _spec_as_instance=False, _eat_self=None, unsafe=False, **kwargs): ...
- def attach_mock(self, mock, attribute): ...
- def mock_add_spec(self, spec, spec_set=False): ...
+ class NonCallableMock(Any):
+ def __new__(cls, *args: Any, **kw: Any) -> Any: ...
+ def __init__(self, spec: Any = None, wraps: Any = None, name: Any = None, spec_set: Any = None, parent: Any = None, _spec_state: Any = None, _new_name: Any ='', _new_parent: Any = None, _spec_as_instance: Any = False, _eat_self: Any = None, unsafe: Any = False, **kwargs: Any) -> None: ...
+ def attach_mock(self, mock: Any, attribute: Any) -> Any: ...
+ def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
return_value = ... # type: Any
- @property
- def __class__(self): ...
+ __class__ = ... # type: type
called = ... # type: Any
call_count = ... # type: Any
call_args = ... # type: Any
@@ -46,24 +45,25 @@ if sys.version_info >= (3, 3):
mock_calls = ... # type: Any
side_effect = ... # type: Any
method_calls = ... # type: Any
- def reset_mock(self, visited=None): ...
- def configure_mock(self, **kwargs): ...
- def __getattr__(self, name): ...
- def __dir__(self): ...
- def __setattr__(self, name, value): ...
- def __delattr__(self, name): ...
- def assert_not_called(_mock_self): ...
- def assert_called_with(_mock_self, *args, **kwargs): ...
- def assert_called_once_with(_mock_self, *args, **kwargs): ...
- def assert_has_calls(self, calls, any_order=False): ...
- def assert_any_call(self, *args, **kwargs): ...
+ def reset_mock(self, visited: bool = None) -> None: ...
+ def configure_mock(self, **kwargs: Any) -> None: ...
+ def __getattr__(self, name: Any) -> Any: ...
+ def __dir__(self) -> Any: ...
+ def __setattr__(self, name: Any, value: Any) -> None: ...
+ def __delattr__(self, name: Any) -> None: ...
+ def assert_not_called(_mock_self) -> None: ...
+ def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ...
+ def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ...
+ def assert_has_calls(self, calls: Any, any_order: bool = False) -> None: ...
+ def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ...
class CallableMixin(Base):
side_effect = ... # type: Any
- def __init__(self, spec=None, side_effect=None, return_value=..., wraps=None, name=None, spec_set=None, parent=None, _spec_state=None, _new_name='', _new_parent=None, **kwargs): ...
- def __call__(_mock_self, *args, **kwargs): ...
+ def __init__(self, spec: Any = None, side_effect: Any = None, return_value: Any = ..., wraps: Any = None, name: Any = None, spec_set: Any = None, parent: Any = None, _spec_state: Any = None, _new_name: Any = '', _new_parent: Any = None, **kwargs: Any) -> None: ...
+ def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ...
- class Mock(CallableMixin, NonCallableMock): ...
+ class Mock(CallableMixin, NonCallableMock):
+ def __init__(self) -> None: ...
class _patch:
attribute_name = ... # type: Any
@@ -78,74 +78,80 @@ if sys.version_info >= (3, 3):
autospec = ... # type: Any
kwargs = ... # type: Any
additional_patchers = ... # type: Any
- def __init__(self, getter, attribute, new, spec, create, spec_set, autospec, new_callable, kwargs): ...
- def copy(self): ...
- def __call__(self, func): ...
- def decorate_class(self, klass): ...
- def decorate_callable(self, func): ...
- def get_original(self): ...
+ def __init__(self, getter: Any, attribute: Any, new: Any, spec: Any, create: Any, spec_set: Any, autospec: Any, new_callable: Any, kwargs: Any) -> None: ...
+ def copy(self) -> Any: ...
+ def __call__(self, func: Any) -> Any: ...
+ def decorate_class(self, klass: Any) -> Any: ...
+ def decorate_callable(self, func: Any) -> Any: ...
+ def get_original(self) -> Any: ...
target = ... # type: Any
temp_original = ... # type: Any
is_local = ... # type: Any
- def __enter__(self): ...
- def __exit__(self, *exc_info): ...
- def start(self): ...
- def stop(self): ...
+ def __enter__(self) -> Any: ...
+ def __exit__(self, *exc_info: Any) -> Any: ...
+ def start(self) -> Any: ...
+ def stop(self) -> Any: ...
- def patch(target, new=..., spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs): ...
+ class _patcher:
+ def __call__(self, target: Any, new: Any = None, spec: Any = None, create: Any = False, spec_set: Any = None, autospec: Any = None, new_callable: Any = None, **kwargs: Any) -> Any: ...
+ def object(self, target: Any, attribute: str, new: Any = None, spec: Any = None, create: Any = False, spec_set: Any = None, autospec: Any = None, new_callable: Any = None, **kwargs: Any) -> _patch: ...
+
+ patch = ... # type: _patcher
class _patch_dict:
in_dict = ... # type: Any
values = ... # type: Any
clear = ... # type: Any
- def __init__(self, in_dict, values=..., clear=False, **kwargs): ...
- def __call__(self, f): ...
- def decorate_class(self, klass): ...
- def __enter__(self): ...
- def __exit__(self, *args): ...
+ def __init__(self, in_dict: Any, values: Any = ..., clear: Any = False, **kwargs: Any) -> None: ...
+ def __call__(self, f: Any) -> Any: ...
+ def decorate_class(self, klass: Any) -> Any: ...
+ def __enter__(self) -> Any: ...
+ def __exit__(self, *args: Any) -> Any: ...
start = ... # type: Any
stop = ... # type: Any
class MagicMixin:
- def __init__(self, *args, **kw): ...
+ def __init__(self, *args: Any, **kw: Any) -> None: ...
class NonCallableMagicMock(MagicMixin, NonCallableMock):
- def mock_add_spec(self, spec, spec_set=False): ...
+ def __init__(self) -> None: ...
+ def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
class MagicMock(MagicMixin, Mock):
- def mock_add_spec(self, spec, spec_set=False): ...
+ def __init__(self) -> None: ...
+ def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
class MagicProxy:
name = ... # type: Any
parent = ... # type: Any
- def __init__(self, name, parent): ...
- def __call__(self, *args, **kwargs): ...
- def create_mock(self): ...
- def __get__(self, obj, _type=None): ...
+ def __init__(self, name: Any, parent: Any) -> None: ...
+ def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+ def create_mock(self) -> Any: ...
+ def __get__(self, obj: Any, _type: Any = None) -> Any: ...
class _ANY:
- def __eq__(self, other): ...
- def __ne__(self, other): ...
+ def __eq__(self, other: Any) -> bool: ...
+ def __ne__(self, other: Any) -> bool: ...
ANY = ... # type: Any
class _Call(tuple):
- def __new__(cls, value=..., name=None, parent=None, two=False, from_kall=True): ...
+ def __new__(cls, value: Any = ..., name: Any = None, parent: Any = None, two: bool = False, from_kall: bool = True) -> Any: ...
name = ... # type: Any
parent = ... # type: Any
from_kall = ... # type: Any
- def __init__(self, value=..., name=None, parent=None, two=False, from_kall=True): ...
- def __eq__(self, other): ...
+ def __init__(self, value: Any = ..., name: Any = None, parent: Any = None, two: bool = False, from_kall: bool = True) -> None: ...
+ def __eq__(self, other: Any) -> bool: ...
__ne__ = ... # type: Any
- def __call__(self, *args, **kwargs): ...
- def __getattr__(self, attr): ...
- def count(self, *args, **kwargs): ...
- def index(self, *args, **kwargs): ...
- def call_list(self): ...
+ def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+ def __getattr__(self, attr: Any) -> Any: ...
+ def count(self, *args: Any, **kwargs: Any) -> Any: ...
+ def index(self, *args: Any, **kwargs: Any) -> Any: ...
+ def call_list(self) -> Any: ...
call = ... # type: Any
- def create_autospec(spec, spec_set=False, instance=False, _parent=None, _name=None, **kwargs): ...
+ def create_autospec(spec: Any, spec_set: Any = False, instance: Any = False, _parent: Any = None, _name: Any = None, **kwargs: Any) -> Any: ...
class _SpecState:
spec = ... # type: Any
@@ -154,10 +160,10 @@ if sys.version_info >= (3, 3):
parent = ... # type: Any
instance = ... # type: Any
name = ... # type: Any
- def __init__(self, spec, spec_set=False, parent=None, name=None, ids=None, instance=False): ...
+ def __init__(self, spec: Any, spec_set: Any = False, parent: Any = None, name: Any = None, ids: Any = None, instance: Any = False) -> None: ...
- def mock_open(mock=None, read_data=''): ...
+ def mock_open(mock: Any = None, read_data: Any = '') -> Any: ...
class PropertyMock(Mock):
- def __get__(self, obj, obj_type): ...
- def __set__(self, obj, val): ...
+ def __get__(self, obj: Any, obj_type: Any) -> Any: ...
+ def __set__(self, obj: Any, val: Any) -> Any: ...
diff --git a/typeshed/stdlib/3/urllib/parse.pyi b/typeshed/stdlib/3/urllib/parse.pyi
index 25aec92..637fa5f 100644
--- a/typeshed/stdlib/3/urllib/parse.pyi
+++ b/typeshed/stdlib/3/urllib/parse.pyi
@@ -2,6 +2,8 @@
from typing import Any, List, Dict, Tuple, AnyStr, Generic, overload, Sequence, Mapping, Union, NamedTuple, Callable
import sys
+_Str = Union[bytes, str]
+
__all__ = (
'urlparse',
'urlunparse',
@@ -97,25 +99,26 @@ class SplitResultBytes(_SplitResultBytesBase, _NetlocResultMixinBytes): ...
class ParseResultBytes(_ParseResultBytesBase, _NetlocResultMixinBytes): ...
-def parse_qs(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> Dict[str, List[str]]: ...
+def parse_qs(qs: AnyStr, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> Dict[AnyStr, List[AnyStr]]: ...
+
+def parse_qsl(qs: AnyStr, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> List[Tuple[AnyStr, AnyStr]]: ...
-def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ..., encoding: str = ..., errors: str = ...) -> List[Tuple[str, str]]: ...
@overload
-def quote(string: str, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+def quote(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ...
@overload
-def quote(string: bytes, safe: AnyStr = ...) -> str: ...
+def quote(string: bytes, safe: _Str = ...) -> str: ...
-def quote_from_bytes(bs: bytes, safe: AnyStr = ...) -> str: ...
+def quote_from_bytes(bs: bytes, safe: _Str = ...) -> str: ...
@overload
-def quote_plus(string: str, safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+def quote_plus(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ...
@overload
-def quote_plus(string: bytes, safe: AnyStr = ...) -> str: ...
+def quote_plus(string: bytes, safe: _Str = ...) -> str: ...
def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ...
-def unquote_to_bytes(string: AnyStr) -> bytes: ...
+def unquote_to_bytes(string: _Str) -> bytes: ...
def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ...
diff --git a/typeshed/stdlib/3/urllib/request.pyi b/typeshed/stdlib/3/urllib/request.pyi
index b8d2190..dca209f 100644
--- a/typeshed/stdlib/3/urllib/request.pyi
+++ b/typeshed/stdlib/3/urllib/request.pyi
@@ -30,6 +30,7 @@ def url2pathname(path: str) -> str: ...
def pathname2url(path: str) -> str: ...
def getproxies() -> Dict[str, str]: ...
def parse_http_list(s: str) -> List[str]: ...
+def parse_keqv_list(l: List[str]) -> Dict[str, str]: ...
class Request:
if sys.version_info >= (3, 4):
@@ -108,7 +109,7 @@ class HTTPPasswordMgr:
def find_user_password(self, realm: str, authuri: str) \
-> Tuple[Optional[str], Optional[str]]: ...
-class HTTPPasswordMgrWithDefaultRealm:
+class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def add_password(self, realm: str, uri: Union[str, Sequence[str]],
user: str, passwd: str) -> None: ...
def find_user_password(self, realm: str, authuri: str) \
@@ -138,10 +139,15 @@ class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
hdrs: Mapping[str, str]) -> Optional[_UrlopenRet]: ...
class AbstractDigestAuthHandler:
- def __init__(self,
- password_mgr: Optional[HTTPPasswordMgr] = ...) -> None: ...
+ def __init__(self, passwd: Optional[HTTPPasswordMgr] = ...) -> None: ...
+ def reset_retry_count(self) -> None: ...
def http_error_auth_reqed(self, auth_header: str, host: str, req: Request,
headers: Mapping[str, str]) -> None: ...
+ def retry_http_digest_auth(self, req: Request, auth: str) -> Optional[_UrlopenRet]: ...
+ def get_cnonce(self, nonce: str) -> str: ...
+ def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ...
+ def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ...
+ def get_entity_digest(self, data: Optional[bytes], chal: Mapping[str, str]) -> Optional[str]: ...
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
def http_error_401(self, req: Request, fp: IO[str], code: int, msg: int,
diff --git a/typeshed/stdlib/3/urllib/response.pyi b/typeshed/stdlib/3/urllib/response.pyi
index 033bbaa..2c16cb6 100644
--- a/typeshed/stdlib/3/urllib/response.pyi
+++ b/typeshed/stdlib/3/urllib/response.pyi
@@ -4,5 +4,8 @@ from typing import BinaryIO, Mapping, Optional
from types import TracebackType
class addinfourl(BinaryIO):
+ headers = ... # type: Mapping[str, str]
+ url = ... # type: str
+ code = ... # type: int
def info(self) -> Mapping[str, str]: ...
def geturl(self) -> str: ...
diff --git a/typeshed/stdlib/3/uuid.pyi b/typeshed/stdlib/3/uuid.pyi
deleted file mode 100644
index efd48e5..0000000
--- a/typeshed/stdlib/3/uuid.pyi
+++ /dev/null
@@ -1,73 +0,0 @@
-# Stubs for uuid
-
-from typing import Tuple
-
-Int = __builtins__.int
-Bytes = __builtins__.bytes
-FieldsType = Tuple[Int, Int, Int, Int, Int, Int]
-
-class UUID:
- def __init__(self, hex: str=..., bytes: Bytes=..., bytes_le: Bytes=..., fields: FieldsType=..., int: Int=..., version: Int=...) -> None: ...
-
- @property
- def bytes(self) -> Bytes: ...
-
- @property
- def bytes_le(self) -> Bytes: ...
-
- @property
- def clock_seq(self) -> Int: ...
-
- @property
- def clock_seq_hi_variant(self) -> Int: ...
-
- @property
- def clock_seq_low(self) -> Int: ...
-
- @property
- def fields(self) -> FieldsType: ...
-
- @property
- def hex(self) -> str: ...
-
- @property
- def int(self) -> Int: ...
-
- @property
- def node(self) -> Int: ...
-
- @property
- def time(self) -> Int: ...
-
- @property
- def time_hi_version(self) -> Int: ...
-
- @property
- def time_low(self) -> Int: ...
-
- @property
- def time_mid(self) -> Int: ...
-
- @property
- def urn(self) -> str: ...
-
- @property
- def variant(self) -> str: ...
-
- @property
- def version(self) -> str: ...
-
-def getnode() -> Int: ...
-def uuid1(node: Int=..., clock_seq: Int=...) -> UUID: ...
-def uuid3(namespace: UUID, name: str) -> UUID: ...
-def uuid4() -> UUID: ...
-def uuid5(namespace: UUID, name: str) -> UUID: ...
-
-NAMESPACE_DNS = ... # type: UUID
-NAMESPACE_URL = ... # type: UUID
-NAMESPACE_OID = ... # type: UUID
-NAMESPACE_X500 = ... # type: UUID
-RESERVED_NCS = ... # type: str
-RFC_4122 = ... # type: str
-RESERVED_MICROSOFT = ... # type: str
-RESERVED_FUTURE = ... # type: str
diff --git a/typeshed/tests/mypy_selftest.py b/typeshed/tests/mypy_selftest.py
new file mode 100755
index 0000000..3dc3928
--- /dev/null
+++ b/typeshed/tests/mypy_selftest.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+"""Script to run mypy's test suite against this version of typeshed."""
+
+from pathlib import Path
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+if __name__ == '__main__':
+ with tempfile.TemporaryDirectory() as tempdir:
+ dirpath = Path(tempdir)
+ subprocess.run(['git', 'clone', '--depth', '1', 'git://github.com/python/mypy',
+ str(dirpath / 'mypy')], check=True)
+ subprocess.run([sys.executable, '-m', 'pip', 'install', '-U', '-r',
+ str(dirpath / 'mypy/test-requirements.txt')], check=True)
+ shutil.copytree('stdlib', str(dirpath / 'mypy/typeshed/stdlib'))
+ shutil.copytree('third_party', str(dirpath / 'mypy/typeshed/third_party'))
+ try:
+ subprocess.run(['./runtests.py'], cwd=str(dirpath / 'mypy'), check=True)
+ except subprocess.CalledProcessError as e:
+ print('mypy tests failed', file=sys.stderr)
+ sys.exit(e.returncode)
+ else:
+ print('mypy tests succeeded', file=sys.stderr)
+ sys.exit(0)
diff --git a/typeshed/tests/mypy_test.py b/typeshed/tests/mypy_test.py
index d474009..841d067 100755
--- a/typeshed/tests/mypy_test.py
+++ b/typeshed/tests/mypy_test.py
@@ -83,7 +83,7 @@ def main():
print("Cannot import mypy. Did you install it?")
sys.exit(1)
- versions = [(3, 6), (3, 5), (3, 4), (3, 3), (3, 2), (2, 7)]
+ versions = [(3, 6), (3, 5), (3, 4), (3, 3), (2, 7)]
if args.python_version:
versions = [v for v in versions
if any(('%d.%d' % v).startswith(av) for av in args.python_version)]
@@ -124,8 +124,6 @@ def main():
runs += 1
flags = ['--python-version', '%d.%d' % (major, minor)]
flags.append('--strict-optional')
- if (major, minor) >= (3, 6):
- flags.append('--fast-parser')
# flags.append('--warn-unused-ignores') # Fast parser and regular parser disagree.
sys.argv = ['mypy'] + flags + files
if args.verbose:
diff --git a/typeshed/third_party/2/concurrent/futures/__init__.pyi b/typeshed/third_party/2/concurrent/futures/__init__.pyi
index 8e915ae..49c0de6 100644
--- a/typeshed/third_party/2/concurrent/futures/__init__.pyi
+++ b/typeshed/third_party/2/concurrent/futures/__init__.pyi
@@ -1,19 +1,23 @@
-from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Union
+from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Optional, Set, Tuple, Union
_T = TypeVar('_T')
+class Error(Exception): ...
+class CancelledError(Error): ...
+class TimeoutError(Error): ...
+
class Future(Generic[_T]):
def cancel(self) -> bool: ...
def cancelled(self) -> bool: ...
def running(self) -> bool: ...
def done(self) -> bool: ...
def result(self, timeout: float = ...) -> _T: ...
- def exception(self, timeout: float = ...) -> Exception: ...
+ def exception(self, timeout: float = ...) -> Any: ...
def add_done_callback(self, fn: Callable[[Future], Any]) -> None: ...
- def set_running_or_notify_cancel(self) -> None: ...
+ def set_running_or_notify_cancel(self) -> bool: ...
def set_result(self, result: _T) -> None: ...
- def set_exception(self, exception: Exception) -> None: ...
+ def set_exception(self, exception: Any) -> None: ...
class Executor:
def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
@@ -28,7 +32,7 @@ class ThreadPoolExecutor(Executor):
class ProcessPoolExecutor(Executor):
def __init__(self, max_workers: Union[int, None] = ...) -> None: ...
-def wait(fs: Iterable[Future], timeout: float = ..., return_when: str = ...) -> Tuple[Iterable[Future], Iterable[Future]]: ...
+def wait(fs: Iterable[Future], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Set[Future], Set[Future]]: ...
FIRST_COMPLETED = ... # type: str
FIRST_EXCEPTION = ... # type: str
diff --git a/typeshed/third_party/2/dateutil/relativedelta.pyi b/typeshed/third_party/2/dateutil/relativedelta.pyi
index 3a99eaf..ea6592b 100644
--- a/typeshed/third_party/2/dateutil/relativedelta.pyi
+++ b/typeshed/third_party/2/dateutil/relativedelta.pyi
@@ -1,8 +1,11 @@
-from typing import Any, Optional, Union
+from typing import overload, Any, List, Optional, SupportsFloat, TypeVar, Union
from datetime import date, datetime, timedelta
__all__ = ... # type: List[str]
+_SelfT = TypeVar('_SelfT', bound=relativedelta)
+_DateT = TypeVar('_DateT', date, datetime)
+
class weekday(object):
def __init__(self, weekday: int, n: Optional[int]=...) -> None: ...
@@ -49,38 +52,40 @@ class relativedelta(object):
@weeks.setter
def weeks(self, value: int) -> None: ...
- def normalized(self) -> 'relativedelta': ...
-
- def __add__(
- self,
- other: Union['relativedelta', timedelta, date, datetime]) -> 'relativedelta': ...
-
- def __radd__(
- self,
- other: Any) -> 'relativedelta': ...
-
- def __rsub__(
- self,
- other: Any) -> 'relativedelta': ...
-
- def __sub__(self, other: 'relativedelta') -> 'relativedelta': ...
-
- def __neg__(self) -> 'relativedelta': ...
+ def normalized(self: _SelfT) -> _SelfT: ...
+
+ @overload
+ def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ...
+ @overload
+ def __add__(self: _SelfT, other: timedelta) -> _SelfT: ...
+ @overload
+ def __add__(self, other: _DateT) -> _DateT: ...
+ @overload
+ def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ...
+ @overload
+ def __radd__(self, other: _DateT) -> _DateT: ...
+ @overload
+ def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ...
+ @overload
+ def __rsub__(self, other: _DateT) -> _DateT: ...
+ def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ...
+
+ def __neg__(self: _SelfT) -> _SelfT: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
- def __mul__(self, other: float) -> 'relativedelta': ...
+ def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
- def __rmul__(self, other: float) -> 'relativedelta': ...
+ def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other: object) -> bool: ...
- def __div__(self, other: float) -> 'relativedelta': ...
+ def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
- def __truediv__(self, other: float) -> 'relativedelta': ...
+ def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ...
def __repr__(self) -> str: ...
diff --git a/typeshed/third_party/2/dateutil/tz/tz.pyi b/typeshed/third_party/2/dateutil/tz/tz.pyi
index ccd3819..5027e62 100644
--- a/typeshed/third_party/2/dateutil/tz/tz.pyi
+++ b/typeshed/third_party/2/dateutil/tz/tz.pyi
@@ -2,7 +2,7 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any, Optional, Union, IO, Tuple
+from typing import Any, IO, List, Optional, Tuple, Union
import datetime
from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo
from ._common import tzrangebase as tzrangebase, enfold as enfold
diff --git a/typeshed/third_party/2/google/protobuf/descriptor.pyi b/typeshed/third_party/2/google/protobuf/descriptor.pyi
index 7e9bc67..1d26f98 100644
--- a/typeshed/third_party/2/google/protobuf/descriptor.pyi
+++ b/typeshed/third_party/2/google/protobuf/descriptor.pyi
@@ -13,7 +13,7 @@ class DescriptorMetaclass(type):
def __instancecheck__(cls, obj): ...
class DescriptorBase:
- __metaclass__ = ... # type: Any
+ __metaclass__ = DescriptorMetaclass
has_options = ... # type: Any
def __init__(self, options, options_class_name) -> None: ...
def GetOptions(self): ...
diff --git a/typeshed/third_party/2/requests/__init__.pyi b/typeshed/third_party/2/requests/__init__.pyi
deleted file mode 100644
index 51e4131..0000000
--- a/typeshed/third_party/2/requests/__init__.pyi
+++ /dev/null
@@ -1,39 +0,0 @@
-# Stubs for requests (based on version 2.6.0, Python 3)
-
-from typing import Any
-from requests import models
-from requests import api
-from requests import sessions
-from requests import status_codes
-from requests import exceptions
-import logging
-
-__title__ = ... # type: Any
-__build__ = ... # type: Any
-__license__ = ... # type: Any
-__copyright__ = ... # type: Any
-__version__ = ... # type: Any
-
-Request = models.Request
-Response = models.Response
-PreparedRequest = models.PreparedRequest
-request = api.request
-get = api.get
-head = api.head
-post = api.post
-patch = api.patch
-put = api.put
-delete = api.delete
-options = api.options
-session = sessions.session
-Session = sessions.Session
-codes = status_codes.codes
-RequestException = exceptions.RequestException
-Timeout = exceptions.Timeout
-URLRequired = exceptions.URLRequired
-TooManyRedirects = exceptions.TooManyRedirects
-HTTPError = exceptions.HTTPError
-ConnectionError = exceptions.ConnectionError
-
-class NullHandler(logging.Handler):
- def emit(self, record): ...
diff --git a/typeshed/third_party/2/requests/adapters.pyi b/typeshed/third_party/2/requests/adapters.pyi
deleted file mode 100644
index 00cc000..0000000
--- a/typeshed/third_party/2/requests/adapters.pyi
+++ /dev/null
@@ -1,72 +0,0 @@
-# Stubs for requests.adapters (Python 3)
-
-from typing import Any, Container, Union, Tuple
-from . import models
-from .packages.urllib3 import poolmanager
-from .packages.urllib3 import response
-from .packages.urllib3.util import retry
-from . import compat
-from . import utils
-from . import structures
-from .packages.urllib3 import exceptions as urllib3_exceptions
-from . import cookies
-from . import exceptions
-from . import auth
-
-PreparedRequest = models.PreparedRequest
-Response = models.Response
-PoolManager = poolmanager.PoolManager
-proxy_from_url = poolmanager.proxy_from_url
-HTTPResponse = response.HTTPResponse
-Retry = retry.Retry
-DEFAULT_CA_BUNDLE_PATH = utils.DEFAULT_CA_BUNDLE_PATH
-get_encoding_from_headers = utils.get_encoding_from_headers
-prepend_scheme_if_needed = utils.prepend_scheme_if_needed
-get_auth_from_url = utils.get_auth_from_url
-urldefragauth = utils.urldefragauth
-CaseInsensitiveDict = structures.CaseInsensitiveDict
-ConnectTimeoutError = urllib3_exceptions.ConnectTimeoutError
-MaxRetryError = urllib3_exceptions.MaxRetryError
-ProtocolError = urllib3_exceptions.ProtocolError
-ReadTimeoutError = urllib3_exceptions.ReadTimeoutError
-ResponseError = urllib3_exceptions.ResponseError
-extract_cookies_to_jar = cookies.extract_cookies_to_jar
-ConnectionError = exceptions.ConnectionError
-ConnectTimeout = exceptions.ConnectTimeout
-ReadTimeout = exceptions.ReadTimeout
-SSLError = exceptions.SSLError
-ProxyError = exceptions.ProxyError
-RetryError = exceptions.RetryError
-
-DEFAULT_POOLBLOCK = ... # type: Any
-DEFAULT_POOLSIZE = ... # type: Any
-DEFAULT_RETRIES = ... # type: Any
-
-class BaseAdapter:
- def __init__(self) -> None: ...
- def send(self, request: PreparedRequest, stream=False,
- timeout: Union[None, float, Tuple[float, float]]=None,
- verify=False,
- cert: Union[None, Union[str, unicode, bytes], Container[Union[str, unicode]]]=None) -> Response: ...
- def close(self) -> None: ...
-
-class HTTPAdapter(BaseAdapter):
- __attrs__ = ... # type: Any
- max_retries = ... # type: Any
- config = ... # type: Any
- proxy_manager = ... # type: Any
- def __init__(self, pool_connections=..., pool_maxsize=..., max_retries=...,
- pool_block=...): ...
- poolmanager = ... # type: Any
- def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ...
- def proxy_manager_for(self, proxy, **proxy_kwargs): ...
- def cert_verify(self, conn, url, verify, cert): ...
- def build_response(self, req, resp): ...
- def get_connection(self, url, proxies=...): ...
- def close(self): ...
- def request_url(self, request, proxies): ...
- def add_headers(self, request, **kwargs): ...
- def proxy_headers(self, proxy): ...
- # TODO: "request" is not actually optional, modified to please mypy.
- def send(self, request=..., stream=..., timeout=..., verify=..., cert=...,
- proxies=...): ...
diff --git a/typeshed/third_party/2/requests/api.pyi b/typeshed/third_party/2/requests/api.pyi
deleted file mode 100644
index 9f041d2..0000000
--- a/typeshed/third_party/2/requests/api.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-# Stubs for requests.api (Python 2)
-
-from typing import Union, Optional, Iterable, Mapping, Tuple
-
-from .models import Response
-
-ParamsMappingValueType = Union[str, unicode, int, float, Iterable[Union[str, unicode, int, float]]]
-
-def request(method: str, url: str, **kwargs) -> Response: ...
-def get(url: Union[str, unicode],
- params: Optional[
- Union[Mapping[Union[str, unicode, int, float], ParamsMappingValueType],
- Union[str, unicode],
- Tuple[Union[str, unicode, int, float], ParamsMappingValueType],
- Mapping[str, ParamsMappingValueType],
- Mapping[unicode, ParamsMappingValueType],
- Mapping[int, ParamsMappingValueType],
- Mapping[float, ParamsMappingValueType]]] = None,
- **kwargs) -> Response: ...
-def options(url: Union[str, unicode], **kwargs) -> Response: ...
-def head(url: Union[str, unicode], **kwargs) -> Response: ...
-def post(url: Union[str, unicode], data=..., json=...,
- **kwargs) -> Response: ...
-def put(url: Union[str, unicode], data=..., **kwargs) -> Response: ...
-def patch(url: Union[str, unicode], data=..., **kwargs) -> Response: ...
-def delete(url: Union[str, unicode], **kwargs) -> Response: ...
diff --git a/typeshed/third_party/2/requests/cookies.pyi b/typeshed/third_party/2/requests/cookies.pyi
deleted file mode 100644
index 6f56c82..0000000
--- a/typeshed/third_party/2/requests/cookies.pyi
+++ /dev/null
@@ -1,61 +0,0 @@
-# Stubs for requests.cookies (Python 3)
-
-from typing import Any, MutableMapping
-import collections
-from . import compat
-
-class MockRequest:
- type = ... # type: Any
- def __init__(self, request) -> None: ...
- def get_type(self): ...
- def get_host(self): ...
- def get_origin_req_host(self): ...
- def get_full_url(self): ...
- def is_unverifiable(self): ...
- def has_header(self, name): ...
- def get_header(self, name, default=...): ...
- def add_header(self, key, val): ...
- def add_unredirected_header(self, name, value): ...
- def get_new_headers(self): ...
- @property
- def unverifiable(self): ...
- @property
- def origin_req_host(self): ...
- @property
- def host(self): ...
-
-class MockResponse:
- def __init__(self, headers) -> None: ...
- def info(self): ...
- def getheaders(self, name): ...
-
-def extract_cookies_to_jar(jar, request, response): ...
-def get_cookie_header(jar, request): ...
-def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ...
-
-class CookieConflictError(RuntimeError): ...
-
-class RequestsCookieJar(MutableMapping):
- def get(self, name, default=..., domain=..., path=...): ...
- def set(self, name, value, **kwargs): ...
- def iterkeys(self): ...
- def keys(self): ...
- def itervalues(self): ...
- def values(self): ...
- def iteritems(self): ...
- def items(self): ...
- def list_domains(self): ...
- def list_paths(self): ...
- def multiple_domains(self): ...
- def get_dict(self, domain=..., path=...): ...
- def __getitem__(self, name): ...
- def __setitem__(self, name, value): ...
- def __delitem__(self, name): ...
- def set_cookie(self, cookie, *args, **kwargs): ...
- def update(self, other): ...
- def copy(self): ...
-
-def create_cookie(name, value, **kwargs): ...
-def morsel_to_cookie(morsel): ...
-def cookiejar_from_dict(cookie_dict, cookiejar=..., overwrite=...): ...
-def merge_cookies(cookiejar, cookies): ...
diff --git a/typeshed/third_party/2/requests/models.pyi b/typeshed/third_party/2/requests/models.pyi
deleted file mode 100644
index a9f2b59..0000000
--- a/typeshed/third_party/2/requests/models.pyi
+++ /dev/null
@@ -1,135 +0,0 @@
-# Stubs for requests.models (Python 3)
-
-from typing import Any, List, MutableMapping, Iterator, Dict
-import datetime
-
-from . import hooks
-from . import structures
-from . import auth
-from . import cookies
-from .cookies import RequestsCookieJar
-from .packages.urllib3 import fields
-from .packages.urllib3 import filepost
-from .packages.urllib3 import util
-from .packages.urllib3 import exceptions as urllib3_exceptions
-from . import exceptions
-from . import utils
-from . import compat
-from . import status_codes
-
-from typing import Optional, Union
-
-default_hooks = hooks.default_hooks
-CaseInsensitiveDict = structures.CaseInsensitiveDict
-HTTPBasicAuth = auth.HTTPBasicAuth
-cookiejar_from_dict = cookies.cookiejar_from_dict
-get_cookie_header = cookies.get_cookie_header
-RequestField = fields.RequestField
-encode_multipart_formdata = filepost.encode_multipart_formdata
-DecodeError = urllib3_exceptions.DecodeError
-ReadTimeoutError = urllib3_exceptions.ReadTimeoutError
-ProtocolError = urllib3_exceptions.ProtocolError
-LocationParseError = urllib3_exceptions.LocationParseError
-HTTPError = exceptions.HTTPError
-MissingSchema = exceptions.MissingSchema
-InvalidURL = exceptions.InvalidURL
-ChunkedEncodingError = exceptions.ChunkedEncodingError
-ContentDecodingError = exceptions.ContentDecodingError
-ConnectionError = exceptions.ConnectionError
-StreamConsumedError = exceptions.StreamConsumedError
-guess_filename = utils.guess_filename
-get_auth_from_url = utils.get_auth_from_url
-requote_uri = utils.requote_uri
-stream_decode_response_unicode = utils.stream_decode_response_unicode
-to_key_val_list = utils.to_key_val_list
-parse_header_links = utils.parse_header_links
-iter_slices = utils.iter_slices
-guess_json_utf = utils.guess_json_utf
-super_len = utils.super_len
-to_native_string = utils.to_native_string
-codes = status_codes.codes
-
-REDIRECT_STATI = ... # type: Any
-DEFAULT_REDIRECT_LIMIT = ... # type: Any
-CONTENT_CHUNK_SIZE = ... # type: Any
-ITER_CHUNK_SIZE = ... # type: Any
-json_dumps = ... # type: Any
-
-class RequestEncodingMixin:
- @property
- def path_url(self): ...
-
-class RequestHooksMixin:
- def register_hook(self, event, hook): ...
- def deregister_hook(self, event, hook): ...
-
-class Request(RequestHooksMixin):
- hooks = ... # type: Any
- method = ... # type: Any
- url = ... # type: Any
- headers = ... # type: Any
- files = ... # type: Any
- data = ... # type: Any
- json = ... # type: Any
- params = ... # type: Any
- auth = ... # type: Any
- cookies = ... # type: Any
- def __init__(self, method=..., url=..., headers=..., files=..., data=..., params=...,
- auth=..., cookies=..., hooks=..., json=...): ...
- def prepare(self): ...
-
-class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
- method = ... # type: Optional[Union[str, unicode]]
- url = ... # type: Optional[Union[str, unicode]]
- headers = ... # type: CaseInsensitiveDict
- body = ... # type: Optional[Union[str, unicode]]
- hooks = ... # type: Any
- def __init__(self) -> None: ...
- def prepare(self, method=..., url=..., headers=..., files=..., data=..., params=...,
- auth=..., cookies=..., hooks=..., json=...): ...
- def copy(self): ...
- def prepare_method(self, method): ...
- def prepare_url(self, url, params): ...
- def prepare_headers(self, headers): ...
- def prepare_body(self, data, files, json=...): ...
- def prepare_content_length(self, body): ...
- def prepare_auth(self, auth, url=...): ...
- def prepare_cookies(self, cookies): ...
- def prepare_hooks(self, hooks): ...
-
-class Response:
- __attrs__ = ... # type: Any
- status_code = ... # type: int
- headers = ... # type: MutableMapping[str, str]
- raw = ... # type: Any
- url = ... # type: str
- encoding = ... # type: str
- history = ... # type: List[Response]
- reason = ... # type: str
- cookies = ... # type: RequestsCookieJar
- elapsed = ... # type: datetime.timedelta
- request = ... # type: PreparedRequest
- def __init__(self) -> None: ...
- def __bool__(self) -> bool: ...
- def __nonzero__(self) -> bool: ...
- def __iter__(self) -> Iterator[str]: ...
- @property
- def ok(self) -> bool: ...
- @property
- def is_redirect(self) -> bool: ...
- @property
- def is_permanent_redirect(self) -> bool: ...
- @property
- def apparent_encoding(self) -> str: ...
- def iter_content(self, chunk_size: int = ...,
- decode_unicode: bool = ...) -> Iterator[Any]: ...
- def iter_lines(self, chunk_size=..., decode_unicode=..., delimiter=...): ...
- @property
- def content(self) -> str: ...
- @property
- def text(self) -> str: ...
- def json(self, **kwargs) -> Any: ...
- @property
- def links(self) -> Dict[Any, Any]: ...
- def raise_for_status(self) -> None: ...
- def close(self) -> None: ...
diff --git a/typeshed/third_party/2/requests/packages/urllib3/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/__init__.pyi
deleted file mode 100644
index 38cf672..0000000
--- a/typeshed/third_party/2/requests/packages/urllib3/__init__.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for requests.packages.urllib3 (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-import logging
-
-class NullHandler(logging.Handler):
- def emit(self, record): ...
-
-def add_stderr_logger(level=...): ...
-def disable_warnings(category=...): ...
diff --git a/typeshed/third_party/2/requests/packages/urllib3/connection.pyi b/typeshed/third_party/2/requests/packages/urllib3/connection.pyi
deleted file mode 100644
index 289fd18..0000000
--- a/typeshed/third_party/2/requests/packages/urllib3/connection.pyi
+++ /dev/null
@@ -1,51 +0,0 @@
-# Stubs for requests.packages.urllib3.connection (Python 3.4)
-
-from typing import Any
-from httplib import HTTPException
-from . import packages
-from . import exceptions
-from . import util
-
-class DummyConnection: ...
-
-ConnectTimeoutError = exceptions.ConnectTimeoutError
-SystemTimeWarning = exceptions.SystemTimeWarning
-SecurityWarning = exceptions.SecurityWarning
-
-port_by_scheme = ... # type: Any
-RECENT_DATE = ... # type: Any
-
-class HTTPConnection(object):
- default_port = ... # type: Any
- default_socket_options = ... # type: Any
- is_verified = ... # type: Any
- source_address = ... # type: Any
- socket_options = ... # type: Any
- def __init__(self, *args, **kw) -> None: ...
- def connect(self): ...
-
-class HTTPSConnection(HTTPConnection):
- default_port = ... # type: Any
- key_file = ... # type: Any
- cert_file = ... # type: Any
- def __init__(self, host, port=..., key_file=..., cert_file=..., strict=..., timeout=..., **kw) -> None: ...
- sock = ... # type: Any
- def connect(self): ...
-
-class VerifiedHTTPSConnection(HTTPSConnection):
- cert_reqs = ... # type: Any
- ca_certs = ... # type: Any
- ssl_version = ... # type: Any
- assert_fingerprint = ... # type: Any
- key_file = ... # type: Any
- cert_file = ... # type: Any
- assert_hostname = ... # type: Any
- def set_cert(self, key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., assert_hostname=..., assert_fingerprint=...): ...
- sock = ... # type: Any
- auto_open = ... # type: Any
- is_verified = ... # type: Any
- def connect(self): ...
-
-UnverifiedHTTPSConnection = ... # type: Any
-
-class ConnectionError(Exception): pass
diff --git a/typeshed/third_party/2/requests/packages/urllib3/connectionpool.pyi b/typeshed/third_party/2/requests/packages/urllib3/connectionpool.pyi
deleted file mode 100644
index 03c3140..0000000
--- a/typeshed/third_party/2/requests/packages/urllib3/connectionpool.pyi
+++ /dev/null
@@ -1,87 +0,0 @@
-# Stubs for requests.packages.urllib3.connectionpool (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from ssl import SSLError as BaseSSLError
-from . import exceptions
-from .packages import ssl_match_hostname
-from . import packages
-from . import connection
-from . import request
-from . import response
-from .util import connection as _connection
-from .util import retry
-from .util import timeout
-from .util import url
-
-ClosedPoolError = exceptions.ClosedPoolError
-ProtocolError = exceptions.ProtocolError
-EmptyPoolError = exceptions.EmptyPoolError
-HostChangedError = exceptions.HostChangedError
-LocationValueError = exceptions.LocationValueError
-MaxRetryError = exceptions.MaxRetryError
-ProxyError = exceptions.ProxyError
-ReadTimeoutError = exceptions.ReadTimeoutError
-SSLError = exceptions.SSLError
-TimeoutError = exceptions.TimeoutError
-InsecureRequestWarning = exceptions.InsecureRequestWarning
-CertificateError = ssl_match_hostname.CertificateError
-port_by_scheme = connection.port_by_scheme
-DummyConnection = connection.DummyConnection
-HTTPConnection = connection.HTTPConnection
-HTTPSConnection = connection.HTTPSConnection
-VerifiedHTTPSConnection = connection.VerifiedHTTPSConnection
-HTTPException = connection.HTTPException
-ConnectionError = connection.ConnectionError
-RequestMethods = request.RequestMethods
-HTTPResponse = response.HTTPResponse
-is_connection_dropped = _connection.is_connection_dropped
-Retry = retry.Retry
-Timeout = timeout.Timeout
-get_host = url.get_host
-
-xrange = ... # type: Any
-log = ... # type: Any
-
-class ConnectionPool:
- scheme = ... # type: Any
- QueueCls = ... # type: Any
- host = ... # type: Any
- port = ... # type: Any
- def __init__(self, host, port=...) -> None: ...
- def __enter__(self): ...
- def __exit__(self, exc_type, exc_val, exc_tb): ...
- def close(self): ...
-
-class HTTPConnectionPool(ConnectionPool, RequestMethods):
- scheme = ... # type: Any
- ConnectionCls = ... # type: Any
- strict = ... # type: Any
- timeout = ... # type: Any
- retries = ... # type: Any
- pool = ... # type: Any
- block = ... # type: Any
- proxy = ... # type: Any
- proxy_headers = ... # type: Any
- num_connections = ... # type: Any
- num_requests = ... # type: Any
- conn_kw = ... # type: Any
- def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., **conn_kw) -> None: ...
- def close(self): ...
- def is_same_host(self, url): ...
- def urlopen(self, method, url, body=..., headers=..., retries=..., redirect=..., assert_same_host=..., timeout=..., pool_timeout=..., release_conn=..., **response_kw): ...
-
-class HTTPSConnectionPool(HTTPConnectionPool):
- scheme = ... # type: Any
- ConnectionCls = ... # type: Any
- key_file = ... # type: Any
- cert_file = ... # type: Any
- cert_reqs = ... # type: Any
- ca_certs = ... # type: Any
- ssl_version = ... # type: Any
- assert_hostname = ... # type: Any
- assert_fingerprint = ... # type: Any
- def __init__(self, host, port=..., strict=..., timeout=..., maxsize=..., block=..., headers=..., retries=..., _proxy=..., _proxy_headers=..., key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., ssl_version=..., assert_hostname=..., assert_fingerprint=..., **conn_kw) -> None: ...
-
-def connection_from_url(url, **kw): ...
diff --git a/typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
deleted file mode 100644
index 05c03dc..0000000
--- a/typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
+++ /dev/null
@@ -1 +0,0 @@
-class CertificateError(ValueError): pass
diff --git a/typeshed/third_party/2/requests/packages/urllib3/response.pyi b/typeshed/third_party/2/requests/packages/urllib3/response.pyi
deleted file mode 100644
index 9e310d2..0000000
--- a/typeshed/third_party/2/requests/packages/urllib3/response.pyi
+++ /dev/null
@@ -1,58 +0,0 @@
-# Stubs for requests.packages.urllib3.response (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, IO
-import io
-from . import _collections
-from . import exceptions
-# from .packages import six
-from . import connection
-from .util import response
-
-HTTPHeaderDict = _collections.HTTPHeaderDict
-ProtocolError = exceptions.ProtocolError
-DecodeError = exceptions.DecodeError
-ReadTimeoutError = exceptions.ReadTimeoutError
-binary_type = str # six.binary_type
-PY3 = True # six.PY3
-is_fp_closed = response.is_fp_closed
-
-class DeflateDecoder:
- def __init__(self) -> None: ...
- def __getattr__(self, name): ...
- def decompress(self, data): ...
-
-class GzipDecoder:
- def __init__(self) -> None: ...
- def __getattr__(self, name): ...
- def decompress(self, data): ...
-
-class HTTPResponse(IO[Any]):
- CONTENT_DECODERS = ... # type: Any
- REDIRECT_STATUSES = ... # type: Any
- headers = ... # type: Any
- status = ... # type: Any
- version = ... # type: Any
- reason = ... # type: Any
- strict = ... # type: Any
- decode_content = ... # type: Any
- def __init__(self, body=..., headers=..., status=..., version=..., reason=..., strict=..., preload_content=..., decode_content=..., original_response=..., pool=..., connection=...) -> None: ...
- def get_redirect_location(self): ...
- def release_conn(self): ...
- @property
- def data(self): ...
- def tell(self): ...
- def read(self, amt=..., decode_content=..., cache_content=...): ...
- def stream(self, amt=..., decode_content=...): ...
- @classmethod
- def from_httplib(ResponseCls, r, **response_kw): ...
- def getheaders(self): ...
- def getheader(self, name, default=...): ...
- def close(self): ...
- @property
- def closed(self): ...
- def fileno(self): ...
- def flush(self): ...
- def readable(self): ...
- def readinto(self, b): ...
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/__init__.pyi b/typeshed/third_party/2/requests/packages/urllib3/util/__init__.pyi
deleted file mode 100644
index 0a2c9ba..0000000
--- a/typeshed/third_party/2/requests/packages/urllib3/util/__init__.pyi
+++ /dev/null
@@ -1,6 +0,0 @@
-# Stubs for requests.packages.urllib3.util (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from . import connection
-from . import request
diff --git a/typeshed/third_party/2/requests/sessions.pyi b/typeshed/third_party/2/requests/sessions.pyi
deleted file mode 100644
index 9cc60d2..0000000
--- a/typeshed/third_party/2/requests/sessions.pyi
+++ /dev/null
@@ -1,106 +0,0 @@
-# Stubs for requests.sessions (Python 2.7)
-
-from typing import Any, Union, MutableMapping, Text, Optional, IO, Tuple, Callable
-from . import adapters
-from . import auth
-from . import compat
-from . import cookies
-from . import models
-from .models import Response
-from . import hooks
-from . import utils
-from . import exceptions
-from .packages.urllib3 import _collections
-from . import structures
-from . import adapters
-from . import status_codes
-
-BaseAdapter = adapters.BaseAdapter
-OrderedDict = compat.OrderedDict
-cookiejar_from_dict = cookies.cookiejar_from_dict
-extract_cookies_to_jar = cookies.extract_cookies_to_jar
-RequestsCookieJar = cookies.RequestsCookieJar
-merge_cookies = cookies.merge_cookies
-Request = models.Request
-PreparedRequest = models.PreparedRequest
-DEFAULT_REDIRECT_LIMIT = models.DEFAULT_REDIRECT_LIMIT
-default_hooks = hooks.default_hooks
-dispatch_hook = hooks.dispatch_hook
-to_key_val_list = utils.to_key_val_list
-default_headers = utils.default_headers
-to_native_string = utils.to_native_string
-TooManyRedirects = exceptions.TooManyRedirects
-InvalidSchema = exceptions.InvalidSchema
-ChunkedEncodingError = exceptions.ChunkedEncodingError
-ContentDecodingError = exceptions.ContentDecodingError
-RecentlyUsedContainer = _collections.RecentlyUsedContainer
-CaseInsensitiveDict = structures.CaseInsensitiveDict
-HTTPAdapter = adapters.HTTPAdapter
-requote_uri = utils.requote_uri
-get_environ_proxies = utils.get_environ_proxies
-get_netrc_auth = utils.get_netrc_auth
-should_bypass_proxies = utils.should_bypass_proxies
-get_auth_from_url = utils.get_auth_from_url
-codes = status_codes.codes
-REDIRECT_STATI = models.REDIRECT_STATI
-
-REDIRECT_CACHE_SIZE = ... # type: Any
-
-def merge_setting(request_setting, session_setting, dict_class=...): ...
-def merge_hooks(request_hooks, session_hooks, dict_class=...): ...
-
-class SessionRedirectMixin:
- def resolve_redirects(self, resp, req, stream=..., timeout=..., verify=..., cert=...,
- proxies=...): ...
- def rebuild_auth(self, prepared_request, response): ...
- def rebuild_proxies(self, prepared_request, proxies): ...
-
-class Session(SessionRedirectMixin):
- __attrs__ = ... # type: Any
- headers = ... # type: Optional[MutableMapping[Text, Text]]
- auth = ... # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
- proxies = ... # type: Optional[MutableMapping[Text, Text]]
- hooks = ... # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
- params = ... # type: Union[None, bytes, MutableMapping[Text, Text]]
- stream = ... # type: bool
- verify = ... # type: bool
- cert = ... # type: Union[None, Text, Tuple[Text, Text]]
- max_redirects = ... # type: int
- trust_env = ... # type: bool
- cookies = ... # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
- adapters = ... # type: MutableMapping
- redirect_cache = ... # type: RecentlyUsedContainer
- def __init__(self) -> None: ...
- def __enter__(self) -> 'Session': ...
- def __exit__(self, *args) -> None: ...
- def prepare_request(self, request): ...
- def request(self, method: str, url: str,
- params, # type: Union[None, bytes, MutableMapping[Text, Text]]
- data, # type: Union[None, bytes, MutableMapping[Text, Text], IO]
- headers, # type: Optional[MutableMapping[Text, Text]]
- cookies, # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
- files, # type: Optional[MutableMapping[Text, IO]]
- auth, # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
- timeout, # type: Union[None, float, Tuple[float, float]]
- allow_redirects, # type: Optional[bool]
- proxies, # type: Optional[MutableMapping[Text, Text]]
- hooks, # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
- stream, # type: Optional[bool]
- verify, # type: Optional[bool]
- cert, # type: Union[Text, Tuple[Text, Text], None]
- json # type: Optional[MutableMapping]
- ) -> Response: ...
- def get(self, url: str, **kwargs) -> Response: ...
- def options(self, url: str, **kwargs) -> Response: ...
- def head(self, url: str, **kwargs) -> Response: ...
- def post(self, url: str, data=..., json=..., **kwargs) -> Response: ...
- def put(self, url: str, data=..., **kwargs) -> Response: ...
- def patch(self, url: str, data=..., **kwargs) -> Response: ...
- def delete(self, url: str, **kwargs) -> Response: ...
- def send(self, request, **kwargs): ...
- def merge_environment_settings(self, url, proxies, stream, verify, cert): ...
- def get_adapter(self, url): ...
- def close(self) -> None: ...
- def mount(self, prefix: str, adapter: BaseAdapter) -> None: ...
-
-def session() -> Session: ...
diff --git a/typeshed/third_party/2/six/moves/__init__.pyi b/typeshed/third_party/2/six/moves/__init__.pyi
index 2e09f98..5747172 100644
--- a/typeshed/third_party/2/six/moves/__init__.pyi
+++ b/typeshed/third_party/2/six/moves/__init__.pyi
@@ -1,6 +1,7 @@
-# Provisional stubs for six.moves (Python 2.7)
-
-import Cookie as http_cookies
+# Stubs for six.moves
+#
+# Note: Commented out items means they weren't implemented at the time.
+# Uncomment them when the modules have been added to the typeshed.
from cStringIO import StringIO as cStringIO
from itertools import ifilter as filter
from itertools import ifilterfalse as filterfalse
@@ -20,12 +21,46 @@ from UserString import UserString as UserString
from __builtin__ import xrange as xrange
from itertools import izip as zip
from itertools import izip_longest as zip_longest
-import six.moves.cPickle as cPickle
-import HTMLParser as html_parser
+import __builtin__ as builtins
+import ConfigParser as configparser
+# import copy_reg as copyreg
+# import gdbm as dbm_gnu
+# import dummy_thread as _dummy_thread
+import cookielib as http_cookiejar
+import Cookie as http_cookies
import htmlentitydefs as html_entities
+import HTMLParser as html_parser
import httplib as http_client
-
-import six.moves.urllib_parse as urllib_parse
-import six.moves.urllib_error as urllib_error
+# import email.MIMEMultipart as email_mime_multipart
+# import email.MIMENonMultipart as email_mime_nonmultipart
+import email.MIMEText as email_mime_text
+# import email.MIMEBase as email_mime_base
+import BaseHTTPServer as BaseHTTPServer
+# import CGIHTTPServer as CGIHTTPServer
+# import SimpleHTTPServer as SimpleHTTPServer
+import cPickle as cPickle
+import Queue as queue
+import repr as reprlib
+import SocketServer as socketserver
+import thread as _thread
+# import Tkinter as tkinter
+# import Dialog as tkinter_dialog
+# import FileDialog as tkinter_filedialog
+# import ScrolledText as tkinter_scrolledtext
+# import SimpleDialog as tkinter_simpledialog
+# import Tix as tkinter_tix
+# import ttk as tkinter_ttk
+# import Tkconstants as tkinter_constants
+# import Tkdnd as tkinter_dnd
+# import tkColorChooser as tkinter_colorchooser
+# import tkCommonDialog as tkinter_commondialog
+# import tkFileDialog as tkinter_tkfiledialog
+# import tkFont as tkinter_font
+# import tkMessageBox as tkinter_messagebox
+# import tkSimpleDialog as tkinter_tksimpledialog
+import six.moves.urllib.parse as urllib_parse
+import six.moves.urllib.error as urllib_error
import six.moves.urllib as urllib
-import six.moves.urllib_robotparser as urllib_robotparser
+import robotparser as urllib_robotparser
+# import xmlrpclib as xmlrpc_client
+# import SimpleXMLRPCServer as xmlrpc_server
diff --git a/typeshed/third_party/2/six/moves/cPickle.pyi b/typeshed/third_party/2/six/moves/cPickle.pyi
deleted file mode 100644
index 3feedc6..0000000
--- a/typeshed/third_party/2/six/moves/cPickle.pyi
+++ /dev/null
@@ -1,6 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.cPickle (Python 2.7)
-
-from cPickle import * # noqa: F403
diff --git a/typeshed/third_party/2/six/moves/urllib/__init__.pyi b/typeshed/third_party/2/six/moves/urllib/__init__.pyi
index 71523cd..d08209c 100644
--- a/typeshed/third_party/2/six/moves/urllib/__init__.pyi
+++ b/typeshed/third_party/2/six/moves/urllib/__init__.pyi
@@ -1,8 +1,3 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib (Python 2.7)
-
import six.moves.urllib.error as error
import six.moves.urllib.parse as parse
import six.moves.urllib.request as request
diff --git a/typeshed/third_party/2/six/moves/urllib/error.pyi b/typeshed/third_party/2/six/moves/urllib/error.pyi
index 05eda84..044327e 100644
--- a/typeshed/third_party/2/six/moves/urllib/error.pyi
+++ b/typeshed/third_party/2/six/moves/urllib/error.pyi
@@ -1,8 +1,3 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.error (Python 2.7)
-
from urllib2 import URLError as URLError
from urllib2 import HTTPError as HTTPError
from urllib import ContentTooShortError as ContentTooShortError
diff --git a/typeshed/third_party/2/six/moves/urllib/parse.pyi b/typeshed/third_party/2/six/moves/urllib/parse.pyi
index 25351fb..4096c27 100644
--- a/typeshed/third_party/2/six/moves/urllib/parse.pyi
+++ b/typeshed/third_party/2/six/moves/urllib/parse.pyi
@@ -1,30 +1,24 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.parse (Python 2.7)
-
-from six.moves.urllib_parse import (
- ParseResult as ParseResult,
- SplitResult as SplitResult,
- parse_qs as parse_qs,
- parse_qsl as parse_qsl,
- urldefrag as urldefrag,
- urljoin as urljoin,
- urlparse as urlparse,
- urlsplit as urlsplit,
- urlunparse as urlunparse,
- urlunsplit as urlunsplit,
- quote as quote,
- quote_plus as quote_plus,
- unquote as unquote,
- unquote_plus as unquote_plus,
- urlencode as urlencode,
- splitquery as splitquery,
- splittag as splittag,
- splituser as splituser,
- uses_fragment as uses_fragment,
- uses_netloc as uses_netloc,
- uses_params as uses_params,
- uses_query as uses_query,
- uses_relative as uses_relative,
-)
+# Stubs for six.moves.urllib.parse
+from urlparse import ParseResult as ParseResult
+from urlparse import SplitResult as SplitResult
+from urlparse import parse_qs as parse_qs
+from urlparse import parse_qsl as parse_qsl
+from urlparse import urldefrag as urldefrag
+from urlparse import urljoin as urljoin
+from urlparse import urlparse as urlparse
+from urlparse import urlsplit as urlsplit
+from urlparse import urlunparse as urlunparse
+from urlparse import urlunsplit as urlunsplit
+from urllib import quote as quote
+from urllib import quote_plus as quote_plus
+from urllib import unquote as unquote
+from urllib import unquote_plus as unquote_plus
+from urllib import urlencode as urlencode
+from urllib import splitquery as splitquery
+from urllib import splittag as splittag
+from urllib import splituser as splituser
+from urlparse import uses_fragment as uses_fragment
+from urlparse import uses_netloc as uses_netloc
+from urlparse import uses_params as uses_params
+from urlparse import uses_query as uses_query
+from urlparse import uses_relative as uses_relative
diff --git a/typeshed/third_party/2/six/moves/urllib/request.pyi b/typeshed/third_party/2/six/moves/urllib/request.pyi
index 109cda8..0b8ad8c 100644
--- a/typeshed/third_party/2/six/moves/urllib/request.pyi
+++ b/typeshed/third_party/2/six/moves/urllib/request.pyi
@@ -1,8 +1,4 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.request (Python 2.7)
-
+# Stubs for six.moves.urllib.request
from urllib2 import urlopen as urlopen
from urllib2 import install_opener as install_opener
from urllib2 import build_opener as build_opener
diff --git a/typeshed/third_party/2/six/moves/urllib/response.pyi b/typeshed/third_party/2/six/moves/urllib/response.pyi
index d778514..83e117f 100644
--- a/typeshed/third_party/2/six/moves/urllib/response.pyi
+++ b/typeshed/third_party/2/six/moves/urllib/response.pyi
@@ -1,8 +1,4 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.response (Python 2.7)
-
+# Stubs for six.moves.urllib.response
from urllib import addbase as addbase
from urllib import addclosehook as addclosehook
from urllib import addinfo as addinfo
diff --git a/typeshed/third_party/2/six/moves/urllib/robotparser.pyi b/typeshed/third_party/2/six/moves/urllib/robotparser.pyi
index 3b33758..11eef50 100644
--- a/typeshed/third_party/2/six/moves/urllib/robotparser.pyi
+++ b/typeshed/third_party/2/six/moves/urllib/robotparser.pyi
@@ -1,6 +1 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.robotparser (Python 2.7)
-
from robotparser import RobotFileParser as RobotFileParser
diff --git a/typeshed/third_party/2/six/moves/urllib_error.pyi b/typeshed/third_party/2/six/moves/urllib_error.pyi
deleted file mode 100644
index 4872659..0000000
--- a/typeshed/third_party/2/six/moves/urllib_error.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_error (Python 2.7)
-
-from six.moves.urllib.error import (
- URLError as URLError,
- HTTPError as HTTPError,
- ContentTooShortError as ContentTooShortError,
-)
diff --git a/typeshed/third_party/2/six/moves/urllib_parse.pyi b/typeshed/third_party/2/six/moves/urllib_parse.pyi
deleted file mode 100644
index 2416b96..0000000
--- a/typeshed/third_party/2/six/moves/urllib_parse.pyi
+++ /dev/null
@@ -1,28 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_parse (Python 2.7)
-
-from urlparse import ParseResult as ParseResult
-from urlparse import SplitResult as SplitResult
-from urlparse import parse_qs as parse_qs
-from urlparse import parse_qsl as parse_qsl
-from urlparse import urldefrag as urldefrag
-from urlparse import urljoin as urljoin
-from urlparse import urlparse as urlparse
-from urlparse import urlsplit as urlsplit
-from urlparse import urlunparse as urlunparse
-from urlparse import urlunsplit as urlunsplit
-from urllib import quote as quote
-from urllib import quote_plus as quote_plus
-from urllib import unquote as unquote
-from urllib import unquote_plus as unquote_plus
-from urllib import urlencode as urlencode
-from urllib import splitquery as splitquery
-from urllib import splittag as splittag
-from urllib import splituser as splituser
-from urlparse import uses_fragment as uses_fragment
-from urlparse import uses_netloc as uses_netloc
-from urlparse import uses_params as uses_params
-from urlparse import uses_query as uses_query
-from urlparse import uses_relative as uses_relative
diff --git a/typeshed/third_party/2/six/moves/urllib_request.pyi b/typeshed/third_party/2/six/moves/urllib_request.pyi
deleted file mode 100644
index 832055a..0000000
--- a/typeshed/third_party/2/six/moves/urllib_request.pyi
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_request (Python 2.7)
-
-from six.moves.urllib.request import (
- urlopen as urlopen,
- install_opener as install_opener,
- build_opener as build_opener,
- pathname2url as pathname2url,
- url2pathname as url2pathname,
- getproxies as getproxies,
- Request as Request,
- OpenerDirector as OpenerDirector,
- HTTPDefaultErrorHandler as HTTPDefaultErrorHandler,
- HTTPRedirectHandler as HTTPRedirectHandler,
- HTTPCookieProcessor as HTTPCookieProcessor,
- ProxyHandler as ProxyHandler,
- BaseHandler as BaseHandler,
- HTTPPasswordMgr as HTTPPasswordMgr,
- HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm,
- AbstractBasicAuthHandler as AbstractBasicAuthHandler,
- HTTPBasicAuthHandler as HTTPBasicAuthHandler,
- ProxyBasicAuthHandler as ProxyBasicAuthHandler,
- AbstractDigestAuthHandler as AbstractDigestAuthHandler,
- HTTPDigestAuthHandler as HTTPDigestAuthHandler,
- ProxyDigestAuthHandler as ProxyDigestAuthHandler,
- HTTPHandler as HTTPHandler,
- HTTPSHandler as HTTPSHandler,
- FileHandler as FileHandler,
- FTPHandler as FTPHandler,
- CacheFTPHandler as CacheFTPHandler,
- UnknownHandler as UnknownHandler,
- HTTPErrorProcessor as HTTPErrorProcessor,
- urlretrieve as urlretrieve,
- urlcleanup as urlcleanup,
- URLopener as URLopener,
- FancyURLopener as FancyURLopener,
- proxy_bypass as proxy_bypass,
-)
diff --git a/typeshed/third_party/2/six/moves/urllib_response.pyi b/typeshed/third_party/2/six/moves/urllib_response.pyi
deleted file mode 100644
index ca00492..0000000
--- a/typeshed/third_party/2/six/moves/urllib_response.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_response (Python 2.7)
-
-from six.moves.urllib.response import (
- addbase as addbase,
- addclosehook as addclosehook,
- addinfo as addinfo,
- addinfourl as addinfourl,
-)
diff --git a/typeshed/third_party/2/six/moves/urllib_robotparser.pyi b/typeshed/third_party/2/six/moves/urllib_robotparser.pyi
deleted file mode 100644
index d990bb5..0000000
--- a/typeshed/third_party/2/six/moves/urllib_robotparser.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_robotparser (Python 2.7)
-
-from six.moves.urllib.robotparser import (
- RobotFileParser as RobotFileParser,
-)
diff --git a/typeshed/third_party/2/werkzeug/wrappers.pyi b/typeshed/third_party/2/werkzeug/wrappers.pyi
index 7924bcb..982650c 100644
--- a/typeshed/third_party/2/werkzeug/wrappers.pyi
+++ b/typeshed/third_party/2/werkzeug/wrappers.pyi
@@ -2,78 +2,85 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
+from typing import (
+ Any, Iterable, Mapping, Optional, Sequence, Tuple, Type, Union,
+)
+
+from .datastructures import (
+ CombinedMultiDict, EnvironHeaders, Headers, ImmutableMultiDict,
+ MultiDict, TypeConversionDict,
+)
class BaseRequest:
- charset = ... # type: Any
- encoding_errors = ... # type: Any
- max_content_length = ... # type: Any
- max_form_memory_size = ... # type: Any
- parameter_storage_class = ... # type: Any
- list_storage_class = ... # type: Any
- dict_storage_class = ... # type: Any
- form_data_parser_class = ... # type: Any
- trusted_hosts = ... # type: Any
+ charset = ... # type: str
+ encoding_errors = ... # type: str
+ max_content_length = ... # type: Union[int, long]
+ max_form_memory_size = ... # type: Union[int, long]
+ parameter_storage_class = ... # type: Type
+ list_storage_class = ... # type: Type
+ dict_storage_class = ... # type: Type
+ form_data_parser_class = ... # type: Type
+ trusted_hosts = ... # type: Optional[Sequence[unicode]]
disable_data_descriptor = ... # type: Any
- environ = ... # type: Any
+ environ = ... # type: Mapping[str, object]
shallow = ... # type: Any
- def __init__(self, environ, populate_request=True, shallow=False): ...
+ def __init__(self, environ: Mapping[basestring, object], populate_request: bool=True, shallow: bool=False) -> None: ...
@property
- def url_charset(self): ...
+ def url_charset(self) -> str: ...
@classmethod
- def from_values(cls, *args, **kwargs): ...
+ def from_values(cls, *args, **kwargs) -> 'BaseRequest': ...
@classmethod
def application(cls, f): ...
@property
def want_form_data_parsed(self): ...
def make_form_data_parser(self): ...
- def close(self): ...
+ def close(self) -> None: ...
def __enter__(self): ...
def __exit__(self, exc_type, exc_value, tb): ...
def stream(self): ...
input_stream = ... # type: Any
- def args(self): ...
+ args = ... # type: ImmutableMultiDict
def data(self): ...
- def get_data(self, cache=True, as_text=False, parse_form_data=False): ...
- def form(self): ...
- def values(self): ...
- def files(self): ...
- def cookies(self): ...
- def headers(self): ...
- def path(self): ...
- def full_path(self): ...
- def script_root(self): ...
- def url(self): ...
- def base_url(self): ...
- def url_root(self): ...
- def host_url(self): ...
- def host(self): ...
- query_string = ... # type: Any
- method = ... # type: Any
+ def get_data(self, cache: bool=True, as_text: bool=False, parse_form_data: bool=False) -> str: ...
+ form = ... # type: ImmutableMultiDict
+ values = ... # type: CombinedMultiDict
+ files = ... # type: MultiDict
+ cookies = ... # type: TypeConversionDict
+ headers = ... # type: EnvironHeaders
+ path = ... # type: unicode
+ full_path = ... # type: unicode
+ script_root = ... # type: unicode
+ url = ... # type: unicode
+ base_url = ... # type: unicode
+ url_root = ... # type: unicode
+ host_url = ... # type: unicode
+ host = ... # type: unicode
+ query_string = ... # type: str
+ method = ... # type: str
def access_route(self): ...
@property
- def remote_addr(self): ...
- remote_user = ... # type: Any
- scheme = ... # type: Any
- is_xhr = ... # type: Any
- is_secure = ... # type: Any
- is_multithread = ... # type: Any
- is_multiprocess = ... # type: Any
- is_run_once = ... # type: Any
+ def remote_addr(self) -> str: ...
+ remote_user = ... # type: unicode
+ scheme = ... # type: str
+ is_xhr = ... # type: bool
+ is_secure = ... # type: bool
+ is_multithread = ... # type: bool
+ is_multiprocess = ... # type: bool
+ is_run_once = ... # type: bool
class BaseResponse:
- charset = ... # type: Any
- default_status = ... # type: Any
- default_mimetype = ... # type: Any
- implicit_sequence_conversion = ... # type: Any
- autocorrect_location_header = ... # type: Any
- automatically_set_content_length = ... # type: Any
- headers = ... # type: Any
- status_code = ... # type: Any
- status = ... # type: Any
- direct_passthrough = ... # type: Any
- response = ... # type: Any
- def __init__(self, response=None, status=None, headers=None, mimetype=None, content_type=None, direct_passthrough=False): ...
+ charset = ... # type: str
+ default_status = ... # type: int
+ default_mimetype = ... # type: str
+ implicit_sequence_conversion = ... # type: bool
+ autocorrect_location_header = ... # type: bool
+ automatically_set_content_length = ... # type: bool
+ headers = ... # type: Headers
+ status_code = ... # type: int
+ status = ... # type: str
+ direct_passthrough = ... # type: bool
+ response = ... # type: Iterable[str]
+ def __init__(self, response: Union[Iterable[str], str]=None, status=Union[basestring, int], headers: Union[Headers, Mapping[basestring, basestring], Sequence[Tuple[basestring, basestring]]]=None, mimetype: basestring=None, content_type: basestring=None, direct_passthrough: bool=False) -> None: ...
def call_on_close(self, func): ...
@classmethod
def force_type(cls, response, environ=None): ...
@@ -88,10 +95,10 @@ class BaseResponse:
def set_cookie(self, key, value='', max_age=None, expires=None, path='', domain=None, secure=False, httponly=False): ...
def delete_cookie(self, key, path='', domain=None): ...
@property
- def is_streamed(self): ...
+ def is_streamed(self) -> bool: ...
@property
- def is_sequence(self): ...
- def close(self): ...
+ def is_sequence(self) -> bool: ...
+ def close(self) -> None: ...
def __enter__(self): ...
def __exit__(self, exc_type, exc_value, tb): ...
def freeze(self): ...
diff --git a/typeshed/third_party/2and3/boto/kms/__init__.pyi b/typeshed/third_party/2and3/boto/kms/__init__.pyi
new file mode 100644
index 0000000..7382eca
--- /dev/null
+++ b/typeshed/third_party/2and3/boto/kms/__init__.pyi
@@ -0,0 +1,9 @@
+# Stubs for boto.kms (Python 3.6)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import List
+import boto
+
+def regions() -> List[boto.regioninfo.RegionInfo]: ...
+def connect_to_region(region_name, **kw_params): ...
diff --git a/typeshed/third_party/2and3/boto/kms/exceptions.pyi b/typeshed/third_party/2and3/boto/kms/exceptions.pyi
new file mode 100644
index 0000000..e09b895
--- /dev/null
+++ b/typeshed/third_party/2and3/boto/kms/exceptions.pyi
@@ -0,0 +1,21 @@
+# Stubs for boto.kms.exceptions (Python 3.6)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from boto.exception import BotoServerError
+
+class InvalidGrantTokenException(BotoServerError): ...
+class DisabledException(BotoServerError): ...
+class LimitExceededException(BotoServerError): ...
+class DependencyTimeoutException(BotoServerError): ...
+class InvalidMarkerException(BotoServerError): ...
+class AlreadyExistsException(BotoServerError): ...
+class InvalidCiphertextException(BotoServerError): ...
+class KeyUnavailableException(BotoServerError): ...
+class InvalidAliasNameException(BotoServerError): ...
+class UnsupportedOperationException(BotoServerError): ...
+class InvalidArnException(BotoServerError): ...
+class KMSInternalException(BotoServerError): ...
+class InvalidKeyUsageException(BotoServerError): ...
+class MalformedPolicyDocumentException(BotoServerError): ...
+class NotFoundException(BotoServerError): ...
diff --git a/typeshed/third_party/2and3/boto/kms/layer1.pyi b/typeshed/third_party/2and3/boto/kms/layer1.pyi
new file mode 100644
index 0000000..777f2f7
--- /dev/null
+++ b/typeshed/third_party/2and3/boto/kms/layer1.pyi
@@ -0,0 +1,41 @@
+# Stubs for boto.kms.layer1 (Python 3.6)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Dict, List, Mapping, Optional, Type
+from boto.connection import AWSQueryConnection
+
+class KMSConnection(AWSQueryConnection):
+ APIVersion = ... # type: str
+ DefaultRegionName = ... # type: str
+ DefaultRegionEndpoint = ... # type: str
+ ServiceName = ... # type: str
+ TargetPrefix = ... # type: str
+ ResponseError = ... # type: Type[Exception]
+ region = ... # type: Any
+ def __init__(self, **kwargs) -> None: ...
+ def create_alias(self, alias_name: str, target_key_id: str) -> Optional[Dict[str, Any]]: ...
+ def create_grant(self, key_id: str, grantee_principal: str, retiring_principal: Optional[str] = ..., operations: Optional[List[str]] = ..., constraints: Optional[Dict[str, Dict[str, str]]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ...
+ def create_key(self, policy: Optional[str] = ..., description: Optional[str] = ..., key_usage: Optional[str] = ...) -> Optional[Dict[str, Any]]: ...
+ def decrypt(self, ciphertext_blob: bytes, encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ...
+ def delete_alias(self, alias_name: str) -> Optional[Dict[str, Any]]: ...
+ def describe_key(self, key_id: str) -> Optional[Dict[str, Any]]: ...
+ def disable_key(self, key_id: str) -> Optional[Dict[str, Any]]: ...
+ def disable_key_rotation(self, key_id: str) -> Optional[Dict[str, Any]]: ...
+ def enable_key(self, key_id: str) -> Optional[Dict[str, Any]]: ...
+ def enable_key_rotation(self, key_id: str) -> Optional[Dict[str, Any]]: ...
+ def encrypt(self, key_id: str, plaintext: bytes, encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ...
+ def generate_data_key(self, key_id: str, encryption_context: Optional[Mapping[str, Any]] = ..., number_of_bytes: Optional[int] = ..., key_spec: Optional[str] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ...
+ def generate_data_key_without_plaintext(self, key_id: str, encryption_context: Optional[Mapping[str, Any]] = ..., key_spec: Optional[str] = ..., number_of_bytes: Optional[int] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ...
+ def generate_random(self, number_of_bytes: Optional[int] = ...) -> Optional[Dict[str, Any]]: ...
+ def get_key_policy(self, key_id: str, policy_name: str) -> Optional[Dict[str, Any]]: ...
+ def get_key_rotation_status(self, key_id: str) -> Optional[Dict[str, Any]]: ...
+ def list_aliases(self, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ...
+ def list_grants(self, key_id: str, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ...
+ def list_key_policies(self, key_id: str, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ...
+ def list_keys(self, limit: Optional[int] = ..., marker: Optional[str] = ...) -> Optional[Dict[str, Any]]: ...
+ def put_key_policy(self, key_id: str, policy_name: str, policy: str) -> Optional[Dict[str, Any]]: ...
+ def re_encrypt(self, ciphertext_blob: bytes, destination_key_id: str, source_encryption_context: Optional[Mapping[str, Any]] = ..., destination_encryption_context: Optional[Mapping[str, Any]] = ..., grant_tokens: Optional[List[str]] = ...) -> Optional[Dict[str, Any]]: ...
+ def retire_grant(self, grant_token: str) -> Optional[Dict[str, Any]]: ...
+ def revoke_grant(self, key_id: str, grant_id: str) -> Optional[Dict[str, Any]]: ...
+ def update_key_description(self, key_id: str, description: str) -> Optional[Dict[str, Any]]: ...
diff --git a/typeshed/third_party/2and3/boto/s3/bucket.pyi b/typeshed/third_party/2and3/boto/s3/bucket.pyi
index 3b6efb6..beb1d27 100644
--- a/typeshed/third_party/2and3/boto/s3/bucket.pyi
+++ b/typeshed/third_party/2and3/boto/s3/bucket.pyi
@@ -6,7 +6,7 @@ from .bucketlistresultset import BucketListResultSet
from .connection import S3Connection
from .key import Key
-from typing import Any, Dict, Optional, Text, Type
+from typing import Any, Dict, Optional, Text, Type, List
class S3WebsiteEndpointTranslate:
trans_region = ... # type: Dict[str, str]
diff --git a/typeshed/third_party/2and3/characteristic/__init__.pyi b/typeshed/third_party/2and3/characteristic/__init__.pyi
index 07e0f60..f2903b9 100644
--- a/typeshed/third_party/2and3/characteristic/__init__.pyi
+++ b/typeshed/third_party/2and3/characteristic/__init__.pyi
@@ -9,7 +9,7 @@ def strip_leading_underscores(attribute_name: AnyStr) -> AnyStr: ...
NOTHING = Any
-T = TypeVar('T')
+_T = TypeVar('_T')
def attributes(
attrs: Sequence[Union[AnyStr, Attribute]],
@@ -18,7 +18,7 @@ def attributes(
apply_with_repr: bool = True,
apply_immutable: bool = False,
store_attributes: Optional[Callable[[type, Attribute], Any]] = None,
- **kw: Optional[dict]) -> Callable[[Type[T]], Type[T]]: ...
+ **kw: Optional[dict]) -> Callable[[Type[_T]], Type[_T]]: ...
class Attribute:
def __init__(
diff --git a/typeshed/third_party/2and3/jinja2/environment.pyi b/typeshed/third_party/2and3/jinja2/environment.pyi
index 559369a..b3ea1d7 100644
--- a/typeshed/third_party/2and3/jinja2/environment.pyi
+++ b/typeshed/third_party/2and3/jinja2/environment.pyi
@@ -71,6 +71,16 @@ class Environment:
def from_string(self, source: Text, globals: Optional[Dict[str, Any]] = ..., template_class: Optional[Type[Template]] = ...) -> Template: ...
def make_globals(self, d: Optional[Dict[str, Any]]) -> Dict[str, Any]: ...
+ # Frequently added extensions are included here:
+ # from InternationalizationExtension:
+ def install_gettext_translations(self, translations: Any, newstyle: Optional[bool]): ...
+ def install_null_translations(self, newstyle: Optional[bool]): ...
+ def install_gettext_callables(self, gettext: Callable, ngettext: Callable,
+ newstyle: Optional[bool]): ...
+ def uninstall_gettext_translations(self, translations: Any): ...
+ def extract_translations(self, source: Any, gettext_functions: Any): ...
+ newstyle_gettext = ... # type: bool
+
class Template:
def __new__(cls, source, block_start_string: Any = ..., block_end_string: Any = ..., variable_start_string: Any = ..., variable_end_string: Any = ..., comment_start_string: Any = ..., comment_end_string: Any = ..., line_statement_prefix: Any = ..., line_comment_prefix: Any = ..., trim_blocks: Any = ..., lstrip_blocks: Any = ..., newline_sequence: Any = ..., keep_trailing_newline: Any = ..., extensions: Any = ..., optimized: bool = ..., undefined: Any = ..., finalize: Optional[Any] = [...]
@classmethod
diff --git a/typeshed/third_party/2and3/mypy_extensions.pyi b/typeshed/third_party/2and3/mypy_extensions.pyi
index 8b2ef3f..9810914 100644
--- a/typeshed/third_party/2and3/mypy_extensions.pyi
+++ b/typeshed/third_party/2and3/mypy_extensions.pyi
@@ -1,9 +1,15 @@
-from typing import Dict, Type, TypeVar, Union
+from typing import Dict, Type, TypeVar, Optional, Union
-T = TypeVar('T')
+_T = TypeVar('_T')
+def TypedDict(typename: str, fields: Dict[str, Type[_T]]) -> Type[dict]: ...
-def TypedDict(typename: str, fields: Dict[str, Type[T]]) -> Type[dict]: ...
+def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+def VarArg(type: _T = ...) -> _T: ...
+def KwArg(type: _T = ...) -> _T: ...
# Return type that indicates a function does not return.
# This type is equivalent to the None type, but the no-op Union is necessary to
diff --git a/typeshed/third_party/2and3/pytz/__init__.pyi b/typeshed/third_party/2and3/pytz/__init__.pyi
index 6bc917e..2c2accd 100644
--- a/typeshed/third_party/2and3/pytz/__init__.pyi
+++ b/typeshed/third_party/2and3/pytz/__init__.pyi
@@ -1,7 +1,7 @@
# Stubs for pytz (Python 3.5)
-import datetime as dt
-from typing import Optional, List, Set, Dict # NOQA
+import datetime
+from typing import Optional, List, Set, Dict, Union
all_timezones = ... # type: List
all_timezones_set = ... # type: Set
@@ -11,16 +11,29 @@ country_timezones = ... # type: Dict
country_names = ... # type: Dict
-class _UTCclass(dt.tzinfo):
+class _UTCclass(datetime.tzinfo):
zone = ... # type: str
- def fromutc(self, dt: dt.datetime) -> dt.datetime: ...
- def utcoffset(self, dt: Optional[dt.datetime]) -> dt.timedelta: ... # type: ignore
- def tzname(self, dt: Optional[dt.datetime]) -> str: ...
- def dst(self, dt: Optional[dt.datetime]) -> dt.timedelta: ... # type: ignore
- def localize(self, dt: dt.datetime, is_dst: bool=...) -> dt.datetime: ...
- def normalize(self, dt: dt.datetime, is_dst: bool=...) -> dt.datetime: ...
+ def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ...
+ def utcoffset(self, dt: Optional[datetime.datetime]) -> datetime.timedelta: ... # type: ignore
+ def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+ def dst(self, dt: Optional[datetime.datetime]) -> datetime.timedelta: ... # type: ignore
+ def localize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ...
+ def normalize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ...
utc = ... # type: _UTCclass
UTC = ... # type: _UTCclass
-def timezone(zone: str) -> dt.tzinfo: ...
+
+class _BaseTzInfo(datetime.tzinfo):
+ zone = ... # type: str
+
+ def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ...
+ def localize(self, dt: datetime.datetime, is_dst: Optional[bool] = ...) -> datetime.datetime: ...
+ def normalize(self, dt: datetime.datetime) -> datetime.datetime: ...
+
+
+class _StaticTzInfo(_BaseTzInfo):
+ def normalize(self, dt: datetime.datetime, is_dst: Optional[bool] = ...) -> datetime.datetime: ...
+
+
+def timezone(zone: str) -> _BaseTzInfo: ...
diff --git a/typeshed/third_party/3/requests/__init__.pyi b/typeshed/third_party/2and3/requests/__init__.pyi
similarity index 100%
rename from typeshed/third_party/3/requests/__init__.pyi
rename to typeshed/third_party/2and3/requests/__init__.pyi
diff --git a/typeshed/third_party/3/requests/adapters.pyi b/typeshed/third_party/2and3/requests/adapters.pyi
similarity index 91%
rename from typeshed/third_party/3/requests/adapters.pyi
rename to typeshed/third_party/2and3/requests/adapters.pyi
index d896f1a..81d0212 100644
--- a/typeshed/third_party/3/requests/adapters.pyi
+++ b/typeshed/third_party/2and3/requests/adapters.pyi
@@ -1,6 +1,6 @@
# Stubs for requests.adapters (Python 3)
-from typing import Any, Container, Union, Tuple
+from typing import Any, Container, Union, Text, Tuple
from . import models
from .packages.urllib3 import poolmanager
from .packages.urllib3 import response
@@ -44,10 +44,10 @@ DEFAULT_RETRIES = ... # type: Any
class BaseAdapter:
def __init__(self) -> None: ...
- def send(self, request: PreparedRequest, stream=False,
+ def send(self, request: PreparedRequest, stream: bool=False,
timeout: Union[None, float, Tuple[float, float]]=None,
- verify=False,
- cert: Union[None, Union[str, bytes], Container[Union[str, bytes]]]=None
+ verify: bool=False,
+ cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]]=None
) -> Response: ...
def close(self) -> None: ...
class HTTPAdapter(BaseAdapter):
diff --git a/typeshed/third_party/2and3/requests/api.pyi b/typeshed/third_party/2and3/requests/api.pyi
new file mode 100644
index 0000000..85afdc5
--- /dev/null
+++ b/typeshed/third_party/2and3/requests/api.pyi
@@ -0,0 +1,26 @@
+# Stubs for requests.api (Python 3)
+
+from typing import Optional, Union, Any, Iterable, Mapping, MutableMapping, Tuple, IO, Text
+
+from .models import Response
+
+_ParamsMappingValueType = Union[Text, bytes, int, float, Iterable[Union[Text, bytes, int, float]]]
+_Data = Union[None, bytes, MutableMapping[Text, Text], IO]
+
+def request(method: str, url: str, **kwargs) -> Response: ...
+def get(url: Union[Text, bytes],
+ params: Optional[
+ Union[Mapping[Union[Text, bytes, int, float], _ParamsMappingValueType],
+ Union[Text, bytes],
+ Tuple[Union[Text, bytes, int, float], _ParamsMappingValueType],
+ Mapping[Text, _ParamsMappingValueType],
+ Mapping[bytes, _ParamsMappingValueType],
+ Mapping[int, _ParamsMappingValueType],
+ Mapping[float, _ParamsMappingValueType]]] = None,
+ **kwargs) -> Response: ...
+def options(url: Union[str, Text], **kwargs) -> Response: ...
+def head(url: Union[str, Text], **kwargs) -> Response: ...
+def post(url: Union[str, Text], data: _Data = ..., json: Optional[MutableMapping] = ..., **kwargs) -> Response: ...
+def put(url: Union[str, Text], data: _Data = ..., **kwargs) -> Response: ...
+def patch(url: Union[str, Text], data: _Data = ..., **kwargs) -> Response: ...
+def delete(url: Union[str, Text], **kwargs) -> Response: ...
diff --git a/typeshed/third_party/2/requests/auth.pyi b/typeshed/third_party/2and3/requests/auth.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/auth.pyi
rename to typeshed/third_party/2and3/requests/auth.pyi
diff --git a/typeshed/third_party/2/requests/compat.pyi b/typeshed/third_party/2and3/requests/compat.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/compat.pyi
rename to typeshed/third_party/2and3/requests/compat.pyi
diff --git a/typeshed/third_party/3/requests/cookies.pyi b/typeshed/third_party/2and3/requests/cookies.pyi
similarity index 91%
rename from typeshed/third_party/3/requests/cookies.pyi
rename to typeshed/third_party/2and3/requests/cookies.pyi
index f3bd57e..6665021 100644
--- a/typeshed/third_party/3/requests/cookies.pyi
+++ b/typeshed/third_party/2and3/requests/cookies.pyi
@@ -1,12 +1,14 @@
# Stubs for requests.cookies (Python 3)
+import sys
from typing import Any, MutableMapping
-# import cookielib
-from http import cookiejar as cookielib
import collections
from . import compat
-# cookielib = compat.cookielib
+if sys.version_info < (3, 0):
+ from cookielib import CookieJar
+else:
+ from http.cookiejar import CookieJar
class MockRequest:
type = ... # type: Any
@@ -39,7 +41,7 @@ def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ...
class CookieConflictError(RuntimeError): ...
-class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
+class RequestsCookieJar(CookieJar, MutableMapping):
def get(self, name, default=..., domain=..., path=...): ...
def set(self, name, value, **kwargs): ...
def iterkeys(self): ...
diff --git a/typeshed/third_party/2/requests/exceptions.pyi b/typeshed/third_party/2and3/requests/exceptions.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/exceptions.pyi
rename to typeshed/third_party/2and3/requests/exceptions.pyi
diff --git a/typeshed/third_party/2/requests/hooks.pyi b/typeshed/third_party/2and3/requests/hooks.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/hooks.pyi
rename to typeshed/third_party/2and3/requests/hooks.pyi
diff --git a/typeshed/third_party/3/requests/models.pyi b/typeshed/third_party/2and3/requests/models.pyi
similarity index 95%
rename from typeshed/third_party/3/requests/models.pyi
rename to typeshed/third_party/2and3/requests/models.pyi
index 7789899..265670f 100644
--- a/typeshed/third_party/3/requests/models.pyi
+++ b/typeshed/third_party/2and3/requests/models.pyi
@@ -1,6 +1,6 @@
# Stubs for requests.models (Python 3)
-from typing import Any, List, MutableMapping, Iterator, Dict
+from typing import Any, List, MutableMapping, Iterator, Dict, Text
import datetime
from . import hooks
@@ -80,10 +80,10 @@ class Request(RequestHooksMixin):
def prepare(self): ...
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
- method = ... # type: Optional[str]
- url = ... # type: Optional[str]
+ method = ... # type: Optional[Union[str, Text]]
+ url = ... # type: Optional[Union[str, Text]]
headers = ... # type: CaseInsensitiveDict
- body = ... # type: Optional[Union[str, bytes]]
+ body = ... # type: Optional[Union[bytes, Text]]
hooks = ... # type: Any
def __init__(self) -> None: ...
def prepare(self, method=..., url=..., headers=..., files=..., data=..., params=...,
diff --git a/typeshed/third_party/2/requests/packages/__init__.pyi b/typeshed/third_party/2and3/requests/packages/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/__init__.pyi
rename to typeshed/third_party/2and3/requests/packages/__init__.pyi
diff --git a/typeshed/third_party/3/requests/packages/urllib3/__init__.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi
similarity index 100%
rename from typeshed/third_party/3/requests/packages/urllib3/__init__.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/__init__.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/_collections.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/_collections.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/_collections.pyi
diff --git a/typeshed/third_party/3/requests/packages/urllib3/connection.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi
similarity index 81%
rename from typeshed/third_party/3/requests/packages/urllib3/connection.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi
index 77e4c42..3d656d6 100644
--- a/typeshed/third_party/3/requests/packages/urllib3/connection.pyi
+++ b/typeshed/third_party/2and3/requests/packages/urllib3/connection.pyi
@@ -1,21 +1,28 @@
# Stubs for requests.packages.urllib3.connection (Python 3.4)
+import sys
from typing import Any
from . import packages
-from http.client import HTTPConnection as _HTTPConnection
import ssl
-# from httplib import HTTPConnection as _HTTPConnection # python 2
from . import exceptions
from .packages import ssl_match_hostname
from .util import ssl_
from . import util
-import http.client
+
+if sys.version_info < (3, 0):
+ from httplib import HTTPConnection as _HTTPConnection
+ from httplib import HTTPException as HTTPException
+
+ class ConnectionError(Exception): ...
+else:
+ from http.client import HTTPConnection as _HTTPConnection
+ from http.client import HTTPException as HTTPException
+ from builtins import ConnectionError as ConnectionError
+
class DummyConnection: ...
BaseSSLError = ssl.SSLError
-ConnectionError = __builtins__.ConnectionError
-HTTPException = http.client.HTTPException
ConnectTimeoutError = exceptions.ConnectTimeoutError
SystemTimeWarning = exceptions.SystemTimeWarning
@@ -61,4 +68,4 @@ class VerifiedHTTPSConnection(HTTPSConnection):
is_verified = ... # type: Any
def connect(self): ...
-UnverifiedHTTPSConnection = ... # type: Any
+UnverifiedHTTPSConnection = HTTPSConnection
diff --git a/typeshed/third_party/3/requests/packages/urllib3/connectionpool.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi
similarity index 100%
rename from typeshed/third_party/3/requests/packages/urllib3/connectionpool.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/connectionpool.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/contrib/__init__.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/contrib/__init__.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/contrib/__init__.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/exceptions.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/exceptions.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/exceptions.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/fields.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/fields.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/fields.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/filepost.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/filepost.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/filepost.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/packages/__init__.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/packages/__init__.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/packages/__init__.pyi
diff --git a/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
similarity index 100%
rename from typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/poolmanager.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/poolmanager.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/poolmanager.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/request.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/request.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/request.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/request.pyi
diff --git a/typeshed/third_party/3/requests/packages/urllib3/response.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/response.pyi
similarity index 95%
rename from typeshed/third_party/3/requests/packages/urllib3/response.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/response.pyi
index ba2a801..b6f418a 100644
--- a/typeshed/third_party/3/requests/packages/urllib3/response.pyi
+++ b/typeshed/third_party/2and3/requests/packages/urllib3/response.pyi
@@ -2,11 +2,9 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
-import io
+from typing import Any, IO
from . import _collections
from . import exceptions
-# from .packages import six
from .connection import HTTPException as HTTPException, BaseSSLError as BaseSSLError
from .util import response
@@ -28,7 +26,7 @@ class GzipDecoder:
def __getattr__(self, name): ...
def decompress(self, data): ...
-class HTTPResponse(io.IOBase):
+class HTTPResponse(IO[Any]):
CONTENT_DECODERS = ... # type: Any
REDIRECT_STATUSES = ... # type: Any
headers = ... # type: Any
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/__init__.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi
similarity index 100%
rename from typeshed/third_party/3/requests/packages/urllib3/util/__init__.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/__init__.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/connection.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/util/connection.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/connection.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/request.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/util/request.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/request.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/response.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/util/response.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/response.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/retry.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/util/retry.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/retry.pyi
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/ssl_.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi
similarity index 100%
rename from typeshed/third_party/3/requests/packages/urllib3/util/ssl_.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/ssl_.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/timeout.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/util/timeout.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/timeout.pyi
diff --git a/typeshed/third_party/2/requests/packages/urllib3/util/url.pyi b/typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/packages/urllib3/util/url.pyi
rename to typeshed/third_party/2and3/requests/packages/urllib3/util/url.pyi
diff --git a/typeshed/third_party/3/requests/sessions.pyi b/typeshed/third_party/2and3/requests/sessions.pyi
similarity index 68%
rename from typeshed/third_party/3/requests/sessions.pyi
rename to typeshed/third_party/2and3/requests/sessions.pyi
index d1b79fc..252f7ee 100644
--- a/typeshed/third_party/3/requests/sessions.pyi
+++ b/typeshed/third_party/2and3/requests/sessions.pyi
@@ -55,6 +55,8 @@ class SessionRedirectMixin:
def rebuild_auth(self, prepared_request, response): ...
def rebuild_proxies(self, prepared_request, proxies): ...
+_Data = Union[None, bytes, MutableMapping[Text, Text], IO]
+
class Session(SessionRedirectMixin):
__attrs__ = ... # type: Any
headers = ... # type: Optional[MutableMapping[Text, Text]]
@@ -75,34 +77,34 @@ class Session(SessionRedirectMixin):
def __exit__(self, *args) -> None: ...
def prepare_request(self, request): ...
def request(self, method: str, url: str,
- params, # type: Union[None, bytes, MutableMapping[Text, Text]]
- data, # type: Union[None, bytes, MutableMapping[Text, Text], IO]
- headers, # type: Optional[MutableMapping[Text, Text]]
- cookies, # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
- files, # type: Optional[MutableMapping[Text, IO]]
- auth, # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
- timeout, # type: Union[None, float, Tuple[float, float]]
- allow_redirects, # type: Optional[bool]
- proxies, # type: Optional[MutableMapping[Text, Text]]
- hooks, # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
- stream, # type: Optional[bool]
- verify, # type: Optional[bool]
- cert, # type: Union[Text, Tuple[Text, Text], None]
- json # type: Optional[MutableMapping]
+ params: Union[None, bytes, MutableMapping[Text, Text]] = ...,
+ data: _Data = ...,
+ headers: Optional[MutableMapping[Text, Text]] = ...,
+ cookies: Union[None, RequestsCookieJar, MutableMapping[Text, Text]] = ...,
+ files: Optional[MutableMapping[Text, IO]] = ...,
+ auth: Union[None, Tuple[Text, Text], Callable[[Request], Request]] = ...,
+ timeout: Union[None, float, Tuple[float, float]] = ...,
+ allow_redirects: Optional[bool] = ...,
+ proxies: Optional[MutableMapping[Text, Text]] = ...,
+ hooks: Optional[MutableMapping[Text, Callable[[Request], Any]]] = ...,
+ stream: Optional[bool] = ...,
+ verify: Optional[bool] = ...,
+ cert: Union[Text, Tuple[Text, Text], None] = ...,
+ json: Optional[MutableMapping] = ...,
) -> Response: ...
- def get(self, url: Union[str, bytes], **kwargs) -> Response: ...
- def options(self, url: Union[str, bytes], **kwargs) -> Response: ...
- def head(self, url: Union[str, bytes], **kwargs) -> Response: ...
- def post(self, url: Union[str, bytes], data=..., json=..., **kwargs) -> Response: ...
- def put(self, url: Union[str, bytes], data=..., **kwargs) -> Response: ...
- def patch(self, url: Union[str, bytes], data=..., **kwargs) -> Response: ...
- def delete(self, url: Union[str, bytes], **kwargs) -> Response: ...
+ def get(self, url: Union[Text, bytes], **kwargs) -> Response: ...
+ def options(self, url: Union[Text, bytes], **kwargs) -> Response: ...
+ def head(self, url: Union[Text, bytes], **kwargs) -> Response: ...
+ def post(self, url: Union[Text, bytes], data: _Data = ..., json: Optional[MutableMapping] = ..., **kwargs) -> Response: ...
+ def put(self, url: Union[Text, bytes], data: _Data = ..., **kwargs) -> Response: ...
+ def patch(self, url: Union[Text, bytes], data: _Data = ..., **kwargs) -> Response: ...
+ def delete(self, url: Union[Text, bytes], **kwargs) -> Response: ...
def send(self, request, **kwargs): ...
def merge_environment_settings(self, url, proxies, stream, verify, cert): ...
def get_adapter(self, url): ...
def close(self) -> None: ...
def mount(self, prefix:
- Union[str, bytes],
+ Union[Text, bytes],
adapter: BaseAdapter) -> None: ...
def session() -> Session: ...
diff --git a/typeshed/third_party/2/requests/status_codes.pyi b/typeshed/third_party/2and3/requests/status_codes.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/status_codes.pyi
rename to typeshed/third_party/2and3/requests/status_codes.pyi
diff --git a/typeshed/third_party/2/requests/structures.pyi b/typeshed/third_party/2and3/requests/structures.pyi
similarity index 51%
rename from typeshed/third_party/2/requests/structures.pyi
rename to typeshed/third_party/2and3/requests/structures.pyi
index a7f77b2..53af72d 100644
--- a/typeshed/third_party/2/requests/structures.pyi
+++ b/typeshed/third_party/2and3/requests/structures.pyi
@@ -1,9 +1,9 @@
# Stubs for requests.structures (Python 3)
-from typing import Any, Iterator, MutableMapping, Tuple, Union
+from typing import Any, Iterator, MutableMapping, Text, Tuple, Union
-class CaseInsensitiveDict(MutableMapping[str, Union[str, unicode]]):
- def lower_items(self) -> Iterator[Tuple[str, Union[str, unicode]]]: ...
+class CaseInsensitiveDict(MutableMapping[str, Union[Text, bytes]]):
+ def lower_items(self) -> Iterator[Tuple[str, Union[Text, bytes]]]: ...
class LookupDict(dict):
name = ... # type: Any
diff --git a/typeshed/third_party/2/requests/utils.pyi b/typeshed/third_party/2and3/requests/utils.pyi
similarity index 100%
rename from typeshed/third_party/2/requests/utils.pyi
rename to typeshed/third_party/2and3/requests/utils.pyi
diff --git a/typeshed/third_party/2and3/sqlalchemy/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/__init__.pyi
deleted file mode 100644
index 8d58e25..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/__init__.pyi
+++ /dev/null
@@ -1,124 +0,0 @@
-# Stubs for sqlalchemy (Python 2)
-
-from .sql import (
- alias,
- and_,
- asc,
- between,
- bindparam,
- case,
- cast,
- collate,
- column,
- delete,
- desc,
- distinct,
- except_,
- except_all,
- exists,
- extract,
- false,
- func,
- funcfilter,
- insert,
- intersect,
- intersect_all,
- join,
- literal,
- literal_column,
- modifier,
- not_,
- null,
- or_,
- outerjoin,
- outparam,
- over,
- select,
- subquery,
- table,
- text,
- true,
- tuple_,
- type_coerce,
- union,
- union_all,
- update,
-)
-
-from .types import (
- BIGINT,
- BINARY,
- BLOB,
- BOOLEAN,
- BigInteger,
- Binary,
- Boolean,
- CHAR,
- CLOB,
- DATE,
- DATETIME,
- DECIMAL,
- Date,
- DateTime,
- Enum,
- FLOAT,
- Float,
- INT,
- INTEGER,
- Integer,
- Interval,
- LargeBinary,
- NCHAR,
- NVARCHAR,
- NUMERIC,
- Numeric,
- PickleType,
- REAL,
- SMALLINT,
- SmallInteger,
- String,
- TEXT,
- TIME,
- TIMESTAMP,
- Text,
- Time,
- TypeDecorator,
- Unicode,
- UnicodeText,
- VARBINARY,
- VARCHAR,
-)
-
-from .schema import (
- CheckConstraint,
- Column,
- ColumnDefault,
- Constraint,
- DefaultClause,
- FetchedValue,
- ForeignKey,
- ForeignKeyConstraint,
- Index,
- MetaData,
- PassiveDefault,
- PrimaryKeyConstraint,
- Sequence,
- Table,
- ThreadLocalMetaData,
- UniqueConstraint,
- DDL,
-)
-
-from . import sql as sql
-from . import schema as schema
-from . import types as types
-from . import exc as exc
-from . import dialects as dialects
-from . import pool as pool
-# This should re-export orm but orm is totally broken right now
-# from . import orm as orm
-
-from .inspection import inspect
-from .engine import create_engine, engine_from_config
-
-__version__ = ... # type: int
diff --git a/typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi
deleted file mode 100644
index b1ac4a4..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for sqlalchemy.databases (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-# Names in __all__ with no definition:
-# firebird
-# mssql
-# mysql
-# oracle
-# postgresql
-# sqlite
-# sybase
diff --git a/typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi b/typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi
deleted file mode 100644
index 4cda14f..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi
+++ /dev/null
@@ -1 +0,0 @@
-from sqlalchemy.dialects.mysql.base import * # noqa: F403
diff --git a/typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi
deleted file mode 100644
index 2d261de..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for sqlalchemy.dialects (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-# Names in __all__ with no definition:
-# firebird
-# mssql
-# mysql
-# oracle
-# postgresql
-# sqlite
-# sybase
diff --git a/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi
deleted file mode 100644
index 5a0a4bf..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi
+++ /dev/null
@@ -1,40 +0,0 @@
-# Stubs for sqlalchemy.dialects.mysql (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from . import base
-
-BIGINT = base.BIGINT
-BINARY = base.BINARY
-BIT = base.BIT
-BLOB = base.BLOB
-BOOLEAN = base.BOOLEAN
-CHAR = base.CHAR
-DATE = base.DATE
-DATETIME = base.DATETIME
-DECIMAL = base.DECIMAL
-DOUBLE = base.DOUBLE
-ENUM = base.ENUM
-FLOAT = base.FLOAT
-INTEGER = base.INTEGER
-LONGBLOB = base.LONGBLOB
-LONGTEXT = base.LONGTEXT
-MEDIUMBLOB = base.MEDIUMBLOB
-MEDIUMINT = base.MEDIUMINT
-MEDIUMTEXT = base.MEDIUMTEXT
-NCHAR = base.NCHAR
-NVARCHAR = base.NVARCHAR
-NUMERIC = base.NUMERIC
-SET = base.SET
-SMALLINT = base.SMALLINT
-REAL = base.REAL
-TEXT = base.TEXT
-TIME = base.TIME
-TIMESTAMP = base.TIMESTAMP
-TINYBLOB = base.TINYBLOB
-TINYINT = base.TINYINT
-TINYTEXT = base.TINYTEXT
-VARBINARY = base.VARBINARY
-VARCHAR = base.VARCHAR
-YEAR = base.YEAR
-# dialect = base.dialect
diff --git a/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi b/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi
deleted file mode 100644
index 51b8b26..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi
+++ /dev/null
@@ -1,413 +0,0 @@
-# Stubs for sqlalchemy.dialects.mysql.base (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from ... import sql
-from ... import engine
-from ... import util
-from ... import types
-
-sqltypes = sql.sqltypes
-# compiler = sql.compiler
-# reflection = engine.reflection
-# default = engine.default
-# topological = util.topological
-DATE = types.DATE
-BOOLEAN = types.BOOLEAN
-BLOB = types.BLOB
-BINARY = types.BINARY
-VARBINARY = types.VARBINARY
-
-RESERVED_WORDS = ... # type: Any
-AUTOCOMMIT_RE = ... # type: Any
-SET_RE = ... # type: Any
-
-class _NumericType:
- unsigned = ... # type: Any
- zerofill = ... # type: Any
- def __init__(self, unsigned=..., zerofill=..., **kw) -> None: ...
-
-class _FloatType(_NumericType,
- # sqltypes.Float
- ):
- scale = ... # type: Any
- def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-
-class _IntegerType(_NumericType,
- # sqltypes.Integer
- ):
- display_width = ... # type: Any
- def __init__(self, display_width=..., **kw) -> None: ...
-
-class _StringType(object,
- # sqltypes.String
- ):
- charset = ... # type: Any
- ascii = ... # type: Any
- unicode = ... # type: Any
- binary = ... # type: Any
- national = ... # type: Any
- def __init__(self, charset=..., collation=..., ascii=..., binary=..., unicode=..., national=..., **kw) -> None: ...
-
-class _MatchType(object,
- # sqltypes.Float,
- # sqltypes.MatchType
- ):
- def __init__(self, **kw) -> None: ...
-
-class NUMERIC(_NumericType,
- # sqltypes.NUMERIC
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-
-class DECIMAL(_NumericType,
- # sqltypes.DECIMAL
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-
-class DOUBLE(_FloatType):
- __visit_name__ = ... # type: Any
- def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-
-class REAL(_FloatType,
- # sqltypes.REAL
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
-
-class FLOAT(_FloatType,
- # sqltypes.FLOAT
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, precision=..., scale=..., asdecimal=..., **kw) -> None: ...
- def bind_processor(self, dialect): ...
-
-class INTEGER(_IntegerType,
- # sqltypes.INTEGER
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, display_width=..., **kw) -> None: ...
-
-class BIGINT(_IntegerType,
- # sqltypes.BIGINT
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, display_width=..., **kw) -> None: ...
-
-class MEDIUMINT(_IntegerType):
- __visit_name__ = ... # type: Any
- def __init__(self, display_width=..., **kw) -> None: ...
-
-class TINYINT(_IntegerType):
- __visit_name__ = ... # type: Any
- def __init__(self, display_width=..., **kw) -> None: ...
-
-class SMALLINT(_IntegerType,
- # sqltypes.SMALLINT
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, display_width=..., **kw) -> None: ...
-
-class BIT(object,
- # sqltypes.TypeEngine
- ):
- __visit_name__ = ... # type: Any
- length = ... # type: Any
- def __init__(self, length=...) -> None: ...
- def result_processor(self, dialect, coltype): ...
-
-class TIME(object,
- # sqltypes.TIME
- ):
- __visit_name__ = ... # type: Any
- fsp = ... # type: Any
- def __init__(self, timezone=..., fsp=...) -> None: ...
- def result_processor(self, dialect, coltype): ...
-
-class TIMESTAMP(object,
- # sqltypes.TIMESTAMP
- ):
- __visit_name__ = ... # type: Any
- fsp = ... # type: Any
- def __init__(self, timezone=..., fsp=...) -> None: ...
-
-class DATETIME(object,
- # sqltypes.DATETIME
- ):
- __visit_name__ = ... # type: Any
- fsp = ... # type: Any
- def __init__(self, timezone=..., fsp=...) -> None: ...
-
-class YEAR(object,
- # sqltypes.TypeEngine
- ):
- __visit_name__ = ... # type: Any
- display_width = ... # type: Any
- def __init__(self, display_width=...) -> None: ...
-
-class TEXT(_StringType,
- # sqltypes.TEXT
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, length=..., **kw) -> None: ...
-
-class TINYTEXT(_StringType):
- __visit_name__ = ... # type: Any
- def __init__(self, **kwargs) -> None: ...
-
-class MEDIUMTEXT(_StringType):
- __visit_name__ = ... # type: Any
- def __init__(self, **kwargs) -> None: ...
-
-class LONGTEXT(_StringType):
- __visit_name__ = ... # type: Any
- def __init__(self, **kwargs) -> None: ...
-
-class VARCHAR(_StringType,
- # sqltypes.VARCHAR
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, length=..., **kwargs) -> None: ...
-
-class CHAR(_StringType,
- # sqltypes.CHAR
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, length=..., **kwargs) -> None: ...
-
-class NVARCHAR(_StringType,
- # sqltypes.NVARCHAR
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, length=..., **kwargs) -> None: ...
-
-class NCHAR(_StringType,
- # sqltypes.NCHAR
- ):
- __visit_name__ = ... # type: Any
- def __init__(self, length=..., **kwargs) -> None: ...
-
-class TINYBLOB(object,
- # sqltypes._Binary
- ):
- __visit_name__ = ... # type: Any
-
-class MEDIUMBLOB(object,
- # sqltypes._Binary
- ):
- __visit_name__ = ... # type: Any
-
-class LONGBLOB(object,
- # sqltypes._Binary
- ):
- __visit_name__ = ... # type: Any
-
-class _EnumeratedValues(_StringType): ...
-
-class ENUM( # sqltypes.Enum,
- _EnumeratedValues
-):
- __visit_name__ = ... # type: Any
- strict = ... # type: Any
- def __init__(self, *enums, **kw) -> None: ...
- def bind_processor(self, dialect): ...
- def adapt(self, cls, **kw): ...
-
-class SET(_EnumeratedValues):
- __visit_name__ = ... # type: Any
- retrieve_as_bitwise = ... # type: Any
- values = ... # type: Any
- def __init__(self, *values, **kw) -> None: ...
- def column_expression(self, colexpr): ...
- def result_processor(self, dialect, coltype): ...
- def bind_processor(self, dialect): ...
- def adapt(self, impltype, **kw): ...
-
-MSTime = ... # type: Any
-MSSet = ... # type: Any
-MSEnum = ... # type: Any
-MSLongBlob = ... # type: Any
-MSMediumBlob = ... # type: Any
-MSTinyBlob = ... # type: Any
-MSBlob = ... # type: Any
-MSBinary = ... # type: Any
-MSVarBinary = ... # type: Any
-MSNChar = ... # type: Any
-MSNVarChar = ... # type: Any
-MSChar = ... # type: Any
-MSString = ... # type: Any
-MSLongText = ... # type: Any
-MSMediumText = ... # type: Any
-MSTinyText = ... # type: Any
-MSText = ... # type: Any
-MSYear = ... # type: Any
-MSTimeStamp = ... # type: Any
-MSBit = ... # type: Any
-MSSmallInteger = ... # type: Any
-MSTinyInteger = ... # type: Any
-MSMediumInteger = ... # type: Any
-MSBigInteger = ... # type: Any
-MSNumeric = ... # type: Any
-MSDecimal = ... # type: Any
-MSDouble = ... # type: Any
-MSReal = ... # type: Any
-MSFloat = ... # type: Any
-MSInteger = ... # type: Any
-colspecs = ... # type: Any
-ischema_names = ... # type: Any
-
-class MySQLExecutionContext(object,
- # default.DefaultExecutionContext
- ):
- def should_autocommit_text(self, statement): ...
-
-class MySQLCompiler(object,
- # compiler.SQLCompiler
- ):
- render_table_with_column_in_update_from = ... # type: Any
- extract_map = ... # type: Any
- def visit_random_func(self, fn, **kw): ...
- def visit_utc_timestamp_func(self, fn, **kw): ...
- def visit_sysdate_func(self, fn, **kw): ...
- def visit_concat_op_binary(self, binary, operator, **kw): ...
- def visit_match_op_binary(self, binary, operator, **kw): ...
- def get_from_hint_text(self, table, text): ...
- def visit_typeclause(self, typeclause, type_=...): ...
- def visit_cast(self, cast, **kwargs): ...
- def render_literal_value(self, value, type_): ...
- def visit_true(self, element, **kw): ...
- def visit_false(self, element, **kw): ...
- def get_select_precolumns(self, select, **kw): ...
- def visit_join(self, join, asfrom=..., **kwargs): ...
- def for_update_clause(self, select, **kw): ...
- def limit_clause(self, select, **kw): ...
- def update_limit_clause(self, update_stmt): ...
- def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ...
- def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ...
-
-class MySQLDDLCompiler(object,
- # compiler.DDLCompiler
- ):
- def create_table_constraints(self, table, **kw): ...
- def get_column_specification(self, column, **kw): ...
- def post_create_table(self, table): ...
- def visit_create_index(self, create): ...
- def visit_primary_key_constraint(self, constraint): ...
- def visit_drop_index(self, drop): ...
- def visit_drop_constraint(self, drop): ...
- def define_constraint_match(self, constraint): ...
-
-class MySQLTypeCompiler(object,
- # compiler.GenericTypeCompiler
- ):
- def visit_NUMERIC(self, type_, **kw): ...
- def visit_DECIMAL(self, type_, **kw): ...
- def visit_DOUBLE(self, type_, **kw): ...
- def visit_REAL(self, type_, **kw): ...
- def visit_FLOAT(self, type_, **kw): ...
- def visit_INTEGER(self, type_, **kw): ...
- def visit_BIGINT(self, type_, **kw): ...
- def visit_MEDIUMINT(self, type_, **kw): ...
- def visit_TINYINT(self, type_, **kw): ...
- def visit_SMALLINT(self, type_, **kw): ...
- def visit_BIT(self, type_, **kw): ...
- def visit_DATETIME(self, type_, **kw): ...
- def visit_DATE(self, type_, **kw): ...
- def visit_TIME(self, type_, **kw): ...
- def visit_TIMESTAMP(self, type_, **kw): ...
- def visit_YEAR(self, type_, **kw): ...
- def visit_TEXT(self, type_, **kw): ...
- def visit_TINYTEXT(self, type_, **kw): ...
- def visit_MEDIUMTEXT(self, type_, **kw): ...
- def visit_LONGTEXT(self, type_, **kw): ...
- def visit_VARCHAR(self, type_, **kw): ...
- def visit_CHAR(self, type_, **kw): ...
- def visit_NVARCHAR(self, type_, **kw): ...
- def visit_NCHAR(self, type_, **kw): ...
- def visit_VARBINARY(self, type_, **kw): ...
- def visit_large_binary(self, type_, **kw): ...
- def visit_enum(self, type_, **kw): ...
- def visit_BLOB(self, type_, **kw): ...
- def visit_TINYBLOB(self, type_, **kw): ...
- def visit_MEDIUMBLOB(self, type_, **kw): ...
- def visit_LONGBLOB(self, type_, **kw): ...
- def visit_ENUM(self, type_, **kw): ...
- def visit_SET(self, type_, **kw): ...
- def visit_BOOLEAN(self, type, **kw): ...
-
-class MySQLIdentifierPreparer(object,
- # compiler.IdentifierPreparer
- ):
- reserved_words = ... # type: Any
- def __init__(self, dialect, server_ansiquotes=..., **kw) -> None: ...
-
-class MySQLDialect(object,
- # default.DefaultDialect
- ):
- name = ... # type: Any
- supports_alter = ... # type: Any
- supports_native_boolean = ... # type: Any
- max_identifier_length = ... # type: Any
- max_index_name_length = ... # type: Any
- supports_native_enum = ... # type: Any
- supports_sane_rowcount = ... # type: Any
- supports_sane_multi_rowcount = ... # type: Any
- supports_multivalues_insert = ... # type: Any
- default_paramstyle = ... # type: Any
- colspecs = ... # type: Any
- statement_compiler = ... # type: Any
- ddl_compiler = ... # type: Any
- type_compiler = ... # type: Any
- ischema_names = ... # type: Any
- preparer = ... # type: Any
- construct_arguments = ... # type: Any
- isolation_level = ... # type: Any
- def __init__(self, isolation_level=..., **kwargs) -> None: ...
- def on_connect(self): ...
- def set_isolation_level(self, connection, level): ...
- def get_isolation_level(self, connection): ...
- def do_commit(self, dbapi_connection): ...
- def do_rollback(self, dbapi_connection): ...
- def do_begin_twophase(self, connection, xid): ...
- def do_prepare_twophase(self, connection, xid): ...
- def do_rollback_twophase(self, connection, xid, is_prepared=..., recover=...): ...
- def do_commit_twophase(self, connection, xid, is_prepared=..., recover=...): ...
- def do_recover_twophase(self, connection): ...
- def is_disconnect(self, e, connection, cursor): ...
- def has_table(self, connection, table_name, schema=...): ...
- identifier_preparer = ... # type: Any
- def initialize(self, connection): ...
- def get_schema_names(self, connection, **kw): ...
- def get_table_names(self, connection, schema=..., **kw): ...
- def get_view_names(self, connection, schema=..., **kw): ...
- def get_table_options(self, connection, table_name, schema=..., **kw): ...
- def get_columns(self, connection, table_name, schema=..., **kw): ...
- def get_pk_constraint(self, connection, table_name, schema=..., **kw): ...
- def get_foreign_keys(self, connection, table_name, schema=..., **kw): ...
- def get_indexes(self, connection, table_name, schema=..., **kw): ...
- def get_unique_constraints(self, connection, table_name, schema=..., **kw): ...
- def get_view_definition(self, connection, view_name, schema=..., **kw): ...
-
-class ReflectedState:
- columns = ... # type: Any
- table_options = ... # type: Any
- table_name = ... # type: Any
- keys = ... # type: Any
- constraints = ... # type: Any
- def __init__(self) -> None: ...
-
-class MySQLTableDefinitionParser:
- dialect = ... # type: Any
- preparer = ... # type: Any
- def __init__(self, dialect, preparer) -> None: ...
- def parse(self, show_create, charset): ...
-
-class _DecodingRowProxy:
- rowproxy = ... # type: Any
- charset = ... # type: Any
- def __init__(self, rowproxy, charset) -> None: ...
- def __getitem__(self, index): ...
- def __getattr__(self, attr): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi
deleted file mode 100644
index 49eca6d..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-# Stubs for sqlalchemy.engine (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from .base import Connection as Connection
-from .base import Engine as Engine
-from .base import RowProxy as RowProxy
-from .base import Transaction as Transaction
-
-def create_engine(*args, **kwargs): ...
-def engine_from_config(configuration, prefix=..., **kwargs): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/engine/base.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/base.pyi
deleted file mode 100644
index 71f6e19..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/engine/base.pyi
+++ /dev/null
@@ -1,21 +0,0 @@
-from typing import Any, List, Tuple
-
-# Dummy until I figure out something better.
-class Connectable:
- pass
-
-class Connection:
- def begin(self): ...
- def execute(self, object, *multiparams, **params): ...
-
-class Engine(object): ...
-
-class RowProxy:
- def items(self) -> List[Tuple[Any, Any]]: ...
- def keys(self) -> List[Any]: ...
- def values(self) -> List[Any]: ...
- def __getitem__(self, key: str): ...
-
-class Transaction:
- def commit(self): ...
- def rollback(self): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi
deleted file mode 100644
index 06c2f00..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi
+++ /dev/null
@@ -1,39 +0,0 @@
-# Stubs for sqlalchemy.engine.strategies (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-from . import base
-
-strategies = ... # type: Any
-
-class EngineStrategy:
- def __init__(self) -> None: ...
- def create(self, *args, **kwargs): ...
-
-class DefaultEngineStrategy(EngineStrategy):
- def create(self, name_or_url, **kwargs): ...
-
-class PlainEngineStrategy(DefaultEngineStrategy):
- name = ... # type: Any
- engine_cls = ... # type: Any
-
-class ThreadLocalEngineStrategy(DefaultEngineStrategy):
- name = ... # type: Any
- engine_cls = ... # type: Any
-
-class MockEngineStrategy(EngineStrategy):
- name = ... # type: Any
- def create(self, name_or_url, executor, **kwargs): ...
- class MockConnection(base.Connectable):
- def __init__(self, dialect, execute) -> None: ...
- engine = ... # type: Any
- dialect = ... # type: Any
- name = ... # type: Any
- def contextual_connect(self, **kwargs): ...
- def execution_options(self, **kw): ...
- def compiler(self, statement, parameters, **kwargs): ...
- def create(self, entity, **kwargs): ...
- def drop(self, entity, **kwargs): ...
- def execute(self, object, *multiparams, **params): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/engine/url.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/url.pyi
deleted file mode 100644
index 76dd1ef..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/engine/url.pyi
+++ /dev/null
@@ -1,27 +0,0 @@
-# Stubs for sqlalchemy.engine.url (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from .. import dialects
-
-# registry = dialects.registry
-
-class URL:
- drivername = ... # type: Any
- username = ... # type: Any
- password = ... # type: Any
- host = ... # type: Any
- port = ... # type: Any
- database = ... # type: Any
- query = ... # type: Any
- def __init__(self, drivername, username=..., password=..., host=..., port=..., database=..., query=...) -> None: ...
- def __to_string__(self, hide_password=...): ...
- def __hash__(self): ...
- def __eq__(self, other): ...
- def get_backend_name(self): ...
- def get_driver_name(self): ...
- def get_dialect(self): ...
- def translate_connect_args(self, names=..., **kw): ...
-
-def make_url(name_or_url): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/exc.pyi b/typeshed/third_party/2and3/sqlalchemy/exc.pyi
deleted file mode 100644
index b87b9cd..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/exc.pyi
+++ /dev/null
@@ -1,77 +0,0 @@
-# Stubs for sqlalchemy.exc (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class SQLAlchemyError(Exception): ...
-class ArgumentError(SQLAlchemyError): ...
-class NoSuchModuleError(ArgumentError): ...
-class NoForeignKeysError(ArgumentError): ...
-class AmbiguousForeignKeysError(ArgumentError): ...
-
-class CircularDependencyError(SQLAlchemyError):
- cycles = ... # type: Any
- edges = ... # type: Any
- def __init__(self, message, cycles, edges, msg=...) -> None: ...
- def __reduce__(self): ...
-
-class CompileError(SQLAlchemyError): ...
-
-class UnsupportedCompilationError(CompileError):
- def __init__(self, compiler, element_type) -> None: ...
-
-class IdentifierError(SQLAlchemyError): ...
-class DisconnectionError(SQLAlchemyError): ...
-class TimeoutError(SQLAlchemyError): ...
-class InvalidRequestError(SQLAlchemyError): ...
-class NoInspectionAvailable(InvalidRequestError): ...
-class ResourceClosedError(InvalidRequestError): ...
-class NoSuchColumnError(KeyError, InvalidRequestError): ...
-class NoReferenceError(InvalidRequestError): ...
-
-class NoReferencedTableError(NoReferenceError):
- table_name = ... # type: Any
- def __init__(self, message, tname) -> None: ...
- def __reduce__(self): ...
-
-class NoReferencedColumnError(NoReferenceError):
- table_name = ... # type: Any
- column_name = ... # type: Any
- def __init__(self, message, tname, cname) -> None: ...
- def __reduce__(self): ...
-
-class NoSuchTableError(InvalidRequestError): ...
-class UnboundExecutionError(InvalidRequestError): ...
-class DontWrapMixin: ...
-
-UnmappedColumnError = ... # type: Any
-
-class StatementError(SQLAlchemyError):
- statement = ... # type: Any
- params = ... # type: Any
- orig = ... # type: Any
- detail = ... # type: Any
- def __init__(self, message, statement, params, orig) -> None: ...
- def add_detail(self, msg): ...
- def __reduce__(self): ...
- def __unicode__(self): ...
-
-class DBAPIError(StatementError):
- @classmethod
- def instance(cls, statement, params, orig, dbapi_base_err, connection_invalidated=..., dialect=...): ...
- def __reduce__(self): ...
- connection_invalidated = ... # type: Any
- def __init__(self, statement, params, orig, connection_invalidated=...) -> None: ...
-
-class InterfaceError(DBAPIError): ...
-class DatabaseError(DBAPIError): ...
-class DataError(DatabaseError): ...
-class OperationalError(DatabaseError): ...
-class IntegrityError(DatabaseError): ...
-class InternalError(DatabaseError): ...
-class ProgrammingError(DatabaseError): ...
-class NotSupportedError(DatabaseError): ...
-class SADeprecationWarning(DeprecationWarning): ...
-class SAPendingDeprecationWarning(PendingDeprecationWarning): ...
-class SAWarning(RuntimeWarning): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/inspection.pyi b/typeshed/third_party/2and3/sqlalchemy/inspection.pyi
deleted file mode 100644
index 2d550cd..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/inspection.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for sqlalchemy.inspection (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-def inspect(subject, raiseerr=...): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/log.pyi b/typeshed/third_party/2and3/sqlalchemy/log.pyi
deleted file mode 100644
index 9e73bd9..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/log.pyi
+++ /dev/null
@@ -1,14 +0,0 @@
-import logging
-from typing import Any
-
-rootlogger = ... # type: Any
-
-class Identified(object):
- def _should_log_debug(self) -> bool: ...
- def _should_log_info(self) -> bool: ...
-
-class InstanceLogger(object): ...
-
-def instance_logger(instance, echoflag) -> None: ...
-
-class echo_property(object): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
deleted file mode 100644
index 2abfdfc..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
+++ /dev/null
@@ -1,95 +0,0 @@
-# Stubs for sqlalchemy.orm (Python 2 and 3)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-# from . import mapper
-# from . import interfaces
-# from . import deprecated_interfaces
-from . import util
-# from . import properties
-# from . import relationships
-# from . import descriptor_props
-from . import session
-# from . import scoping
-# from . import query
-from ..util import langhelpers
-# from . import strategy_options
-
-# Mapper = mapper.Mapper
-# class_mapper = mapper.class_mapper
-# configure_mappers = mapper.configure_mappers
-# reconstructor = mapper.reconstructor
-# validates = mapper.validates
-# EXT_CONTINUE = interfaces.EXT_CONTINUE
-# EXT_STOP = interfaces.EXT_STOP
-# PropComparator = interfaces.PropComparator
-# MapperExtension = deprecated_interfaces.MapperExtension
-# SessionExtension = deprecated_interfaces.SessionExtension
-# AttributeExtension = deprecated_interfaces.AttributeExtension
-aliased = util.aliased
-# join = util.join
-# object_mapper = util.object_mapper
-# outerjoin = util.outerjoin
-# polymorphic_union = util.polymorphic_union
-# was_deleted = util.was_deleted
-# with_parent = util.with_parent
-# with_polymorphic = util.with_polymorphic
-# ColumnProperty = properties.ColumnProperty
-# RelationshipProperty = relationships.RelationshipProperty
-# ComparableProperty = descriptor_props.ComparableProperty
-# CompositeProperty = descriptor_props.CompositeProperty
-# SynonymProperty = descriptor_props.SynonymProperty
-# foreign = relationships.foreign
-# remote = relationships.remote
-Session = session.Session
-object_session = Session.object_session
-sessionmaker = session.sessionmaker
-# make_transient = session.make_transient
-# make_transient_to_detached = session.make_transient_to_detached
-# scoped_session = scoping.scoped_session
-# AliasOption = query.AliasOption
-# Query = query.Query
-# Bundle = query.Bundle
-public_factory = langhelpers.public_factory
-
-def create_session(bind=..., **kwargs): ...
-
-relationship = ... # type: Any
-
-def relation(*arg, **kw): ...
-def dynamic_loader(argument, **kw): ...
-
-column_property = ... # type: Any
-composite = ... # type: Any
-
-def backref(name, **kwargs): ...
-def deferred(*columns, **kw): ...
-
-synonym = ... # type: Any
-comparable_property = ... # type: Any
-
-def compile_mappers(): ...
-def clear_mappers(): ...
-
-joinedload = ... # type: Any
-joinedload_all = ... # type: Any
-contains_eager = ... # type: Any
-defer = ... # type: Any
-undefer = ... # type: Any
-undefer_group = ... # type: Any
-load_only = ... # type: Any
-lazyload = ... # type: Any
-lazyload_all = ... # type: Any
-subqueryload = ... # type: Any
-subqueryload_all = ... # type: Any
-immediateload = ... # type: Any
-noload = ... # type: Any
-defaultload = ... # type: Any
-
-# Load = strategy_options.Load
-
-def eagerload(*args, **kwargs): ...
-def eagerload_all(*args, **kwargs): ...
-
-contains_alias = ... # type: Any
diff --git a/typeshed/third_party/2and3/sqlalchemy/orm/session.pyi b/typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
deleted file mode 100644
index 8e7d396..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
+++ /dev/null
@@ -1,93 +0,0 @@
-# Stubs for sqlalchemy.orm.session (Python 2 and 3)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class _SessionClassMethods:
- @classmethod
- def close_all(cls): ...
- @classmethod
- def identity_key(cls, orm_util, *args, **kwargs): ...
- @classmethod
- def object_session(cls, instance): ...
-
-class SessionTransaction:
- session = ... # type: Any
- nested = ... # type: Any
- def __init__(self, session, parent=..., nested=...) -> None: ...
- @property
- def is_active(self): ...
- def connection(self, bindkey, execution_options=..., **kwargs): ...
- def prepare(self): ...
- def commit(self): ...
- def rollback(self, _capture_exception=...): ...
- def close(self, invalidate=...): ...
- def __enter__(self): ...
- def __exit__(self, type, value, traceback): ...
-
-class Session(_SessionClassMethods):
- public_methods = ... # type: Any
- identity_map = ... # type: Any
- bind = ... # type: Any
- transaction = ... # type: Any
- hash_key = ... # type: Any
- autoflush = ... # type: Any
- autocommit = ... # type: Any
- expire_on_commit = ... # type: Any
- twophase = ... # type: Any
- def __init__(self, bind=..., autoflush=..., expire_on_commit=..., _enable_transaction_accounting=..., autocommit=..., twophase=..., weak_identity_map=..., binds=..., extension=..., info=..., query_cls=...) -> None: ...
- connection_callable = ... # type: Any
- def info(self): ...
- def begin(self, subtransactions=..., nested=...): ...
- def begin_nested(self): ...
- def rollback(self): ...
- def commit(self): ...
- def prepare(self): ...
- def connection(self, mapper=..., clause=..., bind=..., close_with_result=..., execution_options=..., **kw): ...
- def execute(self, clause, params=..., mapper=..., bind=..., **kw): ...
- def scalar(self, clause, params=..., mapper=..., bind=..., **kw): ...
- def close(self): ...
- def invalidate(self): ...
- def expunge_all(self): ...
- def bind_mapper(self, mapper, bind): ...
- def bind_table(self, table, bind): ...
- def get_bind(self, mapper=..., clause=...): ...
- def query(self, *entities, **kwargs): ...
- @property
- def no_autoflush(self): ...
- def refresh(self, instance, attribute_names=..., lockmode=...): ...
- def expire_all(self): ...
- def expire(self, instance, attribute_names=...): ...
- def prune(self): ...
- def expunge(self, instance): ...
- def add(self, instance, _warn=...): ...
- def add_all(self, instances): ...
- def delete(self, instance): ...
- def merge(self, instance, load=...): ...
- def enable_relationship_loading(self, obj): ...
- def __contains__(self, instance): ...
- def __iter__(self): ...
- def flush(self, objects=...): ...
- def bulk_save_objects(self, objects, return_defaults=..., update_changed_only=...): ...
- def bulk_insert_mappings(self, mapper, mappings, return_defaults=...): ...
- def bulk_update_mappings(self, mapper, mappings): ...
- def is_modified(self, instance, include_collections=..., passive=...): ...
- @property
- def is_active(self): ...
- @property
- def dirty(self): ...
- @property
- def deleted(self): ...
- @property
- def new(self): ...
-
-class sessionmaker(_SessionClassMethods):
- kw = ... # type: Any
- class_ = ... # type: Any
- def __init__(self, bind=..., class_=..., autoflush=..., autocommit=..., expire_on_commit=..., info=..., **kw) -> None: ...
- def __call__(self, **local_kw): ...
- def configure(self, **new_kw): ...
-
-# Names in __all__ with no definition:
-# SessionExtension
diff --git a/typeshed/third_party/2and3/sqlalchemy/orm/util.pyi b/typeshed/third_party/2and3/sqlalchemy/orm/util.pyi
deleted file mode 100644
index d3c000c..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/orm/util.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for sqlalchemy.orm.session (Python 2 and 3)
-from typing import Optional, Any, Text
-
-from ..sql.selectable import FromClause
-
-class AliasedClass(object):
- def __init__(self, cls: Any, alias: Optional[FromClause] =None, name: Optional[Text] =None, flat: bool =False, adapt_on_names: bool =False,
- with_polymorphic_mappers: Any =(), with_polymorphic_discriminator: Any =None, base_alias: Any =None, use_mapper_path: bool =False) -> None: ...
- def __getattr__(self, key): ...
- def __repr__(self): ...
-
-def aliased(element: Any, alias: Optional[FromClause] =None, name: Optional[Text] =None, flat: bool =False, adapt_on_names: bool =False) -> AliasedClass: ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/pool.pyi b/typeshed/third_party/2and3/sqlalchemy/pool.pyi
deleted file mode 100644
index a278d6e..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/pool.pyi
+++ /dev/null
@@ -1,118 +0,0 @@
-# Stubs for sqlalchemy.pool (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from . import log
-from . import util
-
-threading = util.threading
-memoized_property = util.memoized_property
-chop_traceback = util.chop_traceback
-
-proxies = ... # type: Any
-
-def manage(module, **params): ...
-def clear_managers(): ...
-
-reset_rollback = ... # type: Any
-reset_commit = ... # type: Any
-reset_none = ... # type: Any
-
-class _ConnDialect:
- def do_rollback(self, dbapi_connection): ...
- def do_commit(self, dbapi_connection): ...
- def do_close(self, dbapi_connection): ...
-
-class Pool(log.Identified):
- logging_name = ... # type: Any
- echo = ... # type: Any
- def __init__(self, creator, recycle=..., echo=..., use_threadlocal=..., logging_name=..., reset_on_return=..., listeners=..., events=..., _dispatch=..., _dialect=...) -> None: ...
- def add_listener(self, listener): ...
- def unique_connection(self): ...
- def recreate(self): ...
- def dispose(self): ...
- def connect(self): ...
- def status(self): ...
-
- _threadconns = ... # type: Any
- _creator = ... # type: Any
- _recycle = ... # type: Any
- _invalidate_time = ... # type: Any
- dispatch = ... # type: Any
- _dialect = ... # type: Any
- _orig_logging_name = ... # type: Any
- _reset_on_return = ... # type: Any
- _use_threadlocal = ... # type: Any
-
-class _ConnectionRecord:
- connection = ... # type: Any
- finalize_callback = ... # type: Any
- def __init__(self, pool) -> None: ...
- def info(self): ...
- @classmethod
- def checkout(cls, pool): ...
- fairy_ref = ... # type: Any
- def checkin(self): ...
- def close(self): ...
- def invalidate(self, e=..., soft=...): ...
- def get_connection(self): ...
-
-class _ConnectionFairy:
- connection = ... # type: Any
- def __init__(self, dbapi_connection, connection_record, echo) -> None: ...
- @property
- def is_valid(self): ...
- def info(self): ...
- def invalidate(self, e=..., soft=...): ...
- def cursor(self, *args, **kwargs): ...
- def __getattr__(self, key): ...
- def detach(self): ...
- def close(self): ...
-
-class SingletonThreadPool(Pool):
- size = ... # type: Any
- def __init__(self, creator, pool_size=..., **kw) -> None: ...
- def recreate(self): ...
- def dispose(self): ...
- def status(self): ...
-
-class QueuePool(Pool):
- def __init__(self, creator, pool_size=..., max_overflow=..., timeout=..., **kw) -> None: ...
- def recreate(self): ...
- def dispose(self): ...
- def status(self): ...
- def size(self): ...
- def checkedin(self): ...
- def overflow(self): ...
- def checkedout(self): ...
-
-class NullPool(Pool):
- def status(self): ...
- def recreate(self): ...
- def dispose(self): ...
-
-class StaticPool(Pool):
- def connection(self): ...
- def status(self): ...
- def dispose(self): ...
- def recreate(self): ...
-
-class AssertionPool(Pool):
- def __init__(self, *args, **kw) -> None: ...
- def status(self): ...
- def dispose(self): ...
- def recreate(self): ...
-
-class _DBProxy:
- module = ... # type: Any
- kw = ... # type: Any
- poolclass = ... # type: Any
- pools = ... # type: Any
- def __init__(self, module, poolclass=..., **kw) -> None: ...
- def close(self): ...
- def __del__(self): ...
- def __getattr__(self, key): ...
- def get_pool(self, *args, **kw): ...
- def connect(self, *args, **kw): ...
- def dispose(self, *args, **kw): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/schema.pyi b/typeshed/third_party/2and3/sqlalchemy/schema.pyi
deleted file mode 100644
index f788897..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/schema.pyi
+++ /dev/null
@@ -1,50 +0,0 @@
-# Stubs for sqlalchemy.schema (Python 2)
-
-from .sql import base
-from .sql import schema
-from .sql import naming
-from .sql import ddl
-from .sql import elements
-
-SchemaVisitor = base.SchemaVisitor
-CheckConstraint = schema.CheckConstraint
-Column = schema.Column
-ColumnDefault = schema.ColumnDefault
-Constraint = schema.Constraint
-DefaultClause = schema.DefaultClause
-DefaultGenerator = schema.DefaultGenerator
-FetchedValue = schema.FetchedValue
-ForeignKey = schema.ForeignKey
-ForeignKeyConstraint = schema.ForeignKeyConstraint
-Index = schema.Index
-MetaData = schema.MetaData
-PassiveDefault = schema.PassiveDefault
-PrimaryKeyConstraint = schema.PrimaryKeyConstraint
-SchemaItem = schema.SchemaItem
-Sequence = schema.Sequence
-Table = schema.Table
-ThreadLocalMetaData = schema.ThreadLocalMetaData
-UniqueConstraint = schema.UniqueConstraint
-_get_table_key = schema._get_table_key
-ColumnCollectionConstraint = schema.ColumnCollectionConstraint
-ColumnCollectionMixin = schema.ColumnCollectionMixin
-conv = elements.conv
-DDL = ddl.DDL
-CreateTable = ddl.CreateTable
-DropTable = ddl.DropTable
-CreateSequence = ddl.CreateSequence
-DropSequence = ddl.DropSequence
-CreateIndex = ddl.CreateIndex
-DropIndex = ddl.DropIndex
-CreateSchema = ddl.CreateSchema
-DropSchema = ddl.DropSchema
-_DropView = ddl._DropView
-CreateColumn = ddl.CreateColumn
-AddConstraint = ddl.AddConstraint
-DropConstraint = ddl.DropConstraint
-DDLBase = ddl.DDLBase
-DDLElement = ddl.DDLElement
-_CreateDropBase = ddl._CreateDropBase
-_DDLCompiles = ddl._DDLCompiles
-sort_tables = ddl.sort_tables
-sort_tables_and_constraints = ddl.sort_tables_and_constraints
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi
deleted file mode 100644
index 91d06d8..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi
+++ /dev/null
@@ -1,66 +0,0 @@
-# Stubs for sqlalchemy.sql (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from . import expression
-from . import visitors
-
-Alias = expression.Alias
-ClauseElement = expression.ClauseElement
-ColumnCollection = expression.ColumnCollection
-ColumnElement = expression.ColumnElement
-CompoundSelect = expression.CompoundSelect
-Delete = expression.Delete
-FromClause = expression.FromClause
-Insert = expression.Insert
-Join = expression.Join
-Select = expression.Select
-Selectable = expression.Selectable
-TableClause = expression.TableClause
-Update = expression.Update
-alias = expression.alias
-and_ = expression.and_
-asc = expression.asc
-between = expression.between
-bindparam = expression.bindparam
-case = expression.case
-cast = expression.cast
-collate = expression.collate
-column = expression.column
-delete = expression.delete
-desc = expression.desc
-distinct = expression.distinct
-except_ = expression.except_
-except_all = expression.except_all
-exists = expression.exists
-extract = expression.extract
-false = expression.false
-False_ = expression.False_
-func = expression.func
-funcfilter = expression.funcfilter
-insert = expression.insert
-intersect = expression.intersect
-intersect_all = expression.intersect_all
-join = expression.join
-label = expression.label
-literal = expression.literal
-literal_column = expression.literal_column
-modifier = expression.modifier
-not_ = expression.not_
-null = expression.null
-or_ = expression.or_
-outerjoin = expression.outerjoin
-outparam = expression.outparam
-over = expression.over
-select = expression.select
-subquery = expression.subquery
-table = expression.table
-text = expression.text
-true = expression.true
-True_ = expression.True_
-tuple_ = expression.tuple_
-type_coerce = expression.type_coerce
-union = expression.union
-union_all = expression.union_all
-update = expression.update
-ClauseVisitor = visitors.ClauseVisitor
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi
deleted file mode 100644
index ba0aba4..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-class Annotated(object):
- def __new__(cls, *args): ...
- def __init__(self, element, values): ...
- def _annotate(self, values): ...
- def _with_annotations(self, values): ...
- def _deannotate(self, values=..., clone: bool=...): ...
- def _compiler_dispatch(self, visitor, **kw): ...
- def _constructor(self): ...
- def _clone(self): ...
- def __hash__(self): ...
- def __eq__(self): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/base.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/base.pyi
deleted file mode 100644
index 48e68c7..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/base.pyi
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Any, Iterable
-
-from .visitors import ClauseVisitor
-from .. import util
-
-class Immutable(object):
- def unique_params(self, *optionaldict, **kwargs): ...
- def params(self, *optionaldict, **kwargs): ...
- def _clone(self) -> Immutable: ...
-
-class DialectKWArgs(object):
- def argument_for(cls, dialect_name, argument_name, default): ...
- def kwargs(self): ...
- def dialect_options(self): ...
-
-class Generative(object): ...
-
-class Executable(Generative):
- def execution_options(self, **kw): ...
- def execute(self, *multiparams, **params): ...
- def scalar(self, *multiparams, **params): ...
-
- @property
- def bind(self): ...
-
-class SchemaEventTarget(object): ...
-class SchemaVisitor(ClauseVisitor): ...
-class ColumnCollection(util.OrderedProperties):
- def replace(self, column): ...
- def add(self, column): ...
- def clear(self): ...
- def remove(self, column): ...
- def update(self, iter: Iterable[Any]): ...
- def extend(self, iter: Iterable[Any]): ...
- def contains_column(self, col): ...
- def as_immutable(self): ...
-
-class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection): ...
-
-class ColumnSet(util.ordered_column_set): ...
-
-def _bind_or_error(schemaitem, msg): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi
deleted file mode 100644
index 06ac96a..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi
+++ /dev/null
@@ -1,25 +0,0 @@
-from .elements import ClauseElement
-from .base import Executable, SchemaVisitor
-
-class _DDLCompiles(ClauseElement): ...
-class DDLElement(Executable, _DDLCompiles): ...
-class DDL(DDLElement): ...
-class _CreateDropBase(DDLElement): ...
-class CreateSchema(_CreateDropBase): ...
-class DropSchema(_CreateDropBase): ...
-class CreateTable(_CreateDropBase): ...
-class _DropView(_CreateDropBase): ...
-class CreateColumn(_DDLCompiles): ...
-class DropTable(_CreateDropBase): ...
-class CreateSequence(_CreateDropBase): ...
-class DropSequence(_CreateDropBase): ...
-class CreateIndex(_CreateDropBase): ...
-class DropIndex(_CreateDropBase): ...
-class AddConstraint(_CreateDropBase): ...
-class DropConstraint(_CreateDropBase): ...
-class DDLBase(SchemaVisitor): ...
-class SchemaGenerator(DDLBase): ...
-class SchemaDropper(DDLBase): ...
-
-def sort_tables(tables, skip_fn=..., extra_dependencies=...): ...
-def sort_tables_and_constraints(tables, filter_fn=..., extra_dependencies=...): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi
deleted file mode 100644
index 79cb201..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi
+++ /dev/null
@@ -1,20 +0,0 @@
-from typing import AnyStr
-
-from .base import Executable, DialectKWArgs
-from .elements import ClauseElement
-from .selectable import HasPrefixes
-
-class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
- def params(self, *arg, **kw): ...
- @property
- def bind(self): ...
- def returning(self, *cols): ...
- def with_hint(self, text, selectable=..., dialect_name: AnyStr=...): ...
-
-class ValuesBase(UpdateBase):
- def values(self, *args, **kwargs): ...
- def return_defaults(self, *cols): ...
-
-class Insert(ValuesBase): ...
-class Update(ValuesBase): ...
-class Delete(UpdateBase): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
deleted file mode 100644
index 4b585c6..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
+++ /dev/null
@@ -1,93 +0,0 @@
-# Stubs for sqlalchemy.sql.elements (Python 2 and 3)
-from typing import Text, Any
-
-from .visitors import Visitable
-from .annotation import Annotated
-from .base import Executable, Immutable
-from .operators import ColumnOperators
-from .. import util
-
-class ClauseElement(Visitable): ...
-
-class ColumnElement(ColumnOperators, ClauseElement):
- __visit_name__ = 'column' # type: Text
- primary_key = False # type: Any
- foreign_keys = [] # type: Any
- _label = None # type: Any
- _key_label = key = None # type: Any
- _alt_names = () # type: Any
- def self_group(self, against=None): ...
- def _negate(self): ...
- @util.memoized_property
- def type(self): ...
- @util.memoized_property
- def comparator(self): ...
- def __getattr__(self, key): ...
- def operate(self, op, *other, **kwargs): ...
- def reverse_operate(self, op, other, **kwargs): ...
- def _bind_param(self, operator, obj): ...
- @property
- def expression(self): ...
- @property
- def _select_iterable(self): ...
- @util.memoized_property
- def base_columns(self): ...
- @util.memoized_property
- def proxy_set(self): ...
- def shares_lineage(self, othercolumn): ...
- def _compare_name_for_result(self, other): ...
- def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw): ...
- def compare(self, other, use_proxies=False, equivalents=None, **kw): ...
- def label(self, name): ...
- @util.memoized_property
- def anon_label(self): ...
-
-class BindParameter(ColumnElement): ...
-class BinaryExpression(ColumnElement): ...
-
-class TypeClause(ClauseElement): ...
-class TextClause(Executable, ClauseElement): ...
-
-class Null(ColumnElement): ...
-class False_(ColumnElement): ...
-class True_(ColumnElement): ...
-
-class ClauseList(ClauseElement): ...
-class BooleanClauseList(ClauseList, ColumnElement): ...
-class Tuple(ClauseList, ColumnElement): ...
-class Case(ColumnElement): ...
-class Cast(ColumnElement): ...
-class Extract(ColumnElement): ...
-class _label_reference(ColumnElement): ...
-
-class _textual_label_reference(ColumnElement): ...
-class UnaryExpression(ColumnElement): ...
-class AsBoolean(UnaryExpression): ...
-class Grouping(ColumnElement): ...
-class Over(ColumnElement): ...
-class FunctionFilter(ColumnElement): ...
-class Label(ColumnElement): ...
-class ColumnClause(Immutable, ColumnElement): ...
-class _IdentifiedClause(Executable, ClauseElement): ...
-class SavepointClause(_IdentifiedClause): ...
-class RollbackToSavepointClause(_IdentifiedClause): ...
-class ReleaseSavepointClause(_IdentifiedClause): ...
-class quoted_name(util.MemoizedSlots, util.text_type): ...
-class _truncated_label(quoted_name): ...
-class conv(_truncated_label): ...
-class _defer_name(_truncated_label): ...
-class _defer_none_name(_defer_name): ...
-class _anonymous_label(_truncated_label): ...
-class AnnotatedColumnElement(Annotated): ...
-
-def _clone(element, **kw): ...
-def _type_from_args(args): ...
-def _literal_as_binds(element, name, type_=None): ...
-
-def collate(expression, collation) -> BinaryExpression: ...
-def between(expr, lower_bound, upper_bound, symmetric: bool=...): ...
-def literal(value, type_=None) -> BindParameter: ...
-def outparam(key, type_=None) -> BindParameter: ...
-def type_coerce(expression, type_): ...
-def not_(clause): ...
-def literal_column(text, type_=None): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi
deleted file mode 100644
index 48fc3b8..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi
+++ /dev/null
@@ -1,87 +0,0 @@
-# Stubs for sqlalchemy.sql.expression (Python 2)
-
-from typing import Any
-from . import functions
-from . import elements
-from . import base
-from . import selectable
-from . import dml
-
-from .visitors import Visitable
-
-from .elements import ClauseElement, ColumnElement,\
- BindParameter, UnaryExpression, BooleanClauseList, \
- Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
- True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
- Grouping, not_, \
- collate, literal_column, between,\
- literal, outparam, type_coerce, ClauseList, FunctionFilter
-from .elements import SavepointClause, RollbackToSavepointClause, \
- ReleaseSavepointClause
-from .base import ColumnCollection, Generative, Executable
-from .selectable import Alias, Join, Select, Selectable, TableClause, \
- CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
- alias, GenerativeSelect, \
- subquery, HasPrefixes, HasSuffixes, Exists, ScalarSelect, TextAsFrom
-from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
-
-func = functions.func # type: functions._FunctionGenerator
-modifier = functions.modifier # type: functions._FunctionGenerator
-
-and_ = ... # type: Any
-or_ = ... # type: Any
-bindparam = ... # type: Any
-select = ... # type: Any
-text = ... # type: Any
-table = ... # type: Any
-column = ... # type: Any
-over = ... # type: Any
-label = ... # type: Any
-case = ... # type: Any
-cast = ... # type: Any
-extract = ... # type: Any
-tuple_ = ... # type: Any
-except_ = ... # type: Any
-except_all = ... # type: Any
-intersect = ... # type: Any
-intersect_all = ... # type: Any
-union = ... # type: Any
-union_all = ... # type: Any
-exists = ... # type: Any
-nullsfirst = ... # type: Any
-nullslast = ... # type: Any
-asc = ... # type: Any
-desc = ... # type: Any
-distinct = ... # type: Any
-true = ... # type: Any
-false = ... # type: Any
-null = ... # type: Any
-join = ... # type: Any
-outerjoin = ... # type: Any
-insert = ... # type: Any
-update = ... # type: Any
-delete = ... # type: Any
-funcfilter = ... # type: Any
-
-# old names for compatibility
-_Executable = Executable
-_BindParamClause = BindParameter
-_Label = Label
-_SelectBase = SelectBase
-_BinaryExpression = BinaryExpression
-_Cast = Cast
-_Null = Null
-_False = False_
-_True = True_
-_TextClause = TextClause
-_UnaryExpression = UnaryExpression
-_Case = Case
-_Tuple = Tuple
-_Over = Over
-_Generative = Generative
-_TypeClause = TypeClause
-_Extract = Extract
-_Exists = Exists
-_Grouping = Grouping
-_FromGrouping = FromGrouping
-_ScalarSelect = ScalarSelect
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi
deleted file mode 100644
index b656456..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi
+++ /dev/null
@@ -1,47 +0,0 @@
-
-from .base import Executable, ColumnCollection
-from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
- literal_column, _type_from_args, ColumnElement, _clone,\
- Over, BindParameter, FunctionFilter
-from .selectable import FromClause, Select, Alias
-from .visitors import VisitableType
-
-class FunctionElement(Executable, ColumnElement, FromClause): ...
-
-class _FunctionGenerator(object):
- def __init__(self, **opts): ...
- def __getattr__(self, name): ...
- def __call__(self, *c, **kwargs) -> Function: ...
-
-func = ... # type: _FunctionGenerator
-modifier = ... # type: _FunctionGenerator
-
-class Function(FunctionElement): ...
-
-class _GenericMeta(VisitableType): ...
-# TODO: Use GenericFunction(util.with_metaclass(_GenericMeta, Function))
-class GenericFunction(_GenericMeta, Function): ...
-class next_value(GenericFunction): ...
-
-class AnsiFunction(GenericFunction): ...
-class ReturnTypeFromArgs(GenericFunction): ...
-
-class coalesce(ReturnTypeFromArgs): ...
-class max(ReturnTypeFromArgs): ...
-class min(ReturnTypeFromArgs): ...
-class sum(ReturnTypeFromArgs): ...
-class now(GenericFunction): ...
-class concat(GenericFunction): ...
-
-class char_length(GenericFunction): ...
-class random(GenericFunction): ...
-class count(GenericFunction): ...
-class current_date(AnsiFunction): ...
-class current_time(AnsiFunction): ...
-class current_timestamp(AnsiFunction): ...
-class current_user(AnsiFunction): ...
-class localtime(AnsiFunction): ...
-class localtimestamp(AnsiFunction): ...
-class session_user(AnsiFunction): ...
-class sysdate(AnsiFunction): ...
-class user(AnsiFunction): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi
deleted file mode 100644
index d9172c4..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi
+++ /dev/null
@@ -1 +0,0 @@
-class ConventionDict(object): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi
deleted file mode 100644
index 5ae39cd..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi
+++ /dev/null
@@ -1,99 +0,0 @@
-from typing import Any, AnyStr, Callable
-
-class Operators(object):
- def op(self, opstring: AnyStr, precedence: int, is_comparison: bool): ...
- def operate(self, op: Callable[[Any], Any], *other, **kwargs): ...
- def reverse_operator(self, op: Callable[[Any], Any], *other, **kwargs): ...
- def __and__(self, other): ...
- def __or__(self, other): ...
- def __invert__(self): ...
-
-
-class ColumnOperators(Operators):
- def concat(self, other): ...
- def like(self, other, escape=None): ...
- def ilike(self, other, escape=None): ...
- def notlike(self, other, escape=None): ...
- def notilike(self, other, escape=None): ...
- def in_(self, other): ...
- def notin_(self, other): ...
- def is_(self, other): ...
- def startswith(self, other, **kwargs): ...
- def endswith(self, other, **kwargs): ...
- def contains(self, other, **kwargs): ...
- def match(self, other, **kwargs): ...
- def desc(self): ...
- def asc(self): ...
- def nullsfirst(self): ...
- def nullslast(self): ...
- def collate(self, collation): ...
- def between(self, cleft, cright, symmetric: bool = ...): ...
- def distinct(self): ...
-
- def __lt__(self, other): ...
- def __le__(self, other): ...
- def __eq__(self, other): ...
- def __ne__(self, other): ...
- def __gt__(self, other): ...
- def __ge__(self, other): ...
- def __neg__(self, other): ...
- def __getitem__(self, index): ...
- def __lshift__(self, other): ...
- def __rshift__(self, other): ...
-
- def __radd__(self, other): ...
- def __rsub__(self, other): ...
- def __rmul__(self, other): ...
- def __rdiv__(self, other): ...
- def __rmod__(self, other): ...
- def __add__(self, other): ...
- def __sub__(self, other): ...
- def __mul__(self, other): ...
- def __div__(self, other): ...
- def __mod__(self, other): ...
- def __truediv__(self, other): ...
- def __rtruediv__(self, other): ...
-
-def from_(): ...
-def as_(): ...
-def exists(): ...
-def istrue(a): ...
-def isfalse(a): ...
-def is_(a, b): ...
-def isnot(a, b): ...
-def collate(a, b): ...
-def op(a, opstring, b): ...
-
-def like_op(a, b, escape=None): ...
-def notlike_op(a, b, escape=None): ...
-def ilike_op(a, b, escape=None): ...
-def notilike_op(a, b, escape=None): ...
-def between_op(a, b, symmetric: bool): ...
-def notbetween_(a, b, symmetric: bool): ...
-
-def in_op(a, b): ...
-def notin_op(a, b): ...
-def distinct_op(a): ...
-
-def startswith_op(a, b, escape=None): ...
-def notstartswith_op(a, b, escape=None): ...
-def endswith_op(a, b, escape=None): ...
-def notendswith_op(a, b, escape=None): ...
-def contains_op(a, b, escape=None): ...
-def notcontains_op(a, b, escape=None): ...
-
-def match_op(a, b, **kw): ...
-def notmatch_op(a, b, **kw): ...
-
-def comma_op(a, b): ...
-def concat_op(a, b): ...
-
-def desc_op(a): ...
-def asc_op(a): ...
-def nullsfirst_op(a): ...
-def nullslast_op(a): ...
-
-def is_comparison(op): ...
-def is_commutative(op): ...
-def is_ordering_modified(op): ...
-def is_precedent(operator, against): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi
deleted file mode 100644
index b2b3e4c..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi
+++ /dev/null
@@ -1,126 +0,0 @@
-from typing import Any, AnyStr
-
-from .base import SchemaEventTarget, DialectKWArgs
-from .base import ColumnCollection
-from .elements import ClauseElement, ColumnClause, TextClause, \
- ColumnElement
-from .selectable import TableClause
-
-from . import visitors
-
-class SchemaItem(SchemaEventTarget, visitors.Visitable):
- def _execute_on_connection(self, connection, multiparams, params): ...
- @property
- def info(self): ...
- @property
- def quote(self): ...
- def get_children(self, **kwargs): ...
- def _init_items(self, *args): ...
- def _schema_item_copy(self, schema_item): ...
- def __repr__(self): ...
-
-
-class Table(DialectKWArgs, SchemaItem, TableClause):
- def __init__(self, name, metadata, *args, **kwargs): ...
- @property
- def key(self): ...
- @property
- def primary_key(self): ...
- def __repr__(self): ...
- def __str__(self): ...
- def append_column(self, column): ...
- def append_constraint(self, constraint): ...
- def append_ddl_listener(self, event, listener): ...
- def get_children(self, column_collections=True, schema_visitor=False, **kwargs): ...
- def exists(self, bind=None): ...
- def create(self, bind=None, checkfirst=False): ...
- def drop(self, bind=None, checkfirst=False): ...
- def tometadata(self, metadata, schema=None): ...
- c = ... # type: ColumnCollection
- constraints = ... # type: Set[Constraint]
-
-
-class Column(SchemaItem, ColumnClause):
- primary_key = ... # type: Any
- def __init__(self, *args, **kwargs): ...
- def references(self, column): ...
- def append_foreign_key(self, fk): ...
- def __repr__(self): ...
- def _set_parent(self, table): ...
- def _setup_on_memoized_fks(self, fn): ...
- def _on_table_attach(self, fn): ...
- def copy(self, **kw): ...
- def _make_proxy(self, selectable, name=None, key=None,
- name_is_truncatable=False, **kw): ...
- def get_children(self, schema_visitor=False, **kwargs): ...
-
-
-class ForeignKey(DialectKWArgs, SchemaItem):
- def __init__(self, column, _constraint=None, use_alter=False, name=None,
- onupdate=None, ondelete=None, deferrable=None,
- initially=None, link_to_name=False, match=None,
- info=None, **dialect_kw) -> None: ...
- def __repr__(self): ...
- def copy(self, schema=None): ...
- def _get_colspec(self, schema=None, table_name=None): ...
- @property
- def _referred_schema(self): ...
- def _table_key(self): ...
- def references(self, table): ...
- def get_referent(self, table): ...
- @property
- def _column_tokens(self): ...
- def _resolve_col_tokens(self): ...
- def _link_to_col_by_colstring(self, parenttable, table, colname): ...
- def _set_target_column(self, column): ...
- @property
- def column(self): ...
- def _set_parent(self, column): ...
- def _set_remote_table(self, table): ...
- def _remove_from_metadata(self, metadata): ...
- def _set_table(self, column, table): ...
-
-class _NotAColumnExpr(object): ...
-class DefaultGenerator(_NotAColumnExpr, SchemaItem): ...
-class ColumnDefault(DefaultGenerator): ...
-class Sequence(DefaultGenerator): ...
-class FetchedValue(_NotAColumnExpr, SchemaEventTarget): ...
-class DefaultClause(FetchedValue): ...
-class PassiveDefault(DefaultClause): ...
-
-class Constraint(DialectKWArgs, SchemaItem):
- def __init__(self, name=None, deferrable=None, initially=None): ...
- def __contains__(self, x): ...
- def contains_column(self, col): ...
- def keys(self): ...
- def __add__(self, other): ...
- def __iter__(self): ...
- def __len__(self): ...
- def copy(self, **kw): ...
-
-class ColumnCollectionMixin(object):
- columns = ... # type: Any
- def __init__(self, *columns, **kw): ...
- @classmethod
- def _extract_col_expression_collection(cls, expressions): ...
- def _check_attach(self, evt=False): ...
- def _set_parent(self, table): ...
-
-class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
- def __init__(self, *columns, **kw): ...
- def _set_parent(self, table): ...
- def __contains__(self, x): ...
- def copy(self, **kw): ...
- def contains_column(self, col): ...
- def __iter__(self): ...
- def __len__(self): ...
-
-class CheckConstraint(ColumnCollectionConstraint): ...
-class ForeignKeyConstraint(ColumnCollectionConstraint): ...
-class PrimaryKeyConstraint(ColumnCollectionConstraint): ...
-class UniqueConstraint(ColumnCollectionConstraint): ...
-class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): ...
-class MetaData(SchemaItem): ...
-class ThreadLocalMetaData(MetaData): ...
-
-def _get_table_key(name: AnyStr, schema: AnyStr): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
deleted file mode 100644
index 543349d..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
+++ /dev/null
@@ -1,76 +0,0 @@
-from typing import Any
-
-from .base import Immutable, Executable, \
- ColumnCollection, ColumnSet, Generative
-from .elements import ClauseElement, TextClause, ClauseList, \
- Grouping, UnaryExpression, BindParameter
-from .annotation import Annotated
-from .visitors import Visitable
-from .. import util
-
-def subquery(alias, *args, **kwargs): ...
-def alias(selectable, name=..., flat: bool=...): ...
-
-class Selectable(ClauseElement):
- def selectable(self): ...
-
-class HasPrefixes(object):
- def prefix_with(self, *expr, **kw): ...
-
-class HasSuffixes(object):
- def suffix_with(self, *expr, **kw): ...
-
-class FromClause(Selectable):
- def count(self, functions, whereclause=None, **params): ...
- def select(self, whereclause=None, **params): ...
- def join(self, right, onclause=None, isouter: bool=False): ...
- def outerjoin(self, right, onclause=None): ...
- def alias(self, name=None, flat: bool=False): ...
- def is_derived_from(self, fromclause): ...
- def _is_lexical_equivalent(self, other): ...
- def replace_selectable(self, sqlutil, old, alias): ...
- def correspond_on_equivalents(self, column, equivalents): ...
- def corresponding_column(self, column, require_embedded: bool=False): ...
- @property
- def description(self): ...
- def _reset_exported(self): ...
- @property
- def columns(self): ...
- @property
- def primary_key(self) -> Any: ...
- @property
- def foreign_keys(self) -> Any: ...
- def _init_collections(self): ...
- @property
- def _cols_populated(self): ...
- def _populate_column_collection(self): ...
- def _refresh_for_new_column(self, column): ...
-
-class Join(FromClause): ...
-class Alias(FromClause): ...
-class CTE(Generative, HasSuffixes, Alias): ...
-class FromGrouping(FromClause): ...
-
-class TableClause(Immutable, FromClause):
- def __init__(self, name, *columns): ...
- def _export_columns(self): ...
- @util.memoized_property
- def description(self): ...
- def append_column(self, c): ...
- def get_children(self, **kwargs): ...
- def count(self, whereclause=None, **params): ...
- def insert(self, values=None, inline=False, **kwargs): ...
- def update(self, whereclause=None, values=None, inline=False, **kwargs): ...
- def delete(self, whereclause=None, **kwargs): ...
- @property
- def _from_objects(self): ...
-
-class ForUpdateArg(ClauseElement): ...
-class SelectBase(Executable, FromClause): ...
-class GenerativeSelect(SelectBase): ...
-class CompoundSelect(GenerativeSelect): ...
-class Select(HasPrefixes, HasSuffixes, GenerativeSelect): ...
-class ScalarSelect(Generative, Grouping): ...
-class Exists(UnaryExpression): ...
-class TextAsFrom(SelectBase): ...
-class AnnotatedFromClause(Annotated): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi
deleted file mode 100644
index 6c51ef7..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi
+++ /dev/null
@@ -1,57 +0,0 @@
-from .type_api import TypeEngine, TypeDecorator
-from .base import SchemaEventTarget
-
-class _DateAffinity(object): ...
-class Concatenable(object): ...
-class String(TypeEngine, Concatenable):
- def __init__(self, length=None, collation=None,
- convert_unicode=False,
- unicode_error=None,
- _warn_on_bytestring=False): ...
-
-class Text(String): ...
-class Unicode(String): ...
-class UnicodeText(Text): ...
-class Integer(TypeEngine, _DateAffinity): ...
-class SmallInteger(Integer): ...
-class BigInteger(Integer): ...
-class Numeric(TypeEngine, _DateAffinity): ...
-class Float(Numeric): ...
-class DateTime(TypeEngine, _DateAffinity):
- def __init__(self, timezone=None): ...
-class Date(TypeEngine, _DateAffinity): ...
-class Time(TypeEngine, _DateAffinity): ...
-class _Binary(TypeEngine): ...
-class LargeBinary(_Binary): ...
-class Binary(LargeBinary): ...
-class SchemaType(SchemaEventTarget): ...
-class Enum(String, SchemaType): ...
-class PickleType(TypeDecorator): ...
-class Boolean(TypeEngine, SchemaType): ...
-class Interval(_DateAffinity, TypeDecorator): ...
-
-class REAL(Float): ...
-class FLOAT(Float): ...
-class NUMERIC(Numeric): ...
-class DECIMAL(Numeric): ...
-class INTEGER(Integer): ...
-# In code it's INT=INTEGER
-class INT(Integer): ...
-class SMALLINT(SmallInteger): ...
-class BIGINT(BigInteger): ...
-class TIMESTAMP(DateTime): ...
-class DATETIME(DateTime): ...
-class DATE(Date): ...
-class TIME(Time): ...
-class TEXT(Text): ...
-class CLOB(Text): ...
-class VARCHAR(String): ...
-class NVARCHAR(Unicode): ...
-class CHAR(String): ...
-class NCHAR(Unicode): ...
-class BLOB(LargeBinary): ...
-class BINARY(_Binary): ...
-class VARBINARY(_Binary): ...
-class BOOLEAN(Boolean): ...
-class NullType(TypeEngine): ...
-class MatchType(Boolean): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi
deleted file mode 100644
index 9b8d0e8..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi
+++ /dev/null
@@ -1,16 +0,0 @@
-from .. import util
-from .visitors import Visitable, VisitableType
-
-class TypeEngine(Visitable):
- @property
- def python_type(self): ...
- def get_dbapi_type(self, dbapi): ...
- def literal_processor(self, dialect): ...
- def bind_processor(self, dialect): ...
- def result_processor(self, dialect, coltype): ...
-
-class VisitableCheckKWArg(util.EnsureKWArgType, VisitableType): ...
-# TODO: class UserDefinedType(util.with_metaclass(VisitableCheckKWArg, TypeEngine)):
-class UserDefinedType(VisitableCheckKWArg, TypeEngine): ...
-class TypeDecorator(TypeEngine): ...
-class Variant(TypeDecorator): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi
deleted file mode 100644
index 6fc58e7..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Stubs for sqlalchemy.sql.visitors (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class VisitableType(type):
- def __init__(cls, clsname, bases, clsdict) -> None: ...
-
-class Visitable: ...
-
-class ClauseVisitor:
- __traverse_options__ = ... # type: Any
- def traverse_single(self, obj, **kw): ...
- def iterate(self, obj): ...
- def traverse(self, obj): ...
- def chain(self, visitor): ...
-
-class CloningVisitor(ClauseVisitor):
- def copy_and_process(self, list_): ...
- def traverse(self, obj): ...
-
-class ReplacingCloningVisitor(CloningVisitor):
- def replace(self, elem): ...
- def traverse(self, obj): ...
-
-def iterate(obj, opts): ...
-def iterate_depthfirst(obj, opts): ...
-def traverse_using(iterator, obj, visitors): ...
-def traverse(obj, opts, visitors): ...
-def traverse_depthfirst(obj, opts, visitors): ...
-def cloned_traverse(obj, opts, visitors): ...
-def replacement_traverse(obj, opts, replace): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/types.pyi b/typeshed/third_party/2and3/sqlalchemy/types.pyi
deleted file mode 100644
index 7aa160c..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/types.pyi
+++ /dev/null
@@ -1,51 +0,0 @@
-# Stubs for sqlalchemy.types (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from .sql import type_api
-from .sql import sqltypes
-
-TypeEngine = type_api.TypeEngine
-TypeDecorator = type_api.TypeDecorator
-UserDefinedType = type_api.UserDefinedType
-BIGINT = sqltypes.BIGINT
-BINARY = sqltypes.BINARY
-BLOB = sqltypes.BLOB
-BOOLEAN = sqltypes.BOOLEAN
-BigInteger = sqltypes.BigInteger
-Binary = sqltypes.Binary
-Boolean = sqltypes.Boolean
-CHAR = sqltypes.CHAR
-CLOB = sqltypes.CLOB
-Concatenable = sqltypes.Concatenable
-DATE = sqltypes.DATE
-DATETIME = sqltypes.DATETIME
-DECIMAL = sqltypes.DECIMAL
-Date = sqltypes.Date
-DateTime = sqltypes.DateTime
-Enum = sqltypes.Enum
-FLOAT = sqltypes.FLOAT
-Float = sqltypes.Float
-INT = sqltypes.INT
-INTEGER = sqltypes.INTEGER
-Integer = sqltypes.Integer
-Interval = sqltypes.Interval
-LargeBinary = sqltypes.LargeBinary
-NCHAR = sqltypes.NCHAR
-NVARCHAR = sqltypes.NVARCHAR
-NUMERIC = sqltypes.NUMERIC
-Numeric = sqltypes.Numeric
-PickleType = sqltypes.PickleType
-REAL = sqltypes.REAL
-SMALLINT = sqltypes.SMALLINT
-SmallInteger = sqltypes.SmallInteger
-String = sqltypes.String
-TEXT = sqltypes.TEXT
-TIME = sqltypes.TIME
-TIMESTAMP = sqltypes.TIMESTAMP
-Text = sqltypes.Text
-Time = sqltypes.Time
-Unicode = sqltypes.Unicode
-UnicodeText = sqltypes.UnicodeText
-VARBINARY = sqltypes.VARBINARY
-VARCHAR = sqltypes.VARCHAR
diff --git a/typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi
deleted file mode 100644
index a42c2ce..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi
+++ /dev/null
@@ -1,133 +0,0 @@
-# Stubs for sqlalchemy.util (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from . import compat
-from . import _collections
-from . import langhelpers
-from . import deprecations
-
-callable = compat.callable
-cmp = compat.cmp
-reduce = compat.reduce
-threading = compat.threading
-py3k = compat.py3k
-py33 = compat.py33
-py2k = compat.py2k
-jython = compat.jython
-pypy = compat.pypy
-cpython = compat.cpython
-win32 = compat.win32
-pickle = compat.pickle
-dottedgetter = compat.dottedgetter
-parse_qsl = compat.parse_qsl
-namedtuple = compat.namedtuple
-next = compat.next
-reraise = compat.reraise
-raise_from_cause = compat.raise_from_cause
-text_type = compat.text_type
-safe_kwarg = compat.safe_kwarg
-string_types = compat.string_types
-int_types = compat.int_types
-binary_type = compat.binary_type
-nested = compat.nested
-quote_plus = compat.quote_plus
-with_metaclass = compat.with_metaclass
-print_ = compat.print_
-itertools_filterfalse = compat.itertools_filterfalse
-u = compat.u
-ue = compat.ue
-b = compat.b
-unquote_plus = compat.unquote_plus
-unquote = compat.unquote
-b64decode = compat.b64decode
-b64encode = compat.b64encode
-byte_buffer = compat.byte_buffer
-itertools_filter = compat.itertools_filter
-iterbytes = compat.iterbytes
-StringIO = compat.StringIO
-inspect_getargspec = compat.inspect_getargspec
-zip_longest = compat.zip_longest
-KeyedTuple = _collections.KeyedTuple
-ImmutableContainer = _collections.ImmutableContainer
-immutabledict = _collections.immutabledict
-Properties = _collections.Properties
-OrderedProperties = _collections.OrderedProperties
-ImmutableProperties = _collections.ImmutableProperties
-OrderedDict = _collections.OrderedDict
-OrderedSet = _collections.OrderedSet
-IdentitySet = _collections.IdentitySet
-OrderedIdentitySet = _collections.OrderedIdentitySet
-column_set = _collections.column_set
-column_dict = _collections.column_dict
-ordered_column_set = _collections.ordered_column_set
-populate_column_dict = _collections.populate_column_dict
-unique_list = _collections.unique_list
-UniqueAppender = _collections.UniqueAppender
-PopulateDict = _collections.PopulateDict
-EMPTY_SET = _collections.EMPTY_SET
-to_list = _collections.to_list
-to_set = _collections.to_set
-to_column_set = _collections.to_column_set
-update_copy = _collections.update_copy
-flatten_iterator = _collections.flatten_iterator
-has_intersection = _collections.has_intersection
-LRUCache = _collections.LRUCache
-ScopedRegistry = _collections.ScopedRegistry
-ThreadLocalRegistry = _collections.ThreadLocalRegistry
-WeakSequence = _collections.WeakSequence
-coerce_generator_arg = _collections.coerce_generator_arg
-lightweight_named_tuple = _collections.lightweight_named_tuple
-iterate_attributes = langhelpers.iterate_attributes
-class_hierarchy = langhelpers.class_hierarchy
-portable_instancemethod = langhelpers.portable_instancemethod
-unbound_method_to_callable = langhelpers.unbound_method_to_callable
-getargspec_init = langhelpers.getargspec_init
-format_argspec_init = langhelpers.format_argspec_init
-format_argspec_plus = langhelpers.format_argspec_plus
-get_func_kwargs = langhelpers.get_func_kwargs
-get_cls_kwargs = langhelpers.get_cls_kwargs
-decorator = langhelpers.decorator
-as_interface = langhelpers.as_interface
-memoized_property = langhelpers.memoized_property
-memoized_instancemethod = langhelpers.memoized_instancemethod
-md5_hex = langhelpers.md5_hex
-group_expirable_memoized_property = langhelpers.group_expirable_memoized_property
-dependencies = langhelpers.dependencies
-decode_slice = langhelpers.decode_slice
-monkeypatch_proxied_specials = langhelpers.monkeypatch_proxied_specials
-asbool = langhelpers.asbool
-bool_or_str = langhelpers.bool_or_str
-coerce_kw_type = langhelpers.coerce_kw_type
-duck_type_collection = langhelpers.duck_type_collection
-assert_arg_type = langhelpers.assert_arg_type
-symbol = langhelpers.symbol
-dictlike_iteritems = langhelpers.dictlike_iteritems
-classproperty = langhelpers.classproperty
-set_creation_order = langhelpers.set_creation_order
-warn_exception = langhelpers.warn_exception
-warn = langhelpers.warn
-NoneType = langhelpers.NoneType
-constructor_copy = langhelpers.constructor_copy
-methods_equivalent = langhelpers.methods_equivalent
-chop_traceback = langhelpers.chop_traceback
-asint = langhelpers.asint
-generic_repr = langhelpers.generic_repr
-counter = langhelpers.counter
-PluginLoader = langhelpers.PluginLoader
-hybridproperty = langhelpers.hybridproperty
-hybridmethod = langhelpers.hybridmethod
-safe_reraise = langhelpers.safe_reraise
-get_callable_argspec = langhelpers.get_callable_argspec
-only_once = langhelpers.only_once
-attrsetter = langhelpers.attrsetter
-ellipses_string = langhelpers.ellipses_string
-warn_limited = langhelpers.warn_limited
-map_bits = langhelpers.map_bits
-MemoizedSlots = langhelpers.MemoizedSlots
-EnsureKWArgType = langhelpers.EnsureKWArgType
-warn_deprecated = deprecations.warn_deprecated
-warn_pending_deprecation = deprecations.warn_pending_deprecation
-deprecated = deprecations.deprecated
-pending_deprecation = deprecations.pending_deprecation
-inject_docstring_text = deprecations.inject_docstring_text
diff --git a/typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi b/typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi
deleted file mode 100644
index 58ca80a..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi
+++ /dev/null
@@ -1,214 +0,0 @@
-# Stubs for sqlalchemy.util._collections (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from . import compat
-
-threading = compat.threading
-itertools_filterfalse = compat.itertools_filterfalse
-string_types = compat.string_types
-
-EMPTY_SET = ... # type: Any
-
-class AbstractKeyedTuple(tuple):
- def keys(self): ...
-
-class KeyedTuple(AbstractKeyedTuple):
- def __new__(cls, vals, labels=...): ...
- def __setattr__(self, key, value): ...
-
-class _LW(AbstractKeyedTuple):
- def __new__(cls, vals): ...
- def __reduce__(self): ...
-
-class ImmutableContainer:
- __delitem__ = ... # type: Any
-
-class immutabledict(ImmutableContainer, dict):
- clear = ... # type: Any
- def __new__(cls, *args): ...
- def __init__(self, *args) -> None: ...
- def __reduce__(self): ...
- def union(self, d): ...
-
-class Properties:
- def __init__(self, data) -> None: ...
- def __len__(self): ...
- def __iter__(self): ...
- def __add__(self, other): ...
- def __setitem__(self, key, object): ...
- def __getitem__(self, key): ...
- def __delitem__(self, key): ...
- def __setattr__(self, key, obj): ...
- def __getattr__(self, key): ...
- def __contains__(self, key): ...
- def as_immutable(self): ...
- def update(self, value): ...
- def get(self, key, default=...): ...
- def keys(self): ...
- def values(self): ...
- def items(self): ...
- def has_key(self, key): ...
- def clear(self): ...
-
-class OrderedProperties(Properties):
- def __init__(self) -> None: ...
-
-class ImmutableProperties(ImmutableContainer, Properties): ...
-
-class OrderedDict(dict):
- def __reduce__(self): ...
- def __init__(self, ____sequence=..., **kwargs) -> None: ...
- def clear(self): ...
- def copy(self): ...
- def __copy__(self): ...
- def sort(self, *arg, **kw): ...
- def update(self, ____sequence=..., **kwargs): ...
- def setdefault(self, key, value): ...
- def __iter__(self): ...
- def keys(self): ...
- def values(self): ...
- def items(self): ...
- def itervalues(self): ...
- def iterkeys(self): ...
- def iteritems(self): ...
- def __setitem__(self, key, object): ...
- def __delitem__(self, key): ...
- def pop(self, key, *default): ...
- def popitem(self): ...
-
-class OrderedSet(set):
- def __init__(self, d=...) -> None: ...
- def add(self, element): ...
- def remove(self, element): ...
- def insert(self, pos, element): ...
- def discard(self, element): ...
- def clear(self): ...
- def __getitem__(self, key): ...
- def __iter__(self): ...
- def __add__(self, other): ...
- def update(self, iterable): ...
- __ior__ = ... # type: Any
- def union(self, other): ...
- __or__ = ... # type: Any
- def intersection(self, other): ...
- __and__ = ... # type: Any
- def symmetric_difference(self, other): ...
- __xor__ = ... # type: Any
- def difference(self, other): ...
- __sub__ = ... # type: Any
- def intersection_update(self, other): ...
- __iand__ = ... # type: Any
- def symmetric_difference_update(self, other): ...
- __ixor__ = ... # type: Any
- def difference_update(self, other): ...
- __isub__ = ... # type: Any
-
-class IdentitySet:
- def __init__(self, iterable=...) -> None: ...
- def add(self, value): ...
- def __contains__(self, value): ...
- def remove(self, value): ...
- def discard(self, value): ...
- def pop(self): ...
- def clear(self): ...
- def __cmp__(self, other): ...
- def __eq__(self, other): ...
- def __ne__(self, other): ...
- def issubset(self, iterable): ...
- def __le__(self, other): ...
- def __lt__(self, other): ...
- def issuperset(self, iterable): ...
- def __ge__(self, other): ...
- def __gt__(self, other): ...
- def union(self, iterable): ...
- def __or__(self, other): ...
- def update(self, iterable): ...
- def __ior__(self, other): ...
- def difference(self, iterable): ...
- def __sub__(self, other): ...
- def difference_update(self, iterable): ...
- def __isub__(self, other): ...
- def intersection(self, iterable): ...
- def __and__(self, other): ...
- def intersection_update(self, iterable): ...
- def __iand__(self, other): ...
- def symmetric_difference(self, iterable): ...
- def __xor__(self, other): ...
- def symmetric_difference_update(self, iterable): ...
- def __ixor__(self, other): ...
- def copy(self): ...
- __copy__ = ... # type: Any
- def __len__(self): ...
- def __iter__(self): ...
- def __hash__(self): ...
-
-class WeakSequence:
- def __init__(self, __elements=...) -> None: ...
- def append(self, item): ...
- def __len__(self): ...
- def __iter__(self): ...
- def __getitem__(self, index): ...
-
-class OrderedIdentitySet(IdentitySet):
- class _working_set(OrderedSet):
- __sa_hash_exempt__ = ... # type: Any
- def __init__(self, iterable=...) -> None: ...
-
-class PopulateDict(dict):
- creator = ... # type: Any
- def __init__(self, creator) -> None: ...
- def __missing__(self, key): ...
-
-column_set = set
-column_dict = dict
-ordered_column_set = OrderedSet
-populate_column_dict = PopulateDict
-
-def unique_list(seq, hashfunc=...): ...
-
-class UniqueAppender:
- data = ... # type: Any
- def __init__(self, data, via=...) -> None: ...
- def append(self, item): ...
- def __iter__(self): ...
-
-def coerce_generator_arg(arg): ...
-def to_list(x, default=...): ...
-def has_intersection(set_, iterable): ...
-def to_set(x): ...
-def to_column_set(x): ...
-def update_copy(d, _new=..., **kw): ...
-def flatten_iterator(x): ...
-
-class LRUCache(dict):
- capacity = ... # type: Any
- threshold = ... # type: Any
- def __init__(self, capacity=..., threshold=...) -> None: ...
- def get(self, key, default=...): ...
- def __getitem__(self, key): ...
- def values(self): ...
- def setdefault(self, key, value): ...
- def __setitem__(self, key, value): ...
-
-def lightweight_named_tuple(name, fields): ...
-
-class ScopedRegistry:
- createfunc = ... # type: Any
- scopefunc = ... # type: Any
- registry = ... # type: Any
- def __init__(self, createfunc, scopefunc) -> None: ...
- def __call__(self): ...
- def has(self): ...
- def set(self, obj): ...
- def clear(self): ...
-
-class ThreadLocalRegistry(ScopedRegistry):
- createfunc = ... # type: Any
- registry = ... # type: Any
- def __init__(self, createfunc) -> None: ...
- def __call__(self): ...
- def has(self): ...
- def set(self, obj): ...
- def clear(self): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/util/compat.pyi b/typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
deleted file mode 100644
index 191222c..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
+++ /dev/null
@@ -1,67 +0,0 @@
-# Stubs for sqlalchemy.util.compat (Python 2)
-
-from typing import Any, Text
-from collections import namedtuple
-
-import threading
-import pickle
-from six.moves.urllib.parse import (quote_plus, unquote_plus,
- parse_qsl, quote, unquote)
-# import configparser
-from six.moves import StringIO
-
-from io import BytesIO as byte_buffer
-
-from operator import attrgetter as dottedgetter
-
-from six.moves import zip_longest
-
-py33 = ... # type: Any
-py32 = ... # type: Any
-py3k = ... # type: Any
-py2k = ... # type: Any
-py265 = ... # type: Any
-jython = ... # type: Any
-pypy = ... # type: Any
-win32 = ... # type: Any
-cpython = ... # type: Any
-next = ... # type: Any
-safe_kwarg = ... # type: Any
-
-ArgSpec = namedtuple('ArgSpec', ['args', 'varargs', 'keywords', 'defaults'])
-
-def inspect_getargspec(func): ...
-
-string_types = ... # type: Any
-binary_type = ... # type: Any
-text_type = Text
-int_types = ... # type: Any
-
-def callable(fn): ...
-def cmp(a, b): ...
-
-itertools_filterfalse = ... # type: Any
-itertools_filter = ... # type: Any
-itertools_imap = ... # type: Any
-
-def b64encode(x): ...
-def b64decode(x): ...
-
-def iterbytes(buf): ...
-def u(s): ...
-def ue(s): ...
-def b(s): ...
-def import_(*args): ...
-
-reduce = ... # type: Any
-
-def print_(*args, **kwargs): ...
-
-time_func = ... # type: Any
-
-def reraise(tp, value, tb=..., cause=...): ...
-def raise_from_cause(exception, exc_info=...): ...
-
-def exec_(func_text, globals_, lcl=...): ...
-def with_metaclass(meta, *bases): ...
-def nested(*managers): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi b/typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi
deleted file mode 100644
index 49940cf..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-# Stubs for sqlalchemy.util.deprecations (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from . import langhelpers
-
-decorator = langhelpers.decorator
-
-def warn_deprecated(msg, stacklevel=...): ...
-def warn_pending_deprecation(msg, stacklevel=...): ...
-def deprecated(version, message=..., add_deprecation_to_docstring=...): ...
-def pending_deprecation(version, message=..., add_deprecation_to_docstring=...): ...
-def inject_docstring_text(doctext, injecttext, pos): ...
diff --git a/typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi b/typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi
deleted file mode 100644
index 2dadc07..0000000
--- a/typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi
+++ /dev/null
@@ -1,134 +0,0 @@
-# Stubs for sqlalchemy.util.langhelpers (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from . import compat
-
-def md5_hex(x): ...
-
-class safe_reraise:
- def __enter__(self): ...
- def __exit__(self, type_, value, traceback): ...
-
-def decode_slice(slc): ...
-def map_bits(fn, n): ...
-def decorator(target): ...
-def public_factory(target, location): ...
-
-class PluginLoader:
- group = ... # type: Any
- impls = ... # type: Any
- auto_fn = ... # type: Any
- def __init__(self, group, auto_fn=...) -> None: ...
- def load(self, name): ...
- def register(self, name, modulepath, objname): ...
-
-def get_cls_kwargs(cls, _set=...): ...
-def inspect_func_args(fn): ...
-def get_func_kwargs(func): ...
-def get_callable_argspec(fn, no_self=..., _is_init=...): ...
-def format_argspec_plus(fn, grouped=...): ...
-def format_argspec_init(method, grouped=...): ...
-def getargspec_init(method): ...
-def unbound_method_to_callable(func_or_cls): ...
-def generic_repr(obj, additional_kw=..., to_inspect=..., omit_kwarg=...): ...
-
-class portable_instancemethod:
- target = ... # type: Any
- name = ... # type: Any
- def __init__(self, meth) -> None: ...
- def __call__(self, *arg, **kw): ...
-
-def class_hierarchy(cls): ...
-def iterate_attributes(cls): ...
-def monkeypatch_proxied_specials(into_cls, from_cls, skip=..., only=..., name=..., from_instance=...): ...
-def methods_equivalent(meth1, meth2): ...
-def as_interface(obj, cls=..., methods=..., required=...): ...
-
-class memoized_property:
- fget = ... # type: Any
- __name__ = ... # type: Any
- def __init__(self, fget, doc=...) -> None: ...
- def __get__(self, obj, cls): ...
- @classmethod
- def reset(cls, obj, name): ...
-
-def memoized_instancemethod(fn): ...
-
-class group_expirable_memoized_property:
- attributes = ... # type: Any
- def __init__(self, attributes=...) -> None: ...
- def expire_instance(self, instance): ...
- def __call__(self, fn): ...
- def method(self, fn): ...
-
-class MemoizedSlots:
- def __getattr__(self, key): ...
-
-def dependency_for(modulename): ...
-
-class dependencies:
- import_deps = ... # type: Any
- def __init__(self, *deps) -> None: ...
- def __call__(self, fn): ...
- @classmethod
- def resolve_all(cls, path): ...
- class _importlater:
- def __new__(cls, path, addtl): ...
- def __init__(self, path, addtl) -> None: ...
- def module(self): ...
- def __getattr__(self, key): ...
-
-def asbool(obj): ...
-def bool_or_str(*text): ...
-def asint(value): ...
-def coerce_kw_type(kw, key, type_, flexi_bool=...): ...
-def constructor_copy(obj, cls, *args, **kw): ...
-def counter(): ...
-def duck_type_collection(specimen, default=...): ...
-def assert_arg_type(arg, argtype, name): ...
-def dictlike_iteritems(dictlike): ...
-
-class classproperty:
- def __init__(self, fget, *arg, **kw) -> None: ...
- def __get__(desc, self, cls): ...
-
-class hybridproperty:
- func = ... # type: Any
- def __init__(self, func) -> None: ...
- def __get__(self, instance, owner): ...
-
-class hybridmethod:
- func = ... # type: Any
- def __init__(self, func) -> None: ...
- def __get__(self, instance, owner): ...
-
-class _symbol(int):
- def __new__(self, name, doc=..., canonical=...): ...
- def __reduce__(self): ...
-
-class symbol:
- symbols = ... # type: Any
- def __new__(cls, name, doc=..., canonical=...): ...
-
-def set_creation_order(instance): ...
-def warn_exception(func, *args, **kwargs): ...
-def ellipses_string(value, len_=...): ...
-
-class _hash_limit_string(compat.text_type):
- def __new__(cls, value, num, args): ...
- def __hash__(self): ...
- def __eq__(self, other): ...
-
-def warn(msg): ...
-def warn_limited(msg, args): ...
-def only_once(fn): ...
-def chop_traceback(tb, exclude_prefix=..., exclude_suffix=...): ...
-
-NoneType = ... # type: Any
-
-def attrsetter(attrname): ...
-
-class EnsureKWArgType(type):
- def __init__(cls, clsname, bases, clsdict) -> None: ...
diff --git a/typeshed/third_party/2/yaml/__init__.pyi b/typeshed/third_party/2and3/yaml/__init__.pyi
similarity index 98%
rename from typeshed/third_party/2/yaml/__init__.pyi
rename to typeshed/third_party/2and3/yaml/__init__.pyi
index 6efac20..d209770 100644
--- a/typeshed/third_party/2/yaml/__init__.pyi
+++ b/typeshed/third_party/2and3/yaml/__init__.pyi
@@ -40,7 +40,7 @@ class YAMLObjectMetaclass(type):
def __init__(cls, name, bases, kwds) -> None: ...
class YAMLObject:
- __metaclass__ = ... # type: Any
+ __metaclass__ = YAMLObjectMetaclass
yaml_loader = ... # type: Any
yaml_dumper = ... # type: Any
yaml_tag = ... # type: Any
diff --git a/typeshed/third_party/2/yaml/composer.pyi b/typeshed/third_party/2and3/yaml/composer.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/composer.pyi
rename to typeshed/third_party/2and3/yaml/composer.pyi
diff --git a/typeshed/third_party/2/yaml/constructor.pyi b/typeshed/third_party/2and3/yaml/constructor.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/constructor.pyi
rename to typeshed/third_party/2and3/yaml/constructor.pyi
diff --git a/typeshed/third_party/2/yaml/dumper.pyi b/typeshed/third_party/2and3/yaml/dumper.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/dumper.pyi
rename to typeshed/third_party/2and3/yaml/dumper.pyi
diff --git a/typeshed/third_party/2/yaml/emitter.pyi b/typeshed/third_party/2and3/yaml/emitter.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/emitter.pyi
rename to typeshed/third_party/2and3/yaml/emitter.pyi
diff --git a/typeshed/third_party/2/yaml/error.pyi b/typeshed/third_party/2and3/yaml/error.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/error.pyi
rename to typeshed/third_party/2and3/yaml/error.pyi
diff --git a/typeshed/third_party/2/yaml/events.pyi b/typeshed/third_party/2and3/yaml/events.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/events.pyi
rename to typeshed/third_party/2and3/yaml/events.pyi
diff --git a/typeshed/third_party/2/yaml/loader.pyi b/typeshed/third_party/2and3/yaml/loader.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/loader.pyi
rename to typeshed/third_party/2and3/yaml/loader.pyi
diff --git a/typeshed/third_party/2/yaml/nodes.pyi b/typeshed/third_party/2and3/yaml/nodes.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/nodes.pyi
rename to typeshed/third_party/2and3/yaml/nodes.pyi
diff --git a/typeshed/third_party/2/yaml/parser.pyi b/typeshed/third_party/2and3/yaml/parser.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/parser.pyi
rename to typeshed/third_party/2and3/yaml/parser.pyi
diff --git a/typeshed/third_party/2/yaml/reader.pyi b/typeshed/third_party/2and3/yaml/reader.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/reader.pyi
rename to typeshed/third_party/2and3/yaml/reader.pyi
diff --git a/typeshed/third_party/2/yaml/representer.pyi b/typeshed/third_party/2and3/yaml/representer.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/representer.pyi
rename to typeshed/third_party/2and3/yaml/representer.pyi
diff --git a/typeshed/third_party/2/yaml/resolver.pyi b/typeshed/third_party/2and3/yaml/resolver.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/resolver.pyi
rename to typeshed/third_party/2and3/yaml/resolver.pyi
diff --git a/typeshed/third_party/2/yaml/scanner.pyi b/typeshed/third_party/2and3/yaml/scanner.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/scanner.pyi
rename to typeshed/third_party/2and3/yaml/scanner.pyi
diff --git a/typeshed/third_party/2/yaml/serializer.pyi b/typeshed/third_party/2and3/yaml/serializer.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/serializer.pyi
rename to typeshed/third_party/2and3/yaml/serializer.pyi
diff --git a/typeshed/third_party/2/yaml/tokens.pyi b/typeshed/third_party/2and3/yaml/tokens.pyi
similarity index 100%
rename from typeshed/third_party/2/yaml/tokens.pyi
rename to typeshed/third_party/2and3/yaml/tokens.pyi
diff --git a/typeshed/third_party/3.6/click/core.pyi b/typeshed/third_party/3.6/click/core.pyi
index fe44668..abd4681 100644
--- a/typeshed/third_party/3.6/click/core.pyi
+++ b/typeshed/third_party/3.6/click/core.pyi
@@ -239,8 +239,8 @@ class Command(BaseCommand):
...
-T = TypeVar('T')
-Decorator = Callable[[T], T]
+_T = TypeVar('_T')
+_Decorator = Callable[[_T], _T]
class MultiCommand(Command):
@@ -264,7 +264,7 @@ class MultiCommand(Command):
def resultcallback(
self, replace: bool = False
- ) -> Decorator:
+ ) -> _Decorator:
...
def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None:
@@ -293,10 +293,10 @@ class Group(MultiCommand):
def add_command(self, cmd: Command, name: str = None):
...
- def command(self, *args, **kwargs) -> Decorator:
+ def command(self, *args, **kwargs) -> _Decorator:
...
- def group(self, *args, **kwargs) -> Decorator:
+ def group(self, *args, **kwargs) -> _Decorator:
...
diff --git a/typeshed/third_party/3.6/click/decorators.pyi b/typeshed/third_party/3.6/click/decorators.pyi
index a3dcddd..8cd1616 100644
--- a/typeshed/third_party/3.6/click/decorators.pyi
+++ b/typeshed/third_party/3.6/click/decorators.pyi
@@ -1,23 +1,24 @@
+from distutils.version import Version
from typing import Any, Callable, Dict, List, TypeVar, Union
from click.core import Command, Group, Argument, Option, Parameter, Context
from click.types import ParamType
-T = TypeVar('T')
-Decorator = Callable[[T], T]
+_T = TypeVar('_T')
+_Decorator = Callable[[_T], _T]
-def pass_context(T) -> T:
+def pass_context(_T) -> _T:
...
-def pass_obj(T) -> T:
+def pass_obj(_T) -> _T:
...
def make_pass_decorator(
object_type: type, ensure: bool = False
-) -> Callable[[T], T]:
+) -> Callable[[_T], _T]:
...
@@ -33,7 +34,7 @@ def command(
short_help: str = None,
options_metavar: str = '[OPTIONS]',
add_help_option: bool = True,
-) -> Decorator:
+) -> _Decorator:
...
@@ -56,7 +57,9 @@ def group(
short_help: str = None,
options_metavar: str = '[OPTIONS]',
add_help_option: bool = True,
-) -> Decorator:
+ # User-defined
+ **kwargs: Any,
+) -> _Decorator:
...
@@ -74,7 +77,7 @@ def argument(
expose_value: bool = True,
is_eager: bool = False,
envvar: Union[str, List[str]] = None
-) -> Decorator:
+) -> _Decorator:
...
@@ -101,7 +104,7 @@ def option(
expose_value: bool = True,
is_eager: bool = False,
envvar: Union[str, List[str]] = None
-) -> Decorator:
+) -> _Decorator:
...
@@ -129,7 +132,7 @@ def confirmation_option(
expose_value: bool = False,
is_eager: bool = False,
envvar: Union[str, List[str]] = None
-) -> Decorator:
+) -> _Decorator:
...
@@ -157,16 +160,17 @@ def password_option(
expose_value: bool = True,
is_eager: bool = False,
envvar: Union[str, List[str]] = None
-) -> Decorator:
+) -> _Decorator:
...
# Defaults copied from the decorator body.
def version_option(
- version: str = None,
+ version: Union[str, Version] = None,
*param_decls: str,
cls: type = Option,
# Option
+ prog_name: str = None,
show_default: bool = False,
prompt: bool = False,
confirmation_prompt: bool = False,
@@ -186,7 +190,7 @@ def version_option(
expose_value: bool = False,
is_eager: bool = True,
envvar: Union[str, List[str]] = None
-) -> Decorator:
+) -> _Decorator:
...
@@ -214,5 +218,5 @@ def help_option(
expose_value: bool = False,
is_eager: bool = True,
envvar: Union[str, List[str]] = None
-) -> Decorator:
+) -> _Decorator:
...
diff --git a/typeshed/third_party/3.6/click/termui.pyi b/typeshed/third_party/3.6/click/termui.pyi
index 33ea7f4..5e0d407 100644
--- a/typeshed/third_party/3.6/click/termui.pyi
+++ b/typeshed/third_party/3.6/click/termui.pyi
@@ -58,18 +58,18 @@ def echo_via_pager(text: str, color: bool = None) -> None:
...
-T = TypeVar('T')
+_T = TypeVar('_T')
@contextmanager
def progressbar(
- iterable=Iterable[T],
+ iterable: Optional[Iterable[_T]] = None,
length: int = None,
label: str = None,
show_eta: bool = True,
show_percent: bool = None,
show_pos: bool = False,
- item_show_func: Callable[[T], str] = None,
+ item_show_func: Callable[[_T], str] = None,
fill_char: str = '#',
empty_char: str = '-',
bar_template: str = '%(label)s [%(bar)s] %(info)s',
@@ -77,7 +77,7 @@ def progressbar(
width: int = 36,
file: IO = None,
color: bool = None,
-) -> Generator[T, None, None]:
+) -> Generator[_T, None, None]:
...
diff --git a/typeshed/third_party/3.6/click/types.pyi b/typeshed/third_party/3.6/click/types.pyi
index 9d06a6f..16eab86 100644
--- a/typeshed/third_party/3.6/click/types.pyi
+++ b/typeshed/third_party/3.6/click/types.pyi
@@ -119,14 +119,14 @@ class File(ParamType):
...
-F = TypeVar('F') # result of the function
-Func = Callable[[Optional[str]], F]
+_F = TypeVar('_F') # result of the function
+_Func = Callable[[Optional[str]], _F]
class FuncParamType(ParamType):
- func: Func
+ func: _Func
- def __init__(self, func: Func) -> None:
+ def __init__(self, func: _Func) -> None:
...
def __call__(
@@ -134,7 +134,7 @@ class FuncParamType(ParamType):
value: Optional[str],
param: Parameter = None,
ctx: Context = None,
- ) -> F:
+ ) -> _F:
...
def convert(
@@ -142,7 +142,7 @@ class FuncParamType(ParamType):
value: str,
param: Optional[Parameter],
ctx: Optional[Context],
- ) -> F:
+ ) -> _F:
...
@@ -171,7 +171,7 @@ class IntRange(IntParamType):
...
-PathType = TypeVar('PathType', str, bytes)
+_PathType = TypeVar('_PathType', str, bytes)
class Path(ParamType):
@@ -184,11 +184,11 @@ class Path(ParamType):
readable: bool = True,
resolve_path: bool = False,
allow_dash: bool = False,
- path_type: PathType = None,
+ path_type: _PathType = None,
) -> None:
...
- def coerce_path_result(self, rv: Union[str, bytes]) -> PathType:
+ def coerce_path_result(self, rv: Union[str, bytes]) -> _PathType:
...
def __call__(
@@ -196,7 +196,7 @@ class Path(ParamType):
value: Optional[str],
param: Parameter = None,
ctx: Context = None,
- ) -> PathType:
+ ) -> _PathType:
...
def convert(
@@ -204,7 +204,7 @@ class Path(ParamType):
value: str,
param: Optional[Parameter],
ctx: Optional[Context],
- ) -> PathType:
+ ) -> _PathType:
...
class StringParamType(ParamType):
diff --git a/typeshed/third_party/3.6/click/utils.pyi b/typeshed/third_party/3.6/click/utils.pyi
index 389659f..3658295 100644
--- a/typeshed/third_party/3.6/click/utils.pyi
+++ b/typeshed/third_party/3.6/click/utils.pyi
@@ -1,15 +1,15 @@
from typing import Any, Callable, Iterator, IO, List, Optional, TypeVar, Union
-T = TypeVar('T')
-Decorator = Callable[[T], T]
+_T = TypeVar('_T')
+_Decorator = Callable[[_T], _T]
def _posixify(name: str) -> str:
...
-def safecall(func: T) -> T:
+def safecall(func: _T) -> _T:
...
@@ -57,7 +57,7 @@ class LazyFile:
...
-class KeepOpenFile(object):
+class KeepOpenFile:
_file: IO
def __init__(self, file: IO) -> None:
diff --git a/typeshed/third_party/3/dateutil/parser.pyi b/typeshed/third_party/3/dateutil/parser.pyi
index 32df996..fcd9775 100644
--- a/typeshed/third_party/3/dateutil/parser.pyi
+++ b/typeshed/third_party/3/dateutil/parser.pyi
@@ -1,10 +1,10 @@
-from typing import List, Tuple, Optional, Callable, Union, IO, Any
+from typing import List, Tuple, Optional, Callable, Union, IO, Any, Dict
from datetime import datetime
__all__ = ... # type: List[str]
-class parserinfo(object):
+class parserinfo:
JUMP = ... # type: List[str]
WEEKDAYS = ... # type: List[Tuple[str, str]]
MONTHS = ... # type: List[Tuple[str, str]]
@@ -27,7 +27,7 @@ class parserinfo(object):
def validate(self, year: datetime) -> bool: ...
-class parser(object):
+class parser:
def __init__(self, info: parserinfo=...) -> None: ...
def parse(
@@ -41,11 +41,11 @@ class parser(object):
DEFAULTPARSER = ... # type: parser
-def parse(timestr, parserinfo: parserinfo=..., **kwargs) -> datetime:
+def parse(timestr: Union[str, bytes, IO[Any]], parserinfo: parserinfo=..., **kwargs) -> datetime:
...
-class _tzparser(object):
+class _tzparser:
...
diff --git a/typeshed/third_party/3/dateutil/relativedelta.pyi b/typeshed/third_party/3/dateutil/relativedelta.pyi
index 35e6d07..f221a20 100644
--- a/typeshed/third_party/3/dateutil/relativedelta.pyi
+++ b/typeshed/third_party/3/dateutil/relativedelta.pyi
@@ -1,10 +1,10 @@
-from typing import Optional, overload, Union
+from typing import Optional, overload, Union, List
from datetime import date, datetime, timedelta
__all__ = ... # type: List[str]
-class weekday(object):
+class weekday:
def __init__(self, weekday: int, n: Optional[int]=...) -> None: ...
def __call__(self, n: int) -> 'weekday': ...
@@ -25,7 +25,7 @@ SA = ... # type: weekday
SU = ... # type: weekday
-class relativedelta(object):
+class relativedelta:
def __init__(self,
dt1: Optional[date]=...,
dt2: Optional[date]=...,
diff --git a/typeshed/third_party/3/dateutil/tz/_common.pyi b/typeshed/third_party/3/dateutil/tz/_common.pyi
index e0856d2..a190c54 100644
--- a/typeshed/third_party/3/dateutil/tz/_common.pyi
+++ b/typeshed/third_party/3/dateutil/tz/_common.pyi
@@ -6,7 +6,7 @@ from typing import Any, Optional
from datetime import datetime, tzinfo, timedelta
def tzname_in_python2(namefunc): ...
-def enfold(dt, fold: int = ...): ...
+def enfold(dt: datetime, fold: int = ...): ...
class _DatetimeWithFold(datetime):
@property
diff --git a/typeshed/third_party/3/dateutil/tz/tz.pyi b/typeshed/third_party/3/dateutil/tz/tz.pyi
index 42bf05a..8564270 100644
--- a/typeshed/third_party/3/dateutil/tz/tz.pyi
+++ b/typeshed/third_party/3/dateutil/tz/tz.pyi
@@ -2,7 +2,7 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any, Optional, Union, IO, Tuple
+from typing import Any, Optional, Union, IO, Tuple, List
import datetime
from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo
from ._common import tzrangebase as tzrangebase, enfold as enfold
@@ -70,7 +70,7 @@ class tzrange(tzrangebase):
class tzstr(tzrange):
hasdst = ... # type: bool
- def __init__(self, s, posix_offset: bool = ...) -> None: ...
+ def __init__(self, s: Union[bytes, str, IO[str]], posix_offset: bool = ...) -> None: ...
class tzical:
def __init__(self, fileobj: Union[str, IO[str]]) -> None: ...
diff --git a/typeshed/third_party/3/enum.pyi b/typeshed/third_party/3/enum.pyi
index 3b97e1b..a7e0fd6 100644
--- a/typeshed/third_party/3/enum.pyi
+++ b/typeshed/third_party/3/enum.pyi
@@ -1,12 +1,19 @@
-# FIXME: Stub incomplete, ommissions include:
-# * the metaclass
-# * _sunder_ methods with their transformations
-
import sys
-from typing import List, Any, TypeVar, Union
+from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
+
+_T = TypeVar('_T', bound=Enum)
+_S = TypeVar('_S', bound=Type[Enum])
-class Enum:
- def __new__(cls, value: Any) -> None: ...
+class EnumMeta(type, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
+ def __iter__(self: Type[_T]) -> Iterator[_T]: ... # type: ignore
+ def __reversed__(self: Type[_T]) -> Iterator[_T]: ...
+ def __contains__(self, member: Any) -> bool: ...
+ def __getitem__(self: Type[_T], name: str) -> _T: ...
+ @property
+ def __members__(self: Type[_T]) -> Mapping[str, _T]: ...
+
+class Enum(metaclass=EnumMeta):
+ def __new__(cls: Type[_T], value: Any) -> _T: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
def __dir__(self) -> List[str]: ...
@@ -20,9 +27,7 @@ class Enum:
class IntEnum(int, Enum):
value = ... # type: int
-_T = TypeVar('_T')
-
-def unique(enumeration: _T) -> _T: ...
+def unique(enumeration: _S) -> _S: ...
if sys.version_info >= (3, 6):
_auto_null = ... # type: Any
diff --git a/typeshed/third_party/3/itsdangerous.pyi b/typeshed/third_party/3/itsdangerous.pyi
index efa51df..2305bb4 100644
--- a/typeshed/third_party/3/itsdangerous.pyi
+++ b/typeshed/third_party/3/itsdangerous.pyi
@@ -11,10 +11,11 @@ int_to_byte = Callable[[int], bytes]
number_types = (int, float)
izip = zip
-bytes_like = Union[bytearray, bytes]
-str_like = Union[str, bytes]
-can_become_bytes = Union[str, bytes, bytearray]
-comparable_bytes = TypeVar('comparable_bytes', str, Union[bytes, bytearray])
+_bytes_like = Union[bytearray, bytes]
+_str_like = Union[str, bytes]
+_can_become_bytes = Union[str, bytes, bytearray]
+_comparable_bytes = TypeVar('_comparable_bytes', str, _bytes_like)
+_serializer = Any # must be an object that has "dumps" and "loads" attributes (e.g. the json module)
class _CompactJSON:
def loads(self, payload: Text) -> Any: ...
@@ -23,9 +24,9 @@ class _CompactJSON:
compact_json = _CompactJSON
EPOCH = ... # type: int
-def want_bytes(s: can_become_bytes, encoding='', errors='') -> bytes: ...
-def is_text_serializer(serializer: Any) -> bool: ...
-def constant_time_compare(val1: comparable_bytes, val2: comparable_bytes) -> bool: ...
+def want_bytes(s: _can_become_bytes, encoding: str='', errors: str='') -> bytes: ...
+def is_text_serializer(serializer: _serializer) -> bool: ...
+def constant_time_compare(val1: _comparable_bytes, val2: _comparable_bytes) -> bool: ...
class BadData(Exception):
message = ... # type: str
@@ -41,7 +42,7 @@ class BadSignature(BadData):
class BadTimeSignature(BadSignature):
date_signed = ... # type: Optional[int]
- def __init__(self, message, payload: Optional[Any]=None, date_signed: Optional[int]=None) -> None: ...
+ def __init__(self, message: str, payload: Optional[Any]=None, date_signed: Optional[int]=None) -> None: ...
class BadHeader(BadSignature):
header = ... # type: Any
@@ -50,75 +51,75 @@ class BadHeader(BadSignature):
class SignatureExpired(BadTimeSignature): ...
-def base64_encode(string: can_become_bytes) -> bytes: ...
-def base64_decode(string: can_become_bytes) -> bytes: ...
+def base64_encode(string: _can_become_bytes) -> bytes: ...
+def base64_decode(string: _can_become_bytes) -> bytes: ...
def int_to_bytes(num: int) -> bytes: ...
-def bytes_to_int(bytestr: can_become_bytes) -> bytes: ...
+def bytes_to_int(bytestr: _can_become_bytes) -> bytes: ...
class SigningAlgorithm:
- def get_signature(self, key: bytes_like, value: bytes_like) -> bytes: ...
- def verify_signature(self, key: bytes_like, value: bytes_like, sig: can_become_bytes) -> bool: ...
+ def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ...
+ def verify_signature(self, key: _bytes_like, value: _bytes_like, sig: _can_become_bytes) -> bool: ...
class NoneAlgorithm(SigningAlgorithm):
- def get_signature(self, key: bytes_like, value: bytes_like) -> bytes: ...
+ def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ...
class HMACAlgorithm(SigningAlgorithm):
default_digest_method = ... # type: Callable
digest_method = ... # type: Callable
def __init__(self, digest_method: Optional[Callable]=None) -> None: ...
- def get_signature(self, key: bytes_like, value: bytes_like) -> bytes: ...
+ def get_signature(self, key: _bytes_like, value: _bytes_like) -> bytes: ...
class Signer:
default_digest_method = ... # type: Callable
default_key_derivation = ... # type: str
- secret_key = ... # type: can_become_bytes
- sep = ... # type: can_become_bytes
- salt = ... # type: can_become_bytes
+ secret_key = ... # type: _can_become_bytes
+ sep = ... # type: _can_become_bytes
+ salt = ... # type: _can_become_bytes
key_derivation = ... # type: str
digest_method = ... # type: Callable
algorithm = ... # type: SigningAlgorithm
- def __init__(self, secret_key: can_become_bytes, salt: Optional[can_become_bytes]=None, sep: Optional[can_become_bytes]='',
+ def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=None, sep: Optional[_can_become_bytes]='',
key_derivation: Optional[str]=None,
digest_method: Optional[Callable]=None,
algorithm: Optional[SigningAlgorithm]=None) -> None: ...
def derive_key(self) -> bytes: ...
- def get_signature(self, value: bytes_like) -> bytes: ...
- def sign(self, value: bytes_like) -> bytes: ...
- def verify_signature(self, value: bytes_like, sig: can_become_bytes) -> bool: ...
- def unsign(self, signed_value: can_become_bytes) -> str: ...
- def validate(self, signed_value: can_become_bytes) -> bool: ...
+ def get_signature(self, value: _bytes_like) -> bytes: ...
+ def sign(self, value: _bytes_like) -> bytes: ...
+ def verify_signature(self, value: _bytes_like, sig: _can_become_bytes) -> bool: ...
+ def unsign(self, signed_value: _can_become_bytes) -> str: ...
+ def validate(self, signed_value: _can_become_bytes) -> bool: ...
class TimestampSigner(Signer):
def get_timestamp(self) -> int: ...
def timestamp_to_datetime(self, ts: int) -> datetime: ...
- def sign(self, value: bytes_like) -> bytes: ...
- def unsign(self, value: can_become_bytes, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
- def validate(self, signed_value: can_become_bytes, max_age: Optional[int]=None) -> bool: ...
+ def sign(self, value: _bytes_like) -> bytes: ...
+ def unsign(self, value: _can_become_bytes, max_age: Optional[int]=None, return_timestamp: bool=False) -> Any: ...
+ def validate(self, signed_value: _can_become_bytes, max_age: Optional[int]=None) -> bool: ...
class Serializer:
- default_serializer = ... # type: Any
+ default_serializer = ... # type: _serializer
default_signer = ... # type: Callable[..., Signer]
secret_key = ... # type: Any
- salt = ... # type: can_become_bytes
- serializer = ... # type: Any
+ salt = ... # type: _can_become_bytes
+ serializer = ... # type: _serializer
is_text_serializer = ... # type: bool
signer = ... # type: Signer
signer_kwargs = ... # type: MutableMapping
- def __init__(self, secret_key: can_become_bytes, salt: Optional[can_become_bytes]=b'', serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None) -> None: ...
- def load_payload(self, payload: Any, serializer=None) -> Any: ...
+ def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=b'', serializer: _serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None) -> None: ...
+ def load_payload(self, payload: Any, serializer: _serializer=None) -> Any: ...
def dump_payload(self, *args, **kwargs) -> bytes: ...
- def make_signer(self, salt: Optional[can_become_bytes]=None) -> Signer: ...
- def dumps(self, obj: Any, salt: Optional[can_become_bytes]=None) -> str_like: ...
- def dump(self, obj: Any, f: IO, salt: Optional[can_become_bytes]=None) -> None: ...
- def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None) -> Any: ...
- def load(self, f: IO, salt: Optional[can_become_bytes]=None): ...
- def loads_unsafe(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None) -> Tuple[bool, Any]: ...
+ def make_signer(self, salt: Optional[_can_become_bytes]=None) -> Signer: ...
+ def dumps(self, obj: Any, salt: Optional[_can_become_bytes]=None) -> _str_like: ...
+ def dump(self, obj: Any, f: IO, salt: Optional[_can_become_bytes]=None) -> None: ...
+ def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None) -> Any: ...
+ def load(self, f: IO, salt: Optional[_can_become_bytes]=None): ...
+ def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None) -> Tuple[bool, Any]: ...
def load_unsafe(self, f: IO, *args, **kwargs) -> Tuple[bool, Any]: ...
class TimedSerializer(Serializer):
default_signer = ... # type: Callable[..., TimestampSigner]
- def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
- def loads_unsafe(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, max_age: Optional[int]=None) -> Tuple[bool, Any]: ...
+ def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, max_age: Optional[int]=None, return_timestamp: bool=False) -> Any: ...
+ def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, max_age: Optional[int]=None) -> Tuple[bool, Any]: ...
class JSONWebSignatureSerializer(Serializer):
jws_algorithms = ... # type: MutableMapping[str, SigningAlgorithm]
@@ -126,22 +127,22 @@ class JSONWebSignatureSerializer(Serializer):
default_serializer = ... # type: Any
algorithm_name = ... # type: str
algorithm = ... # type: Any
- def __init__(self, secret_key: can_become_bytes, salt: Optional[can_become_bytes]=None, serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None, algorithm_name: Optional[str]=None) -> None: ...
- def load_payload(self, payload: Any, return_header=False) -> Any: ...
+ def __init__(self, secret_key: _can_become_bytes, salt: Optional[_can_become_bytes]=None, serializer: _serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None, algorithm_name: Optional[str]=None) -> None: ...
+ def load_payload(self, payload: Any, serializer: _serializer = None, return_header: bool=False) -> Any: ...
def dump_payload(self, *args, **kwargs) -> bytes: ...
def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ...
- def make_signer(self, salt: Optional[can_become_bytes]=None, algorithm_name: Optional[str]=None) -> Signer: ...
+ def make_signer(self, salt: Optional[_can_become_bytes]=None, algorithm_name: Optional[str]=None) -> Signer: ...
def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
- def dumps(self, obj: Any, salt: Optional[can_become_bytes]=None, header_fields=Optional[MutableMapping]) -> str: ...
- def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, return_header=False) -> Any: ...
- def loads_unsafe(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, return_header=False) -> Tuple[bool, Any]: ...
+ def dumps(self, obj: Any, salt: Optional[_can_become_bytes]=None, header_fields: Optional[MutableMapping]=...) -> str: ...
+ def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, return_header: bool=False) -> Any: ...
+ def loads_unsafe(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, return_header: bool=False) -> Tuple[bool, Any]: ...
class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
DEFAULT_EXPIRES_IN = ... # type: int
expires_in = ... # type: int
- def __init__(self, secret_key: can_become_bytes, expires_in: Optional[int]=None, **kwargs) -> None: ...
+ def __init__(self, secret_key: _can_become_bytes, expires_in: Optional[int]=None, **kwargs) -> None: ...
def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
- def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, return_header=False) -> Any: ...
+ def loads(self, s: _can_become_bytes, salt: Optional[_can_become_bytes]=None, return_header: bool=False) -> Any: ...
def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ...
def now(self) -> int: ...
diff --git a/typeshed/third_party/3/lxml/etree.pyi b/typeshed/third_party/3/lxml/etree.pyi
index 39e5077..e1bab54 100644
--- a/typeshed/third_party/3/lxml/etree.pyi
+++ b/typeshed/third_party/3/lxml/etree.pyi
@@ -11,34 +11,32 @@ from typing import Iterable, Iterator, SupportsBytes
# unnecessary constraint. It seems reasonable to constrain each
# List/Dict argument to use one type consistently, though, and it is
# necessary in order to keep these brief.
-AnyStr = Union[str, bytes]
-ListAnyStr = Union[List[str], List[bytes]]
-DictAnyStr = Union[Dict[str, str], Dict[bytes, bytes]]
-Dict_Tuple2AnyStr_Any = Union[Dict[Tuple[str, str], Any], Tuple[bytes, bytes], Any]
+_AnyStr = Union[str, bytes]
+_ListAnyStr = Union[List[str], List[bytes]]
+_DictAnyStr = Union[Dict[str, str], Dict[bytes, bytes]]
+_Dict_Tuple2AnyStr_Any = Union[Dict[Tuple[str, str], Any], Tuple[bytes, bytes], Any]
-class ElementChildIterator(Iterator['_Element;']):
+class ElementChildIterator(Iterator['_Element']):
def __iter__(self) -> 'ElementChildIterator': ...
def __next__(self) -> '_Element': ...
class _Element(Iterable['_Element']):
- def addprevious(self, element: '_Element') -> None:
- pass
+ def addprevious(self, element: '_Element') -> None: ...
attrib = ... # type: MutableMapping[str, str]
- text = ... # type: AnyStr
+ text = ... # type: _AnyStr
tag = ... # type: str
def append(self, element: '_Element') -> '_Element': ...
def __iter__(self) -> ElementChildIterator: ...
-class ElementBase(_Element):
- pass
+class ElementBase(_Element): ...
class _ElementTree:
def write(self,
- file: Union[AnyStr, typing.IO],
- encoding: AnyStr = ...,
- method: AnyStr = ...,
+ file: Union[_AnyStr, typing.IO],
+ encoding: _AnyStr = ...,
+ method: _AnyStr = ...,
pretty_print: bool = ...,
xml_declaration: Any = ...,
with_tail: Any = ...,
@@ -46,80 +44,54 @@ class _ElementTree:
compression: int = ...,
exclusive: bool = ...,
with_comments: bool = ...,
- inclusive_ns_prefixes: ListAnyStr = ...) -> None:
- pass
+ inclusive_ns_prefixes: _ListAnyStr = ...) -> None: ...
-class _XSLTResultTree(SupportsBytes):
- pass
+class _XSLTResultTree(SupportsBytes): ...
-class _XSLTQuotedStringParam:
- pass
+class _XSLTQuotedStringParam: ...
-class XMLParser:
- pass
+class XMLParser: ...
class XMLSchema:
def __init__(self,
etree: Union[_Element, _ElementTree] = ...,
- file: Union[AnyStr, typing.IO] = ...) -> None:
- pass
+ file: Union[_AnyStr, typing.IO] = ...) -> None: ...
+ def assertValid(self, etree: Union[_Element, _ElementTree]) -> None: ...
- def assertValid(self,
- etree: Union[_Element, _ElementTree]) -> None:
- pass
-
-class XSLTAccessControl:
- pass
+class XSLTAccessControl: ...
class XSLT:
def __init__(self,
xslt_input: Union[_Element, _ElementTree],
- extensions: Dict_Tuple2AnyStr_Any = ...,
+ extensions: _Dict_Tuple2AnyStr_Any = ...,
regexp: bool = ...,
- access_control: XSLTAccessControl = ...) -> None:
- pass
-
+ access_control: XSLTAccessControl = ...) -> None: ...
def __call__(self,
_input: Union[_Element, _ElementTree],
profile_run: bool = ...,
- **kwargs: Union[AnyStr, _XSLTQuotedStringParam]) -> _XSLTResultTree:
- pass
-
+ **kwargs: Union[_AnyStr, _XSLTQuotedStringParam]) -> _XSLTResultTree: ...
@staticmethod
- def strparam(s: AnyStr) -> _XSLTQuotedStringParam:
- pass
-
-def Element(_tag: AnyStr,
- attrib: DictAnyStr = ...,
- nsmap: DictAnyStr = ...,
- **extra: AnyStr) -> _Element:
- pass
-
-def SubElement(_parent: _Element, _tag: AnyStr,
- attrib: DictAnyStr = ...,
- nsmap: DictAnyStr = ...,
- **extra: AnyStr) -> _Element:
- pass
-
+ def strparam(s: _AnyStr) -> _XSLTQuotedStringParam: ...
+
+def Element(_tag: _AnyStr,
+ attrib: _DictAnyStr = ...,
+ nsmap: _DictAnyStr = ...,
+ **extra: _AnyStr) -> _Element: ...
+def SubElement(_parent: _Element, _tag: _AnyStr,
+ attrib: _DictAnyStr = ...,
+ nsmap: _DictAnyStr = ...,
+ **extra: _AnyStr) -> _Element: ...
def ElementTree(element: _Element = ...,
- file: Union[AnyStr, typing.IO] = ...,
- parser: XMLParser = ...) -> _ElementTree:
- pass
-
-def ProcessingInstruction(target: AnyStr, text: AnyStr = ...) -> _Element:
- pass
-
-def parse(source: Union[AnyStr, typing.IO],
+ file: Union[_AnyStr, typing.IO] = ...,
+ parser: XMLParser = ...) -> _ElementTree: ...
+def ProcessingInstruction(target: _AnyStr, text: _AnyStr = ...) -> _Element: ...
+def parse(source: Union[_AnyStr, typing.IO],
parser: XMLParser = ...,
- base_url: AnyStr = ...) -> _ElementTree:
- pass
-
-
-def fromstring(text: AnyStr,
+ base_url: _AnyStr = ...) -> _ElementTree: ...
+def fromstring(text: _AnyStr,
parser: XMLParser = ...,
*,
- base_url: AnyStr = ...) -> _Element: ...
-
+ base_url: _AnyStr = ...) -> _Element: ...
def tostring(element_or_tree: Union[_Element, _ElementTree],
encoding: Union[str, type] = ...,
method: str = ...,
@@ -130,39 +102,26 @@ def tostring(element_or_tree: Union[_Element, _ElementTree],
doctype: str = ...,
exclusive: bool = ...,
with_comments: bool = ...,
- inclusive_ns_prefixes: Any = ...) -> AnyStr: ...
-
-
-class _ErrorLog:
- pass
+ inclusive_ns_prefixes: Any = ...) -> _AnyStr: ...
+class _ErrorLog: ...
-class Error(Exception):
- pass
+class Error(Exception): ...
class LxmlError(Error):
def __init__(self, message: Any, error_log: _ErrorLog = ...) -> None: ...
error_log = ... # type: _ErrorLog
-class DocumentInvalid(LxmlError):
- pass
-
-class LxmlSyntaxError(LxmlError, SyntaxError):
- pass
-
-class ParseError(LxmlSyntaxError):
- pass
-
-class XMLSyntaxError(ParseError):
- pass
-
+class DocumentInvalid(LxmlError): ...
+class LxmlSyntaxError(LxmlError, SyntaxError): ...
+class ParseError(LxmlSyntaxError): ...
+class XMLSyntaxError(ParseError): ...
-class _Validator:
- pass
+class _Validator: ...
class DTD(_Validator):
def __init__(self,
- file: Union[AnyStr, typing.IO] = ...,
+ file: Union[_AnyStr, typing.IO] = ...,
*,
external_id: Any = ...) -> None: ...
diff --git a/typeshed/third_party/3/requests/api.pyi b/typeshed/third_party/3/requests/api.pyi
deleted file mode 100644
index 120f695..0000000
--- a/typeshed/third_party/3/requests/api.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-# Stubs for requests.api (Python 3)
-
-from typing import Optional, Union, Any, Iterable, Mapping, Tuple
-
-from .models import Response
-
-ParamsMappingValueType = Union[str, bytes, int, float, Iterable[Union[str, bytes, int, float]]]
-
-def request(method: str, url: str, **kwargs) -> Response: ...
-def get(url: Union[str, bytes],
- params: Optional[
- Union[
- Mapping[Union[str, bytes, int, float], ParamsMappingValueType],
- Union[str, bytes],
- Tuple[Union[str, bytes, int, float], ParamsMappingValueType],
- Mapping[str, ParamsMappingValueType],
- Mapping[bytes, ParamsMappingValueType],
- Mapping[int, ParamsMappingValueType],
- Mapping[float, ParamsMappingValueType]]]=None,
- **kwargs) -> Response: ...
-def options(url: str, **kwargs) -> Response: ...
-def head(url: str, **kwargs) -> Response: ...
-def post(url: str, data=..., json=..., **kwargs) -> Response: ...
-def put(url: str, data=..., **kwargs) -> Response: ...
-def patch(url: str, data=..., **kwargs) -> Response: ...
-def delete(url: str, **kwargs) -> Response: ...
diff --git a/typeshed/third_party/3/requests/auth.pyi b/typeshed/third_party/3/requests/auth.pyi
deleted file mode 100644
index 8eea2b0..0000000
--- a/typeshed/third_party/3/requests/auth.pyi
+++ /dev/null
@@ -1,41 +0,0 @@
-# Stubs for requests.auth (Python 3)
-
-from typing import Any
-from . import compat
-from . import cookies
-from . import utils
-from . import status_codes
-
-extract_cookies_to_jar = cookies.extract_cookies_to_jar
-parse_dict_header = utils.parse_dict_header
-to_native_string = utils.to_native_string
-codes = status_codes.codes
-
-CONTENT_TYPE_FORM_URLENCODED = ... # type: Any
-CONTENT_TYPE_MULTI_PART = ... # type: Any
-
-class AuthBase:
- def __call__(self, r): ...
-
-class HTTPBasicAuth(AuthBase):
- username = ... # type: Any
- password = ... # type: Any
- def __init__(self, username, password) -> None: ...
- def __call__(self, r): ...
-
-class HTTPProxyAuth(HTTPBasicAuth):
- def __call__(self, r): ...
-
-class HTTPDigestAuth(AuthBase):
- username = ... # type: Any
- password = ... # type: Any
- last_nonce = ... # type: Any
- nonce_count = ... # type: Any
- chal = ... # type: Any
- pos = ... # type: Any
- num_401_calls = ... # type: Any
- def __init__(self, username, password) -> None: ...
- def build_digest_header(self, method, url): ...
- def handle_redirect(self, r, **kwargs): ...
- def handle_401(self, r, **kwargs): ...
- def __call__(self, r): ...
diff --git a/typeshed/third_party/3/requests/compat.pyi b/typeshed/third_party/3/requests/compat.pyi
deleted file mode 100644
index 63b92f6..0000000
--- a/typeshed/third_party/3/requests/compat.pyi
+++ /dev/null
@@ -1,6 +0,0 @@
-# Stubs for requests.compat (Python 3.4)
-
-from typing import Any
-import collections
-
-OrderedDict = collections.OrderedDict
diff --git a/typeshed/third_party/3/requests/exceptions.pyi b/typeshed/third_party/3/requests/exceptions.pyi
deleted file mode 100644
index ff0c328..0000000
--- a/typeshed/third_party/3/requests/exceptions.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-# Stubs for requests.exceptions (Python 3)
-
-from typing import Any
-from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
-
-class RequestException(IOError):
- response = ... # type: Any
- request = ... # type: Any
- def __init__(self, *args, **kwargs) -> None: ...
-
-class HTTPError(RequestException): ...
-class ConnectionError(RequestException): ...
-class ProxyError(ConnectionError): ...
-class SSLError(ConnectionError): ...
-class Timeout(RequestException): ...
-class ConnectTimeout(ConnectionError, Timeout): ...
-class ReadTimeout(Timeout): ...
-class URLRequired(RequestException): ...
-class TooManyRedirects(RequestException): ...
-class MissingSchema(RequestException, ValueError): ...
-class InvalidSchema(RequestException, ValueError): ...
-class InvalidURL(RequestException, ValueError): ...
-class ChunkedEncodingError(RequestException): ...
-class ContentDecodingError(RequestException, BaseHTTPError): ...
-class StreamConsumedError(RequestException, TypeError): ...
-class RetryError(RequestException): ...
diff --git a/typeshed/third_party/3/requests/hooks.pyi b/typeshed/third_party/3/requests/hooks.pyi
deleted file mode 100644
index 3367d9a..0000000
--- a/typeshed/third_party/3/requests/hooks.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Stubs for requests.hooks (Python 3)
-
-from typing import Any
-
-HOOKS = ... # type: Any
-
-def default_hooks(): ...
-def dispatch_hook(key, hooks, hook_data, **kwargs): ...
diff --git a/typeshed/third_party/3/requests/packages/__init__.pyi b/typeshed/third_party/3/requests/packages/__init__.pyi
deleted file mode 100644
index 2b1bff8..0000000
--- a/typeshed/third_party/3/requests/packages/__init__.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Stubs for requests.packages (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-class VendorAlias:
- def __init__(self, package_names) -> None: ...
- def find_module(self, fullname, path=...): ...
- def load_module(self, name): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/_collections.pyi b/typeshed/third_party/3/requests/packages/urllib3/_collections.pyi
deleted file mode 100644
index 58aa944..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/_collections.pyi
+++ /dev/null
@@ -1,51 +0,0 @@
-# Stubs for requests.packages.urllib3._collections (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from collections import MutableMapping
-
-class RLock:
- def __enter__(self): ...
- def __exit__(self, exc_type, exc_value, traceback): ...
-
-class RecentlyUsedContainer(MutableMapping):
- ContainerCls = ... # type: Any
- dispose_func = ... # type: Any
- lock = ... # type: Any
- def __init__(self, maxsize=..., dispose_func=...) -> None: ...
- def __getitem__(self, key): ...
- def __setitem__(self, key, value): ...
- def __delitem__(self, key): ...
- def __len__(self): ...
- def __iter__(self): ...
- def clear(self): ...
- def keys(self): ...
-
-class HTTPHeaderDict(dict):
- def __init__(self, headers=..., **kwargs) -> None: ...
- def __setitem__(self, key, val): ...
- def __getitem__(self, key): ...
- def __delitem__(self, key): ...
- def __contains__(self, key): ...
- def __eq__(self, other): ...
- def __ne__(self, other): ...
- values = ... # type: Any
- get = ... # type: Any
- update = ... # type: Any
- iterkeys = ... # type: Any
- itervalues = ... # type: Any
- def pop(self, key, default=...): ...
- def discard(self, key): ...
- def add(self, key, val): ...
- def extend(*args, **kwargs): ...
- def getlist(self, key): ...
- getheaders = ... # type: Any
- getallmatchingheaders = ... # type: Any
- iget = ... # type: Any
- def copy(self): ...
- def iteritems(self): ...
- def itermerged(self): ...
- def items(self): ...
- @classmethod
- def from_httplib(cls, message, duplicates=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi
deleted file mode 100644
index 69e47f7..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/contrib/__init__.pyi
+++ /dev/null
@@ -1,3 +0,0 @@
-# Stubs for requests.packages.urllib3.contrib (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
diff --git a/typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi b/typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi
deleted file mode 100644
index 3e7d0f6..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/exceptions.pyi
+++ /dev/null
@@ -1,54 +0,0 @@
-# Stubs for requests.packages.urllib3.exceptions (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class HTTPError(Exception): ...
-class HTTPWarning(Warning): ...
-
-class PoolError(HTTPError):
- pool = ... # type: Any
- def __init__(self, pool, message) -> None: ...
- def __reduce__(self): ...
-
-class RequestError(PoolError):
- url = ... # type: Any
- def __init__(self, pool, url, message) -> None: ...
- def __reduce__(self): ...
-
-class SSLError(HTTPError): ...
-class ProxyError(HTTPError): ...
-class DecodeError(HTTPError): ...
-class ProtocolError(HTTPError): ...
-
-ConnectionError = ... # type: Any
-
-class MaxRetryError(RequestError):
- reason = ... # type: Any
- def __init__(self, pool, url, reason=...) -> None: ...
-
-class HostChangedError(RequestError):
- retries = ... # type: Any
- def __init__(self, pool, url, retries=...) -> None: ...
-
-class TimeoutStateError(HTTPError): ...
-class TimeoutError(HTTPError): ...
-class ReadTimeoutError(TimeoutError, RequestError): ...
-class ConnectTimeoutError(TimeoutError): ...
-class EmptyPoolError(PoolError): ...
-class ClosedPoolError(PoolError): ...
-class LocationValueError(ValueError, HTTPError): ...
-
-class LocationParseError(LocationValueError):
- location = ... # type: Any
- def __init__(self, location) -> None: ...
-
-class ResponseError(HTTPError):
- GENERIC_ERROR = ... # type: Any
- SPECIFIC_ERROR = ... # type: Any
-
-class SecurityWarning(HTTPWarning): ...
-class InsecureRequestWarning(SecurityWarning): ...
-class SystemTimeWarning(SecurityWarning): ...
-class InsecurePlatformWarning(SecurityWarning): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/fields.pyi b/typeshed/third_party/3/requests/packages/urllib3/fields.pyi
deleted file mode 100644
index cdc7734..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/fields.pyi
+++ /dev/null
@@ -1,16 +0,0 @@
-# Stubs for requests.packages.urllib3.fields (Python 3.4)
-
-from typing import Any
-from . import packages
-
-def guess_content_type(filename, default=...): ...
-def format_header_param(name, value): ...
-
-class RequestField:
- data = ... # type: Any
- headers = ... # type: Any
- def __init__(self, name, data, filename=..., headers=...) -> None: ...
- @classmethod
- def from_tuples(cls, fieldname, value): ...
- def render_headers(self): ...
- def make_multipart(self, content_disposition=..., content_type=..., content_location=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/filepost.pyi b/typeshed/third_party/3/requests/packages/urllib3/filepost.pyi
deleted file mode 100644
index f1ce517..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/filepost.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# Stubs for requests.packages.urllib3.filepost (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from . import packages
-# from .packages import six
-from . import fields
-
-# six = packages.six
-# b = six.b
-RequestField = fields.RequestField
-
-writer = ... # type: Any
-
-def choose_boundary(): ...
-def iter_field_objects(fields): ...
-def iter_fields(fields): ...
-def encode_multipart_formdata(fields, boundary=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi b/typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi
deleted file mode 100644
index 67507f9..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/packages/__init__.pyi
+++ /dev/null
@@ -1,3 +0,0 @@
-# Stubs for requests.packages.urllib3.packages (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
diff --git a/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi b/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
deleted file mode 100644
index 5abbc9d..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi
+++ /dev/null
@@ -1,7 +0,0 @@
-# Stubs for requests.packages.urllib3.packages.ssl_match_hostname._implementation (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-class CertificateError(ValueError): ...
-
-def match_hostname(cert, hostname): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi b/typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi
deleted file mode 100644
index a65f664..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/poolmanager.pyi
+++ /dev/null
@@ -1,31 +0,0 @@
-# Stubs for requests.packages.urllib3.poolmanager (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from .request import RequestMethods
-
-class PoolManager(RequestMethods):
- proxy = ... # type: Any
- connection_pool_kw = ... # type: Any
- pools = ... # type: Any
- def __init__(self, num_pools=..., headers=..., **connection_pool_kw) -> None: ...
- def __enter__(self): ...
- def __exit__(self, exc_type, exc_val, exc_tb): ...
- def clear(self): ...
- def connection_from_host(self, host, port=..., scheme=...): ...
- def connection_from_url(self, url): ...
- # TODO: This was the original signature -- copied another one from base class to fix complaint.
- # def urlopen(self, method, url, redirect=True, **kw): ...
- def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
-
-class ProxyManager(PoolManager):
- proxy = ... # type: Any
- proxy_headers = ... # type: Any
- def __init__(self, proxy_url, num_pools=..., headers=..., proxy_headers=..., **connection_pool_kw) -> None: ...
- def connection_from_host(self, host, port=..., scheme=...): ...
- # TODO: This was the original signature -- copied another one from base class to fix complaint.
- # def urlopen(self, method, url, redirect=True, **kw): ...
- def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
-
-def proxy_from_url(url, **kw): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/request.pyi b/typeshed/third_party/3/requests/packages/urllib3/request.pyi
deleted file mode 100644
index 788c759..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/request.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-# Stubs for requests.packages.urllib3.request (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class RequestMethods:
- headers = ... # type: Any
- def __init__(self, headers=...) -> None: ...
- def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ...
- def request(self, method, url, fields=..., headers=..., **urlopen_kw): ...
- def request_encode_url(self, method, url, fields=..., **urlopen_kw): ...
- def request_encode_body(self, method, url, fields=..., headers=..., encode_multipart=..., multipart_boundary=..., **urlopen_kw): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi
deleted file mode 100644
index cd67309..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/util/connection.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-# Stubs for requests.packages.urllib3.util.connection (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-poll = ... # type: Any
-select = ... # type: Any
-
-def is_connection_dropped(conn): ...
-def create_connection(address, timeout=..., source_address=..., socket_options=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/request.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/request.pyi
deleted file mode 100644
index 0be64df..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/util/request.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for requests.packages.urllib3.util.request (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-# from ..packages import six
-
-# b = six.b
-
-ACCEPT_ENCODING = ... # type: Any
-
-def make_headers(keep_alive=..., accept_encoding=..., user_agent=..., basic_auth=..., proxy_basic_auth=..., disable_cache=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/response.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/response.pyi
deleted file mode 100644
index 761a006..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/util/response.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for requests.packages.urllib3.util.response (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-def is_fp_closed(obj): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi
deleted file mode 100644
index e958d90..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/util/retry.pyi
+++ /dev/null
@@ -1,36 +0,0 @@
-# Stubs for requests.packages.urllib3.util.retry (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from .. import exceptions
-from .. import packages
-
-ConnectTimeoutError = exceptions.ConnectTimeoutError
-MaxRetryError = exceptions.MaxRetryError
-ProtocolError = exceptions.ProtocolError
-ReadTimeoutError = exceptions.ReadTimeoutError
-ResponseError = exceptions.ResponseError
-
-log = ... # type: Any
-
-class Retry:
- DEFAULT_METHOD_WHITELIST = ... # type: Any
- BACKOFF_MAX = ... # type: Any
- total = ... # type: Any
- connect = ... # type: Any
- read = ... # type: Any
- redirect = ... # type: Any
- status_forcelist = ... # type: Any
- method_whitelist = ... # type: Any
- backoff_factor = ... # type: Any
- raise_on_redirect = ... # type: Any
- def __init__(self, total=..., connect=..., read=..., redirect=..., method_whitelist=..., status_forcelist=..., backoff_factor=..., raise_on_redirect=..., _observed_errors=...) -> None: ...
- def new(self, **kw): ...
- @classmethod
- def from_int(cls, retries, redirect=..., default=...): ...
- def get_backoff_time(self): ...
- def sleep(self): ...
- def is_forced_retry(self, method, status_code): ...
- def is_exhausted(self): ...
- def increment(self, method=..., url=..., response=..., error=..., _pool=..., _stacktrace=...): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi
deleted file mode 100644
index 0a7653c..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/util/timeout.pyi
+++ /dev/null
@@ -1,24 +0,0 @@
-# Stubs for requests.packages.urllib3.util.timeout (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from .. import exceptions
-
-TimeoutStateError = exceptions.TimeoutStateError
-
-def current_time(): ...
-
-class Timeout:
- DEFAULT_TIMEOUT = ... # type: Any
- total = ... # type: Any
- def __init__(self, total=..., connect=..., read=...) -> None: ...
- @classmethod
- def from_float(cls, timeout): ...
- def clone(self): ...
- def start_connect(self): ...
- def get_connect_duration(self): ...
- @property
- def connect_timeout(self): ...
- @property
- def read_timeout(self): ...
diff --git a/typeshed/third_party/3/requests/packages/urllib3/util/url.pyi b/typeshed/third_party/3/requests/packages/urllib3/util/url.pyi
deleted file mode 100644
index 9877b4a..0000000
--- a/typeshed/third_party/3/requests/packages/urllib3/util/url.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-# Stubs for requests.packages.urllib3.util.url (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from .. import exceptions
-
-LocationParseError = exceptions.LocationParseError
-
-url_attrs = ... # type: Any
-
-class Url:
- slots = ... # type: Any
- def __new__(cls, scheme=..., auth=..., host=..., port=..., path=..., query=..., fragment=...): ...
- @property
- def hostname(self): ...
- @property
- def request_uri(self): ...
- @property
- def netloc(self): ...
- @property
- def url(self): ...
-
-def split_first(s, delims): ...
-def parse_url(url): ...
-def get_host(url): ...
diff --git a/typeshed/third_party/3/requests/status_codes.pyi b/typeshed/third_party/3/requests/status_codes.pyi
deleted file mode 100644
index e3035eb..0000000
--- a/typeshed/third_party/3/requests/status_codes.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Stubs for requests.status_codes (Python 3)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from .structures import LookupDict
-
-codes = ... # type: Any
diff --git a/typeshed/third_party/3/requests/structures.pyi b/typeshed/third_party/3/requests/structures.pyi
deleted file mode 100644
index af4273b..0000000
--- a/typeshed/third_party/3/requests/structures.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for requests.structures (Python 3)
-
-from typing import Any, Iterator, MutableMapping, Tuple, Union
-
-class CaseInsensitiveDict(MutableMapping[str, Union[str, bytes]]):
- def lower_items(self) -> Iterator[Tuple[str, Union[str, bytes]]]: ...
-
-class LookupDict(dict):
- name = ... # type: Any
- def __init__(self, name=...) -> None: ...
- def __getitem__(self, key): ...
- def get(self, key, default=...): ...
diff --git a/typeshed/third_party/3/requests/utils.pyi b/typeshed/third_party/3/requests/utils.pyi
deleted file mode 100644
index 945277a..0000000
--- a/typeshed/third_party/3/requests/utils.pyi
+++ /dev/null
@@ -1,52 +0,0 @@
-# Stubs for requests.utils (Python 3)
-
-from typing import Any
-from . import compat
-from . import cookies
-from . import structures
-from . import exceptions
-
-OrderedDict = compat.OrderedDict
-RequestsCookieJar = cookies.RequestsCookieJar
-cookiejar_from_dict = cookies.cookiejar_from_dict
-CaseInsensitiveDict = structures.CaseInsensitiveDict
-InvalidURL = exceptions.InvalidURL
-
-NETRC_FILES = ... # type: Any
-DEFAULT_CA_BUNDLE_PATH = ... # type: Any
-
-def dict_to_sequence(d): ...
-def super_len(o): ...
-def get_netrc_auth(url): ...
-def guess_filename(obj): ...
-def from_key_val_list(value): ...
-def to_key_val_list(value): ...
-def parse_list_header(value): ...
-def parse_dict_header(value): ...
-def unquote_header_value(value, is_filename=...): ...
-def dict_from_cookiejar(cj): ...
-def add_dict_to_cookiejar(cj, cookie_dict): ...
-def get_encodings_from_content(content): ...
-def get_encoding_from_headers(headers): ...
-def stream_decode_response_unicode(iterator, r): ...
-def iter_slices(string, slice_length): ...
-def get_unicode_from_response(r): ...
-
-UNRESERVED_SET = ... # type: Any
-
-def unquote_unreserved(uri): ...
-def requote_uri(uri): ...
-def address_in_network(ip, net): ...
-def dotted_netmask(mask): ...
-def is_ipv4_address(string_ip): ...
-def is_valid_cidr(string_network): ...
-def should_bypass_proxies(url): ...
-def get_environ_proxies(url): ...
-def default_user_agent(name=...): ...
-def default_headers(): ...
-def parse_header_links(value): ...
-def guess_json_utf(data): ...
-def prepend_scheme_if_needed(url, new_scheme): ...
-def get_auth_from_url(url): ...
-def to_native_string(string, encoding=...): ...
-def urldefragauth(url): ...
diff --git a/typeshed/third_party/3/six/moves/__init__.pyi b/typeshed/third_party/3/six/moves/__init__.pyi
index 859a7eb..744c95d 100644
--- a/typeshed/third_party/3/six/moves/__init__.pyi
+++ b/typeshed/third_party/3/six/moves/__init__.pyi
@@ -1,9 +1,9 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
+# Stubs for six.moves
#
-# Stubs for six.moves (Python 3.2)
+# Note: Commented out items means they weren't implemented at the time.
+# Uncomment them when the modules have been added to the typeshed.
+import sys
-from http import cookies as http_cookies
from io import StringIO as cStringIO
from builtins import filter as filter
from itertools import filterfalse as filterfalse
@@ -13,7 +13,6 @@ from builtins import map as map
from os import getcwd as getcwd
from os import getcwdb as getcwdb
from builtins import range as range
-from imp import reload as reload_module
from functools import reduce as reduce
from shlex import quote as shlex_quote
from io import StringIO as StringIO
@@ -23,12 +22,51 @@ from collections import UserString as UserString
from builtins import range as xrange
from builtins import zip as zip
from itertools import zip_longest as zip_longest
-import six.moves.cPickle as cPickle
-import html.parser as html_parser
+import builtins as builtins
+import configparser as configparser
+# import copyreg as copyreg
+# import dbm.gnu as dbm_gnu
+import _dummy_thread as _dummy_thread
+import http.cookiejar as http_cookiejar
+import http.cookies as http_cookies
import html.entities as html_entities
+import html.parser as html_parser
import http.client as http_client
-
-import six.moves.urllib_parse as urllib_parse
-import six.moves.urllib_error as urllib_error
+import email.mime.multipart as email_mime_multipart
+import email.mime.nonmultipart as email_mime_nonmultipart
+import email.mime.text as email_mime_text
+import email.mime.base as email_mime_base
+import http.server as BaseHTTPServer
+import http.server as CGIHTTPServer
+import http.server as SimpleHTTPServer
+import pickle as cPickle
+import queue as queue
+# import reprlib as reprlib
+import socketserver as socketserver
+import _thread as _thread
+import tkinter as tkinter
+# import tkinter.dialog as tkinter_dialog
+# import tkinter.filedialog as tkinter_filedialog
+# import tkinter.scrolledtext as tkinter_scrolledtext
+# import tkinter.simpledialog as tkinter_simpledialog
+# import tkinter.tix as tkinter_tix
+import tkinter.ttk as tkinter_ttk
+import tkinter.constants as tkinter_constants
+# import tkinter.dnd as tkinter_dnd
+# import tkinter.colorchooser as tkinter_colorchooser
+# import tkinter.commondialog as tkinter_commondialog
+# import tkinter.filedialog as tkinter_tkfiledialog
+# import tkinter.font as tkinter_font
+# import tkinter.messagebox as tkinter_messagebox
+# import tkinter.simpledialog as tkinter_tksimpledialog
+import urllib.parse as urllib_parse
+import urllib.error as urllib_error
import six.moves.urllib as urllib
-import six.moves.urllib_robotparser as urllib_robotparser
+import urllib.robotparser as urllib_robotparser
+# import xmlrpc.client as xmlrpc_client
+# import xmlrpc.server as xmlrpc_server
+
+if sys.version_info >= (3, 4):
+ from importlib import reload as reload_module
+else:
+ from imp import reload as reload_module
diff --git a/typeshed/third_party/3/six/moves/cPickle.pyi b/typeshed/third_party/3/six/moves/cPickle.pyi
deleted file mode 100644
index f2e2e12..0000000
--- a/typeshed/third_party/3/six/moves/cPickle.pyi
+++ /dev/null
@@ -1,6 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.cPickle (Python 3.2)
-
-from pickle import * # noqa: F403
diff --git a/typeshed/third_party/3/six/moves/urllib/__init__.pyi b/typeshed/third_party/3/six/moves/urllib/__init__.pyi
index 298b049..d08209c 100644
--- a/typeshed/third_party/3/six/moves/urllib/__init__.pyi
+++ b/typeshed/third_party/3/six/moves/urllib/__init__.pyi
@@ -1,8 +1,3 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib (Python 3.2)
-
import six.moves.urllib.error as error
import six.moves.urllib.parse as parse
import six.moves.urllib.request as request
diff --git a/typeshed/third_party/3/six/moves/urllib/error.pyi b/typeshed/third_party/3/six/moves/urllib/error.pyi
index a45b0f7..83f0d22 100644
--- a/typeshed/third_party/3/six/moves/urllib/error.pyi
+++ b/typeshed/third_party/3/six/moves/urllib/error.pyi
@@ -1,8 +1,3 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.error (Python 3.2)
-
from urllib.error import URLError as URLError
from urllib.error import HTTPError as HTTPError
from urllib.error import ContentTooShortError as ContentTooShortError
diff --git a/typeshed/third_party/3/six/moves/urllib/parse.pyi b/typeshed/third_party/3/six/moves/urllib/parse.pyi
index c640af2..8b4310a 100644
--- a/typeshed/third_party/3/six/moves/urllib/parse.pyi
+++ b/typeshed/third_party/3/six/moves/urllib/parse.pyi
@@ -1,22 +1,27 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
+# Stubs for six.moves.urllib.parse
#
-# Stubs for six.moves.urllib.parse (Python 3.2)
-
-from six.moves.urllib_parse import (
- ParseResult as ParseResult,
- SplitResult as SplitResult,
- parse_qs as parse_qs,
- parse_qsl as parse_qsl,
- urldefrag as urldefrag,
- urljoin as urljoin,
- urlparse as urlparse,
- urlsplit as urlsplit,
- urlunparse as urlunparse,
- urlunsplit as urlunsplit,
- quote as quote,
- quote_plus as quote_plus,
- unquote as unquote,
- unquote_plus as unquote_plus,
- urlencode as urlencode,
-)
+# Note: Commented out items means they weren't implemented at the time.
+# Uncomment them when the modules have been added to the typeshed.
+from urllib.parse import ParseResult as ParseResult
+from urllib.parse import SplitResult as SplitResult
+from urllib.parse import parse_qs as parse_qs
+from urllib.parse import parse_qsl as parse_qsl
+from urllib.parse import urldefrag as urldefrag
+from urllib.parse import urljoin as urljoin
+from urllib.parse import urlparse as urlparse
+from urllib.parse import urlsplit as urlsplit
+from urllib.parse import urlunparse as urlunparse
+from urllib.parse import urlunsplit as urlunsplit
+from urllib.parse import quote as quote
+from urllib.parse import quote_plus as quote_plus
+from urllib.parse import unquote as unquote
+from urllib.parse import unquote_plus as unquote_plus
+from urllib.parse import urlencode as urlencode
+# from urllib.parse import splitquery as splitquery
+# from urllib.parse import splittag as splittag
+# from urllib.parse import splituser as splituser
+from urllib.parse import uses_fragment as uses_fragment
+from urllib.parse import uses_netloc as uses_netloc
+from urllib.parse import uses_params as uses_params
+from urllib.parse import uses_query as uses_query
+from urllib.parse import uses_relative as uses_relative
diff --git a/typeshed/third_party/3/six/moves/urllib/request.pyi b/typeshed/third_party/3/six/moves/urllib/request.pyi
index b15ced7..718a819 100644
--- a/typeshed/third_party/3/six/moves/urllib/request.pyi
+++ b/typeshed/third_party/3/six/moves/urllib/request.pyi
@@ -1,40 +1,37 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
+# Stubs for six.moves.urllib.request
#
-# Stubs for six.moves.urllib.request (Python 3.2)
-
-from urllib.request import BaseHandler as BaseHandler
-from urllib.request import HTTPRedirectHandler as HTTPRedirectHandler
-from urllib.request import OpenerDirector as OpenerDirector
-
+# Note: Commented out items means they weren't implemented at the time.
+# Uncomment them when the modules have been added to the typeshed.
+from urllib.request import urlopen as urlopen
from urllib.request import install_opener as install_opener
from urllib.request import build_opener as build_opener
-
-# from urllib.request import urlopen as urlopen
-# from urllib.request import pathname2url as pathname2url
-# from urllib.request import url2pathname as url2pathname
-# from urllib.request import getproxies as getproxies
-# from urllib.request import Request as Request
-# from urllib.request import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler
-# from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor
-# from urllib.request import ProxyHandler as ProxyHandler
-# from urllib.request import HTTPPasswordMgr as HTTPPasswordMgr
-# from urllib.request import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm
-# from urllib.request import AbstractBasicAuthHandler as AbstractBasicAuthHandler
-# from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler
-# from urllib.request import ProxyBasicAuthHandler as ProxyBasicAuthHandler
-# from urllib.request import AbstractDigestAuthHandler as AbstractDigestAuthHandler
-# from urllib.request import HTTPDigestAuthHandler as HTTPDigestAuthHandler
-# from urllib.request import ProxyDigestAuthHandler as ProxyDigestAuthHandler
-# from urllib.request import HTTPHandler as HTTPHandler
-# from urllib.request import HTTPSHandler as HTTPSHandler
-# from urllib.request import FileHandler as FileHandler
-# from urllib.request import FTPHandler as FTPHandler
-# from urllib.request import CacheFTPHandler as CacheFTPHandler
-# from urllib.request import UnknownHandler as UnknownHandler
-# from urllib.request import HTTPErrorProcessor as HTTPErrorProcessor
-# from urllib.request import urlretrieve as urlretrieve
-# from urllib.request import urlcleanup as urlcleanup
-# from urllib.request import URLopener as URLopener
-# from urllib.request import FancyURLopener as FancyURLopener
+from urllib.request import pathname2url as pathname2url
+from urllib.request import url2pathname as url2pathname
+from urllib.request import getproxies as getproxies
+from urllib.request import Request as Request
+from urllib.request import OpenerDirector as OpenerDirector
+from urllib.request import HTTPDefaultErrorHandler as HTTPDefaultErrorHandler
+from urllib.request import HTTPRedirectHandler as HTTPRedirectHandler
+from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor
+from urllib.request import ProxyHandler as ProxyHandler
+from urllib.request import BaseHandler as BaseHandler
+from urllib.request import HTTPPasswordMgr as HTTPPasswordMgr
+from urllib.request import HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm
+from urllib.request import AbstractBasicAuthHandler as AbstractBasicAuthHandler
+from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler
+from urllib.request import ProxyBasicAuthHandler as ProxyBasicAuthHandler
+from urllib.request import AbstractDigestAuthHandler as AbstractDigestAuthHandler
+from urllib.request import HTTPDigestAuthHandler as HTTPDigestAuthHandler
+from urllib.request import ProxyDigestAuthHandler as ProxyDigestAuthHandler
+from urllib.request import HTTPHandler as HTTPHandler
+from urllib.request import HTTPSHandler as HTTPSHandler
+from urllib.request import FileHandler as FileHandler
+from urllib.request import FTPHandler as FTPHandler
+from urllib.request import CacheFTPHandler as CacheFTPHandler
+from urllib.request import UnknownHandler as UnknownHandler
+from urllib.request import HTTPErrorProcessor as HTTPErrorProcessor
+from urllib.request import urlretrieve as urlretrieve
+from urllib.request import urlcleanup as urlcleanup
+from urllib.request import URLopener as URLopener
+from urllib.request import FancyURLopener as FancyURLopener
# from urllib.request import proxy_bypass as proxy_bypass
diff --git a/typeshed/third_party/3/six/moves/urllib/response.pyi b/typeshed/third_party/3/six/moves/urllib/response.pyi
index c3b34a8..9f681ea 100644
--- a/typeshed/third_party/3/six/moves/urllib/response.pyi
+++ b/typeshed/third_party/3/six/moves/urllib/response.pyi
@@ -1 +1,8 @@
+# Stubs for six.moves.urllib.response
+#
+# Note: Commented out items means they weren't implemented at the time.
+# Uncomment them when the modules have been added to the typeshed.
+# from urllib.response import addbase as addbase
+# from urllib.response import addclosehook as addclosehook
+# from urllib.response import addinfo as addinfo
from urllib.response import addinfourl as addinfourl
diff --git a/typeshed/third_party/3/six/moves/urllib/robotparser.pyi b/typeshed/third_party/3/six/moves/urllib/robotparser.pyi
index 6690355..bccda14 100644
--- a/typeshed/third_party/3/six/moves/urllib/robotparser.pyi
+++ b/typeshed/third_party/3/six/moves/urllib/robotparser.pyi
@@ -1,6 +1 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib.robotparser (Python 3.2)
-
from urllib.robotparser import RobotFileParser as RobotFileParser
diff --git a/typeshed/third_party/3/six/moves/urllib_error.pyi b/typeshed/third_party/3/six/moves/urllib_error.pyi
deleted file mode 100644
index 1368664..0000000
--- a/typeshed/third_party/3/six/moves/urllib_error.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_error (Python 3.2)
-
-from six.moves.urllib.error import (
- URLError as URLError,
- HTTPError as HTTPError,
- ContentTooShortError as ContentTooShortError,
-)
diff --git a/typeshed/third_party/3/six/moves/urllib_parse.pyi b/typeshed/third_party/3/six/moves/urllib_parse.pyi
deleted file mode 100644
index 96f6207..0000000
--- a/typeshed/third_party/3/six/moves/urllib_parse.pyi
+++ /dev/null
@@ -1,20 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_parse (Python 3.2)
-
-from urllib.parse import ParseResult as ParseResult
-from urllib.parse import SplitResult as SplitResult
-from urllib.parse import parse_qs as parse_qs
-from urllib.parse import parse_qsl as parse_qsl
-from urllib.parse import urldefrag as urldefrag
-from urllib.parse import urljoin as urljoin
-from urllib.parse import urlparse as urlparse
-from urllib.parse import urlsplit as urlsplit
-from urllib.parse import urlunparse as urlunparse
-from urllib.parse import urlunsplit as urlunsplit
-from urllib.parse import quote as quote
-from urllib.parse import quote_plus as quote_plus
-from urllib.parse import unquote as unquote
-from urllib.parse import unquote_plus as unquote_plus
-from urllib.parse import urlencode as urlencode
diff --git a/typeshed/third_party/3/six/moves/urllib_request.pyi b/typeshed/third_party/3/six/moves/urllib_request.pyi
deleted file mode 100644
index 168f635..0000000
--- a/typeshed/third_party/3/six/moves/urllib_request.pyi
+++ /dev/null
@@ -1,41 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_request (Python 3.2)
-
-from six.moves.urllib.request import (
- install_opener as install_opener,
- build_opener as build_opener,
- BaseHandler as BaseHandler,
- OpenerDirector as OpenerDirector,
- HTTPRedirectHandler as HTTPRedirectHandler,
-
- # urlopen as urlopen,
- # pathname2url as pathname2url,
- # url2pathname as url2pathname,
- # getproxies as getproxies,
- # Request as Request,
- # HTTPDefaultErrorHandler as HTTPDefaultErrorHandler,
- # HTTPCookieProcessor as HTTPCookieProcessor,
- # ProxyHandler as ProxyHandler,
- # HTTPPasswordMgr as HTTPPasswordMgr,
- # HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm,
- # AbstractBasicAuthHandler as AbstractBasicAuthHandler,
- # HTTPBasicAuthHandler as HTTPBasicAuthHandler,
- # ProxyBasicAuthHandler as ProxyBasicAuthHandler,
- # AbstractDigestAuthHandler as AbstractDigestAuthHandler,
- # HTTPDigestAuthHandler as HTTPDigestAuthHandler,
- # ProxyDigestAuthHandler as ProxyDigestAuthHandler,
- # HTTPHandler as HTTPHandler,
- # HTTPSHandler as HTTPSHandler,
- # FileHandler as FileHandler,
- # FTPHandler as FTPHandler,
- # CacheFTPHandler as CacheFTPHandler,
- # UnknownHandler as UnknownHandler,
- # HTTPErrorProcessor as HTTPErrorProcessor,
- # urlretrieve as urlretrieve,
- # urlcleanup as urlcleanup,
- # URLopener as URLopener,
- # FancyURLopener as FancyURLopener,
- # proxy_bypass as proxy_bypass,
-)
diff --git a/typeshed/third_party/3/six/moves/urllib_response.pyi b/typeshed/third_party/3/six/moves/urllib_response.pyi
deleted file mode 100644
index 1574d7d..0000000
--- a/typeshed/third_party/3/six/moves/urllib_response.pyi
+++ /dev/null
@@ -1 +0,0 @@
-from six.moves.urllib.response import addinfourl as addinfourl
diff --git a/typeshed/third_party/3/six/moves/urllib_robotparser.pyi b/typeshed/third_party/3/six/moves/urllib_robotparser.pyi
deleted file mode 100644
index 8b4ca73..0000000
--- a/typeshed/third_party/3/six/moves/urllib_robotparser.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-# Generated by stubtool 0.1, DO NOT EDIT
-# See https://github.com/o11c/stubtool
-#
-# Stubs for six.moves.urllib_robotparser (Python 3.2)
-
-from six.moves.urllib.robotparser import (
- RobotFileParser as RobotFileParser,
-)
diff --git a/typeshed/third_party/3/typed_ast/__init__.pyi b/typeshed/third_party/3/typed_ast/__init__.pyi
index 92e1216..f260032 100644
--- a/typeshed/third_party/3/typed_ast/__init__.pyi
+++ b/typeshed/third_party/3/typed_ast/__init__.pyi
@@ -1,2 +1,2 @@
-# This module is a fork of the CPython 2.7 and 3.5 ast modules with PEP 484 support.
-# See: https://github.com/dropbox/typed_ast
+# This module is a fork of the CPython 2 and 3 ast modules with PEP 484 support.
+# See: https://github.com/python/typed_ast
diff --git a/typeshed/third_party/3/typed_ast/ast35.pyi b/typeshed/third_party/3/typed_ast/ast3.pyi
similarity index 93%
rename from typeshed/third_party/3/typed_ast/ast35.pyi
rename to typeshed/third_party/3/typed_ast/ast3.pyi
index 71ac1ba..1e96daf 100644
--- a/typeshed/third_party/3/typed_ast/ast35.pyi
+++ b/typeshed/third_party/3/typed_ast/ast3.pyi
@@ -8,7 +8,10 @@ class NodeVisitor():
class NodeTransformer(NodeVisitor):
def generic_visit(self, node: AST) -> None: ...
-def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def parse(source: Union[str, bytes],
+ filename: Union[str, bytes] = ...,
+ mode: str = ...,
+ feature_version: int = ...) -> AST: ...
def copy_location(new_node: AST, old_node: AST) -> AST: ...
def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
def fix_missing_locations(node: AST) -> AST: ...
@@ -86,15 +89,20 @@ class Delete(stmt):
class Assign(stmt):
targets = ... # type: typing.List[expr]
- value = ... # type: Optional[expr]
+ value = ... # type: expr
type_comment = ... # type: Optional[str]
- annotation = ... # type: Optional[expr]
class AugAssign(stmt):
target = ... # type: expr
op = ... # type: operator
value = ... # type: expr
+class AnnAssign(stmt):
+ target = ... # type: expr
+ annotation = ... # type: expr
+ value = ... # type: Optional[expr]
+ simple = ... # type: int
+
class For(stmt):
target = ... # type: expr
iter = ... # type: expr
@@ -107,6 +115,7 @@ class AsyncFor(stmt):
iter = ... # type: expr
body = ... # type: typing.List[stmt]
orelse = ... # type: typing.List[stmt]
+ type_comment = ... # type: Optional[str]
class While(stmt):
test = ... # type: expr
@@ -126,6 +135,7 @@ class With(stmt):
class AsyncWith(stmt):
items = ... # type: typing.List[withitem]
body = ... # type: typing.List[stmt]
+ type_comment = ... # type: Optional[str]
class Raise(stmt):
exc = ... # type: Optional[expr]
@@ -255,6 +265,14 @@ class Num(expr):
class Str(expr):
s = ... # type: str
+class FormattedValue(expr):
+ value = ... # type: expr
+ conversion = ... # type: typing.Optional[int]
+ format_spec = ... # type: typing.Optional[expr]
+
+class JoinedStr(expr):
+ values = ... # type: typing.List[expr]
+
class Bytes(expr):
s = ... # type: bytes
@@ -351,6 +369,7 @@ class comprehension(AST):
target = ... # type: expr
iter = ... # type: expr
ifs = ... # type: typing.List[expr]
+ is_async = ... # type: int
class ExceptHandler(AST):
@@ -374,6 +393,7 @@ class arg(AST):
annotation = ... # type: Optional[expr]
lineno = ... # type: int
col_offset = ... # type: int
+ type_comment = ... # type: typing.Optional[str]
class keyword(AST):
arg = ... # type: Optional[identifier]
diff --git a/typeshed/third_party/3/typed_ast/conversions.pyi b/typeshed/third_party/3/typed_ast/conversions.pyi
index 53fcc32..d5f1829 100644
--- a/typeshed/third_party/3/typed_ast/conversions.pyi
+++ b/typeshed/third_party/3/typed_ast/conversions.pyi
@@ -1,4 +1,4 @@
from . import ast27
-from . import ast35
+from . import ast3
-def py2to3(ast: ast27.AST) -> ast35.AST: ...
+def py2to3(ast: ast27.AST) -> ast3.AST: ...
diff --git a/typeshed/third_party/3/werkzeug/wrappers.pyi b/typeshed/third_party/3/werkzeug/wrappers.pyi
index 9bc5151..f6b6770 100644
--- a/typeshed/third_party/3/werkzeug/wrappers.pyi
+++ b/typeshed/third_party/3/werkzeug/wrappers.pyi
@@ -2,78 +2,85 @@
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
-from typing import Any
+from typing import (
+ Any, Iterable, Mapping, Optional, Sequence, Tuple, Type, Union,
+)
+
+from .datastructures import (
+ CombinedMultiDict, EnvironHeaders, Headers, ImmutableMultiDict,
+ MultiDict, TypeConversionDict,
+)
class BaseRequest:
- charset = ... # type: Any
- encoding_errors = ... # type: Any
- max_content_length = ... # type: Any
- max_form_memory_size = ... # type: Any
- parameter_storage_class = ... # type: Any
- list_storage_class = ... # type: Any
- dict_storage_class = ... # type: Any
- form_data_parser_class = ... # type: Any
- trusted_hosts = ... # type: Any
+ charset = ... # type: str
+ encoding_errors = ... # type: str
+ max_content_length = ... # type: int
+ max_form_memory_size = ... # type: int
+ parameter_storage_class = ... # type: Type
+ list_storage_class = ... # type: Type
+ dict_storage_class = ... # type: Type
+ form_data_parser_class = ... # type: Type
+ trusted_hosts = ... # type: Optional[Sequence[str]]
disable_data_descriptor = ... # type: Any
- environ = ... # type: Any
+ environ = ... # type: Mapping[str, object]
shallow = ... # type: Any
- def __init__(self, environ, populate_request=True, shallow=False): ...
+ def __init__(self, environ: Mapping[str, object], populate_request: bool=True, shallow: bool=False) -> None: ...
@property
- def url_charset(self): ...
+ def url_charset(self) -> str: ...
@classmethod
- def from_values(cls, *args, **kwargs): ...
+ def from_values(cls, *args, **kwargs) -> 'BaseRequest': ...
@classmethod
def application(cls, f): ...
@property
def want_form_data_parsed(self): ...
def make_form_data_parser(self): ...
- def close(self): ...
+ def close(self) -> None: ...
def __enter__(self): ...
def __exit__(self, exc_type, exc_value, tb): ...
def stream(self): ...
input_stream = ... # type: Any
- def args(self): ...
+ args = ... # type: ImmutableMultiDict
def data(self): ...
- def get_data(self, cache=True, as_text=False, parse_form_data=False): ...
- def form(self): ...
- def values(self): ...
- def files(self): ...
- def cookies(self): ...
- def headers(self): ...
- def path(self): ...
- def full_path(self): ...
- def script_root(self): ...
- def url(self): ...
- def base_url(self): ...
- def url_root(self): ...
- def host_url(self): ...
- def host(self): ...
- query_string = ... # type: Any
- method = ... # type: Any
+ def get_data(self, cache: bool=True, as_text: bool=False, parse_form_data: bool=False) -> bytes: ...
+ form = ... # type: ImmutableMultiDict
+ values = ... # type: CombinedMultiDict
+ files = ... # type: MultiDict
+ cookies = ... # type: TypeConversionDict
+ headers = ... # type: EnvironHeaders
+ path = ... # type: str
+ full_path = ... # type: str
+ script_root = ... # type: str
+ url = ... # type: str
+ base_url = ... # type: str
+ url_root = ... # type: str
+ host_url = ... # type: str
+ host = ... # type: str
+ query_string = ... # type: bytes
+ method = ... # type: str
def access_route(self): ...
@property
- def remote_addr(self): ...
- remote_user = ... # type: Any
- scheme = ... # type: Any
- is_xhr = ... # type: Any
- is_secure = ... # type: Any
- is_multithread = ... # type: Any
- is_multiprocess = ... # type: Any
- is_run_once = ... # type: Any
+ def remote_addr(self) -> str: ...
+ remote_user = ... # type: str
+ scheme = ... # type: str
+ is_xhr = ... # type: bool
+ is_secure = ... # type: bool
+ is_multithread = ... # type: bool
+ is_multiprocess = ... # type: bool
+ is_run_once = ... # type: bool
class BaseResponse:
- charset = ... # type: Any
- default_status = ... # type: Any
- default_mimetype = ... # type: Any
- implicit_sequence_conversion = ... # type: Any
- autocorrect_location_header = ... # type: Any
- automatically_set_content_length = ... # type: Any
- headers = ... # type: Any
- status_code = ... # type: Any
- status = ... # type: Any
- direct_passthrough = ... # type: Any
- response = ... # type: Any
- def __init__(self, response=None, status=None, headers=None, mimetype=None, content_type=None, direct_passthrough=False): ...
+ charset = ... # type: str
+ default_status = ... # type: int
+ default_mimetype = ... # type: str
+ implicit_sequence_conversion = ... # type: bool
+ autocorrect_location_header = ... # type: bool
+ automatically_set_content_length = ... # type: bool
+ headers = ... # type: Headers
+ status_code = ... # type: int
+ status = ... # type: str
+ direct_passthrough = ... # type: bool
+ response = ... # type: Iterable[bytes]
+ def __init__(self, response: Union[Iterable[bytes], bytes]=None, status: Union[str, int]=None, headers: Union[Headers, Mapping[str, str], Sequence[Tuple[str, str]]]=None, mimetype: str=None, content_type: str=None, direct_passthrough: bool=False) -> None: ...
def call_on_close(self, func): ...
@classmethod
def force_type(cls, response, environ=None): ...
@@ -88,10 +95,10 @@ class BaseResponse:
def set_cookie(self, key, value='', max_age=None, expires=None, path='', domain=None, secure=False, httponly=False): ...
def delete_cookie(self, key, path='', domain=None): ...
@property
- def is_streamed(self): ...
+ def is_streamed(self) -> bool: ...
@property
- def is_sequence(self): ...
- def close(self): ...
+ def is_sequence(self) -> bool: ...
+ def close(self) -> None: ...
def __enter__(self): ...
def __exit__(self, exc_type, exc_value, tb): ...
def freeze(self, **kwargs): ...
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git
More information about the debian-med-commit
mailing list