[med-svn] [Git][python-team/packages/mypy][upstream] New upstream version 1.20.2
Michael R. Crusoe (@crusoe)
gitlab at salsa.debian.org
Thu Apr 23 16:00:37 BST 2026
Michael R. Crusoe pushed to branch upstream at Debian Python Team / packages / mypy
Commits:
2c374ff5 by Michael R. Crusoe at 2026-04-21T20:24:29+02:00
New upstream version 1.20.2
- - - - -
29 changed files:
- PKG-INFO
- mypy-requirements.txt
- mypy.egg-info/PKG-INFO
- mypy.egg-info/requires.txt
- mypy/build.py
- mypy/checker.py
- mypy/main.py
- mypy/meet.py
- mypy/metastore.py
- mypy/test/testtypes.py
- mypy/typeanal.py
- mypy/version.py
- mypyc/build.py
- mypyc/codegen/emitmodule.py
- mypyc/irbuild/match.py
- mypyc/lib-rt/CPy.h
- mypyc/lib-rt/misc_ops.c
- mypyc/lib-rt/mypyc_util.h
- mypyc/test-data/commandline.test
- mypyc/test-data/irbuild-match.test
- mypyc/test-data/run-base64.test
- mypyc/test-data/run-match.test
- mypyc/test-data/run-misc.test
- mypyc/test-data/run-multimodule.test
- mypyc/test-data/run-python312.test
- pyproject.toml
- test-data/unit/check-isinstance.test
- test-data/unit/check-narrowing.test
- test-data/unit/check-python310.test
Changes:
=====================================
PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 2.4
Name: mypy
-Version: 1.20.1
+Version: 1.20.2
Summary: Optional static typing for Python
Author-email: Jukka Lehtosalo <jukka.lehtosalo at iki.fi>
License-Expression: MIT
@@ -24,7 +24,8 @@ Requires-Python: >=3.10
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: mypy/typeshed/LICENSE
-Requires-Dist: typing_extensions>=4.6.0
+Requires-Dist: typing_extensions>=4.6.0; python_version < "3.15"
+Requires-Dist: typing_extensions>=4.14.0; python_version >= "3.15"
Requires-Dist: mypy_extensions>=1.0.0
Requires-Dist: pathspec>=1.0.0
Requires-Dist: tomli>=1.1.0; python_version < "3.11"
=====================================
mypy-requirements.txt
=====================================
@@ -1,6 +1,7 @@
# NOTE: this needs to be kept in sync with the "requires" list in pyproject.toml
# and the pins in setup.py
-typing_extensions>=4.6.0
+typing_extensions>=4.6.0; python_version<'3.15'
+typing_extensions>=4.14.0; python_version>='3.15'
mypy_extensions>=1.0.0
pathspec>=1.0.0
tomli>=1.1.0; python_version<'3.11'
=====================================
mypy.egg-info/PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 2.4
Name: mypy
-Version: 1.20.1
+Version: 1.20.2
Summary: Optional static typing for Python
Author-email: Jukka Lehtosalo <jukka.lehtosalo at iki.fi>
License-Expression: MIT
@@ -24,7 +24,8 @@ Requires-Python: >=3.10
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: mypy/typeshed/LICENSE
-Requires-Dist: typing_extensions>=4.6.0
+Requires-Dist: typing_extensions>=4.6.0; python_version < "3.15"
+Requires-Dist: typing_extensions>=4.14.0; python_version >= "3.15"
Requires-Dist: mypy_extensions>=1.0.0
Requires-Dist: pathspec>=1.0.0
Requires-Dist: tomli>=1.1.0; python_version < "3.11"
=====================================
mypy.egg-info/requires.txt
=====================================
@@ -1,4 +1,3 @@
-typing_extensions>=4.6.0
mypy_extensions>=1.0.0
pathspec>=1.0.0
@@ -8,6 +7,12 @@ librt>=0.8.0
[:python_version < "3.11"]
tomli>=1.1.0
+[:python_version < "3.15"]
+typing_extensions>=4.6.0
+
+[:python_version >= "3.15"]
+typing_extensions>=4.14.0
+
[dmypy]
psutil>=4.0
=====================================
mypy/build.py
=====================================
@@ -874,7 +874,7 @@ class BuildManager:
]
)
- self.metastore = create_metastore(options)
+ self.metastore = create_metastore(options, parallel_worker=parallel_worker)
# a mapping from source files to their corresponding shadow files
# for efficient lookup
@@ -1613,10 +1613,12 @@ def exclude_from_backups(target_dir: str) -> None:
pass
-def create_metastore(options: Options) -> MetadataStore:
+def create_metastore(options: Options, parallel_worker: bool) -> MetadataStore:
"""Create the appropriate metadata store."""
if options.sqlite_cache:
- mds: MetadataStore = SqliteMetadataStore(_cache_dir_prefix(options))
+ mds: MetadataStore = SqliteMetadataStore(
+ _cache_dir_prefix(options), set_journal_mode=not parallel_worker
+ )
else:
mds = FilesystemMetadataStore(_cache_dir_prefix(options))
return mds
=====================================
mypy/checker.py
=====================================
@@ -6957,10 +6957,6 @@ class TypeChecker(NodeVisitor[None], TypeCheckerSharedApi, SplittingVisitor):
def propagate_up_typemap_info(self, new_types: TypeMap) -> TypeMap:
"""Attempts refining parent expressions of any MemberExpr or IndexExprs in new_types.
- Specifically, this function accepts two mappings of expression to original types:
- the original mapping (existing_types), and a new mapping (new_types) intended to
- update the original.
-
This function iterates through new_types and attempts to use the information to try
refining any parent types that happen to be unions.
@@ -6979,23 +6975,12 @@ class TypeChecker(NodeVisitor[None], TypeCheckerSharedApi, SplittingVisitor):
We return the newly refined map. This map is guaranteed to be a superset of 'new_types'.
"""
- output_map = {}
+ all_mappings = [new_types]
for expr, expr_type in new_types.items():
- # The original inferred type should always be present in the output map, of course
- output_map[expr] = expr_type
-
- # Next, try using this information to refine the parent types, if applicable.
- new_mapping = self.refine_parent_types(expr, expr_type)
- for parent_expr, proposed_parent_type in new_mapping.items():
- # We don't try inferring anything if we've already inferred something for
- # the parent expression.
- # TODO: Consider picking the narrower type instead of always discarding this?
- if parent_expr in new_types:
- continue
- output_map[parent_expr] = proposed_parent_type
- return output_map
+ all_mappings.append(self.refine_parent_types(expr, expr_type))
+ return reduce_and_conditional_type_maps(all_mappings, use_meet=True)
- def refine_parent_types(self, expr: Expression, expr_type: Type) -> Mapping[Expression, Type]:
+ def refine_parent_types(self, expr: Expression, expr_type: Type) -> TypeMap:
"""Checks if the given expr is a 'lookup operation' into a union and iteratively refines
the parent types based on the 'expr_type'.
@@ -8426,12 +8411,12 @@ def conditional_types(
proposed_type: Type
remaining_type: Type
- proper_type = get_proper_type(current_type)
+ p_current_type = get_proper_type(current_type)
# factorize over union types: isinstance(A|B, C) -> yes = A_yes | B_yes
- if isinstance(proper_type, UnionType):
+ if isinstance(p_current_type, UnionType):
yes_items: list[Type] = []
no_items: list[Type] = []
- for union_item in proper_type.items:
+ for union_item in p_current_type.items:
yes_type, no_type = conditional_types(
union_item,
proposed_type_ranges,
@@ -8457,7 +8442,7 @@ def conditional_types(
items[i] = item
proposed_type = get_proper_type(UnionType.make_union(items))
- if isinstance(proper_type, AnyType):
+ if isinstance(p_current_type, AnyType):
return proposed_type, current_type
if isinstance(proposed_type, AnyType):
# We don't really know much about the proposed type, so we shouldn't
@@ -8508,6 +8493,11 @@ def conditional_types(
proposed_precise_type,
consider_runtime_isinstance=consider_runtime_isinstance,
)
+
+ # Avoid widening the type
+ if is_proper_subtype(p_current_type, proposed_type, ignore_promotions=True):
+ proposed_type = default if default is not None else current_type
+
return proposed_type, remaining_type
@@ -8740,6 +8730,8 @@ def reduce_and_conditional_type_maps(ms: list[TypeMap], *, use_meet: bool) -> Ty
return ms[0]
result = ms[0]
for m in ms[1:]:
+ if not m:
+ continue # this is a micro-optimisation
result = and_conditional_maps(result, m, use_meet=use_meet)
return result
=====================================
mypy/main.py
=====================================
@@ -189,6 +189,12 @@ def main(
list([res]) # noqa: C410
+class BuildResultThunk:
+ # We pass this around so that we avoid freeing memory, which is slow
+ def __init__(self, build_result: build.BuildResult | None) -> None:
+ self._result = build_result
+
+
def run_build(
sources: list[BuildSource],
options: Options,
@@ -196,7 +202,7 @@ def run_build(
t0: float,
stdout: TextIO,
stderr: TextIO,
-) -> tuple[build.BuildResult | None, list[str], bool]:
+) -> tuple[BuildResultThunk | None, list[str], bool]:
formatter = util.FancyFormatter(
stdout, stderr, options.hide_error_codes, hide_success=bool(options.output)
)
@@ -227,8 +233,12 @@ def run_build(
blockers = True
if not e.use_stdout:
serious = True
+
+ if res:
+ res.manager.metastore.close()
+
maybe_write_junit_xml(time.time() - t0, serious, messages, messages_by_file, options)
- return res, messages, blockers
+ return BuildResultThunk(res), messages, blockers
def show_messages(
=====================================
mypy/meet.py
=====================================
@@ -541,6 +541,9 @@ def is_overlapping_types(
return False
if isinstance(left, CallableType) and isinstance(right, CallableType):
+ # We run is_callable_compatible in both directions, similar to the logic
+ # in is_unsafe_overlapping_overload_signatures
+ # See comments in https://github.com/python/mypy/pull/5476
return is_callable_compatible(
left,
right,
@@ -548,6 +551,14 @@ def is_overlapping_types(
is_proper_subtype=False,
ignore_pos_arg_names=not overlap_for_overloads,
allow_partial_overlap=True,
+ ) or is_callable_compatible(
+ right,
+ left,
+ is_compat=_is_overlapping_types,
+ is_proper_subtype=False,
+ ignore_pos_arg_names=not overlap_for_overloads,
+ check_args_covariantly=True,
+ allow_partial_overlap=True,
)
call = None
=====================================
mypy/metastore.py
=====================================
@@ -154,7 +154,7 @@ CREATE INDEX IF NOT EXISTS path_idx on files2(path);
"""
-def connect_db(db_file: str) -> sqlite3.Connection:
+def connect_db(db_file: str, set_journal_mode: bool) -> sqlite3.Connection:
import sqlite3.dbapi2
db = sqlite3.dbapi2.connect(db_file)
@@ -162,21 +162,23 @@ def connect_db(db_file: str) -> sqlite3.Connection:
# but without this flag, commits are *very* slow, especially when using HDDs,
# see https://www.sqlite.org/faq.html#q19 for details.
db.execute("PRAGMA synchronous=OFF")
+ if set_journal_mode:
+ db.execute("PRAGMA journal_mode=WAL")
db.executescript(SCHEMA)
return db
class SqliteMetadataStore(MetadataStore):
- def __init__(self, cache_dir_prefix: str) -> None:
+ def __init__(self, cache_dir_prefix: str, set_journal_mode: bool = False) -> None:
# We check startswith instead of equality because the version
# will have already been appended by the time the cache dir is
# passed here.
+ self.db = None
if cache_dir_prefix.startswith(os.devnull):
- self.db = None
return
os.makedirs(cache_dir_prefix, exist_ok=True)
- self.db = connect_db(os_path_join(cache_dir_prefix, "cache.db"))
+ self.db = connect_db(os_path_join(cache_dir_prefix, "cache.db"), set_journal_mode)
def _query(self, name: str, field: str) -> Any:
# Raises FileNotFound for consistency with the file system version
=====================================
mypy/test/testtypes.py
=====================================
@@ -8,7 +8,7 @@ from unittest import TestCase, skipUnless
from mypy.erasetype import erase_type, remove_instance_last_known_values
from mypy.indirection import TypeIndirectionVisitor
from mypy.join import join_types
-from mypy.meet import meet_types, narrow_declared_type
+from mypy.meet import is_overlapping_types, meet_types, narrow_declared_type
from mypy.nodes import (
ARG_NAMED,
ARG_OPT,
@@ -645,6 +645,20 @@ class TypeOpsSuite(Suite):
assert_equal(make_simplified_union(original), union)
assert_equal(make_simplified_union(list(reversed(original))), union)
+ def test_generic_callable_overlap_is_symmetric(self) -> None:
+ any_type = AnyType(TypeOfAny.from_omitted_generics)
+ outer_t = TypeVarType("T", "T", TypeVarId(1), [], self.fx.o, any_type)
+ outer_s = TypeVarType("S", "S", TypeVarId(2), [], self.fx.o, any_type)
+ generic_t = TypeVarType("T", "T", TypeVarId(-1), [], self.fx.o, any_type)
+
+ callable_type = CallableType([outer_t], [ARG_POS], [None], outer_s, self.fx.function)
+ generic_identity = CallableType(
+ [generic_t], [ARG_POS], [None], generic_t, self.fx.function, variables=[generic_t]
+ )
+
+ assert is_overlapping_types(callable_type, generic_identity)
+ assert is_overlapping_types(generic_identity, callable_type)
+
# Helpers
def tuple(self, *a: Type) -> TupleType:
=====================================
mypy/typeanal.py
=====================================
@@ -2120,7 +2120,7 @@ def fix_instance(
t.args = tuple(args)
fix_type_var_tuple_argument(t)
if not t.type.has_type_var_tuple_type:
- with state.strict_optional_set(options.strict_optional):
+ with state.strict_optional_set(True):
fixed = expand_type(t, env)
assert isinstance(fixed, Instance)
t.args = fixed.args
=====================================
mypy/version.py
=====================================
@@ -8,7 +8,7 @@ from mypy import git
# - Release versions have the form "1.2.3".
# - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440).
# - Before 1.0 we had the form "0.NNN".
-__version__ = "1.20.1"
+__version__ = "1.20.2"
base_version = __version__
mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
=====================================
mypyc/build.py
=====================================
@@ -215,9 +215,7 @@ def get_mypy_config(
mypyc_sources = all_sources
if compiler_options.separate:
- mypyc_sources = [
- src for src in mypyc_sources if src.path and not src.path.endswith("__init__.py")
- ]
+ mypyc_sources = [src for src in mypyc_sources if src.path]
if not mypyc_sources:
return mypyc_sources, all_sources, options
@@ -243,6 +241,10 @@ def get_mypy_config(
return mypyc_sources, all_sources, options
+def is_package_source(source: BuildSource) -> bool:
+ return source.path is not None and os.path.split(source.path)[1] == "__init__.py"
+
+
def generate_c_extension_shim(
full_module_name: str, module_name: str, dir_name: str, group_name: str
) -> str:
@@ -387,7 +389,7 @@ def build_using_shared_lib(
# since this seems to be needed for it to end up in the right place.
full_module_name = source.module
assert source.path
- if os.path.split(source.path)[1] == "__init__.py":
+ if is_package_source(source):
full_module_name += ".__init__"
extensions.append(
get_extension()(
@@ -528,6 +530,7 @@ def mypyc_build(
use_shared_lib = (
len(mypyc_sources) > 1
or any("." in x.module for x in mypyc_sources)
+ or any(is_package_source(x) for x in mypyc_sources)
or always_use_shared_lib
)
=====================================
mypyc/codegen/emitmodule.py
=====================================
@@ -46,6 +46,7 @@ from mypyc.codegen.emitwrapper import (
)
from mypyc.codegen.literals import Literals
from mypyc.common import (
+ EXT_SUFFIX,
IS_FREE_THREADED,
MODULE_PREFIX,
PREFIX,
@@ -133,7 +134,7 @@ class MypycPlugin(Plugin):
self.group_map[id] = (name, modules)
self.compiler_options = compiler_options
- self.metastore = create_metastore(options)
+ self.metastore = create_metastore(options, parallel_worker=False)
def report_config_data(self, ctx: ReportConfigContext) -> tuple[str | None, list[str]] | None:
# The config data we report is the group map entry for the module.
@@ -1258,11 +1259,42 @@ class GroupGenerator:
f"if (unlikely({module_static} == NULL))",
" goto fail;",
)
+
+ emitter.emit_line(f'modname = PyUnicode_FromString("{module_name}");')
+ emitter.emit_line("if (modname == NULL) CPyError_OutOfMemory();")
+ emitter.emit_line("int rv = 0;")
+ if self.group_name:
+ shared_lib_mod_name = shared_lib_name(self.group_name)
+ emitter.emit_line("PyObject *mod_dict = PyImport_GetModuleDict();")
+ emitter.emit_line("PyObject *shared_lib = NULL;")
+ emitter.emit_line(
+ f'rv = PyDict_GetItemStringRef(mod_dict, "{shared_lib_mod_name}", &shared_lib);'
+ )
+ emitter.emit_line("if (rv < 0) goto fail;")
+ emitter.emit_line(
+ 'PyObject *shared_lib_file = PyObject_GetAttrString(shared_lib, "__file__");'
+ )
+ emitter.emit_line("if (shared_lib_file == NULL) goto fail;")
+ else:
+ emitter.emit_line(
+ f'PyObject *shared_lib_file = PyUnicode_FromString("{module_name + EXT_SUFFIX}");'
+ )
+ emitter.emit_line("if (shared_lib_file == NULL) CPyError_OutOfMemory();")
+ emitter.emit_line(f'PyObject *ext_suffix = PyUnicode_FromString("{EXT_SUFFIX}");')
+ emitter.emit_line("if (ext_suffix == NULL) CPyError_OutOfMemory();")
+ is_pkg = int(self.source_paths[module_name].endswith("__init__.py"))
+ emitter.emit_line(f"Py_ssize_t is_pkg = {is_pkg};")
+
+ emitter.emit_line(
+ f"rv = CPyImport_SetDunderAttrs({module_static}, modname, shared_lib_file, ext_suffix, is_pkg);"
+ )
+ emitter.emit_line("Py_DECREF(ext_suffix);")
+ emitter.emit_line("Py_DECREF(shared_lib_file);")
+ emitter.emit_line("if (rv < 0) goto fail;")
+
# Register in sys.modules early so that circular imports via
# CPyImport_ImportNative can detect that this module is already
# being initialized and avoid re-executing the module body.
- emitter.emit_line(f'modname = PyUnicode_FromString("{module_name}");')
- emitter.emit_line("if (modname == NULL) CPyError_OutOfMemory();")
emitter.emit_line(
f"if (PyObject_SetItem(PyImport_GetModuleDict(), modname, {module_static}) < 0)"
)
=====================================
mypyc/irbuild/match.py
=====================================
@@ -108,20 +108,25 @@ class MatchVisitor(TraverserVisitor):
self.builder.add_bool_branch(cond, self.code_block, self.next_block)
def visit_or_pattern(self, pattern: OrPattern) -> None:
- backup_block = self.next_block
- self.next_block = BasicBlock()
+ code_block = self.code_block
+ next_block = self.next_block
for p in pattern.patterns:
+ self.code_block = BasicBlock()
+ self.next_block = BasicBlock()
+
# Hack to ensure the as pattern is bound to each pattern in the
# "or" pattern, but not every subpattern
backup = self.as_pattern
p.accept(self)
self.as_pattern = backup
+ self.builder.activate_block(self.code_block)
+ self.builder.goto(code_block)
self.builder.activate_block(self.next_block)
- self.next_block = BasicBlock()
- self.next_block = backup_block
+ self.code_block = code_block
+ self.next_block = next_block
self.builder.goto(self.next_block)
def visit_class_pattern(self, pattern: ClassPattern) -> None:
=====================================
mypyc/lib-rt/CPy.h
=====================================
@@ -967,6 +967,8 @@ PyObject *CPyImport_ImportNative(PyObject *module_name,
CPyModule **module_static,
PyObject *shared_lib_file, PyObject *ext_suffix,
Py_ssize_t is_package);
+int CPyImport_SetDunderAttrs(PyObject *module, PyObject *module_name, PyObject *shared_lib_file,
+ PyObject *ext_suffix, Py_ssize_t is_package);
PyObject *CPySingledispatch_RegisterFunction(PyObject *singledispatch_func, PyObject *cls,
PyObject *func);
=====================================
mypyc/lib-rt/misc_ops.c
=====================================
@@ -1157,6 +1157,9 @@ void CPyTrace_LogEvent(const char *location, const char *line, const char *op, c
typedef struct {
PyObject_HEAD
PyObject *name;
+#if CPY_3_15_FEATURES
+ PyObject *qualname;
+#endif
PyObject *type_params;
PyObject *compute_value;
PyObject *value;
@@ -1221,6 +1224,47 @@ static int CPyImport_InitSpecClasses(void) {
return 0;
}
+// Set __package__ before executing the module body so it is available
+// during module initialization. For a package, __package__ is the module
+// name itself. For a non-package submodule "a.b.c", it is "a.b". For a
+// top-level non-package module, it is "".
+static int CPyImport_SetModulePackage(PyObject *modobj, PyObject *module_name,
+ Py_ssize_t is_package) {
+ PyObject *pkg = NULL;
+ int rc = PyObject_GetOptionalAttrString(modobj, "__package__", &pkg);
+ if (rc < 0) {
+ return -1;
+ }
+ if (pkg != NULL && pkg != Py_None) {
+ Py_DECREF(pkg);
+ return 0;
+ }
+ Py_XDECREF(pkg);
+
+ PyObject *package_name = NULL;
+ if (is_package) {
+ package_name = module_name;
+ Py_INCREF(package_name);
+ } else {
+ Py_ssize_t name_len = PyUnicode_GetLength(module_name);
+ if (name_len < 0) {
+ return -1;
+ }
+ Py_ssize_t dot = PyUnicode_FindChar(module_name, '.', 0, name_len, -1);
+ if (dot >= 0) {
+ package_name = PyUnicode_Substring(module_name, 0, dot);
+ } else {
+ package_name = PyUnicode_FromString("");
+ }
+ }
+ if (package_name == NULL) {
+ return -1;
+ }
+ rc = PyObject_SetAttrString(modobj, "__package__", package_name);
+ Py_DECREF(package_name);
+ return rc;
+}
+
// Derive and set __file__ on modobj from the shared library path, module name,
// and extension suffix. Returns 0 on success, -1 on error.
static int CPyImport_SetModuleFile(PyObject *modobj, PyObject *module_name,
@@ -1505,47 +1549,7 @@ PyObject *CPyImport_ImportNative(PyObject *module_name,
goto fail;
}
- // Set __package__ before executing the module body so it is available
- // during module initialization. For a package, __package__ is the module
- // name itself. For a non-package submodule "a.b.c", it is "a.b". For a
- // top-level non-package module, it is "".
- {
- PyObject *pkg = NULL;
- if (PyObject_GetOptionalAttrString(modobj, "__package__", &pkg) < 0) {
- goto fail;
- }
- if (pkg == NULL || pkg == Py_None) {
- Py_XDECREF(pkg);
- PyObject *package_name;
- if (is_package) {
- package_name = module_name;
- Py_INCREF(package_name);
- } else if (dot >= 0) {
- package_name = PyUnicode_Substring(module_name, 0, dot);
- } else {
- package_name = PyUnicode_FromString("");
- if (package_name == NULL) {
- CPyError_OutOfMemory();
- }
- }
- if (PyObject_SetAttrString(modobj, "__package__", package_name) < 0) {
- Py_DECREF(package_name);
- goto fail;
- }
- Py_DECREF(package_name);
- } else {
- Py_DECREF(pkg);
- }
- }
-
- if (CPyImport_SetModuleFile(modobj, module_name, shared_lib_file, ext_suffix,
- is_package) < 0) {
- goto fail;
- }
- if (is_package && CPyImport_SetModulePath(modobj) < 0) {
- goto fail;
- }
- if (CPyImport_SetModuleSpec(modobj, module_name, is_package) < 0) {
+ if (CPyImport_SetDunderAttrs(modobj, module_name, shared_lib_file, ext_suffix, is_package) < 0) {
goto fail;
}
@@ -1573,10 +1577,34 @@ fail:
PyErr_Restore(exc_type, exc_val, exc_tb);
Py_XDECREF(parent_module);
Py_XDECREF(child_name);
- Py_DECREF(modobj);
+ Py_CLEAR(*module_static);
return NULL;
}
+int CPyImport_SetDunderAttrs(PyObject *module, PyObject *module_name, PyObject *shared_lib_file,
+ PyObject *ext_suffix, Py_ssize_t is_package)
+{
+ int res = CPyImport_SetModulePackage(module, module_name, is_package);
+ if (res < 0) {
+ return res;
+ }
+
+ res = CPyImport_SetModuleFile(module, module_name, shared_lib_file, ext_suffix,
+ is_package);
+ if (res < 0) {
+ return res;
+ }
+
+ if (is_package) {
+ res = CPyImport_SetModulePath(module);
+ if (res < 0) {
+ return res;
+ }
+ }
+
+ return CPyImport_SetModuleSpec(module, module_name, is_package);
+}
+
#if CPY_3_14_FEATURES
#include "internal/pycore_object.h"
=====================================
mypyc/lib-rt/mypyc_util.h
=====================================
@@ -160,6 +160,7 @@ static inline CPyTagged CPyTagged_ShortFromSsize_t(Py_ssize_t x) {
#define CPY_3_11_FEATURES (PY_VERSION_HEX >= 0x030b0000)
#define CPY_3_12_FEATURES (PY_VERSION_HEX >= 0x030c0000)
#define CPY_3_14_FEATURES (PY_VERSION_HEX >= 0x030e0000)
+#define CPY_3_15_FEATURES (PY_VERSION_HEX >= 0x030f0000)
#if CPY_3_12_FEATURES
=====================================
mypyc/test-data/commandline.test
=====================================
@@ -313,6 +313,20 @@ print(type(Eggs(obj1=pkg1.A.B())["obj1"]).__module__)
B
pkg2.mod2
+[case testCompilePackageOnlyInitPy]
+# cmd: pkg/__init__.py
+import os.path
+import pkg
+
+print(pkg.x)
+assert os.path.splitext(pkg.__file__)[1] != ".py"
+
+[file pkg/__init__.py]
+x: int = 1
+
+[out]
+1
+
[case testStrictBytesRequired]
# cmd: --no-strict-bytes a.py
=====================================
mypyc/test-data/irbuild-match.test
=====================================
@@ -48,13 +48,17 @@ def f():
r8, r9 :: object
L0:
r0 = int_eq 246, 246
- if r0 goto L3 else goto L1 :: bool
+ if r0 goto L1 else goto L2 :: bool
L1:
- r1 = int_eq 246, 912
- if r1 goto L3 else goto L2 :: bool
+ goto L5
L2:
- goto L4
+ r1 = int_eq 246, 912
+ if r1 goto L3 else goto L4 :: bool
L3:
+ goto L5
+L4:
+ goto L6
+L5:
r2 = 'matched'
r3 = builtins :: module
r4 = 'print'
@@ -63,9 +67,9 @@ L3:
r7 = load_address r6
r8 = PyObject_Vectorcall(r5, r7, 1, 0)
keep_alive r2
- goto L5
-L4:
-L5:
+ goto L7
+L6:
+L7:
r9 = box(None, 1)
return r9
@@ -86,19 +90,27 @@ def f():
r10, r11 :: object
L0:
r0 = int_eq 2, 2
- if r0 goto L5 else goto L1 :: bool
+ if r0 goto L1 else goto L2 :: bool
L1:
- r1 = int_eq 2, 4
- if r1 goto L5 else goto L2 :: bool
+ goto L9
L2:
- r2 = int_eq 2, 6
- if r2 goto L5 else goto L3 :: bool
+ r1 = int_eq 2, 4
+ if r1 goto L3 else goto L4 :: bool
L3:
- r3 = int_eq 2, 8
- if r3 goto L5 else goto L4 :: bool
+ goto L9
L4:
- goto L6
+ r2 = int_eq 2, 6
+ if r2 goto L5 else goto L6 :: bool
L5:
+ goto L9
+L6:
+ r3 = int_eq 2, 8
+ if r3 goto L7 else goto L8 :: bool
+L7:
+ goto L9
+L8:
+ goto L10
+L9:
r4 = 'matched'
r5 = builtins :: module
r6 = 'print'
@@ -107,9 +119,9 @@ L5:
r9 = load_address r8
r10 = PyObject_Vectorcall(r7, r9, 1, 0)
keep_alive r4
- goto L7
-L6:
-L7:
+ goto L11
+L10:
+L11:
r11 = box(None, 1)
return r11
@@ -280,16 +292,20 @@ L1:
r6 = load_address r5
r7 = PyObject_Vectorcall(r4, r6, 1, 0)
keep_alive r1
- goto L9
+ goto L11
L2:
r8 = int_eq 246, 4
- if r8 goto L5 else goto L3 :: bool
+ if r8 goto L3 else goto L4 :: bool
L3:
- r9 = int_eq 246, 6
- if r9 goto L5 else goto L4 :: bool
+ goto L7
L4:
- goto L6
+ r9 = int_eq 246, 6
+ if r9 goto L5 else goto L6 :: bool
L5:
+ goto L7
+L6:
+ goto L8
+L7:
r10 = 'here 2 | 3'
r11 = builtins :: module
r12 = 'print'
@@ -298,11 +314,11 @@ L5:
r15 = load_address r14
r16 = PyObject_Vectorcall(r13, r15, 1, 0)
keep_alive r10
- goto L9
-L6:
+ goto L11
+L8:
r17 = int_eq 246, 246
- if r17 goto L7 else goto L8 :: bool
-L7:
+ if r17 goto L9 else goto L10 :: bool
+L9:
r18 = 'here 123'
r19 = builtins :: module
r20 = 'print'
@@ -311,9 +327,9 @@ L7:
r23 = load_address r22
r24 = PyObject_Vectorcall(r21, r23, 1, 0)
keep_alive r18
- goto L9
-L8:
-L9:
+ goto L11
+L10:
+L11:
r25 = box(None, 1)
return r25
@@ -456,15 +472,19 @@ def f():
r10, r11 :: object
L0:
r0 = int_eq 2, 2
- if r0 goto L3 else goto L1 :: bool
+ if r0 goto L1 else goto L2 :: bool
L1:
+ goto L5
+L2:
r1 = load_address PyLong_Type
r2 = object 1
r3 = CPy_TypeCheck(r2, r1)
- if r3 goto L3 else goto L2 :: bool
-L2:
- goto L4
+ if r3 goto L3 else goto L4 :: bool
L3:
+ goto L5
+L4:
+ goto L6
+L5:
r4 = 'matched'
r5 = builtins :: module
r6 = 'print'
@@ -473,9 +493,9 @@ L3:
r9 = load_address r8
r10 = PyObject_Vectorcall(r7, r9, 1, 0)
keep_alive r4
- goto L5
-L4:
-L5:
+ goto L7
+L6:
+L7:
r11 = box(None, 1)
return r11
@@ -532,15 +552,19 @@ L0:
r0 = int_eq 2, 2
r1 = object 1
x = r1
- if r0 goto L3 else goto L1 :: bool
+ if r0 goto L1 else goto L2 :: bool
L1:
+ goto L5
+L2:
r2 = int_eq 2, 4
r3 = object 2
x = r3
- if r2 goto L3 else goto L2 :: bool
-L2:
- goto L4
+ if r2 goto L3 else goto L4 :: bool
L3:
+ goto L5
+L4:
+ goto L6
+L5:
r4 = builtins :: module
r5 = 'print'
r6 = CPyObject_GetAttr(r4, r5)
@@ -548,9 +572,9 @@ L3:
r8 = load_address r7
r9 = PyObject_Vectorcall(r6, r8, 1, 0)
keep_alive x
- goto L5
-L4:
-L5:
+ goto L7
+L6:
+L7:
r10 = box(None, 1)
return r10
@@ -809,7 +833,7 @@ L0:
r1 = PyObject_IsInstance(x, r0)
r2 = r1 >= 0 :: signed
r3 = truncate r1: i32 to builtins.bool
- if r3 goto L1 else goto L5 :: bool
+ if r3 goto L1 else goto L7 :: bool
L1:
r4 = 'num'
r5 = CPyObject_GetAttr(x, r4)
@@ -818,17 +842,21 @@ L1:
r8 = PyObject_IsTrue(r7)
r9 = r8 >= 0 :: signed
r10 = truncate r8: i32 to builtins.bool
- if r10 goto L4 else goto L2 :: bool
+ if r10 goto L2 else goto L3 :: bool
L2:
+ goto L6
+L3:
r11 = object 2
r12 = PyObject_RichCompare(r5, r11, 2)
r13 = PyObject_IsTrue(r12)
r14 = r13 >= 0 :: signed
r15 = truncate r13: i32 to builtins.bool
- if r15 goto L4 else goto L3 :: bool
-L3:
- goto L5
+ if r15 goto L4 else goto L5 :: bool
L4:
+ goto L6
+L5:
+ goto L7
+L6:
r16 = 'matched'
r17 = builtins :: module
r18 = 'print'
@@ -837,11 +865,12 @@ L4:
r21 = load_address r20
r22 = PyObject_Vectorcall(r19, r21, 1, 0)
keep_alive r16
- goto L6
-L5:
-L6:
+ goto L8
+L7:
+L8:
r23 = box(None, 1)
return r23
+
[case testAsPatternDoesntBleedIntoSubPatterns_python3_10]
class C:
__match_args__ = ("a", "b")
=====================================
mypyc/test-data/run-base64.test
=====================================
@@ -204,6 +204,11 @@ def test_urlsafe_b64decode_errors() -> None:
for b in b"eA", b"eA=", b"eHk":
with assertRaises(ValueError):
b64decode(b)
+[out version>=3.15]
+driver.py:28: FutureWarning: invalid character '+' in URL-safe Base64 data will be discarded in future Python versions
+ test_func()
+driver.py:28: FutureWarning: invalid character '/' in URL-safe Base64 data will be discarded in future Python versions
+ test_func()
[case testBase64UsedAtTopLevelOnly_librt]
from librt.base64 import b64encode
=====================================
mypyc/test-data/run-match.test
=====================================
@@ -230,6 +230,27 @@ test 21 ('')
test 21 (' as well')
test sequence final
test final
+
+[case testMatchOrSequencePattern_python3_10]
+def f(x: tuple[str, str]) -> str:
+ match x:
+ case ("X", "Y") | ("X", "Z"):
+ return "THERE"
+ case _:
+ return "OTHER"
+
+[file driver.py]
+from native import f
+
+print(f(("X", "Y")))
+print(f(("X", "Z")))
+print(f(("X", "A")))
+
+[out]
+THERE
+THERE
+OTHER
+
[case testCustomMappingAndSequenceObjects_python3_10]
def f(x: object) -> None:
match x:
=====================================
mypyc/test-data/run-misc.test
=====================================
@@ -971,7 +971,10 @@ print(z)
[case testCheckVersion]
import sys
-if sys.version_info[:2] == (3, 14):
+if sys.version_info[:2] == (3, 15):
+ def version() -> int:
+ return 15
+elif sys.version_info[:2] == (3, 14):
def version() -> int:
return 14
elif sys.version_info[:2] == (3, 13):
=====================================
mypyc/test-data/run-multimodule.test
=====================================
@@ -473,6 +473,38 @@ globals()['A'] = None
[file driver.py]
import other_main
+[case testNonNativeImportInPackageFile]
+# The import is really non-native only in separate compilation mode where __init__.py and
+# other_cache.py are in different libraries and the import uses the standard Python procedure.
+# Python imports are resolved using __path__ and __spec__ from the package file so this checks
+# that they are set up correctly.
+[file other/__init__.py]
+from other.other_cache import Cache
+
+x = 1
+[file other/other_cache.py]
+class Cache:
+ pass
+
+[file driver.py]
+import other
+
+[case testRelativeImportInPackageFile]
+# Relative imports from a compiled package __init__ depend on package metadata being
+# available while the package module body is executing.
+[file other/__init__.py]
+assert __package__ == "other"
+from .other_cache import Cache
+
+x = 1
+[file other/other_cache.py]
+class Cache:
+ pass
+
+[file driver.py]
+import other
+assert other.Cache.__name__ == "Cache"
+
[case testMultiModuleSameNames]
# Use same names in both modules
import other
=====================================
mypyc/test-data/run-python312.test
=====================================
@@ -199,6 +199,7 @@ EnumLiteralAlias3 = Literal[SomeEnum.AVALUE] | None
[typing fixtures/typing-full.pyi]
[case testPEP695GenericTypeAlias]
+import sys
from typing import Callable
from types import GenericAlias
@@ -208,24 +209,36 @@ type A[T] = list[T]
def test_generic_alias() -> None:
assert type(A[str]) is GenericAlias
- assert str(A[str]) == "A[str]"
+ if sys.version_info >= (3, 15): # type: ignore[operator]
+ assert str(A[str]) == "_frozen_importlib.A[str]"
+ else:
+ assert str(A[str]) == "A[str]"
assert str(getattr(A, "__value__")) == "list[T]"
type B[T, S] = dict[S, T]
def test_generic_alias_with_two_args() -> None:
- assert str(B[str, int]) == "B[str, int]"
+ if sys.version_info >= (3, 15): # type: ignore[operator]
+ assert str(B[str, int]) == "_frozen_importlib.B[str, int]"
+ else:
+ assert str(B[str, int]) == "B[str, int]"
assert str(getattr(B, "__value__")) == "dict[S, T]"
type C[*Ts] = tuple[*Ts]
def test_type_var_tuple_type_alias() -> None:
- assert str(C[int, str]) == "C[int, str]"
+ if sys.version_info >= (3, 15): # type: ignore[operator]
+ assert str(C[int, str]) == "_frozen_importlib.C[int, str]"
+ else:
+ assert str(C[int, str]) == "C[int, str]"
assert str(getattr(C, "__value__")) == "tuple[typing.Unpack[Ts]]"
type D[**P] = Callable[P, int]
def test_param_spec_type_alias() -> None:
- assert str(D[[int, str]]) == "D[[int, str]]"
+ if sys.version_info >= (3, 15): # type: ignore[operator]
+ assert str(D[[int, str]]) == "_frozen_importlib.D[[int, str]]"
+ else:
+ assert str(D[[int, str]]) == "D[[int, str]]"
assert str(getattr(D, "__value__")) == "typing.Callable[P, int]"
[typing fixtures/typing-full.pyi]
=====================================
pyproject.toml
=====================================
@@ -5,7 +5,8 @@ requires = [
# self-typechecking :/
"setuptools >= 77.0.3",
# the following is from mypy-requirements.txt/setup.py
- "typing_extensions>=4.6.0",
+ "typing_extensions>=4.6.0; python_version<'3.15'",
+ "typing_extensions>=4.14.0; python_version>='3.15'",
"mypy_extensions>=1.0.0",
"pathspec>=1.0.0",
"tomli>=1.1.0; python_version<'3.11'",
@@ -49,7 +50,8 @@ classifiers = [
requires-python = ">=3.10"
dependencies = [
# When changing this, also update build-system.requires and mypy-requirements.txt
- "typing_extensions>=4.6.0",
+ "typing_extensions>=4.6.0; python_version<'3.15'",
+ "typing_extensions>=4.14.0; python_version>='3.15'",
"mypy_extensions>=1.0.0",
"pathspec>=1.0.0",
"tomli>=1.1.0; python_version<'3.11'",
=====================================
test-data/unit/check-isinstance.test
=====================================
@@ -1563,6 +1563,61 @@ def f(x: Union[int, str], typ: type) -> None:
reveal_type(x) # N: Revealed type is "builtins.int | builtins.str"
[builtins fixtures/isinstancelist.pyi]
+[case testIsInstanceWithUnknownTypeMultipleNarrowing]
+# flags: --strict-equality --warn-unreachable --python-version 3.10
+from __future__ import annotations
+from typing import Iterable
+from typing_extensions import TypeAlias
+import types
+
+# Regression test for https://github.com/python/mypy/issues/21181
+# We don't have the same type context as with the real stubs, so sort of fake it
+_ClassInfoLike: TypeAlias = "type | tuple[_ClassInfoLike, ...]"
+
+class A: ...
+class B(A): ...
+
+def fake_type_context(ts: list[type[A]]) -> _ClassInfoLike:
+ return tuple(ts) # E: Too many arguments for "tuple"
+
+
+def f1(x: A | None) -> None:
+ if x is not None:
+ reveal_type(x) # N: Revealed type is "__main__.A"
+ if isinstance(x, object):
+ reveal_type(x) # N: Revealed type is "__main__.A"
+
+
+def f2(x: A | None, ts: list[type[A]]) -> None:
+ if x is not None:
+ reveal_type(x) # N: Revealed type is "__main__.A"
+ if isinstance(x, fake_type_context(ts)):
+ reveal_type(x) # N: Revealed type is "__main__.A"
+
+
+def f3(x: A | None, t: type | type[A]) -> None:
+ if x is not None:
+ reveal_type(x) # N: Revealed type is "__main__.A"
+ if isinstance(x, t):
+ reveal_type(x) # N: Revealed type is "__main__.A"
+
+
+def f4(x: A | None, t: type) -> None:
+ if x is not None:
+ reveal_type(x) # N: Revealed type is "__main__.A"
+ if isinstance(x, t):
+ reveal_type(x) # N: Revealed type is "__main__.A"
+
+
+def f5(x: object | None, ta: type[A], tb: type[B]) -> None:
+ if x is not None:
+ reveal_type(x) # N: Revealed type is "builtins.object"
+ if isinstance(x, ta):
+ reveal_type(x) # N: Revealed type is "__main__.A"
+ if isinstance(x, tb):
+ reveal_type(x) # N: Revealed type is "__main__.B"
+[builtins fixtures/isinstancelist.pyi]
+
[case testIsInstanceWithBoundedType]
# flags: --warn-unreachable
from typing import Union, Type
@@ -3050,7 +3105,7 @@ if hasattr(mod, "y"):
def __getattr__(attr: str) -> str: ...
[builtins fixtures/module.pyi]
-[case testMultipleHasAttr-xfail]
+[case testMultipleHasAttr]
# flags: --warn-unreachable
# https://github.com/python/mypy/issues/20596
from __future__ import annotations
=====================================
test-data/unit/check-narrowing.test
=====================================
@@ -3993,3 +3993,44 @@ def f2(func: Callable[..., T], arg: str) -> T:
return func(arg)
return func(arg)
[builtins fixtures/primitives.pyi]
+
+
+[case testNarrowGenericCallableEquality]
+# flags: --strict-equality --warn-unreachable
+from typing import Callable, TypeVar
+
+S = TypeVar("S")
+T = TypeVar("T")
+
+def identity(x: T) -> T:
+ return x
+
+def msg(cmp_property: Callable[[T], S]) -> None:
+ if cmp_property == identity:
+ # TODO: the swapping of these reveal's is not ideal
+ reveal_type(cmp_property) # N: Revealed type is "def [T] (x: T`-1) -> T`-1"
+ reveal_type(identity) # N: Revealed type is "def (T`-1) -> S`-2"
+ return
+[builtins fixtures/primitives.pyi]
+
+
+[case testPropagatedParentNarrowingMeet]
+# flags: --strict-equality --warn-unreachable
+from __future__ import annotations
+
+class A:
+ tag: int
+
+class B:
+ tag: int
+ name = "b"
+
+class C:
+ tag: str
+
+def stringify(value: A | B | C) -> str:
+ if isinstance(value.tag, int) and isinstance(value, B):
+ reveal_type(value) # N: Revealed type is "__main__.B"
+ return value.name
+ return ""
+[builtins fixtures/tuple.pyi]
=====================================
test-data/unit/check-python310.test
=====================================
@@ -3560,9 +3560,9 @@ class B(TypedDict):
num: int
d: A | B
-match d["tag"]: # E: Match statement has unhandled case for values of type "Literal['b']" \
+match d["tag"]: # E: Match statement has unhandled case for values of type "B" \
# N: If match statement is intended to be non-exhaustive, add `case _: pass` \
- # E: Match statement has unhandled case for values of type "B"
+ # E: Match statement has unhandled case for values of type "Literal['b']"
case "a":
reveal_type(d) # N: Revealed type is "TypedDict('__main__.A', {'tag': Literal['a'], 'name': builtins.str})"
reveal_type(d["name"]) # N: Revealed type is "builtins.str"
View it on GitLab: https://salsa.debian.org/python-team/packages/mypy/-/commit/2c374ff5839b6213d6944cbc4523cc8b4a7271b0
--
View it on GitLab: https://salsa.debian.org/python-team/packages/mypy/-/commit/2c374ff5839b6213d6944cbc4523cc8b4a7271b0
You're receiving this email because of your account on salsa.debian.org. Manage all notifications: https://salsa.debian.org/-/profile/notifications | Help: https://salsa.debian.org/help
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20260423/c32ef65b/attachment-0001.htm>
More information about the debian-med-commit
mailing list