Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 6 additions & 22 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@
RefExpr,
ReturnStmt,
SetExpr,
SplittingVisitor,
StarExpr,
Statement,
StrExpr,
Expand Down Expand Up @@ -319,7 +320,7 @@ def __exit__(self, exc_type: object, exc_val: object, exc_tb: object) -> Literal
return False


class TypeChecker(NodeVisitor[None], TypeCheckerSharedApi):
class TypeChecker(NodeVisitor[None], TypeCheckerSharedApi, SplittingVisitor):
"""Mypy type checker.

Type check mypy source files that have been semantically analyzed.
Expand Down Expand Up @@ -454,9 +455,6 @@ def __init__(
or self.path in self.msg.errors.ignored_files
or (self.options.test_env and self.is_typeshed_stub)
)

# If True, process function definitions. If False, don't. This is used
# for processing module top levels in fine-grained incremental mode.
self.recurse_into_functions = True
# This internal flag is used to track whether we a currently type-checking
# a final declaration (assignment), so that some errors should be suppressed.
Expand Down Expand Up @@ -719,23 +717,10 @@ def accept_loop(
# Definitions
#

@contextmanager
def set_recurse_into_functions(self) -> Iterator[None]:
"""Temporarily set recurse_into_functions to True.

This is used to process top-level functions/methods as a whole.
"""
old_recurse_into_functions = self.recurse_into_functions
self.recurse_into_functions = True
try:
yield
finally:
self.recurse_into_functions = old_recurse_into_functions

def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
# If a function/method can infer variable types, it should be processed as part
# of the module top level (i.e. module interface).
if not self.recurse_into_functions and not defn.can_infer_vars:
if not self.recurse_into_functions and not defn.def_or_infer_vars:
return
with self.tscope.function_scope(defn), self.set_recurse_into_functions():
self._visit_overloaded_func_def(defn)
Expand Down Expand Up @@ -1211,7 +1196,7 @@ def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Ty
return NoneType()

def visit_func_def(self, defn: FuncDef) -> None:
if not self.recurse_into_functions and not defn.can_infer_vars:
if not self.recurse_into_functions and not defn.def_or_infer_vars:
return
with self.tscope.function_scope(defn), self.set_recurse_into_functions():
self.check_func_item(defn, name=defn.name)
Expand Down Expand Up @@ -1452,8 +1437,7 @@ def check_func_def(
not self.can_skip_diagnostics
or self.options.preserve_asts
or not isinstance(defn, FuncDef)
or defn.has_self_attr_def
or defn.can_infer_vars
or defn.def_or_infer_vars
):
self.accept(item.body)
unreachable = self.binder.is_unreachable()
Expand Down Expand Up @@ -5620,7 +5604,7 @@ def visit_decorator(self, e: Decorator) -> None:
def visit_decorator_inner(
self, e: Decorator, allow_empty: bool = False, skip_first_item: bool = False
) -> None:
if self.recurse_into_functions or e.func.can_infer_vars:
if self.recurse_into_functions or e.func.def_or_infer_vars:
with self.tscope.function_scope(e.func), self.set_recurse_into_functions():
self.check_func_item(e.func, name=e.func.name, allow_empty=allow_empty)

Expand Down
32 changes: 25 additions & 7 deletions mypy/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from abc import abstractmethod
from collections import defaultdict
from collections.abc import Callable, Iterator, Sequence
from contextlib import contextmanager
from enum import Enum, unique
from typing import (
TYPE_CHECKING,
Expand Down Expand Up @@ -659,7 +660,7 @@ class FuncBase(Node):
"is_final", # Uses "@final"
"is_explicit_override", # Uses "@override"
"is_type_check_only", # Uses "@type_check_only"
"can_infer_vars",
"def_or_infer_vars",
"_fullname",
)

Expand All @@ -680,8 +681,8 @@ def __init__(self) -> None:
self.is_final = False
self.is_explicit_override = False
self.is_type_check_only = False
# Can this function/method infer types of variables defined outside? Currently,
# we only set this in cases like:
# Can this function/method define variables or infer variables defined outside?
# In particular, we set this in cases like:
# x = None
# def foo() -> None:
# global x
Expand All @@ -691,7 +692,7 @@ def __init__(self) -> None:
# x = None
# def foo(self) -> None:
# self.x = 1
self.can_infer_vars = False
self.def_or_infer_vars = False
# Name with module prefix
self._fullname = ""

Expand Down Expand Up @@ -1035,7 +1036,6 @@ class FuncDef(FuncItem, SymbolNode, Statement):
"original_def",
"is_trivial_body",
"is_trivial_self",
"has_self_attr_def",
"is_mypy_only",
# Present only when a function is decorated with @typing.dataclass_transform or similar
"dataclass_transform_spec",
Expand Down Expand Up @@ -1074,8 +1074,6 @@ def __init__(
# the majority). In cases where self is not annotated and there are no Self
# in the signature we can simply drop the first argument.
self.is_trivial_self = False
# Keep track of functions where self attributes are defined.
self.has_self_attr_def = False
# This is needed because for positional-only arguments the name is set to None,
# but we sometimes still want to show it in error messages.
if arguments:
Expand Down Expand Up @@ -5089,6 +5087,26 @@ def read(cls, data: ReadBuffer) -> DataclassTransformSpec:
return ret


@trait
class SplittingVisitor:
# If True, process function definitions. If False, don't. This is used
# for processing module top levels in fine-grained incremental mode.
recurse_into_functions: bool

@contextmanager
def set_recurse_into_functions(self) -> Iterator[None]:
"""Temporarily set recurse_into_functions to True.
This is used to process top-level functions/methods as a whole.
"""
old_recurse_into_functions = self.recurse_into_functions
self.recurse_into_functions = True
try:
yield
finally:
self.recurse_into_functions = old_recurse_into_functions


def get_flags(node: Node, names: list[str]) -> list[str]:
return [name for name in names if getattr(node, name)]

Expand Down
27 changes: 13 additions & 14 deletions mypy/semanal.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@
SetComprehension,
SetExpr,
SliceExpr,
SplittingVisitor,
StarExpr,
Statement,
StrExpr,
Expand Down Expand Up @@ -372,7 +373,7 @@


class SemanticAnalyzer(
NodeVisitor[None], SemanticAnalyzerInterface, SemanticAnalyzerPluginInterface
NodeVisitor[None], SemanticAnalyzerInterface, SemanticAnalyzerPluginInterface, SplittingVisitor
):
"""Semantically analyze parsed mypy files.

Expand Down Expand Up @@ -497,8 +498,6 @@ def __init__(
self.incomplete_namespaces = incomplete_namespaces
self.all_exports: list[str] = []
self.plugin = plugin
# If True, process function definitions. If False, don't. This is used
# for processing module top levels in fine-grained incremental mode.
self.recurse_into_functions = True
self.scope = Scope()

Expand Down Expand Up @@ -981,10 +980,10 @@ def visit_func_def(self, defn: FuncDef) -> None:
if not defn.is_decorated and not defn.is_overload:
self.add_function_to_symbol_table(defn)

if not self.recurse_into_functions:
if not self.recurse_into_functions and not defn.def_or_infer_vars:
return

with self.scope.function_scope(defn):
with self.scope.function_scope(defn), self.set_recurse_into_functions():
with self.inside_except_star_block_set(value=False):
self.analyze_func_def(defn)

Expand Down Expand Up @@ -1272,14 +1271,14 @@ def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
self.statement = defn
self.add_function_to_symbol_table(defn)

if not self.recurse_into_functions:
if not self.recurse_into_functions and not defn.def_or_infer_vars:
return

# NB: Since _visit_overloaded_func_def will call accept on the
# underlying FuncDefs, the function might get entered twice.
# This is fine, though, because only the outermost function is
# used to compute targets.
with self.scope.function_scope(defn):
with self.scope.function_scope(defn), self.set_recurse_into_functions():
self.analyze_overloaded_func_def(defn)

@contextmanager
Expand Down Expand Up @@ -1809,8 +1808,9 @@ def visit_decorator(self, dec: Decorator) -> None:
dec.var.is_initialized_in_class = True
if no_type_check:
erase_func_annotations(dec.func)
if not no_type_check and self.recurse_into_functions:
dec.func.accept(self)
if not no_type_check and (self.recurse_into_functions or dec.func.def_or_infer_vars):
with self.set_recurse_into_functions():
dec.func.accept(self)
if could_be_decorated_property and dec.decorators and dec.var.is_property:
self.fail(
"Decorators on top of @property are not supported", dec, code=PROPERTY_DECORATOR
Expand Down Expand Up @@ -4597,7 +4597,7 @@ def make_name_lvalue_point_to_existing_def(
and original_def.node.is_inferred
):
for func in self.scope.functions:
func.can_infer_vars = True
func.def_or_infer_vars = True

def analyze_tuple_or_list_lvalue(self, lval: TupleExpr, explicit_type: bool = False) -> None:
"""Analyze an lvalue or assignment target that is a list or tuple."""
Expand Down Expand Up @@ -4682,16 +4682,15 @@ def analyze_member_lvalue(
# TODO: should we also set lval.kind = MDEF?
self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True)
for func in self.scope.functions:
if isinstance(func, FuncDef):
func.has_self_attr_def = True
func.def_or_infer_vars = True
if (
cur_node
and isinstance(cur_node.node, Var)
and cur_node.node.is_inferred
and cur_node.node.is_initialized_in_class
):
for func in self.scope.functions:
func.can_infer_vars = True
func.def_or_infer_vars = True
self.check_lvalue_validity(lval.node, lval)

def is_self_member_ref(self, memberexpr: MemberExpr) -> bool:
Expand Down Expand Up @@ -7561,7 +7560,7 @@ def already_defined(

if isinstance(original_ctx, SymbolTableNode) and isinstance(original_ctx.node, MypyFile):
# Since this is an import, original_ctx.node points to the module definition.
# Therefore its line number is always 1, which is not useful for this
# Therefore, its line number is always 1, which is not useful for this
# error message.
extra_msg = " (by an import)"
elif node and node.line != -1 and self.is_local_name(node.fullname):
Expand Down
39 changes: 6 additions & 33 deletions mypy/semanal_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
import mypy.state
from mypy.checker import FineGrainedDeferredNode
from mypy.errors import Errors
from mypy.nodes import Decorator, FuncDef, MypyFile, OverloadedFuncDef, TypeInfo, Var
from mypy.nodes import Decorator, FuncDef, MypyFile, OverloadedFuncDef, TypeInfo
from mypy.options import Options
from mypy.plugin import ClassDefContext
from mypy.plugins import dataclasses as dataclasses_plugin
Expand All @@ -52,7 +52,6 @@
from mypy.semanal_infer import infer_decorator_signature_if_simple
from mypy.semanal_shared import find_dataclass_transform_spec
from mypy.semanal_typeargs import TypeArgumentAnalyzer
from mypy.server.aststrip import SavedAttributes

if TYPE_CHECKING:
from mypy.build import Graph, State
Expand Down Expand Up @@ -129,23 +128,18 @@ def cleanup_builtin_scc(state: State) -> None:


def semantic_analysis_for_targets(
state: State, nodes: list[FineGrainedDeferredNode], graph: Graph, saved_attrs: SavedAttributes
state: State, nodes: list[FineGrainedDeferredNode], graph: Graph
) -> None:
"""Semantically analyze only selected nodes in a given module.

This essentially mirrors the logic of semantic_analysis_for_scc()
except that we process only some targets. This is used in fine grained
except that we process only some targets. This is used in fine-grained
incremental mode, when propagating an update.

The saved_attrs are implicitly declared instance attributes (attributes
defined on self) removed by AST stripper that may need to be reintroduced
here. They must be added before any methods are analyzed.
"""
patches: Patches = []
if any(isinstance(n.node, MypyFile) for n in nodes):
# Process module top level first (if needed).
process_top_levels(graph, [state.id], patches)
restore_saved_attrs(saved_attrs)
analyzer = state.manager.semantic_analyzer
for n in nodes:
if isinstance(n.node, MypyFile):
Expand All @@ -160,30 +154,6 @@ def semantic_analysis_for_targets(
calculate_class_properties(graph, [state.id], state.manager.errors)


def restore_saved_attrs(saved_attrs: SavedAttributes) -> None:
"""Restore instance variables removed during AST strip that haven't been added yet."""
for (cdef, name), sym in saved_attrs.items():
info = cdef.info
existing = info.get(name)
defined_in_this_class = name in info.names
assert isinstance(sym.node, Var)
# This needs to mimic the logic in SemanticAnalyzer.analyze_member_lvalue()
# regarding the existing variable in class body or in a superclass:
# If the attribute of self is not defined in superclasses, create a new Var.
if (
existing is None
or
# (An abstract Var is considered as not defined.)
(isinstance(existing.node, Var) and existing.node.is_abstract_var)
or
# Also an explicit declaration on self creates a new Var unless
# there is already one defined in the class body.
sym.node.explicit_self_type
and not defined_in_this_class
):
info.names[name] = sym


def process_top_levels(graph: Graph, scc: list[str], patches: Patches) -> None:
# Process top levels until everything has been bound.

Expand Down Expand Up @@ -240,6 +210,9 @@ def process_top_levels(graph: Graph, scc: list[str], patches: Patches) -> None:
# processing the same target twice in a row, which is inefficient.
worklist = list(reversed(all_deferred))
final_iteration = not any_progress
# Functions/methods that define/infer attributes are processed as part of top-levels.
# We need to clear the locals for those between fine-grained iterations.
analyzer.saved_locals.clear()


def order_by_subclassing(targets: list[FullTargetInfo]) -> Iterator[FullTargetInfo]:
Expand Down
Loading
Loading