From d126ce4c67d762920911623bce287c106010d941 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 20 Jan 2017 13:30:12 +0000 Subject: [PATCH 01/44] Add foundation for fine-grained incremental type checking * Add AST diff * Add fine-grained dependency generation * Add AST merge for merging old and new ASTs for a module * Update AST dump to show various object identities * Add various tests --- mypy/build.py | 14 +- mypy/checker.py | 81 ++-- mypy/errors.py | 10 +- mypy/nodes.py | 41 +- mypy/semanal.py | 60 ++- mypy/server/__init__.py | 0 mypy/server/astdiff.py | 212 ++++++++++ mypy/server/astmerge.py | 219 ++++++++++ mypy/server/aststrip.py | 49 +++ mypy/server/deps.py | 188 +++++++++ mypy/server/subexpr.py | 136 +++++++ mypy/server/trigger.py | 5 + mypy/server/update.py | 272 +++++++++++++ mypy/strconv.py | 92 ++++- mypy/test/testdeps.py | 64 +++ mypy/test/testdiff.py | 72 ++++ mypy/test/testfinegrained.py | 112 +++++ mypy/test/testmerge.py | 185 +++++++++ mypy/traverser.py | 11 +- mypy/types.py | 11 +- mypy/util.py | 56 +-- runtests.py | 7 +- test-data/unit/deps.test | 71 ++++ test-data/unit/diff.test | 125 ++++++ test-data/unit/fine-grained.test | 174 ++++++++ test-data/unit/fixtures/fine_grained.pyi | 24 ++ test-data/unit/merge.test | 495 +++++++++++++++++++++++ 27 files changed, 2687 insertions(+), 99 deletions(-) create mode 100644 mypy/server/__init__.py create mode 100644 mypy/server/astdiff.py create mode 100644 mypy/server/astmerge.py create mode 100644 mypy/server/aststrip.py create mode 100644 mypy/server/deps.py create mode 100644 mypy/server/subexpr.py create mode 100644 mypy/server/trigger.py create mode 100644 mypy/server/update.py create mode 100644 mypy/test/testdeps.py create mode 100644 mypy/test/testdiff.py create mode 100644 mypy/test/testfinegrained.py create mode 100644 mypy/test/testmerge.py create mode 100644 test-data/unit/deps.test create mode 100644 test-data/unit/diff.test create mode 100644 test-data/unit/fine-grained.test create mode 100644 test-data/unit/fixtures/fine_grained.pyi create mode 100644 test-data/unit/merge.test diff --git a/mypy/build.py b/mypy/build.py index 51e82e6a8543..1419a843ba2f 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -65,8 +65,9 @@ class BuildResult: errors: List of error messages. """ - def __init__(self, manager: 'BuildManager') -> None: + def __init__(self, manager: 'BuildManager', graph: Graph) -> None: self.manager = manager + self.graph = graph self.files = manager.modules self.types = manager.all_types self.errors = manager.errors.messages() @@ -184,8 +185,8 @@ def build(sources: List[BuildSource], ) try: - dispatch(sources, manager) - return BuildResult(manager) + graph = dispatch(sources, manager) + return BuildResult(manager, graph) finally: manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" % (time.time() - manager.start_time, @@ -1545,20 +1546,21 @@ def write_cache(self) -> None: self.interface_hash = new_interface_hash -def dispatch(sources: List[BuildSource], manager: BuildManager) -> None: +def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph: manager.log("Mypy version %s" % __version__) graph = load_graph(sources, manager) if not graph: print("Nothing to do?!") - return + return graph manager.log("Loaded graph with %d nodes" % len(graph)) if manager.options.dump_graph: dump_graph(graph) - return + return graph process_graph(graph, manager) if manager.options.warn_unused_ignores: # TODO: This could also be a per-module option. manager.errors.generate_unused_ignore_notes() + return graph class NodeInfo: diff --git a/mypy/checker.py b/mypy/checker.py index 431d2d38aabe..f79b0ba09cc8 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -65,13 +65,15 @@ LAST_PASS = 1 # Pass numbers start at 0 -# A node which is postponed to be type checked during the next pass. +# A node which is postponed to be processed during the next pass. +# This is used for both batch mode and fine-grained incremental mode. DeferredNode = NamedTuple( 'DeferredNode', [ - ('node', FuncItem), + ('node', Union[FuncDef, MypyFile]), # In batch mode only FuncDef is supported ('context_type_name', Optional[str]), # Name of the surrounding class (for error messages) - ('active_class', Optional[Type]), # And its type (for selftype handling) + ('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis + # self type handling) ]) @@ -187,19 +189,20 @@ def check_first_pass(self) -> None: self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s), all_.node) - def check_second_pass(self) -> bool: + def check_second_pass(self, todo: List[DeferredNode] = None) -> bool: """Run second or following pass of type checking. This goes through deferred nodes, returning True if there were any. """ - if not self.deferred_nodes: + if not todo and not self.deferred_nodes: return False self.errors.set_file(self.path) self.pass_num += 1 - todo = self.deferred_nodes + if not todo: + todo = self.deferred_nodes self.deferred_nodes = [] - done = set() # type: Set[FuncItem] - for node, type_name, active_class in todo: + done = set() # type: Set[Union[FuncDef, MypyFile]] + for node, type_name, active_typeinfo in todo: if node in done: continue # This is useful for debugging: @@ -207,15 +210,28 @@ def check_second_pass(self) -> bool: # (self.pass_num, type_name, node.fullname() or node.name())) done.add(node) with self.errors.enter_type(type_name) if type_name else nothing(): - with self.scope.push_class(active_class) if active_class else nothing(): - if isinstance(node, Statement): - self.accept(node) - elif isinstance(node, Expression): - self.expr_checker.accept(node) - else: - assert False + with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing(): + self.check_partial(node) return True + def check_partial(self, node: Union[FuncDef, MypyFile]) -> None: + if isinstance(node, MypyFile): + self.check_top_level(node) + else: + self.accept(node) + + def check_top_level(self, node: MypyFile) -> None: + """Check only the top-level of a module, skipping function definitions.""" + with self.enter_partial_types(): + with self.binder.top_frame_context(): + for d in node.defs: + # TODO: Type check class bodies. + if not isinstance(d, (FuncDef, ClassDef)): + d.accept(self) + + assert not self.current_node_deferred + # TODO: Handle __all__ + def handle_cannot_determine_type(self, name: str, context: Context) -> None: node = self.scope.top_function() if self.pass_num < LAST_PASS and node is not None: @@ -635,7 +651,7 @@ def is_implicit_any(t: Type) -> bool: for i in range(len(typ.arg_types)): arg_type = typ.arg_types[i] - ref_type = self.scope.active_class() + ref_type = self.scope.active_self_type() # type: Type if (isinstance(defn, FuncDef) and ref_type is not None and i == 0 and not defn.is_static and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]): @@ -945,7 +961,7 @@ def check_method_override_for_base_with_name( # The name of the method is defined in the base class. # Construct the type of the overriding method. - typ = bind_self(self.function_type(defn), self.scope.active_class()) + typ = bind_self(self.function_type(defn), self.scope.active_self_type()) # Map the overridden method type to subtype context so that # it can be checked for compatibility. original_type = base_attr.type @@ -958,7 +974,7 @@ def check_method_override_for_base_with_name( assert False, str(base_attr.node) if isinstance(original_type, FunctionLike): original = map_type_from_supertype( - bind_self(original_type, self.scope.active_class()), + bind_self(original_type, self.scope.active_self_type()), defn.info, base) # Check that the types are compatible. # TODO overloaded signatures @@ -1050,7 +1066,7 @@ def visit_class_def(self, defn: ClassDef) -> None: old_binder = self.binder self.binder = ConditionalTypeBinder() with self.binder.top_frame_context(): - with self.scope.push_class(fill_typevars(defn.info)): + with self.scope.push_class(defn.info): self.accept(defn.defs) self.binder = old_binder if not defn.has_incompatible_baseclass: @@ -1316,8 +1332,8 @@ def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Type, rvalue: # Class-level function objects and classmethods become bound # methods: the former to the instance, the latter to the # class - base_type = bind_self(base_type, self.scope.active_class()) - compare_type = bind_self(compare_type, self.scope.active_class()) + base_type = bind_self(base_type, self.scope.active_self_type()) + compare_type = bind_self(compare_type, self.scope.active_self_type()) # If we are a static method, ensure to also tell the # lvalue it now contains a static method @@ -1346,7 +1362,8 @@ def lvalue_type_from_base(self, expr_node: Var, if base_type: if not has_no_typevars(base_type): - instance = cast(Instance, self.scope.active_class()) + # TODO: Handle TupleType, don't cast + instance = cast(Instance, self.scope.active_self_type()) itype = map_instance_to_supertype(instance, base) base_type = expand_type_by_instance(base_type, itype) @@ -2995,31 +3012,37 @@ def is_node_static(node: Node) -> Optional[bool]: class Scope: # We keep two stacks combined, to maintain the relative order - stack = None # type: List[Union[Type, FuncItem, MypyFile]] + stack = None # type: List[Union[TypeInfo, FuncDef, MypyFile]] def __init__(self, module: MypyFile) -> None: self.stack = [module] - def top_function(self) -> Optional[FuncItem]: + def top_function(self) -> Optional[FuncDef]: for e in reversed(self.stack): if isinstance(e, FuncItem): return e return None - def active_class(self) -> Optional[Type]: - if isinstance(self.stack[-1], Type): + def active_class(self) -> Optional[TypeInfo]: + if isinstance(self.stack[-1], TypeInfo): return self.stack[-1] return None + def active_self_type(self) -> Optional[Union[Instance, TupleType]]: + info = self.active_class() + if info: + return fill_typevars(info) + return None + @contextmanager - def push_function(self, item: FuncItem) -> Iterator[None]: + def push_function(self, item: FuncDef) -> Iterator[None]: self.stack.append(item) yield self.stack.pop() @contextmanager - def push_class(self, t: Type) -> Iterator[None]: - self.stack.append(t) + def push_class(self, info: TypeInfo) -> Iterator[None]: + self.stack.append(info) yield self.stack.pop() diff --git a/mypy/errors.py b/mypy/errors.py index 3e66e29e6f5c..30527c19ab02 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -111,6 +111,11 @@ class Errors: def __init__(self, show_error_context: bool = False, show_column_numbers: bool = False) -> None: + self.show_error_context = show_error_context + self.show_column_numbers = show_column_numbers + self.initialize() + + def initialize(self) -> None: self.error_info = [] self.import_ctx = [] self.error_files = set() @@ -120,8 +125,9 @@ def __init__(self, show_error_context: bool = False, self.used_ignored_lines = defaultdict(set) self.ignored_files = set() self.only_once_messages = set() - self.show_error_context = show_error_context - self.show_column_numbers = show_column_numbers + + def reset(self) -> None: + self.initialize() def copy(self) -> 'Errors': new = Errors(self.show_error_context, self.show_column_numbers) diff --git a/mypy/nodes.py b/mypy/nodes.py index 4584245b9904..697c6fd42bc6 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -9,7 +9,7 @@ import mypy.strconv from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor -from mypy.util import dump_tagged, short_type +from mypy.util import short_type, IdMapper class Context: @@ -368,6 +368,8 @@ class FuncBase(Node): # Type signature. This is usually CallableType or Overloaded, but it can be something else for # decorated functions/ type = None # type: mypy.types.Type + # Original, not semantically analyzed type (used for reprocessing) + unanalyzed_type = None # type: mypy.types.Type # If method, reference to TypeInfo info = None # type: TypeInfo is_property = False @@ -512,6 +514,7 @@ def __init__(self, arguments: List[Argument], body: 'Block', self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT) self.body = body self.type = typ + self.unanalyzed_type = typ self.expanded = [] self.min_args = 0 @@ -835,6 +838,8 @@ class AssignmentStmt(Statement): rvalue = None # type: Expression # Declared type in a comment, may be None. type = None # type: mypy.types.Type + # Original, not semantically analyzed type in annotation (used for reprocessing) + unanalyzed_type = None # type: Optional[mypy.types.Type] # This indicates usage of PEP 526 type annotation syntax in assignment. new_syntax = False # type: bool @@ -843,6 +848,7 @@ def __init__(self, lvalues: List[Lvalue], rvalue: Expression, self.lvalues = lvalues self.rvalue = rvalue self.type = type + self.unanalyzed_type = type self.new_syntax = new_syntax def accept(self, visitor: StatementVisitor[T]) -> T: @@ -2121,14 +2127,37 @@ def __str__(self) -> str: This includes the most important information about the type. """ + return self.dump() + + def dump(self, + str_conv: 'mypy.strconv.StrConv' = None, + type_str_conv: 'mypy.types.TypeStrVisitor' = None) -> str: + if not str_conv: + str_conv = mypy.strconv.StrConv() base = None # type: str + + def type_str(typ: 'mypy.types.Type') -> str: + if type_str_conv: + return typ.accept(type_str_conv) + return str(typ) + + head = 'TypeInfo' + str_conv.format_id(self) if self.bases: - base = 'Bases({})'.format(', '.join(str(base) + base = 'Bases({})'.format(', '.join(type_str(base) for base in self.bases)) - return dump_tagged(['Name({})'.format(self.fullname()), - base, - ('Names', sorted(self.names.keys()))], - 'TypeInfo') + names = [] + for name in sorted(self.names): + description = name + str_conv.format_id(self.names[name].node) + node = self.names[name].node + if isinstance(node, Var) and node.type: + description += ' ({})'.format(type_str(node.type)) + names.append(description) + return mypy.strconv.dump_tagged( + ['Name({})'.format(self.fullname()), + base, + ('Names', names)], + head, + str_conv=str_conv) def serialize(self) -> JsonDict: # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates. diff --git a/mypy/semanal.py b/mypy/semanal.py index 8f285c135242..c5eb313862c6 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -44,8 +44,9 @@ """ from collections import OrderedDict +from contextlib import contextmanager from typing import ( - List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable + List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable, Iterator ) from mypy.nodes import ( @@ -77,6 +78,7 @@ NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType, FunctionLike, UnboundType, TypeList, TypeVarDef, TypeType, TupleType, UnionType, StarType, EllipsisType, function_type, TypedDictType, + Void, ) from mypy.nodes import implicit_module_attrs from mypy.typeanal import ( @@ -267,6 +269,40 @@ def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None: del self.options + def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None: + """Refresh a stale target in fine-grained incremental mode.""" + if isinstance(node, MypyFile): + self.refresh_top_level(node) + else: + self.accept(node) + + def refresh_top_level(self, file_node: MypyFile) -> None: + """Reanalyze a stale module top-level in fine-grained incremental mode.""" + for d in file_node.defs: + if not isinstance(d, (FuncItem, ClassDef)): + self.accept(d) + + @contextmanager + def file_context(self, file_node: MypyFile, fnam: str, options: Options, + active_type: Optional[TypeInfo]) -> Iterator[None]: + # TODO: Use this above in visit_file + self.options = options + self.errors.set_file(fnam) + self.cur_mod_node = file_node + self.cur_mod_id = file_node.fullname() + self.is_stub_file = fnam.lower().endswith('.pyi') + self.globals = file_node.names + if active_type: + self.enter_class(active_type.defn) + # TODO: Bind class type vars + + yield + + if active_type: + self.leave_class() + self.type = None + del self.options + def visit_func_def(self, defn: FuncDef) -> None: phase_info = self.postpone_nested_functions_stack[-1] if phase_info != FUNCTION_SECOND_PHASE: @@ -289,7 +325,8 @@ def visit_func_def(self, defn: FuncDef) -> None: # Method definition defn.info = self.type if not defn.is_decorated and not defn.is_overload: - if defn.name() in self.type.names: + if (defn.name() in self.type.names and + self.type.names[defn.name()].node != defn): # Redefinition. Conditional redefinition is okay. n = self.type.names[defn.name()].node if not self.set_original_def(n, defn): @@ -420,8 +457,12 @@ def find_type_variables_in_type(self, type: Type) -> List[Tuple[str, TypeVarExpr result.extend(self.find_type_variables_in_type(item)) elif isinstance(type, AnyType): pass - elif isinstance(type, EllipsisType) or isinstance(type, TupleType): + elif isinstance(type, (EllipsisType, TupleType, Void)): + # TODO: Need to process tuple items? pass + elif isinstance(type, Instance): + for arg in type.args: + result.extend(self.find_type_variables_in_type(arg)) else: assert False, 'Unsupported type %s' % type return result @@ -3608,6 +3649,19 @@ def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None: self.options = options self.accept(file_node) + def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None: + """Refresh a stale target in fine-grained incremental mode.""" + if isinstance(node, MypyFile): + self.refresh_top_level(node) + else: + self.accept(node) + + def refresh_top_level(self, file_node: MypyFile) -> None: + """Reanalyze a stale module top-level in fine-grained incremental mode.""" + for d in file_node.defs: + if not isinstance(d, (FuncItem, ClassDef)): + self.accept(d) + def accept(self, node: Node) -> None: try: node.accept(self) diff --git a/mypy/server/__init__.py b/mypy/server/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py new file mode 100644 index 000000000000..141ea65b145c --- /dev/null +++ b/mypy/server/astdiff.py @@ -0,0 +1,212 @@ +"""Compare two versions of a module symbol table. + +The goal is to find which AST nodes have externally visible changes, so +that we can fire triggers and re-type-check other parts of the program +that are stale because of the changes. + +Only look at detail at definitions at the current module. +""" + +from typing import Set, List, TypeVar + +from mypy.nodes import SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var +from mypy.types import ( + Type, TypeVisitor, UnboundType, ErrorType, TypeList, AnyType, Void, NoneTyp, UninhabitedType, + ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType, + UnionType, Overloaded, PartialType, TypeType +) + + +def compare_symbol_tables(name_prefix: str, table1: SymbolTable, table2: SymbolTable) -> Set[str]: + """Return names that are different in two versions of a symbol table. + + Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method'). + """ + # Find names only defined only in one version. + names1 = {'%s.%s' % (name_prefix, name) for name in table1} + names2 = {'%s.%s' % (name_prefix, name) for name in table2} + triggers = names1 ^ names2 + + # Look for names defined in both versions that are different. + for name in set(table1.keys()) & set(table2.keys()): + if not is_similar_node_shallow(table1[name], table2[name]): + triggers.add('%s.%s' % (name_prefix, name)) + else: + # Nodes are the same when using shallow comparison. Now look into contents of + # classes to find changed items. + node1 = table1[name].node + node2 = table2[name].node + + if node1.fullname() and module_name(node1.fullname()) != name_prefix: + # Only look inside things defined in the current module. + continue + + if isinstance(node1, TypeInfo) and isinstance(node2, TypeInfo): + # TODO: Only do this is the class is defined in this module. + prefix = '%s.%s' % (name_prefix, node1.name()) + triggers |= compare_symbol_tables(prefix, node1.names, node2.names) + + return triggers + + +def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool: + # TODO: + # cross_ref + # tvar_def + # type_override + if (n.kind != m.kind + or n.mod_id != m.mod_id + or n.module_public != m.module_public): + return False + if type(n.node) != type(m.node): # noqa + return False + if n.node.fullname() != m.node.fullname(): + return False + if isinstance(n.node, FuncBase) and isinstance(m.node, FuncBase): + # TODO: info + return (n.node.is_property == m.node.is_property and + is_identical_type(n.node.type, m.node.type)) + if isinstance(n.node, TypeInfo) and isinstance(m.node, TypeInfo): + # TODO: + # mro + # type_vars + # bases + # _promote + # tuple_type + # typeddict_type + nn = n.node + mn = m.node + return (nn.is_abstract == mn.is_abstract and + nn.is_enum == mn.is_enum and + nn.fallback_to_any == mn.fallback_to_any and + nn.is_named_tuple == mn.is_named_tuple and + nn.is_newtype == mn.is_newtype and + nn.alt_fullname == mn.alt_fullname) + if isinstance(n.node, Var) and isinstance(m.node, Var): + return is_identical_type(n.node.type, m.node.type) + return True + + +def module_name(id: str) -> str: + return id.rsplit('.', 1)[0] + + +def is_identical_type(t: Type, s: Type) -> bool: + return t.accept(IdenticalTypeVisitor(s)) + + +TT = TypeVar('TT', bound=Type) + + +def is_identical_types(a: List[TT], b: List[TT]) -> bool: + return len(a) == len(b) and all(is_identical_type(t, s) for t, s in zip(a, b)) + + +class IdenticalTypeVisitor(TypeVisitor[bool]): + """Visitor for checking whether two types are identical. + + This may be conservative -- it's okay for two types to be considered + different even if they are actually the same. The results are only + used to improve performance, not relied on for correctness. + + Differences from mypy.sametypes: + + * Types with the same name but different AST nodes are considered + identical. + + * If one of the types is not valid for whatever reason, they are + considered different. + + * Sometimes require types to be structurally identical, even if the + are semantically the same type. + """ + + def __init__(self, right: Type) -> None: + self.right = right + + # visit_x(left) means: is left (which is an instance of X) the same type as + # right? + + def visit_unbound_type(self, left: UnboundType) -> bool: + return False + + def visit_error_type(self, left: ErrorType) -> bool: + return False + + def visit_type_list(self, t: TypeList) -> bool: + assert False, 'Not supported' + + def visit_any(self, left: AnyType) -> bool: + return isinstance(self.right, AnyType) + + def visit_void(self, left: Void) -> bool: + return isinstance(self.right, Void) + + def visit_none_type(self, left: NoneTyp) -> bool: + return isinstance(self.right, NoneTyp) + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return isinstance(self.right, UninhabitedType) + + def visit_erased_type(self, left: ErasedType) -> bool: + return False + + def visit_deleted_type(self, left: DeletedType) -> bool: + return isinstance(self.right, DeletedType) + + def visit_instance(self, left: Instance) -> bool: + return (isinstance(self.right, Instance) and + left.type.fullname() == self.right.type.fullname() and + is_identical_types(left.args, self.right.args)) + + def visit_type_var(self, left: TypeVarType) -> bool: + return (isinstance(self.right, TypeVarType) and + left.id == self.right.id) + + def visit_callable_type(self, left: CallableType) -> bool: + # FIX generics + if isinstance(self.right, CallableType): + cright = self.right + return (is_identical_type(left.ret_type, cright.ret_type) and + is_identical_types(left.arg_types, cright.arg_types) and + left.arg_names == cright.arg_names and + left.arg_kinds == cright.arg_kinds and + left.is_type_obj() == cright.is_type_obj() and + left.is_ellipsis_args == cright.is_ellipsis_args) + return False + + def visit_tuple_type(self, left: TupleType) -> bool: + if isinstance(self.right, TupleType): + return is_identical_types(left.items, self.right.items) + return False + + def visit_typeddict_type(self, left: TypedDictType) -> bool: + if isinstance(self.right, TypedDictType): + if left.items.keys() != self.right.items.keys(): + return False + for (_, left_item_type, right_item_type) in left.zip(self.right): + if not is_identical_type(left_item_type, right_item_type): + return False + return True + return False + + def visit_union_type(self, left: UnionType) -> bool: + if isinstance(self.right, UnionType): + # Require structurally identical types. + return is_identical_types(left.items, self.right.items) + return False + + def visit_overloaded(self, left: Overloaded) -> bool: + if isinstance(self.right, Overloaded): + return is_identical_types(left.items(), self.right.items()) + return False + + def visit_partial_type(self, left: PartialType) -> bool: + # A partial type is not fully defined, so the result is indeterminate. We shouldn't + # get here. + raise RuntimeError + + def visit_type_type(self, left: TypeType) -> bool: + if isinstance(self.right, TypeType): + return is_identical_type(left.item, self.right.item) + return False diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py new file mode 100644 index 000000000000..f40035e371cc --- /dev/null +++ b/mypy/server/astmerge.py @@ -0,0 +1,219 @@ +"""Merge a new version of a module AST to an old version. + +See the main entry point merge_asts for details. +""" + +from typing import Dict, List, cast, TypeVar + +from mypy.nodes import ( + Node, MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo, + FuncDef, ClassDef, SymbolNode, Var, Statement, MDEF +) +from mypy.traverser import TraverserVisitor +from mypy.types import ( + Type, TypeVisitor, Instance, AnyType, NoneTyp, CallableType, DeletedType, PartialType, + TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, + Void, Overloaded +) + + +def merge_asts(old: MypyFile, old_symbols: SymbolTable, + new: MypyFile, new_symbols: SymbolTable) -> None: + """Merge a new version of a module AST to a previous version. + + The main idea is to preserve the identities of externally visible + nodes in the old AST (that have a corresponding node in the new AST). + All old node state (outside identity) will come from the new AST. + + When this returns, 'old' will refer to the merged AST, but 'new_symbols' + will be the new symbol table. 'new' and 'old_symbols' will no longer be + valid. + """ + assert new.fullname() == old.fullname() + replacement_map = replacement_map_from_symbol_table( + old_symbols, new_symbols, prefix=old.fullname()) + replacement_map[new] = old + node = replace_nodes_in_ast(new, replacement_map) + assert node is old + replace_nodes_in_symbol_table(new_symbols, replacement_map) + + +def replacement_map_from_symbol_table( + old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]: + replacements = {} + for name, node in old.items(): + if (name in new and (node.kind == MDEF + or module_prefix(node.node.fullname()) == prefix)): + new_node = new[name] + if (type(new_node.node) == type(node.node) # noqa + and new_node.node.fullname() == node.node.fullname() + and new_node.kind == node.kind): + replacements[new_node.node] = node.node + if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo): + type_repl = replacement_map_from_symbol_table( + node.node.names, + new_node.node.names, + prefix) + replacements.update(type_repl) + return replacements + + +def replace_nodes_in_ast(node: SymbolNode, + replacements: Dict[SymbolNode, SymbolNode]) -> SymbolNode: + visitor = NodeReplaceVisitor(replacements) + node.accept(visitor) + return replacements.get(node, node) + + +SN = TypeVar('SN', bound=SymbolNode) + + +class NodeReplaceVisitor(TraverserVisitor): + """Transform some nodes to new identities in an AST. + + Only nodes that live in the symbol table may be + replaced, which simplifies the implementation some. + """ + + def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None: + self.replacements = replacements + + def visit_mypy_file(self, node: MypyFile) -> None: + node = self.fixup(node) + node.defs = self.replace_statements(node.defs) + super().visit_mypy_file(node) + + def visit_block(self, node: Block) -> None: + super().visit_block(node) + node.body = self.replace_statements(node.body) + + def visit_func_def(self, node: FuncDef) -> None: + node = self.fixup(node) + if node.type: + self.fixup_type(node.type) + super().visit_func_def(node) + + def visit_class_def(self, node: ClassDef) -> None: + # TODO additional things like the MRO + node.defs.body = self.replace_statements(node.defs.body) + replace_nodes_in_symbol_table(node.info.names, self.replacements) + super().visit_class_def(node) + + def visit_assignment_stmt(self, node: AssignmentStmt) -> None: + if node.type: + self.fixup_type(node.type) + super().visit_assignment_stmt(node) + + # Expressions + + def visit_name_expr(self, node: NameExpr) -> None: + self.visit_ref_expr(node) + + def visit_member_expr(self, node: MemberExpr) -> None: + self.visit_ref_expr(node) + super().visit_member_expr(node) + + def visit_ref_expr(self, node: RefExpr) -> None: + node.node = self.fixup(node.node) + + # Helpers + + def fixup(self, node: SN) -> SN: + if node in self.replacements: + new = self.replacements[node] + new.__dict__ = node.__dict__ + return cast(SN, new) + return node + + def fixup_type(self, typ: Type) -> None: + typ.accept(TypeReplaceVisitor(self.replacements)) + + def replace_statements(self, nodes: List[Statement]) -> List[Statement]: + result = [] + for node in nodes: + if isinstance(node, SymbolNode): + node = self.fixup(node) + result.append(node) + return result + + +class TypeReplaceVisitor(TypeVisitor[None]): + def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None: + self.replacements = replacements + + def visit_instance(self, typ: Instance) -> None: + typ.type = self.fixup(typ.type) + for arg in typ.args: + arg.accept(self) + + def visit_any(self, typ: AnyType) -> None: + pass + + def visit_none_type(self, typ: NoneTyp) -> None: + pass + + def visit_callable_type(self, typ: CallableType) -> None: + for arg in typ.arg_types: + arg.accept(self) + typ.ret_type.accept(self) + # TODO: typ.definition + # TODO: typ.fallback + assert not typ.variables # TODO + + def visit_overloaded(self, t: Overloaded) -> None: + raise NotImplementedError + + def visit_deleted_type(self, typ: DeletedType) -> None: + pass + + def visit_partial_type(self, typ: PartialType) -> None: + raise RuntimeError + + def visit_tuple_type(self, typ: TupleType) -> None: + raise NotImplementedError + + def visit_type_type(self, typ: TypeType) -> None: + raise NotImplementedError + + def visit_type_var(self, typ: TypeVarType) -> None: + raise NotImplementedError + + def visit_typeddict_type(self, typ: TypedDictType) -> None: + raise NotImplementedError + + def visit_unbound_type(self, typ: UnboundType) -> None: + raise RuntimeError + + def visit_uninhabited_type(self, typ: UninhabitedType) -> None: + pass + + def visit_union_type(self, typ: UnionType) -> None: + raise NotImplementedError + + def visit_void(self, typ: Void) -> None: + pass + + # Helpers + + def fixup(self, node: SN) -> SN: + if node in self.replacements: + new = self.replacements[node] + new.__dict__ = node.__dict__ + return cast(SN, new) + return node + + +def replace_nodes_in_symbol_table(symbols: SymbolTable, + replacements: Dict[SymbolNode, SymbolNode]) -> None: + for name, node in symbols.items(): + if node.node in replacements: + new = replacements[node.node] + new.__dict__ = node.node.__dict__ + node.node = new + if isinstance(node.node, Var) and node.node.type: + node.node.type.accept(TypeReplaceVisitor(replacements)) + node.node.info = cast(TypeInfo, replacements.get(node.node.info, node.node.info)) + + +def module_prefix(fullname: str) -> str: + return fullname.rsplit('.', 1)[0] diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py new file mode 100644 index 000000000000..92e23fa781ef --- /dev/null +++ b/mypy/server/aststrip.py @@ -0,0 +1,49 @@ +"""Strip AST from from semantic and type information.""" + +from typing import Union + +from mypy.nodes import ( + Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt +) +from mypy.traverser import TraverserVisitor + + +def strip_target(node: Union[MypyFile, FuncItem]) -> None: + NodeStripVisitor().strip_target(node) + + +class NodeStripVisitor(TraverserVisitor): + def strip_target(self, node: Union[MypyFile, FuncItem]) -> None: + """Strip a fine-grained incremental mode target.""" + if isinstance(node, MypyFile): + self.strip_top_level(node) + else: + node.accept(self) + + def strip_top_level(self, file_node: MypyFile) -> None: + """Strip a module top-level (don't recursive into functions).""" + for node in file_node.defs: + if not isinstance(node, (FuncItem, ClassDef)): + node.accept(self) + + def visit_func_def(self, node: FuncDef) -> None: + node.expanded = [] + node.type = node.unanalyzed_type + super().visit_func_def(node) + + def visit_assignment_stmt(self, node: AssignmentStmt) -> None: + node.type = node.unanalyzed_type + super().visit_assignment_stmt(node) + + def visit_name_expr(self, node: NameExpr) -> None: + self.visit_ref_expr(node) + + def visit_member_expr(self, node: MemberExpr) -> None: + self.visit_ref_expr(node) + + def visit_ref_expr(self, node: RefExpr) -> None: + node.kind = None + node.node = None + node.fullname = None + + # TODO: handle more node types diff --git a/mypy/server/deps.py b/mypy/server/deps.py new file mode 100644 index 000000000000..4a1a7fc6ad97 --- /dev/null +++ b/mypy/server/deps.py @@ -0,0 +1,188 @@ +"""Generate fine-grained dependencies for AST nodes.""" + +from typing import Dict, List, Set + +from mypy.checkmember import bind_self +from mypy.nodes import ( + Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import, + ImportFrom, LDEF +) +from mypy.traverser import TraverserVisitor +from mypy.types import ( + Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType, + TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, + Void, FunctionLike +) +from mypy.server.trigger import make_trigger + + +def get_dependencies(prefix: str, node: Node, + type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]: + visitor = DependencyVisitor(prefix, type_map) + node.accept(visitor) + return visitor.map + + +class DependencyVisitor(TraverserVisitor): + def __init__(self, prefix: str, type_map: Dict[Expression, Type]) -> None: + self.stack = [prefix] + self.type_map = type_map + self.map = {} # type: Dict[str, Set[str]] + self.is_class = False + + # TODO + # decorated functions + # overloads + # from m import * + + def visit_mypy_file(self, o: MypyFile) -> None: + # TODO: Do we need to anything here? + super().visit_mypy_file(o) + + def visit_func_def(self, o: FuncDef) -> None: + target = self.push(o.name()) + if o.type: + if self.is_class and isinstance(o.type, FunctionLike): + signature = bind_self(o.type) # type: Type + else: + signature = o.type + for trigger in get_type_dependencies(signature): + self.add_dependency(trigger) + self.add_dependency(trigger, target=make_trigger(target)) + super().visit_func_def(o) + self.pop() + + def visit_class_def(self, o: ClassDef) -> None: + target = self.push(o.name) + self.add_dependency(make_trigger(target)) + old_is_class = self.is_class + self.is_class = True + # TODO: Add dependencies based on MRO and other attributes. + super().visit_class_def(o) + self.is_class = old_is_class + self.pop() + + def visit_import(self, o: Import) -> None: + for id, as_id in o.ids: + # TODO: as_id + self.add_dependency(make_trigger(id), self.current()) + + def visit_import_from(self, o: ImportFrom) -> None: + raise NotImplementedError + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + super().visit_assignment_stmt(o) + if o.type: + for trigger in get_type_dependencies(o.type): + self.add_dependency(trigger) + + # Expressions + + # TODO + # dependency on __init__ (e.g. ClassName()) + # super() + + def visit_name_expr(self, o: NameExpr) -> None: + if o.kind == LDEF: + # We don't track depdendencies to local variables, since they + # aren't externally visible. + return + trigger = make_trigger(o.fullname) + self.add_dependency(trigger) + + def visit_member_expr(self, e: MemberExpr) -> None: + super().visit_member_expr(e) + if e.kind is not None: + # Reference to a module attribute + trigger = make_trigger(e.fullname) + self.add_dependency(trigger) + else: + # Reference to a non-module attribute + typ = self.type_map[e.expr] + if isinstance(typ, Instance): + member = '%s.%s' % (typ.type.fullname(), e.name) + trigger = make_trigger(member) + self.add_dependency(trigger) + elif isinstance(typ, (AnyType, NoneTyp)): + pass # No dependency needed + else: + # TODO: Handle more types + raise NotImplementedError + + # Helpers + + def add_dependency(self, trigger: str, target: str = None) -> None: + if target is None: + target = self.current() + self.map.setdefault(trigger, set()).add(target) + + def push(self, component: str) -> str: + target = '%s.%s' % (self.current(), component) + self.stack.append(target) + return target + + def pop(self) -> None: + self.stack.pop() + + def current(self) -> str: + return self.stack[-1] + + +def get_type_dependencies(typ: Type) -> List[str]: + return typ.accept(TypeDependenciesVisitor()) + + +class TypeDependenciesVisitor(TypeVisitor[List[str]]): + def __init__(self) -> None: + self.deps = [] # type: List[str] + + def visit_instance(self, typ: Instance) -> List[str]: + trigger = make_trigger(typ.type.fullname()) + triggers = [trigger] + for arg in typ.args: + triggers.extend(get_type_dependencies(arg)) + return triggers + + def visit_any(self, typ: AnyType) -> List[str]: + return [] + + def visit_none_type(self, typ: NoneTyp) -> List[str]: + return [] + + def visit_callable_type(self, typ: CallableType) -> List[str]: + # TODO: generic callables + triggers = [] + for arg in typ.arg_types: + triggers.extend(get_type_dependencies(arg)) + triggers.extend(get_type_dependencies(typ.ret_type)) + return triggers + + def visit_deleted_type(self, typ: DeletedType) -> List[str]: + return [] + + def visit_partial_type(self, typ: PartialType) -> List[str]: + assert False, "Should not see a partial type here" + + def visit_tuple_type(self, typ: TupleType) -> List[str]: + raise NotImplementedError + + def visit_type_type(self, typ: TypeType) -> List[str]: + raise NotImplementedError + + def visit_type_var(self, typ: TypeVarType) -> List[str]: + raise NotImplementedError + + def visit_typeddict_type(self, typ: TypedDictType) -> List[str]: + raise NotImplementedError + + def visit_unbound_type(self, typ: UnboundType) -> List[str]: + return [] + + def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]: + return [] + + def visit_union_type(self, typ: UnionType) -> List[str]: + raise NotImplementedError + + def visit_void(self, typ: Void) -> List[str]: + return [] diff --git a/mypy/server/subexpr.py b/mypy/server/subexpr.py new file mode 100644 index 000000000000..7a5433c06682 --- /dev/null +++ b/mypy/server/subexpr.py @@ -0,0 +1,136 @@ +"""Find all subexpressions of an AST node.""" + +from typing import List + +from mypy.nodes import ( + Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr, + SliceExpr, CastExpr, RevealTypeExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, + IndexExpr, GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication, FuncExpr, + StarExpr, BackquoteExpr, AwaitExpr +) +from mypy.traverser import TraverserVisitor + + +def get_subexpressions(node: Node) -> List[Expression]: + visitor = SubexpressionFinder() + node.accept(visitor) + return visitor.expressions + + +class SubexpressionFinder(TraverserVisitor): + def __init__(self) -> None: + self.expressions = [] # type: List[Expression] + + def _visit_leaf(self, o: Expression) -> None: + self.add(o) + + visit_int_expr = _visit_leaf + visit_name_expr = _visit_leaf + visit_float_expr = _visit_leaf + visit_str_expr = _visit_leaf + visit_bytes_expr = _visit_leaf + visit_unicode_expr = _visit_leaf + visit_complex_expr = _visit_leaf + visit_ellipsis = _visit_leaf + visit_super_expr = _visit_leaf + visit_type_var_expr = _visit_leaf + visit_type_alias_expr = _visit_leaf + visit_namedtuple_expr = _visit_leaf + visit_typeddict_expr = _visit_leaf + visit__promote_expr = _visit_leaf + visit_newtype_expr = _visit_leaf + + def visit_member_expr(self, e: MemberExpr) -> None: + self.add(e) + super().visit_member_expr(e) + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + self.add(e) + super().visit_yield_from_expr(e) + + def visit_yield_expr(self, e: YieldExpr) -> None: + self.add(e) + super().visit_yield_expr(e) + + def visit_call_expr(self, e: CallExpr) -> None: + self.add(e) + super().visit_call_expr(e) + + def visit_op_expr(self, e: OpExpr) -> None: + self.add(e) + super().visit_op_expr(e) + + def visit_comparison_expr(self, e: ComparisonExpr) -> None: + self.add(e) + super().visit_comparison_expr(e) + + def visit_slice_expr(self, e: SliceExpr) -> None: + self.add(e) + super().visit_slice_expr(e) + + def visit_cast_expr(self, e: CastExpr) -> None: + self.add(e) + super().visit_cast_expr(e) + + def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None: + self.add(e) + super().visit_reveal_type_expr(e) + + def visit_unary_expr(self, e: UnaryExpr) -> None: + self.add(e) + super().visit_unary_expr(e) + + def visit_list_expr(self, e: ListExpr) -> None: + self.add(e) + super().visit_list_expr(e) + + def visit_tuple_expr(self, e: TupleExpr) -> None: + self.add(e) + super().visit_tuple_expr(e) + + def visit_dict_expr(self, e: DictExpr) -> None: + self.add(e) + super().visit_dict_expr(e) + + def visit_set_expr(self, e: SetExpr) -> None: + self.add(e) + super().visit_set_expr(e) + + def visit_index_expr(self, e: IndexExpr) -> None: + self.add(e) + super().visit_index_expr(e) + + def visit_generator_expr(self, e: GeneratorExpr) -> None: + self.add(e) + super().visit_generator_expr(e) + + def visit_list_comprehension(self, e: ListComprehension) -> None: + self.add(e) + super().visit_list_comprehension(e) + + def visit_conditional_expr(self, e: ConditionalExpr) -> None: + self.add(e) + super().visit_conditional_expr(e) + + def visit_type_application(self, e: TypeApplication) -> None: + self.add(e) + super().visit_type_application(e) + + def visit_func_expr(self, e: FuncExpr) -> None: + self.add(e) + super().visit_func_expr(e) + + def visit_star_expr(self, e: StarExpr) -> None: + self.add(e) + super().visit_star_expr(e) + + def visit_backquote_expr(self, e: BackquoteExpr) -> None: + self.add(e) + super().visit_backquote_expr(e) + + def visit_await_expr(self, e: AwaitExpr) -> None: + self.add(e) + super().visit_await_expr(e) + + def add(self, e: Expression) -> None: + self.expressions.append(e) diff --git a/mypy/server/trigger.py b/mypy/server/trigger.py new file mode 100644 index 000000000000..2c161f57c57e --- /dev/null +++ b/mypy/server/trigger.py @@ -0,0 +1,5 @@ +"""AST triggers that are used for fine-grained dependency handling.""" + + +def make_trigger(name: str) -> str: + return '<%s>' % name diff --git a/mypy/server/update.py b/mypy/server/update.py new file mode 100644 index 000000000000..78df007cd0a6 --- /dev/null +++ b/mypy/server/update.py @@ -0,0 +1,272 @@ +"""Update build result by incrementally processing changed modules. + +Use fine-grained dependencies to update targets in other modules that +may be affected by externally-visible changes in the changed modules. + +Terms: + +* A 'target' is a function definition or the top level of a module. We + refer to targets using their fully qualified name (e.g. 'mod.Cls.attr'). + Targets are the smallest units of processing during fine-grained + incremental checking. +* A 'trigger' represents the properties of a part of a program, and it + gets triggered/activated when these properties change. For example, + '' refers to a module-level function, and it gets triggered + if the signature of the function changes, or if if the function is + removed. + +Some program state is maintained across multiple build increments: + +* The full ASTs of all modules in memory all the time (+ type map). +* Maintain a fine-grained dependency map, which is from triggers to + targets/triggers. The latter determine what other parts of a program + need to be processed again due to an externally visible change to a + module. + +We perform a fine-grained incremental program update like this: + +* Determine which modules have changes in their source code since the + previous build. +* Fully process these modules, creating new ASTs and symbol tables + for them. Retain the existing ASTs and symbol tables of modules that + have no changes in their source code. +* Determine which parts of the changed modules have changed. The result + is a set of triggered triggers. +* Using the dependency map, decide which other targets have become + stale and need to be reprocessed. +* Replace old ASTs of the modules that we reprocessed earlier with + the new ones, but try to retain the identities of original externally + visible AST nodes so that we don't (always) need to patch references + in the rest of the program. +* Semantically analyze and type check the stale targets. +* Repeat the previous steps until nothing externally visible has changed. + +Major todo items: + +- Support multiple rounds of change propagation +- Support multiple type checking passes +- Always reprocess targets with errors, even if they aren't explicitly + stale +""" + +from typing import Dict, List, Set + +from mypy.build import BuildManager, State +from mypy.checker import DeferredNode +from mypy.errors import Errors +from mypy.nodes import MypyFile, FuncDef, TypeInfo, Expression, SymbolNode +from mypy.types import Type +from mypy.server.astdiff import compare_symbol_tables +from mypy.server.astmerge import merge_asts +from mypy.server.aststrip import strip_target +from mypy.server.deps import get_dependencies +from mypy.server.subexpr import get_subexpressions +from mypy.server.trigger import make_trigger + + +def get_all_dependencies(manager: BuildManager) -> Dict[str, Set[str]]: + """Return the fine-grained dependency map for an entire build.""" + deps = {} # type: Dict[str, Set[str]] + update_dependencies(manager.modules, deps, manager.all_types) + return deps + + +def update_build(manager: BuildManager, + graph: Dict[str, State], + deps: Dict[str, Set[str]], + changed_modules: List[str]) -> List[str]: + """Update previous build result by processing changed modules. + + Also propagate changes to other modules as needed, but only process + those parts of other modules that are affected by the changes. Retain + the existing ASTs and symbol tables of unaffected modules. + + TODO: What about blocking errors? + + Args: + manager: State of the build + graph: Additional state of the build + deps: Fine-grained dependcy map for the build (mutated by this function) + changed_modules: Modules changed since the previous update/build (assume + this is correct; not validated here) + + Returns: + A list of errors. + """ + old_modules = dict(manager.modules) + manager.errors.reset() + new_modules = build_incremental_step(manager, changed_modules) + # TODO: What to do with stale dependencies? + update_dependencies(new_modules, deps, manager.all_types) + triggered = calculate_active_triggers(manager, old_modules, new_modules) + replace_modules_with_new_variants(manager, old_modules, new_modules) + propagate_changes_using_dependencies(manager, graph, deps, triggered, set(changed_modules)) + return manager.errors.messages() + + +def build_incremental_step(manager: BuildManager, + changed_modules: List[str]) -> Dict[str, MypyFile]: + """Build new versions of changed modules only. + + Return the new ASTs for the changed modules. They will be totally + separate from the existing ASTs and need to merged afterwards. + """ + assert len(changed_modules) == 1 + id = changed_modules[0] + path = manager.modules[id].path + + # TODO: what if file is missing? + with open(path) as f: + source = f.read() + + state = State(id=id, + path=path, + source=source, + manager=manager) # TODO: more args? + state.parse_file() + # TODO: state.fix_suppressed_dependencies()? + state.semantic_analysis() + state.semantic_analysis_pass_three() + state.type_check_first_pass() + # TODO: state.type_check_second_pass()? + state.finish_passes() + # TODO: state.write_cache()? + # TODO: state.mark_as_rechecked()? + + return {id: state.tree} + + +def update_dependencies(new_modules: Dict[str, MypyFile], + deps: Dict[str, Set[str]], + type_map: Dict[Expression, Type]) -> None: + for id, node in new_modules.items(): + module_deps = get_dependencies(prefix=id, + node=node, + type_map=type_map) + for trigger, targets in module_deps.items(): + deps.setdefault(trigger, set()).update(targets) + + +def calculate_active_triggers(manager: BuildManager, + old_modules: Dict[str, MypyFile], + new_modules: Dict[str, MypyFile]) -> Set[str]: + """Determine activated triggers by comparing old and new symbol tables. + + For example, if only the signature of function m.f is different in the new + symbol table, return {''}. + """ + names = set() # type: Set[str] + for id in new_modules: + names |= compare_symbol_tables(id, old_modules[id].names, new_modules[id].names) + return {make_trigger(name) for name in names} + + +def replace_modules_with_new_variants( + manager: BuildManager, + old_modules: Dict[str, MypyFile], + new_modules: Dict[str, MypyFile]) -> None: + """Replace modules with newly builds versions. + + Retain the identities of externally visible AST nodes in the + old ASTs so that references to the affected modules from other + modules will still be valid (unless something was deleted or + replaced with an incompatible definition, in which case there + will be dangling references that will be handled by + propagate_changes_using_dependencies). + """ + for id in new_modules: + if id in old_modules: + # Remove nodes of old modules from the type map. + all_types = manager.all_types + for expr in get_subexpressions(old_modules[id]): + if expr in all_types: + del all_types[expr] + merge_asts(old_modules[id], old_modules[id].names, + new_modules[id], new_modules[id].names) + manager.modules[id] = old_modules[id] + + +def propagate_changes_using_dependencies( + manager: BuildManager, + graph: Dict[str, State], + deps: Dict[str, Set[str]], + triggered: Set[str], + up_to_date_modules: Set[str]) -> None: + # TODO: Multiple propagation passes + # TODO: Multiple type checking passes + + todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules) + + for id, nodes in todo.items(): + assert id not in up_to_date_modules + file_node = manager.modules[id] + for deferred in nodes: + node = deferred.node + # Strip semantic analysis information + strip_target(node) + # We don't redo the first pass, because it only does local things. + semantic_analyzer = manager.semantic_analyzer + with semantic_analyzer.file_context( + file_node=file_node, + fnam=file_node.path, + options=manager.options, + active_type=deferred.active_typeinfo): + # Second pass + manager.semantic_analyzer.refresh_partial(node) + # Third pass + manager.semantic_analyzer_pass3.refresh_partial(node) + # Type check + graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + + +def find_targets_recursive( + triggers: Set[str], + deps: Dict[str, Set[str]], + modules: Dict[str, MypyFile], + up_to_date_modules: Set[str]) -> Dict[str, Set[DeferredNode]]: + """Find names of all targets that need to reprocessed, given some triggers. + + Returns: Dictionary from module id to a set of stale targets. + """ + result = {} # type: Dict[str, Set[DeferredNode]] + worklist = triggers + processed = set() # type: Set[str] + + # Find AST nodes corresponding to each target. + # + # TODO: Don't rely on a set, since the items are in an unpredictable order. + while worklist: + processed |= worklist + current = worklist + worklist = set() + for target in current: + if target.startswith('<'): + worklist |= deps.get(target, set()) - processed + else: + module_id = target.split('.', 1)[0] + if module_id in up_to_date_modules: + # Already processed. + continue + if module_id not in result: + result[module_id] = set() + deferred = lookup_target(modules, target) + result[module_id].add(deferred) + + return result + + +def lookup_target(modules: Dict[str, MypyFile], target: str) -> DeferredNode: + """Look up a target by fully-qualified name.""" + components = target.split('.') + node = modules[components[0]] # type: SymbolNode + active_class = None + active_class_name = None + for c in components[1:]: + if isinstance(node, TypeInfo): + active_class = node + active_class_name = node.name() + # TODO: Is it possible for the assertion to fail? + assert isinstance(node, (MypyFile, TypeInfo)) + node = node.names[c].node + assert isinstance(node, (FuncDef, MypyFile)) + return DeferredNode(node, active_class_name, active_class) diff --git a/mypy/strconv.py b/mypy/strconv.py index b8bda6d0224c..169d44bdf9aa 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -3,9 +3,9 @@ import re import os -from typing import Any, List, Tuple, Optional, Union, Sequence +from typing import Any, List, Tuple, Optional, Union, Sequence, Dict -from mypy.util import dump_tagged, short_type +from mypy.util import short_type, IdMapper import mypy.nodes from mypy.visitor import NodeVisitor @@ -21,6 +21,23 @@ class StrConv(NodeVisitor[str]): ExpressionStmt:1( IntExpr(1))) """ + + def __init__(self, show_ids: bool = False) -> None: + self.show_ids = show_ids + if show_ids: + self.id_mapper = IdMapper() + else: + self.id_mapper = None + + def get_id(self, o: object) -> int: + return self.id_mapper.id(o) + + def format_id(self, o: object) -> str: + if self.id_mapper: + return '<{}>'.format(self.get_id(o)) + else: + return '' + def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str: """Convert a list of items to a multiline pretty-printed string. @@ -28,7 +45,10 @@ def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str: number. See mypy.util.dump_tagged for a description of the nodes argument. """ - return dump_tagged(nodes, short_type(obj) + ':' + str(obj.get_line())) + tag = short_type(obj) + ':' + str(obj.get_line()) + if self.show_ids: + tag += '<{}>'.format(self.get_id(obj)) + return dump_tagged(nodes, tag, self) def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]: """Return a list in a format suitable for dump() that represents the @@ -320,29 +340,35 @@ def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> str: return self.dump([o.expr], o) def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str: - return (short_type(o) + '(' + self.pretty_name(o.name, o.kind, - o.fullname, o.is_def) - + ')') + pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node) + return short_type(o) + '(' + pretty + ')' - def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool) -> str: + def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool, + target_node: 'mypy.nodes.Node' = None) -> str: n = name if is_def: n += '*' + if target_node: + id = self.format_id(target_node) + else: + id = '' if kind == mypy.nodes.GDEF or (fullname != name and fullname is not None): # Append fully qualified name for global references. - n += ' [{}]'.format(fullname) + n += ' [{}{}]'.format(fullname, id) elif kind == mypy.nodes.LDEF: # Add tag to signify a local reference. - n += ' [l]' + n += ' [l{}]'.format(id) elif kind == mypy.nodes.MDEF: # Add tag to signify a member reference. - n += ' [m]' + n += ' [m{}]'.format(id) + else: + n += id return n def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str: - return self.dump([o.expr, self.pretty_name(o.name, o.kind, o.fullname, - o.is_def)], o) + pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node) + return self.dump([o.expr, pretty], o) def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str: return self.dump([o.expr], o) @@ -476,3 +502,45 @@ def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> str: def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> str: return self.dump([o.expr], o) + + +def dump_tagged(nodes: Sequence[object], tag: str, str_conv: 'StrConv') -> str: + """Convert an array into a pretty-printed multiline string representation. + + The format is + tag( + item1.. + itemN) + Individual items are formatted like this: + - arrays are flattened + - pairs (str, array) are converted recursively, so that str is the tag + - other items are converted to strings and indented + """ + from mypy.types import Type, TypeStrVisitor + + a = [] # type: List[str] + if tag: + a.append(tag + '(') + for n in nodes: + if isinstance(n, list): + if n: + a.append(dump_tagged(n, None, str_conv)) + elif isinstance(n, tuple): + s = dump_tagged(n[1], n[0], str_conv) + a.append(indent(s, 2)) + elif isinstance(n, mypy.nodes.Node): + a.append(indent(n.accept(str_conv), 2)) + elif isinstance(n, Type): + a.append(indent(n.accept(TypeStrVisitor(str_conv.id_mapper)), 2)) + elif n: + a.append(indent(str(n), 2)) + if tag: + a[-1] += ')' + return '\n'.join(a) + + +def indent(s: str, n: int) -> str: + """Indent all the lines in s (separated by newlines) by n spaces.""" + s = ' ' * n + s + s = s.replace('\n', '\n' + ' ' * n) + return s diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py new file mode 100644 index 000000000000..e1ee2b97e3e5 --- /dev/null +++ b/mypy/test/testdeps.py @@ -0,0 +1,64 @@ +"""Test cases for generating node-level dependencies (for fine-grained incremental checking)""" + +import os.path +from typing import List, Tuple, Dict + +from mypy import build +from mypy.build import BuildSource +from mypy.errors import CompileError +from mypy.nodes import MypyFile, Expression +from mypy.options import Options +from mypy.server.deps import get_dependencies +from mypy.test.config import test_temp_dir, test_data_prefix +from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal +from mypy.types import Type + +files = [ + 'deps.test' +] + + +class GetDependenciesSuite(DataSuite): + def __init__(self, *, update_data: bool) -> None: + pass + + @classmethod + def cases(cls) -> List[DataDrivenTestCase]: + c = [] # type: List[DataDrivenTestCase] + for f in files: + c += parse_test_cases(os.path.join(test_data_prefix, f), + None, test_temp_dir, True) + return c + + def run_case(self, testcase: DataDrivenTestCase) -> None: + src = '\n'.join(testcase.input) + messages, files, type_map = self.build(src) + a = messages + deps = get_dependencies('__main__', files['__main__'], type_map) + + for source, targets in sorted(deps.items()): + line = '%s -> %s' % (source, ', '.join(sorted(targets))) + # Clean up output a bit + line = line.replace('__main__', 'm') + a.append(line) + + assert_string_arrays_equal( + testcase.output, a, + 'Invalid output ({}, line {})'.format(testcase.file, + testcase.line)) + + def build(self, source: str) -> Tuple[List[str], + Dict[str, MypyFile], + Dict[Expression, Type]]: + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + try: + result = build.build(sources=[BuildSource('main', None, source)], + options=options, + alt_lib_path=test_temp_dir) + except CompileError as e: + # TODO: Should perhaps not return None here. + return e.messages, None, None + return result.errors, result.files, result.types diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py new file mode 100644 index 000000000000..f379a3735ce7 --- /dev/null +++ b/mypy/test/testdiff.py @@ -0,0 +1,72 @@ +"""Test cases for AST diff (used for fine-grained incremental checking)""" + +import os.path +from typing import List, Tuple, Dict + +from mypy import build +from mypy.build import BuildSource +from mypy.errors import CompileError +from mypy.nodes import MypyFile +from mypy.options import Options +from mypy.server.astdiff import compare_symbol_tables +from mypy.test.config import test_temp_dir, test_data_prefix +from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal + + +files = [ + 'diff.test' +] + + +class ASTDiffSuite(DataSuite): + def __init__(self, *, update_data: bool) -> None: + pass + + @classmethod + def cases(cls) -> List[DataDrivenTestCase]: + c = [] # type: List[DataDrivenTestCase] + for f in files: + c += parse_test_cases(os.path.join(test_data_prefix, f), + None, test_temp_dir, True) + return c + + def run_case(self, testcase: DataDrivenTestCase) -> None: + first_src = '\n'.join(testcase.input) + files_dict = dict(testcase.files) + second_src = files_dict['tmp/next.py'] + + messages1, files1 = self.build(first_src) + messages2, files2 = self.build(second_src) + + a = [] + if messages1: + a.extend(messages1) + if messages2: + a.append('== next ==') + a.extend(messages2) + + diff = compare_symbol_tables( + '__main__', + files1['__main__'].names, + files2['__main__'].names) + for trigger in sorted(diff): + a.append(trigger) + + assert_string_arrays_equal( + testcase.output, a, + 'Invalid output ({}, line {})'.format(testcase.file, + testcase.line)) + + def build(self, source: str) -> Tuple[List[str], Dict[str, MypyFile]]: + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + try: + result = build.build(sources=[BuildSource('main', None, source)], + options=options, + alt_lib_path=test_temp_dir) + except CompileError as e: + # TODO: Is it okay to return None? + return e.messages, None + return result.errors, result.files diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py new file mode 100644 index 000000000000..eeabf40b41bb --- /dev/null +++ b/mypy/test/testfinegrained.py @@ -0,0 +1,112 @@ +"""Test cases for fine-grained incremental checking. + +Each test cases runs a batch build followed by one or more fine-grained +incremental steps. We verify that each step produces the expected output. + +See the comment at the top of test-data/unit/fine-grained.test for more +information. +""" + +import os.path +import re +import shutil +from typing import List, Tuple, Dict + +from mypy import build +from mypy.build import BuildManager, BuildSource, Graph +from mypy.errors import Errors, CompileError +from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression +from mypy.options import Options +from mypy.server.astmerge import merge_asts +from mypy.server.subexpr import get_subexpressions +from mypy.server.update import get_all_dependencies, update_build +from mypy.strconv import StrConv, indent +from mypy.test.config import test_temp_dir, test_data_prefix +from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal +from mypy.test.testtypegen import ignore_node +from mypy.types import TypeStrVisitor, Type +from mypy.util import short_type + + +files = [ + 'fine-grained.test' +] + + +class FineGrainedSuite(DataSuite): + def __init__(self, *, update_data: bool) -> None: + pass + + @classmethod + def cases(cls) -> List[DataDrivenTestCase]: + c = [] # type: List[DataDrivenTestCase] + for f in files: + c += parse_test_cases(os.path.join(test_data_prefix, f), + None, test_temp_dir, True) + return c + + def run_case(self, testcase: DataDrivenTestCase) -> None: + main_src = '\n'.join(testcase.input) + messages, manager, graph = self.build(main_src) + + a = [] + if messages: + a.extend(messages) + + deps = get_all_dependencies(manager) + + steps = find_steps() + for changed_paths in steps: + modules = [] + for module, path in changed_paths: + new_path = re.sub(r'\.[0-9]+$', '', path) + shutil.copy(path, new_path) + modules.append(module) + + new_messages = update_build(manager, graph, deps, modules) + + a.append('==') + a.extend(new_messages) + + assert_string_arrays_equal( + testcase.output, a, + 'Invalid output ({}, line {})'.format(testcase.file, + testcase.line)) + + def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]: + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + try: + result = build.build(sources=[BuildSource('main', None, source)], + options=options, + alt_lib_path=test_temp_dir) + except CompileError as e: + # TODO: We need a manager and a graph in this case as well + return e.messages, None, None + return result.errors, result.manager, result.graph + + +def find_steps() -> List[List[Tuple[str, str]]]: + """Return a list of build step representations. + + Each build step is a list of (module id, path) tuples, and each + path is of form 'dir/mod.py.2' (where 2 is the step number). + """ + steps = {} # type: Dict[int, List[Tuple[str, str]]] + for dn, dirs, files in os.walk(test_temp_dir): + dnparts = dn.split(os.sep) + assert dnparts[0] == test_temp_dir + del dnparts[0] + for filename in files: + m = re.match(r'.*\.([0-9]+)$', filename) + if m: + num = int(m.group(1)) + assert num >= 2 + name = re.sub(r'\.py.*', '', filename) + module = '.'.join(dnparts + [name]) + path = os.path.join(dn, filename) + steps.setdefault(num, []).append((module, path)) + max_step = max(steps) + return [steps[num] for num in range(2, max_step + 1)] diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py new file mode 100644 index 000000000000..f3ebd7f494eb --- /dev/null +++ b/mypy/test/testmerge.py @@ -0,0 +1,185 @@ +"""Test cases for AST merge (used for fine-grained incremental checking)""" + +import os.path +import shutil +from typing import List, Tuple, Dict + +from mypy import build +from mypy.build import BuildManager, BuildSource +from mypy.errors import Errors, CompileError +from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression +from mypy.options import Options +from mypy.server.astmerge import merge_asts +from mypy.server.subexpr import get_subexpressions +from mypy.server.update import build_incremental_step, replace_modules_with_new_variants +from mypy.strconv import StrConv, indent +from mypy.test.config import test_temp_dir, test_data_prefix +from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite +from mypy.test.helpers import assert_string_arrays_equal +from mypy.test.testtypegen import ignore_node +from mypy.types import TypeStrVisitor, Type +from mypy.util import short_type + + +files = [ + 'merge.test' +] + + +# Which data structures to dump in a test case? +SYMTABLE = 'SYMTABLE' +TYPEINFO = ' TYPEINFO' +TYPES = 'TYPES' +AST = 'AST' + + +class ASTMergeSuite(DataSuite): + def __init__(self, *, update_data: bool) -> None: + self.str_conv = StrConv(show_ids=True) + self.id_mapper = self.str_conv.id_mapper + self.type_str_conv = TypeStrVisitor(self.id_mapper) + + @classmethod + def cases(cls) -> List[DataDrivenTestCase]: + c = [] # type: List[DataDrivenTestCase] + for f in files: + c += parse_test_cases(os.path.join(test_data_prefix, f), + None, test_temp_dir, True) + return c + + def run_case(self, testcase: DataDrivenTestCase) -> None: + name = testcase.name + # We use the test case name to decide which data structures to dump. + # Dumping everything would result in very verbose test cases. + if name.endswith('_symtable'): + kind = SYMTABLE + elif name.endswith('_typeinfo'): + kind = TYPEINFO + elif name.endswith('_types'): + kind = TYPES + else: + kind = AST + + main_src = '\n'.join(testcase.input) + messages, manager = self.build(main_src) + + a = [] + if messages: + a.extend(messages) + + shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), + os.path.join(test_temp_dir, 'target.py')) + + a.extend(self.dump(manager.modules, manager.all_types, kind)) + + old_modules = dict(manager.modules) + old_subexpr = get_subexpressions(old_modules['target']) + + new_file = self.build_increment(manager, 'target') + replace_modules_with_new_variants(manager, + old_modules, + {'target': new_file}) + + a.append('==>') + a.extend(self.dump(manager.modules, manager.all_types, kind)) + + for expr in old_subexpr: + # Verify that old AST nodes are removed from the expression type map. + assert expr not in manager.all_types + + assert_string_arrays_equal( + testcase.output, a, + 'Invalid output ({}, line {})'.format(testcase.file, + testcase.line)) + + def build(self, source: str) -> Tuple[List[str], BuildManager]: + options = Options() + options.use_builtins_fixtures = True + options.show_traceback = True + try: + result = build.build(sources=[BuildSource('main', None, source)], + options=options, + alt_lib_path=test_temp_dir) + except CompileError as e: + # TODO: Is it okay to return None? + return e.messages, None + return result.errors, result.manager + + def build_increment(self, manager: BuildManager, module_id: str) -> MypyFile: + module_dict = build_incremental_step(manager, [module_id]) + return module_dict[module_id] + + def dump(self, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], + kind: str) -> List[str]: + if kind == AST: + return self.dump_asts(modules) + elif kind == TYPEINFO: + return self.dump_typeinfos(modules) + elif kind == SYMTABLE: + return self.dump_symbol_tables(modules) + elif kind == TYPES: + return self.dump_types(type_map) + assert False, 'Invalid kind %s' % kind + + def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]: + a = [] + for m in sorted(modules): + if m == 'builtins': + # We don't support incremental checking of changes to builtins. + continue + s = modules[m].accept(self.str_conv) + a.extend(s.splitlines()) + return a + + def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]: + a = [] + for id in sorted(modules): + if id == 'builtins': + # We don't support incremental checking of changes to builtins. + continue + a.extend(self.dump_symbol_table(id, modules[id].names)) + return a + + def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> List[str]: + a = ['{}:'.format(module_id)] + for name in sorted(symtable): + if name.startswith('__'): + continue + a.append(' {}: {}'.format(name, self.format_symbol_table_node(symtable[name]))) + return a + + def format_symbol_table_node(self, node: SymbolTableNode) -> str: + if node is None: + return 'None' + if isinstance(node.node, Node): + return '{}<{}>'.format(str(type(node.node).__name__), + self.id_mapper.id(node.node)) + # TODO: type_override? + return '?' + + def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]: + a = [] + for id in sorted(modules): + if id == 'builtins': + continue + for name, node in modules[id].names.items(): + if isinstance(node.node, TypeInfo): + a.extend(self.dump_typeinfo(node.node)) + return a + + def dump_typeinfo(self, info: TypeInfo) -> List[str]: + s = info.dump(str_conv=self.str_conv, + type_str_conv=self.type_str_conv) + return s.splitlines() + + def dump_types(self, type_map: Dict[Expression, Type]) -> List[str]: + a = [] + # To make the results repeatable, we try to generate unique and + # deterministic sort keys. + for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), + str(n) + str(type_map[n]))): + typ = type_map[expr] + a.append('{}:{}: {}'.format(short_type(expr), + expr.line, + typ.accept(self.type_str_conv))) + return a diff --git a/mypy/traverser.py b/mypy/traverser.py index 8f3cffbca642..689bbc721a13 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -9,7 +9,7 @@ UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr, GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication, LambdaExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr, - YieldExpr + YieldExpr, StarExpr, BackquoteExpr, AwaitExpr ) @@ -224,3 +224,12 @@ def visit_type_application(self, o: TypeApplication) -> None: def visit_lambda_expr(self, o: LambdaExpr) -> None: self.visit_func(o) + + def visit_star_expr(self, o: StarExpr) -> None: + o.expr.accept(self) + + def visit_backquote_expr(self, o: BackquoteExpr) -> None: + o.expr.accept(self) + + def visit_await_expr(self, o: AwaitExpr) -> None: + o.expr.accept(self) diff --git a/mypy/types.py b/mypy/types.py index e182806a4f39..a737aeb5cb1b 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -13,8 +13,8 @@ INVARIANT, SymbolNode, ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT, ) - from mypy.sharedparse import argument_elide_name +from mypy.util import IdMapper T = TypeVar('T') @@ -1342,6 +1342,9 @@ class TypeStrVisitor(TypeVisitor[str]): - Represent the NoneTyp type as None. """ + def __init__(self, id_mapper: IdMapper = None) -> None: + self.id_mapper = id_mapper + def visit_unbound_type(self, t: UnboundType)-> str: s = t.name + '?' if t.args != []: @@ -1379,6 +1382,8 @@ def visit_instance(self, t: Instance) -> str: s += '*' if t.args != []: s += '[{}]'.format(self.list_str(t.args)) + if self.id_mapper: + s += '<{}>'.format(self.id_mapper.id(t.type)) return s def visit_type_var(self, t: TypeVarType) -> str: @@ -1404,14 +1409,14 @@ def visit_callable_type(self, t: CallableType) -> str: s += '**' if t.arg_names[i]: s += t.arg_names[i] + ': ' - s += str(t.arg_types[i]) + s += t.arg_types[i].accept(self) if t.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT): s += ' =' s = '({})'.format(s) if not isinstance(t.ret_type, NoneTyp): - s += ' -> {}'.format(t.ret_type) + s += ' -> {}'.format(t.ret_type.accept(self)) if t.variables: s = '{} {}'.format(t.variables, s) diff --git a/mypy/util.py b/mypy/util.py index e5c9e5e0275f..d3f6cf0ac298 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -37,13 +37,6 @@ def short_type(obj: object) -> str: return t.split('.')[-1].rstrip("'>") -def indent(s: str, n: int) -> str: - """Indent all the lines in s (separated by Newlines) by n spaces.""" - s = ' ' * n + s - s = s.replace('\n', '\n' + ' ' * n) - return s - - def array_repr(a: List[T]) -> List[str]: """Return the items of an array converted to strings using Repr.""" aa = [] # type: List[str] @@ -52,35 +45,6 @@ def array_repr(a: List[T]) -> List[str]: return aa -def dump_tagged(nodes: Sequence[object], tag: str) -> str: - """Convert an array into a pretty-printed multiline string representation. - - The format is - tag( - item1.. - itemN) - Individual items are formatted like this: - - arrays are flattened - - pairs (str : array) are converted recursively, so that str is the tag - - other items are converted to strings and indented - """ - a = [] # type: List[str] - if tag: - a.append(tag + '(') - for n in nodes: - if isinstance(n, list): - if n: - a.append(dump_tagged(n, None)) - elif isinstance(n, tuple): - s = dump_tagged(n[1], n[0]) - a.append(indent(s, 2)) - elif n: - a.append(indent(str(n), 2)) - if tag: - a[-1] += ')' - return '\n'.join(a) - - def find_python_encoding(text: bytes, pyversion: Tuple[int, int]) -> Tuple[str, int]: """PEP-263 for detecting Python file encoding""" result = ENCODING_RE.match(text) @@ -150,3 +114,23 @@ def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str) -> xml = ERROR_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt) with open(path, 'wb') as f: f.write(xml.encode('utf-8')) + + +class IdMapper: + """Generate integer ids for objects. + + Unlike id(), these start from 0 and increment by 1, and ids won't + get reused across the life-time of IdMapper. + + Assume objects don't redefine __eq__ or __hash__. + """ + + def __init__(self) -> None: + self.id_map = {} # type: Dict[object, int] + self.next_id = 0 + + def id(self, o: object) -> int: + if o not in self.id_map: + self.id_map[o] = self.next_id + self.next_id += 1 + return self.id_map[o] diff --git a/runtests.py b/runtests.py index 4be285b19cf9..83a6ffa0d3da 100755 --- a/runtests.py +++ b/runtests.py @@ -209,7 +209,12 @@ def add_imports(driver: Driver) -> None: PYTEST_FILES = [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in [ - 'testcheck', 'testextensions', + 'testcheck', + 'testextensions', + 'testdeps', + 'testdiff', + 'testfinegrained', + 'testmerge', ]] diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test new file mode 100644 index 000000000000..ca2c8b588494 --- /dev/null +++ b/test-data/unit/deps.test @@ -0,0 +1,71 @@ +-- Test cases for generating dependencies between ASTs nodes. +-- +-- The dependencies are used for fined-grained incremental checking. + + +[case testCallFunction] +def f() -> None: + g() +def g() -> None: + pass +[out] + -> m.f + +[case testCallMethod] +def f(a: A) -> None: + a.g() +class A: + def g(self) -> None: pass +[out] + -> m.f + -> , m.A, m.f + +[case testAccessAttribute] +def f(a: A) -> None: + a.x +class A: + def g(self) -> None: + self.x = 1 +[out] + -> m.A.g, m.f + -> , m.A, m.f + +[case testConstructInstance] +def f() -> None: + A() +class A: pass +[out] + -> m.A, m.f + +[case testAccessModuleAttribute-skip] +x = 1 +def f() -> None: + x +[out] + -> m, m.f + +[case testImport] +import n +[file n.py] +x = 1 +[out] + -> m + +[case testCallImportedFunction] +import n +n.f() +[file n.py] +def f() -> None: pass +[out] + -> m + -> m + +[case testCallImportedFunctionInFunction] +import n +def g() -> None: + n.f() +[file n.py] +def f() -> None: pass +[out] + -> m.g + -> m, m.g diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test new file mode 100644 index 000000000000..cedf3fd58568 --- /dev/null +++ b/test-data/unit/diff.test @@ -0,0 +1,125 @@ +-- Test cases for taking a diff of two module ASTs/symbol tables. +-- The diffs are used for fined-grained incremental checking. + +-- +-- Module top-levels +-- + +[case testChangeTypeOfModuleAttribute] +x = 1 +y = 1 +[file next.py] +x = '' +y = 1 +[out] +__main__.x + +[case testChangeSignatureOfModuleFunction] +def f(x: int) -> None: + pass +def g(y: str) -> None: + pass +[file next.py] +def f(x: str) -> None: + x = '' +def g(y: str) -> None: + y = '' +[out] +__main__.f + +[case testAddModuleAttribute] +x = 1 +[file next.py] +x = 1 +y = 1 +[out] +__main__.y + +[case testRemoveModuleAttribute] +x = 1 +y = 1 +[file next.py] +x = 1 +[out] +__main__.y + +-- +-- Classes +-- + +[case testChangeMethodSignature] +class A: + def f(self) -> None: pass + def g(self) -> None: pass +[file next.py] +class A: + def f(self, x: int) -> None: pass + def g(self) -> None: pass +[out] +__main__.A.f + +[case testChangeAttributeType] +class A: + def f(self) -> None: + self.x = 1 + self.y = 1 +[file next.py] +class A: + def f(self) -> None: + self.x = 1 + self.y = '' +[out] +__main__.A.y + +[case testAddMethod] +class A: + def f(self) -> None: pass +[file next.py] +class A: + def f(self) -> None: pass + def g(self) -> None: pass +[out] +__main__.A.g + +[case testRemoveMethod] +class A: + def f(self) -> None: pass + def g(self) -> None: pass +[file next.py] +class A: + def f(self) -> None: pass +[out] +__main__.A.g + +[case testAddImport] +import nn +[file next.py] +import n +import nn +[file n.py] +x = 1 +[file nn.py] +y = 1 +[out] +__main__.n + +[case testRemoveImport] +import n +[file next.py] +[file n.py] +x = 1 +[out] +__main__.n + +[case testChangeClassIntoFunction] +class A: pass +[file next.py] +def A() -> None: pass +[out] +__main__.A + +[case testDeleteClass] +class A: pass +[file next.py] +[out] +__main__.A diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test new file mode 100644 index 000000000000..1edfae58d186 --- /dev/null +++ b/test-data/unit/fine-grained.test @@ -0,0 +1,174 @@ +-- Test cases for fine-grained incremental checking +-- +-- Test cases may define multiple versions of a file +-- (e.g. m.py, m.py.2). There is always an initial batch +-- pass that processes all files present initially, followed +-- by one or more fine-grained incremental passes that use +-- alternative versions of files, if available. If a file +-- just has a single .py version, it is used for all passes. + +-- TODO: what if version for some passes but not all + +-- Output is laid out like this: +-- +-- [out] +-- +-- == +-- + +[case testReprocessFunction] +import m +def g() -> int: + return m.f() +[file m.py] +def f() -> int: + pass +[file m.py.2] +def f() -> str: + pass +[out] +== +main:3: error: Incompatible return value type (got "str", expected "int") + +[case testReprocessTopLevel] +import m +m.f(1) +def g() -> None: pass +[file m.py] +def f(x: int) -> None: pass +[file m.py.2] +def f(x: str) -> None: pass +[out] +== +main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" + +[case testReprocessMethod] +import m +class B: + def f(self, a: m.A) -> None: + a.g() # E +[file m.py] +class A: + def g(self) -> None: pass +[file m.py.2] +class A: + def g(self, a: A) -> None: pass +[out] +== +main:4: error: Too few arguments for "g" of "A" + +[case testFunctionMissingModuleAttribute] +import m +def h() -> None: + m.f(1) +[file m.py] +def f(x: int) -> None: pass +[file m.py.2] +def g(x: str) -> None: pass +[builtins fixtures/fine_grained.pyi] +[out] +== +main:3: error: "module" has no attribute "f" + +[case testTopLevelMissingModuleAttribute] +import m +m.f(1) +def g() -> None: pass +[file m.py] +def f(x: int) -> None: pass +[file m.py.2] +def g(x: int) -> None: pass +[builtins fixtures/fine_grained.pyi] +[out] +== +main:2: error: "module" has no attribute "f" + +[case testClassChangedIntoFunction] +import m +def f(a: m.A) -> None: + pass +[file m.py] +class A: pass +[file m.py.2] +def A() -> None: pass +[out] +== +main:2: error: Invalid type "m.A" + +[case testClassChangedIntoFunction2] +import m +class B: + def f(self, a: m.A) -> None: pass +[file m.py] +class A: pass +[file m.py.2] +def A() -> None: pass +[out] +== +main:3: error: Invalid type "m.A" + +[case testAttributeTypeChanged] +import m +def f(a: m.A) -> int: + return a.x +[file m.py] +class A: + def f(self) -> None: + self.x = 1 +[file m.py.2] +class A: + def f(self) -> None: + self.x = 'x' +[out] +== +main:3: error: Incompatible return value type (got "str", expected "int") + +[case testAttributeRemoved] +import m +def f(a: m.A) -> int: + return a.x +[file m.py] +class A: + def f(self) -> None: + self.x = 1 +[file m.py.2] +class A: + def f(self) -> None: pass +[out] +== +main:3: error: "A" has no attribute "x" + +[case testVariableTypeBecomesInvalid] +import m +def f() -> None: + a = None # type: m.A +[file m.py] +class A: pass +[file m.py.2] +[out] +== +main:3: error: Name 'm.A' is not defined + +[case testTwoIncrementalSteps] +import m +import n +[file m.py] +def f() -> None: pass +[file n.py] +import m +def g() -> None: + m.f() # E +[file m.py.2] +import n +def f(x: int) -> None: + n.g() # E +[file n.py.3] +import m +def g(a: str) -> None: + m.f('') # E +[out] +== +tmp/n.py:3: error: Too few arguments for "f" +== +tmp/n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" +tmp/m.py:3: error: Too few arguments for "g" diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi new file mode 100644 index 000000000000..5959df68835b --- /dev/null +++ b/test-data/unit/fixtures/fine_grained.pyi @@ -0,0 +1,24 @@ +# Small stub for fine-grained incremental checking test cases +# +# TODO: Migrate to regular stubs once fine-grained incremental is robust +# enough to handle them. + +class Any: pass + +class object: + def __init__(self) -> None: pass + +class type: + def __init__(self, x: Any) -> None: pass + +class int: + def __add__(self, other: 'int') -> 'int': pass +class str: + def __add__(self, other: 'str') -> 'str': pass + +class float: pass +class bytes: pass +class tuple: pass +class function: pass +class ellipsis: pass +class module: pass diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test new file mode 100644 index 000000000000..ff855cc8d661 --- /dev/null +++ b/test-data/unit/merge.test @@ -0,0 +1,495 @@ +-- Test cases for AST merge (user for fine-grained incremental checking) + +[case testFunction] +import target +[file target.py] +def f() -> int: + pass +[file target.py.next] +def f() -> int: + pass +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + FuncDef:1<2>( + f + def () -> builtins.int<3> + Block:1<4>( + PassStmt:2<5>()))) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + FuncDef:1<2>( + f + def () -> builtins.int<3> + Block:1<6>( + PassStmt:2<7>()))) + +[case testClass] +import target +[file target.py] +class A: + def f(self, x: str) -> int: + pass +[file target.py.next] +class A: + def f(self, x: int) -> str: + pass +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<2>( + A + FuncDef:2<3>( + f + Args( + Var(self) + Var(x)) + def (self: target.A<4>, x: builtins.str<5>) -> builtins.int<6> + Block:2<7>( + PassStmt:3<8>())))) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<9>( + A + FuncDef:2<3>( + f + Args( + Var(self) + Var(x)) + def (self: target.A<4>, x: builtins.int<6>) -> builtins.str<5> + Block:2<10>( + PassStmt:3<11>())))) + +[case testClass_typeinfo] +import target +[file target.py] +class A: + def f(self, x: str) -> int: pass + def g(self, x: str) -> int: pass +[file target.py.next] +class A: + def f(self, x: int) -> str: pass + def h(self, x: int) -> str: pass +[out] +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Names( + f<2> + g<3>)) +==> +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Names( + f<2> + h<4>)) + +[case testConstructInstance] +import target +[file target.py] +class A: + def f(self) -> B: + return B() +class B: pass +[file target.py.next] +class B: pass +class A: + def f(self) -> B: + 1 + return B() +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<2>( + A + FuncDef:2<3>( + f + Args( + Var(self)) + def (self: target.A<4>) -> target.B<5> + Block:2<6>( + ReturnStmt:3<7>( + CallExpr:3<8>( + NameExpr(B [target.B<5>]) + Args()))))) + ClassDef:4<9>( + B + PassStmt:4<10>())) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<11>( + B + PassStmt:1<12>()) + ClassDef:2<13>( + A + FuncDef:3<3>( + f + Args( + Var(self)) + def (self: target.A<4>) -> target.B<5> + Block:3<14>( + ExpressionStmt:4<15>( + IntExpr(1)) + ReturnStmt:5<16>( + CallExpr:5<17>( + NameExpr(B [target.B<5>]) + Args())))))) + +[case testCallMethod] +import target +[file target.py] +class A: + def f(self) -> None: + self.f() +[file target.py.next] +class A: + def f(self) -> None: + self.f() +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<2>( + A + FuncDef:2<3>( + f + Args( + Var(self)) + def (self: target.A<4>) + Block:2<5>( + ExpressionStmt:3<6>( + CallExpr:3<7>( + MemberExpr:3<8>( + NameExpr(self [l<9>]) + f) + Args())))))) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<10>( + A + FuncDef:2<3>( + f + Args( + Var(self)) + def (self: target.A<4>) + Block:2<11>( + ExpressionStmt:3<12>( + CallExpr:3<13>( + MemberExpr:3<14>( + NameExpr(self [l<15>]) + f) + Args())))))) + +[case testClassAttribute] +import target +[file target.py] +class A: + def f(self) -> None: + self.x = 1 + self.x +[file target.py.next] +class A: + def f(self) -> None: + self.x = 1 + self.x +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<2>( + A + FuncDef:2<3>( + f + Args( + Var(self)) + def (self: target.A<4>) + Block:2<5>( + AssignmentStmt:3<6>( + MemberExpr:3<8>( + NameExpr(self [l<9>]) + x*<7>) + IntExpr(1)) + ExpressionStmt:4<10>( + MemberExpr:4<11>( + NameExpr(self [l<9>]) + x)))))) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<12>( + A + FuncDef:2<3>( + f + Args( + Var(self)) + def (self: target.A<4>) + Block:2<13>( + AssignmentStmt:3<14>( + MemberExpr:3<15>( + NameExpr(self [l<16>]) + x*<7>) + IntExpr(1)) + ExpressionStmt:4<17>( + MemberExpr:4<18>( + NameExpr(self [l<16>]) + x)))))) + +[case testClassAttribute_typeinfo] +import target +[file target.py] +class A: + def f(self) -> None: + self.x = 1 + self.x + self.y = A() +[file target.py.next] +class A: + def f(self) -> None: + self.x = 1 + self.x + self.y = A() +[out] +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Names( + f<2> + x<3> (builtins.int<4>) + y<5> (target.A<0>))) +==> +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Names( + f<2> + x<3> (builtins.int<4>) + y<5> (target.A<0>))) + +[case testFunction_symtable] +import target +[file target.py] +def f() -> int: + pass +[file target.py.next] +def f() -> int: + pass +[out] +__main__: + target: MypyFile<0> +target: + f: FuncDef<1> +==> +__main__: + target: MypyFile<0> +target: + f: FuncDef<1> + +[case testClass_symtable] +import target +[file target.py] +class A: pass +class B: pass +[file target.py.next] +class A: pass +class C: pass +[out] +__main__: + target: MypyFile<0> +target: + A: TypeInfo<1> + B: TypeInfo<2> +==> +__main__: + target: MypyFile<0> +target: + A: TypeInfo<1> + C: TypeInfo<3> + +[case testTopLevelExpression] +import target +[file target.py] +class A: pass +A() +[file target.py.next] +class A: pass +class B: pass +A() +B() +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<2>( + A + PassStmt:1<3>()) + ExpressionStmt:2<4>( + CallExpr:2<5>( + NameExpr(A [target.A<6>]) + Args()))) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + ClassDef:1<7>( + A + PassStmt:1<8>()) + ClassDef:2<9>( + B + PassStmt:2<10>()) + ExpressionStmt:3<11>( + CallExpr:3<12>( + NameExpr(A [target.A<6>]) + Args())) + ExpressionStmt:4<13>( + CallExpr:4<14>( + NameExpr(B [target.B<15>]) + Args()))) + +[case testExpression_types] +import target +[file target.py] +class A: pass +def f(a: A) -> None: + 1 + a +[file target.py.next] +class A: pass +def f(a: A) -> None: + a + 1 +[out] +IntExpr:3: builtins.int<0> +NameExpr:4: target.A<1> +==> +NameExpr:3: target.A<1> +IntExpr:4: builtins.int<0> + +[case testClassAttribute_types] +import target +[file target.py] +class A: + def f(self) -> None: + self.x = A() + self.x + self.y = 1 + self.y +[file target.py.next] +class A: + def f(self) -> None: + self.y = 1 + self.y + self.x = A() + self.x +[out] +CallExpr:3: target.A<0> +MemberExpr:3: target.A<0> +NameExpr:3: def () -> target.A<0> +NameExpr:3: target.A<0> +MemberExpr:4: target.A<0> +NameExpr:4: target.A<0> +IntExpr:5: builtins.int<1> +MemberExpr:5: builtins.int<1> +NameExpr:5: target.A<0> +MemberExpr:6: builtins.int<1> +NameExpr:6: target.A<0> +==> +IntExpr:3: builtins.int<1> +MemberExpr:3: builtins.int<1> +NameExpr:3: target.A<0> +MemberExpr:4: builtins.int<1> +NameExpr:4: target.A<0> +CallExpr:5: target.A<0> +MemberExpr:5: target.A<0> +NameExpr:5: def () -> target.A<0> +NameExpr:5: target.A<0> +MemberExpr:6: target.A<0> +NameExpr:6: target.A<0> + +[case testMethod_types] +import target +[file target.py] +class A: + def f(self) -> A: + return self.f() +[file target.py.next] +class A: + # Extra line to change line numbers + def f(self) -> A: + return self.f() +[out] +CallExpr:3: target.A<0> +MemberExpr:3: def () -> target.A<0> +NameExpr:3: target.A<0> +==> +CallExpr:4: target.A<0> +MemberExpr:4: def () -> target.A<0> +NameExpr:4: target.A<0> + +[case testRenameFunction] +import target +[file target.py] +def f() -> int: pass +[file target.py.next] +def g() -> int: pass +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + FuncDef:1<2>( + f + def () -> builtins.int<3> + Block:1<4>( + PassStmt:1<5>()))) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + FuncDef:1<6>( + g + def () -> builtins.int<3> + Block:1<7>( + PassStmt:1<8>()))) + +[case testRenameFunction_symtable] +import target +[file target.py] +def f() -> int: pass +[file target.py.next] +def g() -> int: pass +[out] +__main__: + target: MypyFile<0> +target: + f: FuncDef<1> +==> +__main__: + target: MypyFile<0> +target: + g: FuncDef<2> From c690fec1735d45e89f661245806a9a6798e9742d Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 9 Feb 2017 16:54:28 +0000 Subject: [PATCH 02/44] Attempt to fix tests on Windows --- mypy/test/testdiff.py | 2 +- mypy/test/testfinegrained.py | 2 ++ test-data/unit/fine-grained.test | 6 +++--- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py index f379a3735ce7..aa427dbdb9d8 100644 --- a/mypy/test/testdiff.py +++ b/mypy/test/testdiff.py @@ -34,7 +34,7 @@ def cases(cls) -> List[DataDrivenTestCase]: def run_case(self, testcase: DataDrivenTestCase) -> None: first_src = '\n'.join(testcase.input) files_dict = dict(testcase.files) - second_src = files_dict['tmp/next.py'] + second_src = files_dict[os.path.join('tmp', 'next.py')] messages1, files1 = self.build(first_src) messages2, files2 = self.build(second_src) diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index eeabf40b41bb..9056c45ba31c 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -65,6 +65,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: modules.append(module) new_messages = update_build(manager, graph, deps, modules) + new_messages = [re.sub('^tmp' + re.escape(os.sep), '', message) + for message in new_messages] a.append('==') a.extend(new_messages) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 1edfae58d186..4328c74258de 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -168,7 +168,7 @@ def g(a: str) -> None: m.f('') # E [out] == -tmp/n.py:3: error: Too few arguments for "f" +n.py:3: error: Too few arguments for "f" == -tmp/n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" -tmp/m.py:3: error: Too few arguments for "g" +n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" +m.py:3: error: Too few arguments for "g" From 38d69aaf924576468cb7d15079b2db20d079f698 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Feb 2017 12:13:57 +0000 Subject: [PATCH 03/44] Try to work around travis failure --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 4c25d0e4bdd6..81d28f01e2c0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,5 +16,5 @@ install: - python setup.py install script: - - python runtests.py -x lint + - python runtests.py -x lint -j16 - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi From 3b98a1de161e8f47b9344284c47be2f41929c77b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 2 Mar 2017 17:40:01 +0000 Subject: [PATCH 04/44] Add missing import --- mypy/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index d3f6cf0ac298..1e8e31898d23 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -3,7 +3,7 @@ import re import subprocess from xml.sax.saxutils import escape -from typing import TypeVar, List, Tuple, Optional, Sequence +from typing import TypeVar, List, Tuple, Optional, Sequence, Dict T = TypeVar('T') From 4f3b19247bb12e3acd33addf6461bfe0c34d2fb3 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 2 Mar 2017 17:40:40 +0000 Subject: [PATCH 05/44] Another attempt to fix the travis build --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 81d28f01e2c0..29ce77b353e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,5 +16,5 @@ install: - python setup.py install script: - - python runtests.py -x lint -j16 + - python runtests.py -x lint -j4 - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi From da6631c8d1a23002fa79c64fe7b83dc347be472d Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 3 Mar 2017 17:17:24 +0000 Subject: [PATCH 06/44] Fix typos --- mypy/server/astdiff.py | 2 +- mypy/server/aststrip.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 141ea65b145c..3be3f00fa57d 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -117,7 +117,7 @@ class IdenticalTypeVisitor(TypeVisitor[bool]): * If one of the types is not valid for whatever reason, they are considered different. - * Sometimes require types to be structurally identical, even if the + * Sometimes require types to be structurally identical, even if they are semantically the same type. """ diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 92e23fa781ef..32f6fb7fe0bc 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -1,4 +1,4 @@ -"""Strip AST from from semantic and type information.""" +"""Strip AST from semantic and type information.""" from typing import Union From 27992781e1ba642c66986d967072e9de531072b6 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 9 Feb 2017 17:49:13 +0000 Subject: [PATCH 07/44] Support multiple rounds of event propagation --- mypy/server/update.py | 75 +++++++++++++++++++++----------- mypy/test/testfinegrained.py | 1 + test-data/unit/fine-grained.test | 19 ++++++++ 3 files changed, 69 insertions(+), 26 deletions(-) diff --git a/mypy/server/update.py b/mypy/server/update.py index 78df007cd0a6..cec4203d3bfc 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -43,7 +43,6 @@ Major todo items: -- Support multiple rounds of change propagation - Support multiple type checking passes - Always reprocess targets with errors, even if they aren't explicitly stale @@ -54,9 +53,9 @@ from mypy.build import BuildManager, State from mypy.checker import DeferredNode from mypy.errors import Errors -from mypy.nodes import MypyFile, FuncDef, TypeInfo, Expression, SymbolNode +from mypy.nodes import MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var from mypy.types import Type -from mypy.server.astdiff import compare_symbol_tables +from mypy.server.astdiff import compare_symbol_tables, is_identical_type from mypy.server.astmerge import merge_asts from mypy.server.aststrip import strip_target from mypy.server.deps import get_dependencies @@ -194,29 +193,53 @@ def propagate_changes_using_dependencies( up_to_date_modules: Set[str]) -> None: # TODO: Multiple propagation passes # TODO: Multiple type checking passes - - todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules) - - for id, nodes in todo.items(): - assert id not in up_to_date_modules - file_node = manager.modules[id] - for deferred in nodes: - node = deferred.node - # Strip semantic analysis information - strip_target(node) - # We don't redo the first pass, because it only does local things. - semantic_analyzer = manager.semantic_analyzer - with semantic_analyzer.file_context( - file_node=file_node, - fnam=file_node.path, - options=manager.options, - active_type=deferred.active_typeinfo): - # Second pass - manager.semantic_analyzer.refresh_partial(node) - # Third pass - manager.semantic_analyzer_pass3.refresh_partial(node) - # Type check - graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + # TODO: Restrict the number of iterations to some maximum to avoid infinite loops + + # Propagate changes until nothing visible has changed during the last + # iteration. + while triggered: + todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules) + + # TODO: Preserve order (set is not optimal) + new_triggered = set() + for id, nodes in todo.items(): + assert id not in up_to_date_modules + file_node = manager.modules[id] + for deferred in nodes: + node = deferred.node + # Strip semantic analysis information + strip_target(node) + # We don't redo the first pass, because it only does local things. + semantic_analyzer = manager.semantic_analyzer + with semantic_analyzer.file_context( + file_node=file_node, + fnam=file_node.path, + options=manager.options, + active_type=deferred.active_typeinfo): + # Second pass + manager.semantic_analyzer.refresh_partial(node) + # Third pass + manager.semantic_analyzer_pass3.refresh_partial(node) + info = deferred.active_typeinfo + if info: + old_types = {name: node.node.type + for name, node in info.names.items() + if isinstance(node.node, Var)} + # Type check + graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + if info: + # Check if we need to propagate any attribute type changes further. + # TODO: Also consider module-level attribute type changes here. + for name, node in info.names.items(): + if (name in old_types and + not is_identical_type(node.node.type, old_types[name])): + # Type checking a method changed an attribute type. + new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name))) + # Changes elsewhere may require us to reprocess modules that were + # previously considered up to date. For example, there may be a + # dependency loop that loops back to an originally processed module. + up_to_date_modules = set() + triggered = new_triggered def find_targets_recursive( diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 9056c45ba31c..3e5eced595a8 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -86,6 +86,7 @@ def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]: alt_lib_path=test_temp_dir) except CompileError as e: # TODO: We need a manager and a graph in this case as well + assert False, str('\n'.join(e.messages)) return e.messages, None, None return result.errors, result.manager, result.graph diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 4328c74258de..59b418feae54 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -172,3 +172,22 @@ n.py:3: error: Too few arguments for "f" == n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int" m.py:3: error: Too few arguments for "g" + +[case testTwoRounds] +import m +def h(a: m.A) -> int: + return a.x +[file m.py] +import n +class A: + def g(self, b: n.B) -> None: + self.x = b.f() +[file n.py] +class B: + def f(self) -> int: pass +[file n.py.2] +class B: + def f(self) -> str: pass +[out] +== +main:3: error: Incompatible return value type (got "str", expected "int") From 28d82793a1055ae0b030a14ab9734702fce98b85 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 11 Feb 2017 11:59:38 +0000 Subject: [PATCH 08/44] Keep track of targets that generated an error In fine-grained incremental mode keep reporting errors for targets with errors even if the targets weren't explicitly triggered during successive iterations. We currently repeatedly type check these targets. Alternatively, we could perhaps just keep generating the errors without actually re-type-checking the targets, but there's a risk that it will be hard to always do this correctly, so we are starting with this simple-minded approach which is good enough for now. --- mypy/build.py | 10 +- mypy/checker.py | 4 +- mypy/errors.py | 84 +++++++++++++--- mypy/fastparse.py | 2 +- mypy/fastparse2.py | 2 +- mypy/semanal.py | 8 +- mypy/server/target.py | 3 + mypy/server/update.py | 96 ++++++++++-------- mypy/test/testfinegrained.py | 6 +- test-data/unit/fine-grained.test | 161 +++++++++++++++++++++++++++++++ 10 files changed, 309 insertions(+), 67 deletions(-) create mode 100644 mypy/server/target.py diff --git a/mypy/build.py b/mypy/build.py index 1419a843ba2f..682fa1e00398 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -475,7 +475,7 @@ def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> My return tree def module_not_found(self, path: str, line: int, id: str) -> None: - self.errors.set_file(path) + self.errors.set_file(path, id) stub_msg = "(Stub files are from https://github.com/python/typeshed)" if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or (self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))): @@ -1231,7 +1231,7 @@ def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None: # so we'd need to cache the decision. manager = self.manager manager.errors.set_import_context([]) - manager.errors.set_file(ancestor_for.xpath) + manager.errors.set_file(ancestor_for.xpath, ancestor_for.id) manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,), severity='note', only_once=True) manager.errors.report(-1, -1, @@ -1243,7 +1243,7 @@ def skipping_module(self, id: str, path: str) -> None: manager = self.manager save_import_context = manager.errors.import_context() manager.errors.set_import_context(self.caller_state.import_context) - manager.errors.set_file(self.caller_state.xpath) + manager.errors.set_file(self.caller_state.xpath, self.caller_state.id) line = self.caller_line manager.errors.report(line, 0, "Import of '%s' ignored" % (id,), @@ -1430,7 +1430,7 @@ def parse_file(self) -> None: continue if id == '': # Must be from a relative import. - manager.errors.set_file(self.xpath) + manager.errors.set_file(self.xpath, self.id) manager.errors.report(line, 0, "No parent module -- cannot perform relative import", blocker=True) @@ -1635,7 +1635,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph: except ModuleNotFound: continue if st.id in graph: - manager.errors.set_file(st.xpath) + manager.errors.set_file(st.xpath, st.id) manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id) manager.errors.raise_error() graph[st.id] = st diff --git a/mypy/checker.py b/mypy/checker.py index f79b0ba09cc8..11f3329ebd90 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -169,7 +169,7 @@ def check_first_pass(self) -> None: Deferred functions will be processed by check_second_pass(). """ - self.errors.set_file(self.path) + self.errors.set_file(self.path, self.tree.fullname()) with self.enter_partial_types(): with self.binder.top_frame_context(): for d in self.tree.defs: @@ -196,7 +196,7 @@ def check_second_pass(self, todo: List[DeferredNode] = None) -> bool: """ if not todo and not self.deferred_nodes: return False - self.errors.set_file(self.path) + self.errors.set_file(self.path, self.tree.fullname()) self.pass_num += 1 if not todo: todo = self.deferred_nodes diff --git a/mypy/errors.py b/mypy/errors.py index 30527c19ab02..f0fadaeed035 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -4,7 +4,7 @@ from collections import OrderedDict, defaultdict from contextlib import contextmanager -from typing import Tuple, List, TypeVar, Set, Dict, Iterator +from typing import Tuple, List, TypeVar, Set, Dict, Iterator, Optional from mypy.options import Options @@ -22,6 +22,9 @@ class ErrorInfo: # The source file that was the source of this error. file = '' + # The fully-qualified id of the source module for this error. + module = None # type: Optional[str] + # The name of the type in which this error is located at. type = '' # Unqualified, may be None @@ -46,12 +49,26 @@ class ErrorInfo: # Only report this particular messages once per program. only_once = False - def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str, - function_or_member: str, line: int, column: int, severity: str, - message: str, blocker: bool, only_once: bool, - origin: Tuple[str, int] = None) -> None: + # Fine-grained incremental target where this was reported + target = None # type: Optional[str] + + def __init__(self, + import_ctx: List[Tuple[str, int]], + file: str, + module: Optional[str], + typ: str, + function_or_member: str, + line: int, + column: int, + severity: str, + message: str, + blocker: bool, + only_once: bool, + origin: Tuple[str, int] = None, + target: str = None) -> None: self.import_ctx = import_ctx self.file = file + self.module = module self.type = typ self.function_or_member = function_or_member self.line = line @@ -61,6 +78,7 @@ def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str, self.blocker = blocker self.only_once = only_once self.origin = origin or (file, line) + self.target = target class Errors: @@ -106,9 +124,14 @@ class Errors: # Set to True to show "In function "foo":" messages. show_error_context = False # type: bool - # Set to True to show column numbers in error messages + # Set to True to show column numbers in error messages. show_column_numbers = False # type: bool + # Stack of active fine-grained incremental checking targets within + # a module. The first item is always the current module id. + # (See mypy.server.update for more about targets.) + target = None # type: List[str] + def __init__(self, show_error_context: bool = False, show_column_numbers: bool = False) -> None: self.show_error_context = show_error_context @@ -125,6 +148,7 @@ def initialize(self) -> None: self.used_ignored_lines = defaultdict(set) self.ignored_files = set() self.only_once_messages = set() + self.target = [] def reset(self) -> None: self.initialize() @@ -135,6 +159,7 @@ def copy(self) -> 'Errors': new.import_ctx = self.import_ctx[:] new.type_name = self.type_name[:] new.function_or_member = self.function_or_member[:] + new.target = self.target[:] return new def set_ignore_prefix(self, prefix: str) -> None: @@ -149,8 +174,8 @@ def simplify_path(self, file: str) -> str: file = os.path.normpath(file) return remove_path_prefix(file, self.ignore_prefix) - def set_file(self, file: str, ignored_lines: Set[int] = None) -> None: - """Set the path of the current file.""" + def set_file(self, file: str, module: Optional[str], ignored_lines: Set[int] = None) -> None: + """Set the path and module id of the current file.""" # The path will be simplified later, in render_messages. That way # * 'file' is always a key that uniquely identifies a source file # that mypy read (simplified paths might not be unique); and @@ -158,6 +183,8 @@ def set_file(self, file: str, ignored_lines: Set[int] = None) -> None: # reporting errors for files other than the one currently being # processed. self.file = file + if module: + self.target = [module] def set_file_ignored_lines(self, file: str, ignored_lines: Set[int] = None, @@ -168,10 +195,12 @@ def set_file_ignored_lines(self, file: str, def push_function(self, name: str) -> None: """Set the current function or member short name (it can be None).""" + self.push_target_component(name) self.function_or_member.append(name) def pop_function(self) -> None: self.function_or_member.pop() + self.pop_target_component() @contextmanager def enter_function(self, name: str) -> Iterator[None]: @@ -181,10 +210,30 @@ def enter_function(self, name: str) -> Iterator[None]: def push_type(self, name: str) -> None: """Set the short name of the current type (it can be None).""" + self.push_target_component(name) self.type_name.append(name) def pop_type(self) -> None: self.type_name.pop() + self.pop_target_component() + + def push_target_component(self, name: str) -> None: + if self.target and not self.function_or_member[-1]: + self.target.append('{}.{}'.format(self.target[-1], name)) + + def pop_target_component(self) -> None: + if self.target and not self.function_or_member[-1]: + self.target.pop() + + def current_target(self) -> Optional[str]: + if self.target: + return self.target[-1] + return None + + def current_module(self) -> Optional[str]: + if self.target: + return self.target[0] + return None @contextmanager def enter_type(self, name: str) -> Iterator[None]: @@ -220,10 +269,11 @@ def report(self, line: int, column: int, message: str, blocker: bool = False, type = None # Omit type context if nested function if file is None: file = self.file - info = ErrorInfo(self.import_context(), file, type, + info = ErrorInfo(self.import_context(), file, self.current_module(), type, self.function_or_member[-1], line, column, severity, message, blocker, only_once, - origin=(self.file, origin_line) if origin_line else None) + origin=(self.file, origin_line) if origin_line else None, + target=self.current_target()) self.add_error_info(info) def add_error_info(self, info: ErrorInfo) -> None: @@ -247,9 +297,9 @@ def generate_unused_ignore_notes(self) -> None: if not self.is_typeshed_file(file): for line in ignored_lines - self.used_ignored_lines[file]: # Don't use report since add_error_info will ignore the error! - info = ErrorInfo(self.import_context(), file, None, None, - line, -1, 'note', "unused 'type: ignore' comment", - False, False) + info = ErrorInfo(self.import_context(), file, self.current_module(), None, + None, line, -1, 'note', "unused 'type: ignore' comment", + False, False) self.error_info.append(info) def is_typeshed_file(self, file: str) -> bool: @@ -303,6 +353,14 @@ def messages(self) -> List[str]: a.append(s) return a + def targets(self) -> Set[str]: + """Return a set of all targets that contain errors.""" + # TODO: Make sure that either target is always defined or that not being defined + # is okay for fine-grained incremental checking. + return set(info.target + for info in self.error_info + if info.target) + def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[str, int, int, str, str]]: """Translate the messages into a sequence of tuples. diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 1699f351f4f0..19619cf58c6b 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -71,7 +71,7 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, if errors is None: errors = Errors() raise_on_error = True - errors.set_file('' if fnam is None else fnam) + errors.set_file('' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert pyversion[0] >= 3 or is_stub_file diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py index a8039766384d..aca04187e57c 100644 --- a/mypy/fastparse2.py +++ b/mypy/fastparse2.py @@ -87,7 +87,7 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, if errors is None: errors = Errors() raise_on_error = True - errors.set_file('' if fnam is None else fnam) + errors.set_file('' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert pyversion[0] < 3 and not is_stub_file diff --git a/mypy/semanal.py b/mypy/semanal.py index c5eb313862c6..7615df6eb460 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -237,7 +237,7 @@ def __init__(self, def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None: self.options = options - self.errors.set_file(fnam) + self.errors.set_file(fnam, file_node.fullname()) self.cur_mod_node = file_node self.cur_mod_id = file_node.fullname() self.is_stub_file = fnam.lower().endswith('.pyi') @@ -287,7 +287,7 @@ def file_context(self, file_node: MypyFile, fnam: str, options: Options, active_type: Optional[TypeInfo]) -> Iterator[None]: # TODO: Use this above in visit_file self.options = options - self.errors.set_file(fnam) + self.errors.set_file(fnam, file_node.fullname()) self.cur_mod_node = file_node self.cur_mod_id = file_node.fullname() self.is_stub_file = fnam.lower().endswith('.pyi') @@ -3400,7 +3400,7 @@ def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) - self.pyversion = options.python_version self.platform = options.platform sem.cur_mod_id = mod_id - sem.errors.set_file(fnam) + sem.errors.set_file(fnam, mod_id) sem.globals = SymbolTable() sem.global_decls = [set()] sem.nonlocal_decls = [set()] @@ -3645,7 +3645,7 @@ def __init__(self, modules: Dict[str, MypyFile], errors: Errors) -> None: self.errors = errors def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None: - self.errors.set_file(fnam) + self.errors.set_file(fnam, file_node.fullname()) self.options = options self.accept(file_node) diff --git a/mypy/server/target.py b/mypy/server/target.py new file mode 100644 index 000000000000..860ba4a908d2 --- /dev/null +++ b/mypy/server/target.py @@ -0,0 +1,3 @@ +def module_prefix(target: str) -> str: + # TODO: This assumes no nested modules. + return target.split('.', 1)[0] diff --git a/mypy/server/update.py b/mypy/server/update.py index cec4203d3bfc..4b2f5170855c 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -44,8 +44,6 @@ Major todo items: - Support multiple type checking passes -- Always reprocess targets with errors, even if they aren't explicitly - stale """ from typing import Dict, List, Set @@ -60,9 +58,53 @@ from mypy.server.aststrip import strip_target from mypy.server.deps import get_dependencies from mypy.server.subexpr import get_subexpressions +from mypy.server.target import module_prefix from mypy.server.trigger import make_trigger +class FineGrainedBuildManager: + def __init__(self, + manager: BuildManager, + graph: Dict[str, State]) -> None: + self.manager = manager + self.graph = graph + self.deps = get_all_dependencies(manager) + self.previous_targets_with_errors = manager.errors.targets() + + def update(self, changed_modules: List[str]) -> List[str]: + """Update previous build result by processing changed modules. + + Also propagate changes to other modules as needed, but only process + those parts of other modules that are affected by the changes. Retain + the existing ASTs and symbol tables of unaffected modules. + + TODO: What about blocking errors? + + Args: + manager: State of the build + graph: Additional state of the build + deps: Fine-grained dependcy map for the build (mutated by this function) + changed_modules: Modules changed since the previous update/build (assume + this is correct; not validated here) + + Returns: + A list of errors. + """ + manager = self.manager + old_modules = dict(manager.modules) + manager.errors.reset() + new_modules = build_incremental_step(manager, changed_modules) + # TODO: What to do with stale dependencies? + update_dependencies(new_modules, self.deps, manager.all_types) + triggered = calculate_active_triggers(manager, old_modules, new_modules) + replace_modules_with_new_variants(manager, old_modules, new_modules) + propagate_changes_using_dependencies(manager, self.graph, self.deps, triggered, + set(changed_modules), + self.previous_targets_with_errors) + self.previous_targets_with_errors = manager.errors.targets() + return manager.errors.messages() + + def get_all_dependencies(manager: BuildManager) -> Dict[str, Set[str]]: """Return the fine-grained dependency map for an entire build.""" deps = {} # type: Dict[str, Set[str]] @@ -70,39 +112,6 @@ def get_all_dependencies(manager: BuildManager) -> Dict[str, Set[str]]: return deps -def update_build(manager: BuildManager, - graph: Dict[str, State], - deps: Dict[str, Set[str]], - changed_modules: List[str]) -> List[str]: - """Update previous build result by processing changed modules. - - Also propagate changes to other modules as needed, but only process - those parts of other modules that are affected by the changes. Retain - the existing ASTs and symbol tables of unaffected modules. - - TODO: What about blocking errors? - - Args: - manager: State of the build - graph: Additional state of the build - deps: Fine-grained dependcy map for the build (mutated by this function) - changed_modules: Modules changed since the previous update/build (assume - this is correct; not validated here) - - Returns: - A list of errors. - """ - old_modules = dict(manager.modules) - manager.errors.reset() - new_modules = build_incremental_step(manager, changed_modules) - # TODO: What to do with stale dependencies? - update_dependencies(new_modules, deps, manager.all_types) - triggered = calculate_active_triggers(manager, old_modules, new_modules) - replace_modules_with_new_variants(manager, old_modules, new_modules) - propagate_changes_using_dependencies(manager, graph, deps, triggered, set(changed_modules)) - return manager.errors.messages() - - def build_incremental_step(manager: BuildManager, changed_modules: List[str]) -> Dict[str, MypyFile]: """Build new versions of changed modules only. @@ -190,7 +199,8 @@ def propagate_changes_using_dependencies( graph: Dict[str, State], deps: Dict[str, Set[str]], triggered: Set[str], - up_to_date_modules: Set[str]) -> None: + up_to_date_modules: Set[str], + targets_with_errors: Set[str]) -> None: # TODO: Multiple propagation passes # TODO: Multiple type checking passes # TODO: Restrict the number of iterations to some maximum to avoid infinite loops @@ -199,6 +209,15 @@ def propagate_changes_using_dependencies( # iteration. while triggered: todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules) + # Also process targets that used to have errors, as otherwise some + # errors might be lost. + for target in targets_with_errors: + id = module_prefix(target) + if id not in up_to_date_modules: + if id not in todo: + todo[id] = set() + todo[id].add(lookup_target(manager.modules, target)) + targets_with_errors = set() # TODO: Preserve order (set is not optimal) new_triggered = set() @@ -230,9 +249,10 @@ def propagate_changes_using_dependencies( if info: # Check if we need to propagate any attribute type changes further. # TODO: Also consider module-level attribute type changes here. - for name, node in info.names.items(): + for name, member_node in info.names.items(): if (name in old_types and - not is_identical_type(node.node.type, old_types[name])): + (not isinstance(member_node.node, Var) or + not is_identical_type(member_node.node.type, old_types[name]))): # Type checking a method changed an attribute type. new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name))) # Changes elsewhere may require us to reprocess modules that were diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 3e5eced595a8..047811dea4e5 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -19,7 +19,7 @@ from mypy.options import Options from mypy.server.astmerge import merge_asts from mypy.server.subexpr import get_subexpressions -from mypy.server.update import get_all_dependencies, update_build +from mypy.server.update import FineGrainedBuildManager from mypy.strconv import StrConv, indent from mypy.test.config import test_temp_dir, test_data_prefix from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite @@ -54,7 +54,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: if messages: a.extend(messages) - deps = get_all_dependencies(manager) + fine_grained_manager = FineGrainedBuildManager(manager, graph) steps = find_steps() for changed_paths in steps: @@ -64,7 +64,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: shutil.copy(path, new_path) modules.append(module) - new_messages = update_build(manager, graph, deps, modules) + new_messages = fine_grained_manager.update(modules) new_messages = [re.sub('^tmp' + re.escape(os.sep), '', message) for message in new_messages] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 59b418feae54..56162e93af47 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -191,3 +191,164 @@ class B: [out] == main:3: error: Incompatible return value type (got "str", expected "int") + +[case testFixTypeError] +import m +def f(a: m.A) -> None: + a.f(a) +[file m.py] +class A: + def f(self, a: 'A') -> None: pass +[file m.py.2] +class A: + def f(self) -> None: pass +[file m.py.3] +class A: + def f(self, a: 'A') -> None: pass +[out] +== +main:3: error: Too many arguments for "f" of "A" +== + +[case testFixTypeError2] +import m +def f(a: m.A) -> None: + a.f() +[file m.py] +class A: + def f(self) -> None: pass +[file m.py.2] +class A: + def g(self) -> None: pass +[file m.py.3] +class A: + def f(self) -> None: pass +[out] +== +main:3: error: "A" has no attribute "f" +== + +[case testFixSemanticAnalysisError] +import m +def f() -> None: + m.A() +[file m.py] +class A: pass +[file m.py.2] +class B: pass +[file m.py.3] +class B: pass +[builtins fixtures/fine_grained.pyi] +[out] +== +main:3: error: "module" has no attribute "A" +== + +[case testContinueToReportTypeCheckError] +import m +def f(a: m.A) -> None: + a.f() +def g(a: m.A) -> None: + a.g() +[file m.py] +class A: + def f(self) -> None: pass + def g(self) -> None: pass +[file m.py.2] +class A: pass +[file m.py.3] +class A: + def f(self) -> None: pass +[out] +== +main:3: error: "A" has no attribute "f" +main:5: error: "A" has no attribute "g" +== +main:5: error: "A" has no attribute "g" + +[case testContinueToReportSemanticAnalysisError] +import m +def f() -> None: + m.A() +def g() -> None: + m.B() +[file m.py] +class A: pass +class B: pass +[file m.py.2] +[file m.py.3] +class A: pass +[builtins fixtures/fine_grained.pyi] +[out] +== +main:3: error: "module" has no attribute "A" +main:5: error: "module" has no attribute "B" +== +main:5: error: "module" has no attribute "B" + +[case testContinueToReportErrorAtTopLevel] +import n +import m +m.A().f() +[file n.py] +import m +m.A().g() +[file m.py] +class A: + def f(self) -> None: pass + def g(self) -> None: pass +[file m.py.2] +class A: pass +[file m.py.3] +class A: + def f(self) -> None: pass +[out] +== +n.py:2: error: "A" has no attribute "g" +main:3: error: "A" has no attribute "f" +== +n.py:2: error: "A" has no attribute "g" + +[case testContinueToReportErrorInMethod] +import m +class C: + def f(self, a: m.A) -> None: + a.f() + def g(self, a: m.A) -> None: + a.g() +[file m.py] +class A: + def f(self) -> None: pass + def g(self) -> None: pass +[file m.py.2] +class A: pass +[file m.py.3] +class A: + def f(self) -> None: pass +[out] +== +main:4: error: "A" has no attribute "f" +main:6: error: "A" has no attribute "g" +== +main:6: error: "A" has no attribute "g" + +[case testInitialBatchGeneratedError] +import m +def g() -> None: + m.f() +def h() -> None: + m.g() +[file m.py] +def f(x: object) -> None: pass +[file m.py.2] +def f() -> None: pass +[file m.py.3] +def f() -> None: pass +def g() -> None: pass +[builtins fixtures/fine_grained.pyi] +[out] +main:3: error: Too few arguments for "f" +main:5: error: "module" has no attribute "g" +== +main:5: error: "module" has no attribute "g" +== From f38024bff662c30869d5d4121f414f32ee6b17d3 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 3 Mar 2017 13:22:58 +0000 Subject: [PATCH 09/44] Refactor and continue reporting error if no changes --- mypy/server/update.py | 83 +++++++++++++++++--------------- test-data/unit/fine-grained.test | 16 +++++- 2 files changed, 59 insertions(+), 40 deletions(-) diff --git a/mypy/server/update.py b/mypy/server/update.py index 4b2f5170855c..362a862e5379 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -207,7 +207,7 @@ def propagate_changes_using_dependencies( # Propagate changes until nothing visible has changed during the last # iteration. - while triggered: + while triggered or targets_with_errors: todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules) # Also process targets that used to have errors, as otherwise some # errors might be lost. @@ -217,49 +217,16 @@ def propagate_changes_using_dependencies( if id not in todo: todo[id] = set() todo[id].add(lookup_target(manager.modules, target)) - targets_with_errors = set() - + triggered = set() # TODO: Preserve order (set is not optimal) - new_triggered = set() - for id, nodes in todo.items(): + for id, nodes in sorted(todo.items(), key=lambda x: x[0]): assert id not in up_to_date_modules - file_node = manager.modules[id] - for deferred in nodes: - node = deferred.node - # Strip semantic analysis information - strip_target(node) - # We don't redo the first pass, because it only does local things. - semantic_analyzer = manager.semantic_analyzer - with semantic_analyzer.file_context( - file_node=file_node, - fnam=file_node.path, - options=manager.options, - active_type=deferred.active_typeinfo): - # Second pass - manager.semantic_analyzer.refresh_partial(node) - # Third pass - manager.semantic_analyzer_pass3.refresh_partial(node) - info = deferred.active_typeinfo - if info: - old_types = {name: node.node.type - for name, node in info.names.items() - if isinstance(node.node, Var)} - # Type check - graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value - if info: - # Check if we need to propagate any attribute type changes further. - # TODO: Also consider module-level attribute type changes here. - for name, member_node in info.names.items(): - if (name in old_types and - (not isinstance(member_node.node, Var) or - not is_identical_type(member_node.node.type, old_types[name]))): - # Type checking a method changed an attribute type. - new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name))) + triggered |= reprocess_nodes(manager, graph, id, nodes) # Changes elsewhere may require us to reprocess modules that were # previously considered up to date. For example, there may be a # dependency loop that loops back to an originally processed module. up_to_date_modules = set() - triggered = new_triggered + targets_with_errors = set() def find_targets_recursive( @@ -298,6 +265,46 @@ def find_targets_recursive( return result +def reprocess_nodes(manager: BuildManager, + graph: Dict[str, State], + id: str, + nodes: List[DeferredNode]) -> Set[str]: + file_node = manager.modules[id] + for deferred in nodes: + node = deferred.node + # Strip semantic analysis information + strip_target(node) + # We don't redo the first pass, because it only does local things. + semantic_analyzer = manager.semantic_analyzer + with semantic_analyzer.file_context( + file_node=file_node, + fnam=file_node.path, + options=manager.options, + active_type=deferred.active_typeinfo): + # Second pass + manager.semantic_analyzer.refresh_partial(node) + # Third pass + manager.semantic_analyzer_pass3.refresh_partial(node) + info = deferred.active_typeinfo + if info: + old_types = {name: node.node.type + for name, node in info.names.items() + if isinstance(node.node, Var)} + # Type check + graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + new_triggered = set() + if info: + # Check if we need to propagate any attribute type changes further. + # TODO: Also consider module-level attribute type changes here. + for name, member_node in info.names.items(): + if (name in old_types and + (not isinstance(member_node.node, Var) or + not is_identical_type(member_node.node.type, old_types[name]))): + # Type checking a method changed an attribute type. + new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name))) + return new_triggered + + def lookup_target(modules: Dict[str, MypyFile], target: str) -> DeferredNode: """Look up a target by fully-qualified name.""" components = target.split('.') diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 56162e93af47..11edf0d21718 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -237,7 +237,7 @@ class A: pass [file m.py.2] class B: pass [file m.py.3] -class B: pass +class A: pass [builtins fixtures/fine_grained.pyi] [out] == @@ -304,8 +304,8 @@ class A: def f(self) -> None: pass [out] == -n.py:2: error: "A" has no attribute "g" main:3: error: "A" has no attribute "f" +n.py:2: error: "A" has no attribute "g" == n.py:2: error: "A" has no attribute "g" @@ -352,3 +352,15 @@ main:5: error: "module" has no attribute "g" == main:5: error: "module" has no attribute "g" == + +[case testKeepReportingErrorIfNoChanges] +import m +def h() -> None: + m.g() +[file m.py] +[file m.py.2] +[builtins fixtures/fine_grained.pyi] +[out] +main:3: error: "module" has no attribute "g" +== +main:3: error: "module" has no attribute "g" From 01ed71c932237a1eaac7a8ac05581a268c7de4c5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 3 Mar 2017 15:32:22 +0000 Subject: [PATCH 10/44] Fix bugs * Use up-to-date type maps instead of the global type map which will get out of sync. * Update dependencies of reprocessed targets. --- mypy/server/deps.py | 14 ++++++ mypy/server/update.py | 76 ++++++++++++++++++++------------ mypy/test/testmerge.py | 62 +++++++++++++++----------- test-data/unit/fine-grained.test | 15 +++++++ test-data/unit/merge.test | 6 +++ 5 files changed, 120 insertions(+), 53 deletions(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 4a1a7fc6ad97..3b950a7b1931 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -18,11 +18,25 @@ def get_dependencies(prefix: str, node: Node, type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]: + """Get all dependencies of a node, recursively.""" visitor = DependencyVisitor(prefix, type_map) node.accept(visitor) return visitor.map +def get_dependencies_of_target(prefix: str, node: Node, + type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]: + """Get dependencies of a target -- don't recursive into nested targets.""" + visitor = DependencyVisitor(prefix, type_map) + if isinstance(node, MypyFile): + for defn in node.defs: + if not isinstance(defn, (ClassDef, FuncDef)): + defn.accept(visitor) + else: + node.accept(visitor) + return visitor.map + + class DependencyVisitor(TraverserVisitor): def __init__(self, prefix: str, type_map: Dict[Expression, Type]) -> None: self.stack = [prefix] diff --git a/mypy/server/update.py b/mypy/server/update.py index 362a862e5379..af321876c3b2 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -46,18 +46,19 @@ - Support multiple type checking passes """ -from typing import Dict, List, Set +from typing import Dict, List, Set, Tuple from mypy.build import BuildManager, State from mypy.checker import DeferredNode from mypy.errors import Errors -from mypy.nodes import MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var +from mypy.nodes import ( + MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, +) from mypy.types import Type from mypy.server.astdiff import compare_symbol_tables, is_identical_type from mypy.server.astmerge import merge_asts from mypy.server.aststrip import strip_target -from mypy.server.deps import get_dependencies -from mypy.server.subexpr import get_subexpressions +from mypy.server.deps import get_dependencies, get_dependencies_of_target from mypy.server.target import module_prefix from mypy.server.trigger import make_trigger @@ -68,7 +69,7 @@ def __init__(self, graph: Dict[str, State]) -> None: self.manager = manager self.graph = graph - self.deps = get_all_dependencies(manager) + self.deps = get_all_dependencies(manager, graph) self.previous_targets_with_errors = manager.errors.targets() def update(self, changed_modules: List[str]) -> List[str]: @@ -91,29 +92,31 @@ def update(self, changed_modules: List[str]) -> List[str]: A list of errors. """ manager = self.manager + graph = self.graph old_modules = dict(manager.modules) manager.errors.reset() - new_modules = build_incremental_step(manager, changed_modules) + new_modules, new_type_maps = build_incremental_step(manager, changed_modules) # TODO: What to do with stale dependencies? - update_dependencies(new_modules, self.deps, manager.all_types) triggered = calculate_active_triggers(manager, old_modules, new_modules) - replace_modules_with_new_variants(manager, old_modules, new_modules) - propagate_changes_using_dependencies(manager, self.graph, self.deps, triggered, + replace_modules_with_new_variants(manager, graph, old_modules, new_modules, new_type_maps) + update_dependencies(new_modules, self.deps, graph) + propagate_changes_using_dependencies(manager, graph, self.deps, triggered, set(changed_modules), self.previous_targets_with_errors) self.previous_targets_with_errors = manager.errors.targets() return manager.errors.messages() -def get_all_dependencies(manager: BuildManager) -> Dict[str, Set[str]]: +def get_all_dependencies(manager: BuildManager, graph: Dict[str, State]) -> Dict[str, Set[str]]: """Return the fine-grained dependency map for an entire build.""" deps = {} # type: Dict[str, Set[str]] - update_dependencies(manager.modules, deps, manager.all_types) + update_dependencies(manager.modules, deps, graph) return deps def build_incremental_step(manager: BuildManager, - changed_modules: List[str]) -> Dict[str, MypyFile]: + changed_modules: List[str]) -> Tuple[Dict[str, MypyFile], + Dict[str, Dict[Expression, Type]]]: """Build new versions of changed modules only. Return the new ASTs for the changed modules. They will be totally @@ -141,16 +144,16 @@ def build_incremental_step(manager: BuildManager, # TODO: state.write_cache()? # TODO: state.mark_as_rechecked()? - return {id: state.tree} + return {id: state.tree}, {id: state.type_checker.type_map} def update_dependencies(new_modules: Dict[str, MypyFile], deps: Dict[str, Set[str]], - type_map: Dict[Expression, Type]) -> None: + graph: Dict[str, State]) -> None: for id, node in new_modules.items(): module_deps = get_dependencies(prefix=id, node=node, - type_map=type_map) + type_map=graph[id].type_checker.type_map) for trigger, targets in module_deps.items(): deps.setdefault(trigger, set()).update(targets) @@ -171,8 +174,10 @@ def calculate_active_triggers(manager: BuildManager, def replace_modules_with_new_variants( manager: BuildManager, + graph: Dict[str, State], old_modules: Dict[str, MypyFile], - new_modules: Dict[str, MypyFile]) -> None: + new_modules: Dict[str, MypyFile], + new_type_maps: Dict[str, Dict[Expression, Type]]) -> None: """Replace modules with newly builds versions. Retain the identities of externally visible AST nodes in the @@ -183,15 +188,10 @@ def replace_modules_with_new_variants( propagate_changes_using_dependencies). """ for id in new_modules: - if id in old_modules: - # Remove nodes of old modules from the type map. - all_types = manager.all_types - for expr in get_subexpressions(old_modules[id]): - if expr in all_types: - del all_types[expr] merge_asts(old_modules[id], old_modules[id].names, new_modules[id], new_modules[id].names) manager.modules[id] = old_modules[id] + graph[id].type_checker.type_map = new_type_maps[id] def propagate_changes_using_dependencies( @@ -201,7 +201,6 @@ def propagate_changes_using_dependencies( triggered: Set[str], up_to_date_modules: Set[str], targets_with_errors: Set[str]) -> None: - # TODO: Multiple propagation passes # TODO: Multiple type checking passes # TODO: Restrict the number of iterations to some maximum to avoid infinite loops @@ -221,7 +220,7 @@ def propagate_changes_using_dependencies( # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): assert id not in up_to_date_modules - triggered |= reprocess_nodes(manager, graph, id, nodes) + triggered |= reprocess_nodes(manager, graph, id, nodes, deps) # Changes elsewhere may require us to reprocess modules that were # previously considered up to date. For example, there may be a # dependency loop that loops back to an originally processed module. @@ -267,9 +266,14 @@ def find_targets_recursive( def reprocess_nodes(manager: BuildManager, graph: Dict[str, State], - id: str, - nodes: List[DeferredNode]) -> Set[str]: - file_node = manager.modules[id] + module_id: str, + nodes: Set[DeferredNode], + deps: Dict[str, Set[str]]) -> Set[str]: + """Reprocess a set of nodes within a single module. + + Return fired triggers. + """ + file_node = manager.modules[module_id] for deferred in nodes: node = deferred.node # Strip semantic analysis information @@ -291,7 +295,7 @@ def reprocess_nodes(manager: BuildManager, for name, node in info.names.items() if isinstance(node.node, Var)} # Type check - graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + graph[module_id].type_checker.check_second_pass(list(nodes)) # TODO: check return value new_triggered = set() if info: # Check if we need to propagate any attribute type changes further. @@ -302,9 +306,25 @@ def reprocess_nodes(manager: BuildManager, not is_identical_type(member_node.node.type, old_types[name]))): # Type checking a method changed an attribute type. new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name))) + update_deps(module_id, nodes, graph, deps) return new_triggered +def update_deps(module_id: str, + nodes: Set[DeferredNode], + graph: Dict[str, State], + deps: Dict[str, Set[str]]) -> None: + for deferred in nodes: + node = deferred.node + prefix = module_id + if isinstance(node, FuncBase) and node.info: + prefix += '.{}'.format(node.info.name()) + type_map = graph[module_id].type_checker.type_map + new_deps = get_dependencies_of_target(prefix, node, type_map) + for trigger, targets in new_deps.items(): + deps.setdefault(trigger, set()).update(targets) + + def lookup_target(modules: Dict[str, MypyFile], target: str) -> DeferredNode: """Look up a target by fully-qualified name.""" components = target.split('.') diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index f3ebd7f494eb..3fe5f60e5da1 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -5,9 +5,11 @@ from typing import List, Tuple, Dict from mypy import build -from mypy.build import BuildManager, BuildSource +from mypy.build import BuildManager, BuildSource, State from mypy.errors import Errors, CompileError -from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression +from mypy.nodes import ( + Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression +) from mypy.options import Options from mypy.server.astmerge import merge_asts from mypy.server.subexpr import get_subexpressions @@ -61,7 +63,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: kind = AST main_src = '\n'.join(testcase.input) - messages, manager = self.build(main_src) + messages, manager, graph = self.build(main_src) a = [] if messages: @@ -70,29 +72,31 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), os.path.join(test_temp_dir, 'target.py')) - a.extend(self.dump(manager.modules, manager.all_types, kind)) + a.extend(self.dump(manager.modules, graph, kind)) old_modules = dict(manager.modules) old_subexpr = get_subexpressions(old_modules['target']) - new_file = self.build_increment(manager, 'target') + new_file, new_types = self.build_increment(manager, 'target') replace_modules_with_new_variants(manager, + graph, old_modules, - {'target': new_file}) + {'target': new_file}, + {'target': new_types}) a.append('==>') - a.extend(self.dump(manager.modules, manager.all_types, kind)) + a.extend(self.dump(manager.modules, graph, kind)) for expr in old_subexpr: # Verify that old AST nodes are removed from the expression type map. - assert expr not in manager.all_types + assert expr not in new_types assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, testcase.line)) - def build(self, source: str) -> Tuple[List[str], BuildManager]: + def build(self, source: str) -> Tuple[List[str], BuildManager, Dict[str, State]]: options = Options() options.use_builtins_fixtures = True options.show_traceback = True @@ -102,14 +106,18 @@ def build(self, source: str) -> Tuple[List[str], BuildManager]: alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Is it okay to return None? - return e.messages, None - return result.errors, result.manager - - def build_increment(self, manager: BuildManager, module_id: str) -> MypyFile: - module_dict = build_incremental_step(manager, [module_id]) - return module_dict[module_id] - - def dump(self, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], + return e.messages, None, {} + return result.errors, result.manager, result.graph + + def build_increment(self, manager: BuildManager, + module_id: str) -> Tuple[MypyFile, + Dict[Expression, Type]]: + module_dict, type_maps = build_incremental_step(manager, [module_id]) + return module_dict[module_id], type_maps[module_id] + + def dump(self, + modules: Dict[str, MypyFile], + graph: Dict[str, State], kind: str) -> List[str]: if kind == AST: return self.dump_asts(modules) @@ -118,7 +126,7 @@ def dump(self, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], elif kind == SYMTABLE: return self.dump_symbol_tables(modules) elif kind == TYPES: - return self.dump_types(type_map) + return self.dump_types(graph) assert False, 'Invalid kind %s' % kind def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]: @@ -172,14 +180,18 @@ def dump_typeinfo(self, info: TypeInfo) -> List[str]: type_str_conv=self.type_str_conv) return s.splitlines() - def dump_types(self, type_map: Dict[Expression, Type]) -> List[str]: + def dump_types(self, graph: Dict[str, State]) -> List[str]: a = [] # To make the results repeatable, we try to generate unique and # deterministic sort keys. - for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), - str(n) + str(type_map[n]))): - typ = type_map[expr] - a.append('{}:{}: {}'.format(short_type(expr), - expr.line, - typ.accept(self.type_str_conv))) + for module_id in sorted(graph): + type_map = graph[module_id].type_checker.type_map + if type_map: + a.append('## {}'.format(module_id)) + for expr in sorted(type_map, key=lambda n: (n.line, short_type(n), + str(n) + str(type_map[n]))): + typ = type_map[expr] + a.append('{}:{}: {}'.format(short_type(expr), + expr.line, + typ.accept(self.type_str_conv))) return a diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 11edf0d21718..bf660e5368bf 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -364,3 +364,18 @@ def h() -> None: main:3: error: "module" has no attribute "g" == main:3: error: "module" has no attribute "g" + +[case testFixErrorAndReintroduce] +import m +def h() -> None: + m.g() +[file m.py] +[file m.py.2] +def g() -> None: pass +[file m.py.3] +[builtins fixtures/fine_grained.pyi] +[out] +main:3: error: "module" has no attribute "g" +== +== +main:3: error: "module" has no attribute "g" diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index ff855cc8d661..6d6c724fea98 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -383,9 +383,11 @@ def f(a: A) -> None: a 1 [out] +## target IntExpr:3: builtins.int<0> NameExpr:4: target.A<1> ==> +## target NameExpr:3: target.A<1> IntExpr:4: builtins.int<0> @@ -406,6 +408,7 @@ class A: self.x = A() self.x [out] +## target CallExpr:3: target.A<0> MemberExpr:3: target.A<0> NameExpr:3: def () -> target.A<0> @@ -418,6 +421,7 @@ NameExpr:5: target.A<0> MemberExpr:6: builtins.int<1> NameExpr:6: target.A<0> ==> +## target IntExpr:3: builtins.int<1> MemberExpr:3: builtins.int<1> NameExpr:3: target.A<0> @@ -442,10 +446,12 @@ class A: def f(self) -> A: return self.f() [out] +## target CallExpr:3: target.A<0> MemberExpr:3: def () -> target.A<0> NameExpr:3: target.A<0> ==> +## target CallExpr:4: target.A<0> MemberExpr:4: def () -> target.A<0> NameExpr:4: target.A<0> From 709740a9cf48c198a97aaf8f15b123874d0447cb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 7 Mar 2017 13:42:34 +0000 Subject: [PATCH 11/44] Create dependencies for inheritance --- mypy/server/deps.py | 18 +++++++++++++++++- test-data/unit/deps.test | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 1 deletion(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 3b950a7b1931..89abebd272af 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -5,7 +5,7 @@ from mypy.checkmember import bind_self from mypy.nodes import ( Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import, - ImportFrom, LDEF + ImportFrom, TypeInfo, Var, LDEF ) from mypy.traverser import TraverserVisitor from mypy.types import ( @@ -63,6 +63,9 @@ def visit_func_def(self, o: FuncDef) -> None: for trigger in get_type_dependencies(signature): self.add_dependency(trigger) self.add_dependency(trigger, target=make_trigger(target)) + if o.info: + for base in non_trivial_bases(o.info): + self.add_dependency(make_trigger(base.fullname() + '.' + o.name())) super().visit_func_def(o) self.pop() @@ -74,6 +77,14 @@ def visit_class_def(self, o: ClassDef) -> None: # TODO: Add dependencies based on MRO and other attributes. super().visit_class_def(o) self.is_class = old_is_class + info = o.info + for name, node in info.names.items(): + if isinstance(node.node, Var): + for base in non_trivial_bases(info): + # If the type of an attribute changes in a base class, we make references + # to the attribute in the subclass stale. + self.add_dependency(make_trigger(base.fullname() + '.' + name), + target=make_trigger(info.fullname() + '.' + name)) self.pop() def visit_import(self, o: Import) -> None: @@ -200,3 +211,8 @@ def visit_union_type(self, typ: UnionType) -> List[str]: def visit_void(self, typ: Void) -> List[str]: return [] + + +def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]: + return [base for base in info.mro[1:] + if base.fullname() != 'builtins.object'] diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index ca2c8b588494..a2a1d78325cc 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -69,3 +69,43 @@ def f() -> None: pass [out] -> m.g -> m, m.g + +[case testInheritanceSimple] +class A: + pass +class B(A): + pass +[out] + -> m.A, m.B + -> m.B + +[case testInheritanceWithMethodAndAttribute] +class A: + pass +class B(A): + def f(self) -> None: + self.x = 1 +[out] + -> m.B.f + -> + -> m.A, m.B + -> m.B.f + -> m.B + +[case testInheritanceWithMethodAndAttributeAndDeepHierarchy] +class A: + pass +class B(A): + pass +class C(B): + def f(self) -> None: + self.x = 1 +[out] + -> m.C.f + -> + -> m.A, m.B + -> m.C.f + -> + -> m.B, m.C + -> m.C.f + -> m.C From 3a67189ea0a8f6a3d05f8753db08591e9ec32e7a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 7 Mar 2017 14:02:26 +0000 Subject: [PATCH 12/44] Detect differences in MRO --- mypy/server/astdiff.py | 9 +++++++-- test-data/unit/diff.test | 41 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 3be3f00fa57d..aab3280cbc38 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -68,7 +68,6 @@ def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool: is_identical_type(n.node.type, m.node.type)) if isinstance(n.node, TypeInfo) and isinstance(m.node, TypeInfo): # TODO: - # mro # type_vars # bases # _promote @@ -81,12 +80,18 @@ def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool: nn.fallback_to_any == mn.fallback_to_any and nn.is_named_tuple == mn.is_named_tuple and nn.is_newtype == mn.is_newtype and - nn.alt_fullname == mn.alt_fullname) + nn.alt_fullname == mn.alt_fullname and + is_same_mro(nn.mro, mn.mro)) if isinstance(n.node, Var) and isinstance(m.node, Var): return is_identical_type(n.node.type, m.node.type) return True +def is_same_mro(mro1: List[TypeInfo], mro2: List[TypeInfo]) -> bool: + return (len(mro1) == len(mro2) + and all(x.fullname() == y.fullname() for x, y in zip(mro1, mro2))) + + def module_name(id: str) -> str: return id.rsplit('.', 1)[0] diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index cedf3fd58568..fcb5e656af99 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -123,3 +123,44 @@ class A: pass [file next.py] [out] __main__.A + +[case testAddBaseClass] +class A: pass +[file next.py] +class B: pass +class A(B): pass +[out] +__main__.A +__main__.B + +[case testChangeBaseClass] +class A: pass +class B: pass +class C(A): pass +[file next.py] +class A: pass +class B: pass +class C(B): pass +[out] +__main__.C + +[case testRemoveBaseClass] +class A: pass +class B(A): pass +[file next.py] +class A: pass +class B: pass +[out] +__main__.B + +[case testRemoveClassFromMiddleOfMro] +class A: pass +class B(A): pass +class C(B): pass +[file next.py] +class A: pass +class B: pass +class C(B): pass +[out] +__main__.B +__main__.C From 42a77b6a9d96a02b6dfe871323b2ffc2a3e74ed3 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 7 Mar 2017 16:00:56 +0000 Subject: [PATCH 13/44] Merge base classes and MRO --- mypy/nodes.py | 4 ++++ mypy/server/astmerge.py | 5 +++++ test-data/unit/merge.test | 45 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+) diff --git a/mypy/nodes.py b/mypy/nodes.py index 697c6fd42bc6..85af20df869e 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2132,6 +2132,7 @@ def __str__(self) -> str: def dump(self, str_conv: 'mypy.strconv.StrConv' = None, type_str_conv: 'mypy.types.TypeStrVisitor' = None) -> str: + """Return a string dump of the contents of the TypeInfo.""" if not str_conv: str_conv = mypy.strconv.StrConv() base = None # type: str @@ -2145,6 +2146,8 @@ def type_str(typ: 'mypy.types.Type') -> str: if self.bases: base = 'Bases({})'.format(', '.join(type_str(base) for base in self.bases)) + mro = 'Mro({})'.format(', '.join(item.fullname() + str_conv.format_id(item) + for item in self.mro)) names = [] for name in sorted(self.names): description = name + str_conv.format_id(self.names[name].node) @@ -2155,6 +2158,7 @@ def type_str(typ: 'mypy.types.Type') -> str: return mypy.strconv.dump_tagged( ['Name({})'.format(self.fullname()), base, + mro, ('Names', names)], head, str_conv=str_conv) diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index f40035e371cc..dde80dcf1f3d 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -97,6 +97,11 @@ def visit_class_def(self, node: ClassDef) -> None: # TODO additional things like the MRO node.defs.body = self.replace_statements(node.defs.body) replace_nodes_in_symbol_table(node.info.names, self.replacements) + info = node.info + for i, item in enumerate(info.mro): + info.mro[i] = self.fixup(info.mro[i]) + for i, base in enumerate(info.bases): + self.fixup_type(info.bases[i]) super().visit_class_def(node) def visit_assignment_stmt(self, node: AssignmentStmt) -> None: diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index 6d6c724fea98..f2f2f528160d 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -84,6 +84,7 @@ class A: TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) Names( f<2> g<3>)) @@ -91,6 +92,7 @@ TypeInfo<0>( TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) Names( f<2> h<4>)) @@ -275,6 +277,7 @@ class A: TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) Names( f<2> x<3> (builtins.int<4>) @@ -283,6 +286,7 @@ TypeInfo<0>( TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) Names( f<2> x<3> (builtins.int<4>) @@ -499,3 +503,44 @@ __main__: target: MypyFile<0> target: g: FuncDef<2> + +[case testMergeWithBaseClass_typeinfo] +import target +[file target.py] +class A: pass +class B(A): + def f(self) -> None: pass +[file target.py.next] +class C: pass +class A: pass +class B(A): + def f(self) -> None: pass +[out] +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) + Names()) +TypeInfo<2>( + Name(target.B) + Bases(target.A<0>) + Mro(target.B<2>, target.A<0>, builtins.object<1>) + Names( + f<3>)) +==> +TypeInfo<4>( + Name(target.C) + Bases(builtins.object<1>) + Mro(target.C<4>, builtins.object<1>) + Names()) +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) + Names()) +TypeInfo<2>( + Name(target.B) + Bases(target.A<0>) + Mro(target.B<2>, target.A<0>, builtins.object<1>) + Names( + f<3>)) From 093d17df7cb977a65872ad23a71074bb87a82786 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 7 Mar 2017 16:44:51 +0000 Subject: [PATCH 14/44] Add support for minimal debug output --- mypy/server/update.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/mypy/server/update.py b/mypy/server/update.py index af321876c3b2..83fc71294710 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -63,6 +63,10 @@ from mypy.server.trigger import make_trigger +# If True, print out debug logging output. +DEBUG = False + + class FineGrainedBuildManager: def __init__(self, manager: BuildManager, @@ -91,6 +95,8 @@ def update(self, changed_modules: List[str]) -> List[str]: Returns: A list of errors. """ + if DEBUG: + print('==== update ====') manager = self.manager graph = self.graph old_modules = dict(manager.modules) @@ -215,6 +221,8 @@ def propagate_changes_using_dependencies( if id not in up_to_date_modules: if id not in todo: todo[id] = set() + if DEBUG: + print('process', target) todo[id].add(lookup_target(manager.modules, target)) triggered = set() # TODO: Preserve order (set is not optimal) @@ -258,6 +266,8 @@ def find_targets_recursive( continue if module_id not in result: result[module_id] = set() + if DEBUG: + print('process', target) deferred = lookup_target(modules, target) result[module_id].add(deferred) From 5a6eb053d9d536dfb76ab600065002edf6c3bfb4 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 7 Mar 2017 16:45:09 +0000 Subject: [PATCH 15/44] Add test cases --- test-data/unit/diff.test | 30 +++++++++++++++++++++++ test-data/unit/fine-grained.test | 41 ++++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index fcb5e656af99..296b947db823 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -71,6 +71,36 @@ class A: [out] __main__.A.y +[case testAddAttribute] +class A: pass +[file next.py] +class A: + def f(self) -> None: + self.x = 1 +[out] +__main__.A.f +__main__.A.x + +[case testAddAttribute2] +class A: + def f(self) -> None: pass +[file next.py] +class A: + def f(self) -> None: + self.x = 1 +[out] +__main__.A.x + +[case testRemoveAttribute] +class A: + def f(self) -> None: + self.x = 1 +[file next.py] +class A: pass +[out] +__main__.A.f +__main__.A.x + [case testAddMethod] class A: def f(self) -> None: pass diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index bf660e5368bf..5f65f9e9fc09 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -379,3 +379,44 @@ main:3: error: "module" has no attribute "g" == == main:3: error: "module" has no attribute "g" + +[case testAddBaseClassMethodCausingInvalidOverride] +import m +class B(m.A): + def f(self) -> None: pass +[file m.py] +class A: pass +[file m.py.2] +class A: + def f(self) -> int: pass +[out] +== +main:3: error: Return type of "f" incompatible with supertype "A" + +[case testModifyBaseClassMethodCausingInvalidOverride] +import m +class B(m.A): + def f(self) -> None: pass +[file m.py] +class A: + def f(self) -> None: pass +[file m.py.2] +class A: + def f(self) -> int: pass +[out] +== +main:3: error: Return type of "f" incompatible with supertype "A" + +[case testAddBaseClassAttributeCausingErrorInSubclass-skip] +import m +class B(m.A): + def f(self) -> None: + self.x = 1 +[file m.py] +class A: pass +[file m.py.2] +class A: + def g(self) -> None: + self.x = 'a' +[out] +TODO From 2763794341cd7c0ab42c24588408a5381fb9eebf Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 7 Mar 2017 18:29:38 +0000 Subject: [PATCH 16/44] Fix to attributes, inheritance and fine-grained incremenal Also change order of processing stale nodes in fine-grained incremental to be more similar to normal mode. --- mypy/server/aststrip.py | 55 ++++++++++++++++++++++++++++---- mypy/server/update.py | 26 +++++++++------ test-data/unit/fine-grained.test | 15 +++++++-- 3 files changed, 77 insertions(+), 19 deletions(-) diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 32f6fb7fe0bc..2709cdf4ae8b 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -1,9 +1,11 @@ -"""Strip AST from semantic and type information.""" +"""Strip AST from semantic information.""" -from typing import Union +import contextlib +from typing import Union, Iterator from mypy.nodes import ( - Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt + Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt, + TypeInfo ) from mypy.traverser import TraverserVisitor @@ -13,6 +15,9 @@ def strip_target(node: Union[MypyFile, FuncItem]) -> None: class NodeStripVisitor(TraverserVisitor): + def __init__(self) -> None: + self.type = None # type: TypeInfo + def strip_target(self, node: Union[MypyFile, FuncItem]) -> None: """Strip a fine-grained incremental mode target.""" if isinstance(node, MypyFile): @@ -29,21 +34,57 @@ def strip_top_level(self, file_node: MypyFile) -> None: def visit_func_def(self, node: FuncDef) -> None: node.expanded = [] node.type = node.unanalyzed_type - super().visit_func_def(node) + with self.enter_class(node.info) if node.info else nothing(): + super().visit_func_def(node) + + @contextlib.contextmanager + def enter_class(self, info: TypeInfo) -> Iterator[None]: + old = self.type + self.type = info + yield + self.type = old def visit_assignment_stmt(self, node: AssignmentStmt) -> None: node.type = node.unanalyzed_type super().visit_assignment_stmt(node) def visit_name_expr(self, node: NameExpr) -> None: - self.visit_ref_expr(node) + self.strip_ref_expr(node) def visit_member_expr(self, node: MemberExpr) -> None: - self.visit_ref_expr(node) + self.strip_ref_expr(node) + if self.is_duplicate_attribute_def(node): + # This is marked as a instance variable definition but a base class + # defines an attribute with the same name, and we can't have + # multiple definitions for an attribute. Defer to the base class + # definition. + del self.type.names[node.name] + node.is_def = False + node.def_var = None - def visit_ref_expr(self, node: RefExpr) -> None: + def is_duplicate_attribute_def(self, node: MemberExpr) -> bool: + if not node.is_def or node.name not in self.type.names: + return False + return any(info.get(node.name) is not None for info in self.type.mro[1:]) + + def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None node.fullname = None # TODO: handle more node types + + + +def is_self_member_ref(memberexpr: MemberExpr) -> bool: + """Does memberexpr refer to an attribute of self?""" + # TODO: Merge with is_self_member_ref in semanal.py. + if not isinstance(memberexpr.expr, NameExpr): + return False + node = memberexpr.expr.node + return isinstance(node, Var) and node.is_self + + +@contextlib.contextmanager +def nothing() -> Iterator[None]: + yield diff --git a/mypy/server/update.py b/mypy/server/update.py index 83fc71294710..69072f8214a9 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -284,27 +284,33 @@ def reprocess_nodes(manager: BuildManager, Return fired triggers. """ file_node = manager.modules[module_id] + # Strip semantic analysis information. + for deferred in nodes: + strip_target(deferred.node) + semantic_analyzer = manager.semantic_analyzer + # Second pass of semantic analysis. We don't redo the first pass, because it only + # does local things that won't go stale. + for deferred in nodes: + with semantic_analyzer.file_context( + file_node=file_node, + fnam=file_node.path, + options=manager.options, + active_type=deferred.active_typeinfo): + manager.semantic_analyzer.refresh_partial(deferred.node) + # Third pass of semantic analysis. for deferred in nodes: - node = deferred.node - # Strip semantic analysis information - strip_target(node) - # We don't redo the first pass, because it only does local things. - semantic_analyzer = manager.semantic_analyzer with semantic_analyzer.file_context( file_node=file_node, fnam=file_node.path, options=manager.options, active_type=deferred.active_typeinfo): - # Second pass - manager.semantic_analyzer.refresh_partial(node) - # Third pass - manager.semantic_analyzer_pass3.refresh_partial(node) + manager.semantic_analyzer_pass3.refresh_partial(deferred.node) info = deferred.active_typeinfo if info: old_types = {name: node.node.type for name, node in info.names.items() if isinstance(node.node, Var)} - # Type check + # Type check. graph[module_id].type_checker.check_second_pass(list(nodes)) # TODO: check return value new_triggered = set() if info: diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 5f65f9e9fc09..2323e70a3bfb 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -407,11 +407,19 @@ class A: == main:3: error: Return type of "f" incompatible with supertype "A" -[case testAddBaseClassAttributeCausingErrorInSubclass-skip] +[case testAddBaseClassAttributeCausingErrorInSubclass] import m class B(m.A): + def a(self) -> None: + x = 1 + x = self.x + def f(self) -> None: self.x = 1 + + def z(self) -> None: + x = 1 + x = self.x [file m.py] class A: pass [file m.py.2] @@ -419,4 +427,7 @@ class A: def g(self) -> None: self.x = 'a' [out] -TODO +== +main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") +main:8: error: Incompatible types in assignment (expression has type "int", variable has type "str") +main:12: error: Incompatible types in assignment (expression has type "str", variable has type "int") From d9cfc3f0cedaee1f74f73246bd1e0012adead470 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 13:51:33 +0000 Subject: [PATCH 17/44] Fix handling changes to attributes in base classes --- mypy/server/deps.py | 5 +++++ mypy/server/update.py | 2 ++ test-data/unit/deps.test | 18 +++++++++++++++++ test-data/unit/fine-grained.test | 33 ++++++++++++++++++++++++++++++++ 4 files changed, 58 insertions(+) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 89abebd272af..6a0dcb00f77a 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -85,6 +85,11 @@ def visit_class_def(self, o: ClassDef) -> None: # to the attribute in the subclass stale. self.add_dependency(make_trigger(base.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) + for base in non_trivial_bases(info): + for name, node in base.names.items(): + if isinstance(node.node, Var): + self.add_dependency(make_trigger(base.fullname() + '.' + name), + target=make_trigger(info.fullname() + '.' + name)) self.pop() def visit_import(self, o: Import) -> None: diff --git a/mypy/server/update.py b/mypy/server/update.py index 69072f8214a9..b0e0790ca982 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -104,6 +104,8 @@ def update(self, changed_modules: List[str]) -> List[str]: new_modules, new_type_maps = build_incremental_step(manager, changed_modules) # TODO: What to do with stale dependencies? triggered = calculate_active_triggers(manager, old_modules, new_modules) + if DEBUG: + print('triggered:', sorted(triggered)) replace_modules_with_new_variants(manager, graph, old_modules, new_modules, new_type_maps) update_dependencies(new_modules, self.deps, graph) propagate_changes_using_dependencies(manager, graph, self.deps, triggered, diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index a2a1d78325cc..e479e038fab2 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -109,3 +109,21 @@ class C(B): -> m.B, m.C -> m.C.f -> m.C + +[case testInheritAttributeFromAnotherModule] +import n +class B(n.A): + def f(sel) -> None: + a = 1 + a = sel.x +[file n.py] +class A: + def g(self) -> None: + self.x = 1 +[out] + -> m.B.f + -> m.B + -> m.B.f + -> + -> m.B + -> m, m.B diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 2323e70a3bfb..4bc508fe7b95 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -431,3 +431,36 @@ class A: main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:8: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:12: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testChangeBaseClassAttributeType] +import m +class B(m.A): + def f(sel) -> None: + sel.x = 1 +[file m.py] +class A: + def g(self) -> None: + self.x = 1 +[file m.py.2] +class A: + def g(self) -> None: + self.x = 'a' +[out] +== +main:4: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testRemoveAttributeInBaseClass] +import m +class B(m.A): + def f(self) -> None: + a = 1 + a = self.x +[file m.py] +class A: + def g(self) -> None: + self.x = 1 +[file m.py.2] +class A: pass +[out] +== +main:5: error: "B" has no attribute "x" From 07bc54b1457075d2d842f32d3dfbe78d14bc9085 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 14:21:51 +0000 Subject: [PATCH 18/44] Fixes to dependency generation --- mypy/server/deps.py | 5 ++--- test-data/unit/deps.test | 19 ++++++++++++++++--- test-data/unit/fine-grained.test | 15 +++++++++++++++ 3 files changed, 33 insertions(+), 6 deletions(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 6a0dcb00f77a..aa79c2aace37 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -87,9 +87,8 @@ def visit_class_def(self, o: ClassDef) -> None: target=make_trigger(info.fullname() + '.' + name)) for base in non_trivial_bases(info): for name, node in base.names.items(): - if isinstance(node.node, Var): - self.add_dependency(make_trigger(base.fullname() + '.' + name), - target=make_trigger(info.fullname() + '.' + name)) + self.add_dependency(make_trigger(base.fullname() + '.' + name), + target=make_trigger(info.fullname() + '.' + name)) self.pop() def visit_import(self, o: Import) -> None: diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index e479e038fab2..a430c815f947 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -110,12 +110,12 @@ class C(B): -> m.C.f -> m.C -[case testInheritAttributeFromAnotherModule] +[case testInheritAttribute] import n class B(n.A): - def f(sel) -> None: + def f(self) -> None: a = 1 - a = sel.x + a = self.x [file n.py] class A: def g(self) -> None: @@ -127,3 +127,16 @@ class A: -> -> m.B -> m, m.B + +[case testInheritMethod] +class A: + def g(self) -> None: pass +class B(A): + def f(self) -> None: + self.g() +[out] + -> m.B.f + -> + -> m.A, m.B + -> m.B.f + -> m.B diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 4bc508fe7b95..2dc30f131663 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -464,3 +464,18 @@ class A: pass [out] == main:5: error: "B" has no attribute "x" + +[case testTestSignatureOfInheritedMethod] +import m +class B(m.A): + def f(self) -> None: + self.g() +[file m.py] +class A: + def g(self) -> None: pass +[file m.py.2] +class A: + def g(self, a: 'A') -> None: pass +[out] +== +main:4: error: Too few arguments for "g" of "A" From d95864b60cd8b26e2a486f09f8d84cfd4e04a357 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 14:36:28 +0000 Subject: [PATCH 19/44] Support classes as fine-grained incremental targets A class target covers the entire class body and all definitions within it. --- mypy/server/update.py | 22 +++++++++++++++++----- test-data/unit/fine-grained.test | 20 ++++++++++++++++++++ 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/mypy/server/update.py b/mypy/server/update.py index b0e0790ca982..fcef3a765fba 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -52,7 +52,7 @@ from mypy.checker import DeferredNode from mypy.errors import Errors from mypy.nodes import ( - MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, + MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, ClassDef ) from mypy.types import Type from mypy.server.astdiff import compare_symbol_tables, is_identical_type @@ -225,7 +225,7 @@ def propagate_changes_using_dependencies( todo[id] = set() if DEBUG: print('process', target) - todo[id].add(lookup_target(manager.modules, target)) + todo[id].update(lookup_target(manager.modules, target)) triggered = set() # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): @@ -271,7 +271,7 @@ def find_targets_recursive( if DEBUG: print('process', target) deferred = lookup_target(modules, target) - result[module_id].add(deferred) + result[module_id].update(deferred) return result @@ -343,10 +343,11 @@ def update_deps(module_id: str, deps.setdefault(trigger, set()).update(targets) -def lookup_target(modules: Dict[str, MypyFile], target: str) -> DeferredNode: +def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNode]: """Look up a target by fully-qualified name.""" components = target.split('.') node = modules[components[0]] # type: SymbolNode + prev = None # type: SymbolNode active_class = None active_class_name = None for c in components[1:]: @@ -355,6 +356,17 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> DeferredNode: active_class_name = node.name() # TODO: Is it possible for the assertion to fail? assert isinstance(node, (MypyFile, TypeInfo)) + prev = node node = node.names[c].node + if isinstance(node, TypeInfo): + # A ClassDef target covers the body of the class and everything defined + # within it. To get the body we include the entire surrounding target, + # typically a module top-level, since we don't support processing class + # bodies as separate entitites for simplicity. + result = [DeferredNode(prev, None, None)] # TODO: Nested classes + for name, node in node.names.items(): + if isinstance(node, FuncDef): + result.extend(lookup_target(modules, target + '.' + name)) + return result assert isinstance(node, (FuncDef, MypyFile)) - return DeferredNode(node, active_class_name, active_class) + return [DeferredNode(node, active_class_name, active_class)] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 2dc30f131663..8356ccd14b68 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -479,3 +479,23 @@ class A: [out] == main:4: error: Too few arguments for "g" of "A" + +[case testRemoveBaseClass] +import m +class A(m.B): + def f(self) -> None: + self.g() + self.x + self.y = 1 +[file m.py] +class C: + def g(self) -> None: + self.x = 1 +class B(C): pass +[file m.py.2] +class C: pass +class B: pass +[out] +== +main:4: error: "A" has no attribute "g" +main:5: error: "A" has no attribute "x" From baeb421a87676c741a54ca0474458b413e97995e Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 17:28:10 +0000 Subject: [PATCH 20/44] Fix inheritance test case --- test-data/unit/deps.test | 1 + 1 file changed, 1 insertion(+) diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index a430c815f947..0c534428af35 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -124,6 +124,7 @@ class A: -> m.B.f -> m.B -> m.B.f + -> -> -> m.B -> m, m.B From 41c81aad741bec32917b082099df84f9458eb7fc Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 17:30:02 +0000 Subject: [PATCH 21/44] Add minimal package support --- mypy/server/astdiff.py | 6 ++-- mypy/server/astmerge.py | 5 +-- mypy/server/target.py | 20 ++++++++++-- mypy/server/update.py | 24 +++++++++------ test-data/unit/deps.test | 53 ++++++++++++++++++++++++++++++++ test-data/unit/fine-grained.test | 28 +++++++++++++++++ 6 files changed, 120 insertions(+), 16 deletions(-) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index aab3280cbc38..be5d86305334 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -37,8 +37,9 @@ def compare_symbol_tables(name_prefix: str, table1: SymbolTable, table2: SymbolT node1 = table1[name].node node2 = table2[name].node - if node1.fullname() and module_name(node1.fullname()) != name_prefix: + if node1.fullname() and get_prefix(node1.fullname()) != name_prefix: # Only look inside things defined in the current module. + # TODO: This probably doesn't work generally... continue if isinstance(node1, TypeInfo) and isinstance(node2, TypeInfo): @@ -92,7 +93,8 @@ def is_same_mro(mro1: List[TypeInfo], mro2: List[TypeInfo]) -> bool: and all(x.fullname() == y.fullname() for x, y in zip(mro1, mro2))) -def module_name(id: str) -> str: +def get_prefix(id: str) -> str: + """Drop the final component of a qualified name (e.g. ('x.y' -> 'x').""" return id.rsplit('.', 1)[0] diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index dde80dcf1f3d..6ef251934a79 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -43,7 +43,7 @@ def replacement_map_from_symbol_table( replacements = {} for name, node in old.items(): if (name in new and (node.kind == MDEF - or module_prefix(node.node.fullname()) == prefix)): + or get_prefix(node.node.fullname()) == prefix)): new_node = new[name] if (type(new_node.node) == type(node.node) # noqa and new_node.node.fullname() == node.node.fullname() @@ -220,5 +220,6 @@ def replace_nodes_in_symbol_table(symbols: SymbolTable, node.node.info = cast(TypeInfo, replacements.get(node.node.info, node.node.info)) -def module_prefix(fullname: str) -> str: +def get_prefix(fullname: str) -> str: + """Drop the final component of a qualified name (e.g. ('x.y' -> 'x').""" return fullname.rsplit('.', 1)[0] diff --git a/mypy/server/target.py b/mypy/server/target.py index 860ba4a908d2..cbd7675e1b50 100644 --- a/mypy/server/target.py +++ b/mypy/server/target.py @@ -1,3 +1,17 @@ -def module_prefix(target: str) -> str: - # TODO: This assumes no nested modules. - return target.split('.', 1)[0] +from typing import Iterable, Tuple + + +def module_prefix(modules: Iterable[str], target: str) -> str: + return split_target(modules, target)[0] + + +def split_target(modules: Iterable[str], target: str) -> Tuple[str, str]: + remaining = [] + while True: + if target in modules: + return target, '.'.join(remaining) + components = target.rsplit('.', 1) + if len(components) == 1: + assert False, 'Cannot find module prefix for {}'.format(target) + target = components[0] + remaining.insert(0, components[1]) diff --git a/mypy/server/update.py b/mypy/server/update.py index fcef3a765fba..ebde0500803a 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -46,7 +46,7 @@ - Support multiple type checking passes """ -from typing import Dict, List, Set, Tuple +from typing import Dict, List, Set, Tuple, Iterable from mypy.build import BuildManager, State from mypy.checker import DeferredNode @@ -59,7 +59,7 @@ from mypy.server.astmerge import merge_asts from mypy.server.aststrip import strip_target from mypy.server.deps import get_dependencies, get_dependencies_of_target -from mypy.server.target import module_prefix +from mypy.server.target import module_prefix, split_target from mypy.server.trigger import make_trigger @@ -110,7 +110,8 @@ def update(self, changed_modules: List[str]) -> List[str]: update_dependencies(new_modules, self.deps, graph) propagate_changes_using_dependencies(manager, graph, self.deps, triggered, set(changed_modules), - self.previous_targets_with_errors) + self.previous_targets_with_errors, + graph) self.previous_targets_with_errors = manager.errors.targets() return manager.errors.messages() @@ -208,7 +209,8 @@ def propagate_changes_using_dependencies( deps: Dict[str, Set[str]], triggered: Set[str], up_to_date_modules: Set[str], - targets_with_errors: Set[str]) -> None: + targets_with_errors: Set[str], + modules: Iterable[str]) -> None: # TODO: Multiple type checking passes # TODO: Restrict the number of iterations to some maximum to avoid infinite loops @@ -219,7 +221,7 @@ def propagate_changes_using_dependencies( # Also process targets that used to have errors, as otherwise some # errors might be lost. for target in targets_with_errors: - id = module_prefix(target) + id = module_prefix(modules, target) if id not in up_to_date_modules: if id not in todo: todo[id] = set() @@ -262,7 +264,7 @@ def find_targets_recursive( if target.startswith('<'): worklist |= deps.get(target, set()) - processed else: - module_id = target.split('.', 1)[0] + module_id = module_prefix(modules, target) if module_id in up_to_date_modules: # Already processed. continue @@ -345,12 +347,16 @@ def update_deps(module_id: str, def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNode]: """Look up a target by fully-qualified name.""" - components = target.split('.') - node = modules[components[0]] # type: SymbolNode + module, rest = split_target(modules, target) + if rest: + components = rest.split('.') + else: + components = [] + node = modules[module] # type: SymbolNode prev = None # type: SymbolNode active_class = None active_class_name = None - for c in components[1:]: + for c in components: if isinstance(node, TypeInfo): active_class = node active_class_name = node.name() diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index 0c534428af35..e2a16eeb329e 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -141,3 +141,56 @@ class B(A): -> m.A, m.B -> m.B.f -> m.B + +[case testPackage] +import a.b +def f() -> None: + a.b.g() +[file a/__init__.py] +[file a/b.py] +def g() -> None: pass +[out] + -> m.f + -> m, m.f + -> m.f + +[case testClassInPackage] +import a.b +def f(x: a.b.A) -> None: + x.g() + x.y +[file a/__init__.py] +[file a/b.py] +class A: + def g(self) -> None: + self.y = 1 +[out] + -> m.f + -> m.f + -> , m.f + -> m + +[case testPackage__init__] +import a +def f() -> None: + a.g() +[file a/__init__.py] +def g() -> None: pass +[out] + -> m.f + -> m, m.f + +[case testClassInPackage__init__] +import a +def f(x: a.A) -> None: + x.g() + x.y +[file a/__init__.py] +class A: + def g(self) -> None: + self.y = 1 +[out] + -> m.f + -> m.f + -> , m.f + -> m diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 8356ccd14b68..29b7ef9abeed 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -499,3 +499,31 @@ class B: pass == main:4: error: "A" has no attribute "g" main:5: error: "A" has no attribute "x" + +[case testChangeInPackage] +import m.n +def f() -> None: + m.n.g() +[file m/__init__.py] +[file m/n.py] +def g() -> None: pass +[file m/n.py.2] +def g(x: int) -> None: pass +[out] +== +main:3: error: Too few arguments for "g" + +[case testTriggerTargetInPackage] +import m.n +[file m/__init__.py] +[file m/n.py] +import a +def f() -> None: + a.g() +[file a.py] +def g() -> None: pass +[file a.py.2] +def g(x: int) -> None: pass +[out] +== +m/n.py:3: error: Too few arguments for "g" From 82afe2267e4127098ff62832c16f5f3f5f7f30a4 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 19:54:22 +0000 Subject: [PATCH 22/44] Add tests for __init__ modules --- mypy/test/testfinegrained.py | 1 + test-data/unit/fine-grained.test | 30 ++++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 047811dea4e5..fd19840967bf 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -109,6 +109,7 @@ def find_steps() -> List[List[Tuple[str, str]]]: assert num >= 2 name = re.sub(r'\.py.*', '', filename) module = '.'.join(dnparts + [name]) + module = re.sub(r'\.__init__$', '', module) path = os.path.join(dn, filename) steps.setdefault(num, []).append((module, path)) max_step = max(steps) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 29b7ef9abeed..0fc3b4754482 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -527,3 +527,33 @@ def g(x: int) -> None: pass [out] == m/n.py:3: error: Too few arguments for "g" + +[case testChangeInPackage__init__] +import m +import m.n +def f() -> None: + m.g() +[file m/__init__.py] +def g() -> None: pass +[file m/__init__.py.2] +def g(x: int) -> None: pass +[file m/n.py] +[out] +== +main:3: error: Too few arguments for "g" + +[case testTriggerTargetInPackage__init__] +import m +import m.n +[file m/__init__.py] +import a +def f() -> None: + a.g() +[file a.py] +def g() -> None: pass +[file a.py.2] +def g(x: int) -> None: pass +[file m/n.py] +[out] +== +m/__init__.py:3: error: Too few arguments for "g" From e5199da909589b05a32e64d88c1c7c5fba6b832a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 8 Mar 2017 19:54:57 +0000 Subject: [PATCH 23/44] Add test cases for module attributes --- test-data/unit/deps.test | 13 ++++++++++++- test-data/unit/fine-grained.test | 13 +++++++++++++ test-data/unit/merge.test | 25 +++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 1 deletion(-) diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index e2a16eeb329e..7bc924c2ba86 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -37,12 +37,23 @@ class A: pass [out] -> m.A, m.f -[case testAccessModuleAttribute-skip] +[case testAccessModuleAttribute] x = 1 def f() -> None: x [out] -> m, m.f + -> m + +[case testAccessModuleAttribute2] +import n +def f() -> None: + n.x +[file n.py] +x = 1 +[out] + -> m.f + -> m, m.f [case testImport] import n diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 0fc3b4754482..153a757cefa2 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -557,3 +557,16 @@ def g(x: int) -> None: pass [out] == m/__init__.py:3: error: Too few arguments for "g" + +[case testModuleAttributeTypeChanges] +import m +def f() -> None: + x = 1 + x = m.x +[file m.py] +x = 1 +[file m.py.2] +x = '' +[out] +== +main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index f2f2f528160d..c2d837a1a6c7 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -544,3 +544,28 @@ TypeInfo<2>( Mro(target.B<2>, target.A<0>, builtins.object<1>) Names( f<3>)) + +[case testModuleAttribute] +import target +[file target.py] +x = 1 +[file target.py.next] +x = 2 +[out] +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + AssignmentStmt:1<2>( + NameExpr(x [target.x<3>]) + IntExpr(1) + builtins.int<4>)) +==> +MypyFile:1<0>( + Import:1(target)) +MypyFile:1<1>( + tmp/target.py + AssignmentStmt:1<5>( + NameExpr(x [target.x<3>]) + IntExpr(2) + builtins.int<4>)) From 84b7a62ed1de43cb1faa59ee6862ac2847ebc818 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 9 Mar 2017 12:24:12 +0000 Subject: [PATCH 24/44] Fix test case --- test-data/unit/fine-grained.test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 153a757cefa2..524c238df212 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -540,7 +540,7 @@ def g(x: int) -> None: pass [file m/n.py] [out] == -main:3: error: Too few arguments for "g" +main:4: error: Too few arguments for "g" [case testTriggerTargetInPackage__init__] import m From 67a54d7500c4fe7ce7457f7c7c529e658debd80f Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 9 Mar 2017 12:24:24 +0000 Subject: [PATCH 25/44] Implement multiple propagation steps for module attributes Also fix cases where multiple namespaces cause additional steps. --- mypy/server/update.py | 60 ++++++++++++++++++++++++-------- test-data/unit/fine-grained.test | 47 +++++++++++++++++++++++++ 2 files changed, 93 insertions(+), 14 deletions(-) diff --git a/mypy/server/update.py b/mypy/server/update.py index ebde0500803a..c7bb346ba5cc 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -46,7 +46,7 @@ - Support multiple type checking passes """ -from typing import Dict, List, Set, Tuple, Iterable +from typing import Dict, List, Set, Tuple, Iterable, Union from mypy.build import BuildManager, State from mypy.checker import DeferredNode @@ -288,10 +288,12 @@ def reprocess_nodes(manager: BuildManager, Return fired triggers. """ file_node = manager.modules[module_id] + # Strip semantic analysis information. for deferred in nodes: strip_target(deferred.node) semantic_analyzer = manager.semantic_analyzer + # Second pass of semantic analysis. We don't redo the first pass, because it only # does local things that won't go stale. for deferred in nodes: @@ -301,6 +303,7 @@ def reprocess_nodes(manager: BuildManager, options=manager.options, active_type=deferred.active_typeinfo): manager.semantic_analyzer.refresh_partial(deferred.node) + # Third pass of semantic analysis. for deferred in nodes: with semantic_analyzer.file_context( @@ -309,24 +312,53 @@ def reprocess_nodes(manager: BuildManager, options=manager.options, active_type=deferred.active_typeinfo): manager.semantic_analyzer_pass3.refresh_partial(deferred.node) - info = deferred.active_typeinfo - if info: - old_types = {name: node.node.type - for name, node in info.names.items() - if isinstance(node.node, Var)} + + # Keep track of potentially affected attribute types before type checking. + old_types_map = get_enclosing_namespace_types(nodes) + # Type check. graph[module_id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + + # Check if any attribute types were changed and need to be propagated further. + new_triggered = get_triggered_namespace_items(old_types_map) + + # Dependencies may have changed. + update_deps(module_id, nodes, graph, deps) + + return new_triggered + + +NamespaceNode = Union[TypeInfo, MypyFile] + + +def get_enclosing_namespace_types(nodes: Set[DeferredNode]) -> Dict[NamespaceNode, + Dict[str, Type]]: + types = {} # type: Dict[NamespaceNode, Dict[str, Type]] + for deferred in nodes: + info = deferred.active_typeinfo + if info: + target = info # type: NamespaceNode + elif isinstance(deferred.node, MypyFile): + target = deferred.node + else: + target = None + if target and target not in types: + local_types = {name: node.node.type + for name, node in target.names.items() + if isinstance(node.node, Var)} + types[target] = local_types + return types + + +def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, Type]]) -> Set[str]: new_triggered = set() - if info: - # Check if we need to propagate any attribute type changes further. - # TODO: Also consider module-level attribute type changes here. - for name, member_node in info.names.items(): + for namespace_node, old_types in old_types_map.items(): + for name, node in namespace_node.names.items(): if (name in old_types and - (not isinstance(member_node.node, Var) or - not is_identical_type(member_node.node.type, old_types[name]))): + (not isinstance(node.node, Var) or + not is_identical_type(node.node.type, old_types[name]))): # Type checking a method changed an attribute type. - new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name))) - update_deps(module_id, nodes, graph, deps) + new_triggered.add(make_trigger('{}.{}'.format(namespace_node.fullname(), name))) return new_triggered diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 524c238df212..c2f61f49bbfe 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -570,3 +570,50 @@ x = '' [out] == main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testTwoStepsDueToModuleAttribute] +import m +x = m.f() + +def g() -> None: + y = 1 + y = x # E +[file m.py] +def f() -> int: pass +[file m.py.2] +def f() -> str: pass +[out] +== +main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testTwoStepsDueToMultipleNamespaces] +import m + +x = m.f() + +def g() -> None: + xx = 1 + xx = x + +class A: + def a(self) -> None: + self.y = m.f() + def b(self) -> None: + yy = 1 + yy = self.y + +class B: + def c(self) -> None: + self.z = m.f() + def b(self) -> None: + zz = 1 + zz = self.z +[file m.py] +def f() -> int: pass +[file m.py.2] +def f() -> str: pass +[out] +== +main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") +main:14: error: Incompatible types in assignment (expression has type "str", variable has type "int") +main:21: error: Incompatible types in assignment (expression has type "str", variable has type "int") From 54c800d4ba7b5dc48b111c3c187b786c35d8d904 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 9 Mar 2017 13:50:06 +0000 Subject: [PATCH 26/44] Support constructors for fine-grained incremental --- mypy/server/deps.py | 14 +++++++- test-data/unit/deps.test | 17 +++++++++ test-data/unit/diff.test | 9 +++++ test-data/unit/fine-grained.test | 60 ++++++++++++++++++++++++++++++++ 4 files changed, 99 insertions(+), 1 deletion(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index aa79c2aace37..bb2985ef3763 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -5,7 +5,7 @@ from mypy.checkmember import bind_self from mypy.nodes import ( Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import, - ImportFrom, TypeInfo, Var, LDEF + ImportFrom, CallExpr, TypeInfo, Var, LDEF ) from mypy.traverser import TraverserVisitor from mypy.types import ( @@ -89,6 +89,8 @@ def visit_class_def(self, o: ClassDef) -> None: for name, node in base.names.items(): self.add_dependency(make_trigger(base.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) + self.add_dependency(make_trigger(base.fullname() + '.__init__'), + target=make_trigger(info.fullname() + '.__init__')) self.pop() def visit_import(self, o: Import) -> None: @@ -138,6 +140,16 @@ def visit_member_expr(self, e: MemberExpr) -> None: # TODO: Handle more types raise NotImplementedError + def visit_call_expr(self, e: CallExpr) -> None: + super().visit_call_expr(e) + callee_type = self.type_map.get(e.callee) + print(callee_type) + if isinstance(callee_type, FunctionLike) and callee_type.is_type_obj(): + print('here') + class_name = callee_type.type_object().fullname() + print(class_name) + self.add_dependency(make_trigger(class_name + '.__init__')) + # Helpers def add_dependency(self, trigger: str, target: str = None) -> None: diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index 7bc924c2ba86..fc45bfb89d55 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -35,6 +35,7 @@ def f() -> None: A() class A: pass [out] + -> m.f -> m.A, m.f [case testAccessModuleAttribute] @@ -87,6 +88,7 @@ class A: class B(A): pass [out] + -> -> m.A, m.B -> m.B @@ -97,6 +99,7 @@ class B(A): def f(self) -> None: self.x = 1 [out] + -> -> m.B.f -> -> m.A, m.B @@ -112,9 +115,11 @@ class C(B): def f(self) -> None: self.x = 1 [out] + -> , -> m.C.f -> -> m.A, m.B + -> -> m.C.f -> -> m.B, m.C @@ -134,6 +139,7 @@ class A: [out] -> m.B.f -> m.B + -> -> m.B.f -> -> @@ -147,6 +153,7 @@ class B(A): def f(self) -> None: self.g() [out] + -> -> m.B.f -> -> m.A, m.B @@ -205,3 +212,13 @@ class A: -> m.f -> , m.f -> m + +[case testConstructor] +class A: + def __init__(self, x: int) -> None: pass +def f() -> None: + A(1) +[out] + -> m.f + -> m.A, m.f + -> , m.A.__init__ diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 296b947db823..c16ac6978f7f 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -194,3 +194,12 @@ class C(B): pass [out] __main__.B __main__.C + +[case testDifferenceInConstructor] +class A: + def __init__(self) -> None: pass +[file next.py] +class A: + def __init__(self, x: int) -> None: pass +[out] +__main__.A.__init__ diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index c2f61f49bbfe..910cc546f45c 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -617,3 +617,63 @@ def f() -> str: pass main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:14: error: Incompatible types in assignment (expression has type "str", variable has type "int") main:21: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testConstructorSignatureChanged] +import m + +def f() -> None: + m.A() +[file m.py] +class A: + def __init__(self) -> None: pass +[file m.py.2] +class A: + def __init__(self, x: int) -> None: pass +[out] +== +main:4: error: Too few arguments for "A" + +[case testConstructorAdded] +import m + +def f() -> None: + m.A() +[file m.py] +class A: pass +[file m.py.2] +class A: + def __init__(self, x: int) -> None: pass +[out] +== +main:4: error: Too few arguments for "A" + +[case testConstructorDeleted] +import m + +def f() -> None: + m.A(1) +[file m.py] +class A: + def __init__(self, x: int) -> None: pass +[file m.py.2] +class A: pass +[out] +== +main:4: error: Too many arguments for "A" + +[case testBaseClassConstructorChanged] +import m + +def f() -> None: + m.B() +[file m.py] +class A: + def __init__(self) -> None: pass +class B(A): pass +[file m.py.2] +class A: + def __init__(self, x: int) -> None: pass +class B(A): pass +[out] +== +main:4: error: Too few arguments for "B" From 770cc614a8285f87e1be98702041f31874a606ea Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 9 Mar 2017 15:23:38 +0000 Subject: [PATCH 27/44] Support from m import with fine-grained incremental Relative imports are not yet supported. --- mypy/server/deps.py | 5 +- test-data/unit/deps.test | 10 ++++ test-data/unit/fine-grained.test | 83 ++++++++++++++++++++++++++++++++ 3 files changed, 97 insertions(+), 1 deletion(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index bb2985ef3763..d013c31c05af 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -99,7 +99,10 @@ def visit_import(self, o: Import) -> None: self.add_dependency(make_trigger(id), self.current()) def visit_import_from(self, o: ImportFrom) -> None: - raise NotImplementedError + assert o.relative == 0 # Relative imports not supported + for name, as_name in o.names: + assert as_name is None or as_name == name + self.add_dependency(make_trigger(o.id + '.' + name)) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: super().visit_assignment_stmt(o) diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index fc45bfb89d55..0e59d1d1a3bb 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -222,3 +222,13 @@ def f() -> None: -> m.f -> m.A, m.f -> , m.A.__init__ + +[case testImportFrom] +from n import f + +def g() -> None: + f() +[file n.py] +def f() -> None: pass +[out] + -> m, m.g diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 910cc546f45c..b234be8e60d5 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -677,3 +677,86 @@ class B(A): pass [out] == main:4: error: Too few arguments for "B" + +[case testImportFrom] +from m import f + +def g() -> None: + f() +[file m.py] +def f() -> None: pass +[file m.py.2] +def f(x: int) -> None: pass +[builtins fixtures/fine_grained.pyi] +[out] +== +main:4: error: Too few arguments for "f" + +[case testImportFrom2] +from m import f +f() +[file m.py] +def f() -> None: pass +[file m.py.2] +def f(x: int) -> None: pass +[out] +== +main:2: error: Too few arguments for "f" + +[case testImportFromTargetsClass] +from m import C + +def f(c: C) -> None: + c.g() +[file m.py] +class C: + def g(self) -> None: pass +[file m.py.2] +class C: + def g(self, x: int) -> None: pass +[out] +== +main:4: error: Too few arguments for "g" of "C" + +[case testImportFromTargetsVariable] +from m import x + +def f() -> None: + y = 1 + y = x +[file m.py] +x = 1 +[file m.py.2] +x = '' +[out] +== +main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testImportFromSubmoduleOfPackage] +from m import n + +def f() -> None: + n.g() +[file m/__init__.py] +[file m/n.py] +def g() -> None: pass +[file m/n.py.2] +def g(x: int) -> None: pass +[out] +== +main:4: error: Too few arguments for "g" + +[case testImportedFunctionGetsImported] +from m import f + +def g() -> None: + f() +[file m.py] +from n import f +[file n.py] +def f() -> None: pass +[file n.py.2] +def f(x: int) -> None: pass +[out] +== +main:4: error: Too few arguments for "f" From 2f0c0a7b3c331a93f4d9219ff4f3cff1b0f7d9cf Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Mar 2017 17:29:05 +0000 Subject: [PATCH 28/44] Support nested classes with fine-grained incremental --- mypy/server/deps.py | 9 +++--- test-data/unit/deps.test | 47 ++++++++++++++++++++++++++++++ test-data/unit/diff.test | 62 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+), 5 deletions(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index d013c31c05af..8b4eb0290c1d 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -135,13 +135,12 @@ def visit_member_expr(self, e: MemberExpr) -> None: typ = self.type_map[e.expr] if isinstance(typ, Instance): member = '%s.%s' % (typ.type.fullname(), e.name) - trigger = make_trigger(member) - self.add_dependency(trigger) + self.add_dependency(make_trigger(member)) elif isinstance(typ, (AnyType, NoneTyp)): pass # No dependency needed - else: - # TODO: Handle more types - raise NotImplementedError + elif isinstance(typ, FunctionLike) and typ.is_type_obj(): + member = '%s.%s' % (typ.type_object().fullname(), e.name) + self.add_dependency(make_trigger(member)) def visit_call_expr(self, e: CallExpr) -> None: super().visit_call_expr(e) diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index 0e59d1d1a3bb..a3d65330330b 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -232,3 +232,50 @@ def g() -> None: def f() -> None: pass [out] -> m, m.g + +[case testNestedClass] +def f() -> None: + b = A.B() + b.f() +class A: + class B: + def f(self) -> None: pass +[out] + -> m.f + -> m.f + -> m.A.B, m.f + -> m.A, m.f + +[case testNestedClassAttribute] +def f() -> None: + b = A.B() + b.x +class A: + class B: + def f(self) -> None: + self.x = 1 +[out] + -> m.f + -> m.A.B.f, m.f + -> m.A.B, m.f + -> m.A, m.f + +[case testNestedClassInAnnotation] +def f(x: A.B) -> None: + pass +class A: + class B: pass +[out] + -> , m.A.B, m.f + -> m.A + +[case testNestedClassInAnnotation2] +def f(x: A.B) -> None: + x.f() +class A: + class B: + def f(self) -> None: pass +[out] + -> m.f + -> , m.A.B, m.f + -> m.A diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index c16ac6978f7f..638948c40312 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -203,3 +203,65 @@ class A: def __init__(self, x: int) -> None: pass [out] __main__.A.__init__ + +[case testChangeSignatureOfMethodInNestedClass] +class A: + class B: + def f(self) -> int: pass +[file next.py] +class A: + class B: + def f(self) -> str: pass +[out] +__main__.A.B.f + +[case testChangeTypeOfAttributeInNestedClass] +class A: + class B: + def f(self) -> None: + self.x = 1 +[file next.py] +class A: + class B: + def f(self) -> None: + self.x = '' +[out] +__main__.A.B.x + +[case testAddMethodToNestedClass] +class A: + class B: pass +[file next.py] +class A: + class B: + def f(self) -> str: pass +[out] +__main__.A.B.f + +[case testAddNestedClass] +class A: pass +[file next.py] +class A: + class B: + def f(self) -> None: pass +[out] +__main__.A.B + +[case testRemoveNestedClass] +class A: + class B: + def f(self) -> None: pass +[file next.py] +class A: pass +[out] +__main__.A.B + +[case testChangeNestedClassToMethod] +class A: + class B: pass +[file next.py] +class A: + def B(self) -> None: pass + +[out] +__main__.A.B From b480807ce040589d5311eaa70e53b8cb4b14880f Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Mar 2017 18:17:13 +0000 Subject: [PATCH 29/44] Fix merge test case --- mypy/test/testmerge.py | 12 +++++++++--- test-data/unit/merge.test | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index 3fe5f60e5da1..e2eed4e00d1c 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -170,9 +170,15 @@ def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]: for id in sorted(modules): if id == 'builtins': continue - for name, node in modules[id].names.items(): - if isinstance(node.node, TypeInfo): - a.extend(self.dump_typeinfo(node.node)) + a.extend(self.dump_typeinfos_recursive(modules[id].names)) + return a + + def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]: + a = [] + for name, node in names.items(): + if isinstance(node.node, TypeInfo): + a.extend(self.dump_typeinfo(node.node)) + a.extend(self.dump_typeinfos_recursive(node.node.names)) return a def dump_typeinfo(self, info: TypeInfo) -> List[str]: diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index c2d837a1a6c7..c269376b02d8 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -569,3 +569,40 @@ MypyFile:1<1>( NameExpr(x [target.x<3>]) IntExpr(2) builtins.int<4>)) + +[case testNestedClassMethod_typeinfo] +import target +[file target.py] +class A: + class B: + def f(self) -> None: pass +[file target.py.next] +class A: + class B: + def f(self) -> None: pass +[out] +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) + Names( + B<2>)) +TypeInfo<2>( + Name(target.A.B) + Bases(builtins.object<1>) + Mro(target.A.B<2>, builtins.object<1>) + Names( + f<3>)) +==> +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) + Names( + B<2>)) +TypeInfo<2>( + Name(target.A.B) + Bases(builtins.object<1>) + Mro(target.A.B<2>, builtins.object<1>) + Names( + f<3>)) From 0636545bb848e7eebc5e184f81491d87f4150ceb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Mar 2017 18:36:23 +0000 Subject: [PATCH 30/44] Remove debug print --- mypy/server/deps.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 8b4eb0290c1d..95bed5e88dbe 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -145,11 +145,8 @@ def visit_member_expr(self, e: MemberExpr) -> None: def visit_call_expr(self, e: CallExpr) -> None: super().visit_call_expr(e) callee_type = self.type_map.get(e.callee) - print(callee_type) if isinstance(callee_type, FunctionLike) and callee_type.is_type_obj(): - print('here') class_name = callee_type.type_object().fullname() - print(class_name) self.add_dependency(make_trigger(class_name + '.__init__')) # Helpers From e27184447080696e7347cd101073b3f5d02bf69c Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Mar 2017 19:40:50 +0000 Subject: [PATCH 31/44] More nested class test cases --- test-data/unit/fine-grained.test | 53 ++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index b234be8e60d5..28dbaecb1632 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -760,3 +760,56 @@ def f(x: int) -> None: pass [out] == main:4: error: Too few arguments for "f" + +[case testNestedClassMethodSignatureChanges] +from m import A + +def f(x: A.B) -> None: + x.g() +[file m.py] +class A: + class B: + def g(self) -> None: pass +[file m.py.2] +class A: + class B: + def g(self, x: int) -> None: pass +[out] +== +main:4: error: Too few arguments for "g" of "B" + +[case testNestedClassAttributeTypeChanges] +from m import A + +def f(x: A.B) -> None: + z = 1 + z = x.y +[file m.py] +class A: + class B: + def g(self) -> None: + self.y = 1 +[file m.py.2] +class A: + class B: + def g(self) -> None: + self.y = '' +[out] +== +main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testReprocessMethodInNestedClass] +from m import f + +class A: + class B: + def g(self) -> None: + x = 1 + x = f() +[file m.py] +def f() -> int: pass +[file m.py.2] +def f() -> str: pass +[out] +== +main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") From 291286f4bfa278f1165e2284dd1830e734c862e9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Mar 2017 20:22:41 +0000 Subject: [PATCH 32/44] Fixes to classes with fine-grained incremental --- mypy/nodes.py | 5 ++++- mypy/semanal.py | 21 ++++++++++++++++--- mypy/server/aststrip.py | 9 ++++++++ mypy/server/update.py | 7 ++++--- test-data/unit/fine-grained.test | 35 ++++++++++++++++++++++++++++++++ 5 files changed, 70 insertions(+), 7 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 85af20df869e..51923299151f 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2009,7 +2009,10 @@ def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> No self._fullname = defn.fullname self.is_abstract = False self.abstract_attributes = [] - if defn.type_vars: + self.add_type_vars() + + def add_type_vars(self) -> None: + if self.defn.type_vars: for vd in defn.type_vars: self.type_vars.append(vd.name) diff --git a/mypy/semanal.py b/mypy/semanal.py index 7615df6eb460..745b0fe6ed20 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -279,9 +279,19 @@ def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None: def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" for d in file_node.defs: - if not isinstance(d, (FuncItem, ClassDef)): + if isinstance(d, ClassDef): + self.refresh_class_def(d) + elif not isinstance(d, FuncItem): self.accept(d) + def refresh_class_def(self, defn: ClassDef) -> None: + with self.analyze_class_body(defn): + for d in defn.defs.body: + if isinstance(d, ClassDef): + self.refresh_class_def(d) + elif not isinstance(d, FuncItem): + self.accept(d) + @contextmanager def file_context(self, file_node: MypyFile, fnam: str, options: Options, active_type: Optional[TypeInfo]) -> Iterator[None]: @@ -693,6 +703,12 @@ def check_function_signature(self, fdef: FuncItem) -> None: self.fail('Type signature has too many arguments', fdef, blocker=True) def visit_class_def(self, defn: ClassDef) -> None: + with self.analyze_class_body(defn): + # Analyze class body. + defn.defs.accept(self) + + @contextmanager + def analyze_class_body(self, defn: ClassDef) -> Iterator[None]: self.clean_up_bases_and_infer_type_variables(defn) if self.analyze_typeddict_classdef(defn): return @@ -714,8 +730,7 @@ def visit_class_def(self, defn: ClassDef) -> None: self.enter_class(defn) - # Analyze class body. - defn.defs.accept(self) + yield self.calculate_abstract_status(defn.info) self.setup_type_promotion(defn) diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 2709cdf4ae8b..aec2676e2e4e 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -30,6 +30,15 @@ def strip_top_level(self, file_node: MypyFile) -> None: for node in file_node.defs: if not isinstance(node, (FuncItem, ClassDef)): node.accept(self) + elif isinstance(node, ClassDef): + self.strip_class_body(node) + + def strip_class_body(self, node: ClassDef) -> None: + """Strip class body and type info, but don't strip methods.""" + node.info.type_vars = [] + node.info.bases = [] + node.info.abstract_attributes = [] + node.info.add_type_vars() def visit_func_def(self, node: FuncDef) -> None: node.expanded = [] diff --git a/mypy/server/update.py b/mypy/server/update.py index c7bb346ba5cc..1f1aad8e43cd 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -385,7 +385,7 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod else: components = [] node = modules[module] # type: SymbolNode - prev = None # type: SymbolNode + file = None # type: MypyFile active_class = None active_class_name = None for c in components: @@ -394,14 +394,15 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod active_class_name = node.name() # TODO: Is it possible for the assertion to fail? assert isinstance(node, (MypyFile, TypeInfo)) - prev = node + if isinstance(node, MypyFile): + file = node node = node.names[c].node if isinstance(node, TypeInfo): # A ClassDef target covers the body of the class and everything defined # within it. To get the body we include the entire surrounding target, # typically a module top-level, since we don't support processing class # bodies as separate entitites for simplicity. - result = [DeferredNode(prev, None, None)] # TODO: Nested classes + result = [DeferredNode(file, None, None)] for name, node in node.names.items(): if isinstance(node, FuncDef): result.extend(lookup_target(modules, target + '.' + name)) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 28dbaecb1632..a204d153b282 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -813,3 +813,38 @@ def f() -> str: pass [out] == main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testBaseClassDeleted] +import m + +class A(m.C): + def f(self) -> None: + self.g() # No error here because m.C becomes an Any base class + def g(self) -> None: + self.x +[file m.py] +class C: + def g(self) -> None: pass +[file m.py.2] +[out] +main:7: error: "A" has no attribute "x" +== +main:3: error: Name 'm.C' is not defined + +[case testBaseClassOfNestedClassDeleted] +import m + +class A: + class B(m.C): + def f(self) -> None: + self.g() # No error here because m.C becomes an Any base class + def g(self) -> None: + self.x +[file m.py] +class C: + def g(self) -> None: pass +[file m.py.2] +[out] +main:8: error: "B" has no attribute "x" +== +main:4: error: Name 'm.C' is not defined From 25a2846826dedc0b785e9067ad7b1166cba5c65a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 21 Mar 2017 16:15:13 -0700 Subject: [PATCH 33/44] Minor fixes --- mypy/nodes.py | 2 +- mypy/semanal.py | 28 ++++++++++++++++------------ mypy/server/aststrip.py | 3 +-- mypy/server/target.py | 4 ++-- mypy/server/update.py | 5 +++-- test-data/unit/semanal-typeinfo.test | 8 ++++++++ 6 files changed, 31 insertions(+), 19 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 51923299151f..c1e3fc8e5f1c 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2013,7 +2013,7 @@ def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> No def add_type_vars(self) -> None: if self.defn.type_vars: - for vd in defn.type_vars: + for vd in self.defn.type_vars: self.type_vars.append(vd.name) def name(self) -> str: diff --git a/mypy/semanal.py b/mypy/semanal.py index 745b0fe6ed20..8b878c66db57 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -285,12 +285,13 @@ def refresh_top_level(self, file_node: MypyFile) -> None: self.accept(d) def refresh_class_def(self, defn: ClassDef) -> None: - with self.analyze_class_body(defn): - for d in defn.defs.body: - if isinstance(d, ClassDef): - self.refresh_class_def(d) - elif not isinstance(d, FuncItem): - self.accept(d) + with self.analyze_class_body(defn) as should_continue: + if should_continue: + for d in defn.defs.body: + if isinstance(d, ClassDef): + self.refresh_class_def(d) + elif not isinstance(d, FuncItem): + self.accept(d) @contextmanager def file_context(self, file_node: MypyFile, fnam: str, options: Options, @@ -703,20 +704,23 @@ def check_function_signature(self, fdef: FuncItem) -> None: self.fail('Type signature has too many arguments', fdef, blocker=True) def visit_class_def(self, defn: ClassDef) -> None: - with self.analyze_class_body(defn): - # Analyze class body. - defn.defs.accept(self) + with self.analyze_class_body(defn) as should_continue: + if should_continue: + # Analyze class body. + defn.defs.accept(self) @contextmanager - def analyze_class_body(self, defn: ClassDef) -> Iterator[None]: + def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]: self.clean_up_bases_and_infer_type_variables(defn) if self.analyze_typeddict_classdef(defn): + yield False return if self.analyze_namedtuple_classdef(defn): # just analyze the class body so we catch type errors in default values self.enter_class(defn) - defn.defs.accept(self) + yield False self.leave_class() + return else: self.setup_class_def_analysis(defn) @@ -730,7 +734,7 @@ def analyze_class_body(self, defn: ClassDef) -> Iterator[None]: self.enter_class(defn) - yield + yield True self.calculate_abstract_status(defn.info) self.setup_type_promotion(defn) diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index aec2676e2e4e..a34d34c3ed63 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -5,7 +5,7 @@ from mypy.nodes import ( Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt, - TypeInfo + TypeInfo, Var ) from mypy.traverser import TraverserVisitor @@ -84,7 +84,6 @@ def strip_ref_expr(self, node: RefExpr) -> None: # TODO: handle more node types - def is_self_member_ref(memberexpr: MemberExpr) -> bool: """Does memberexpr refer to an attribute of self?""" # TODO: Merge with is_self_member_ref in semanal.py. diff --git a/mypy/server/target.py b/mypy/server/target.py index cbd7675e1b50..0b4636b0542f 100644 --- a/mypy/server/target.py +++ b/mypy/server/target.py @@ -1,4 +1,4 @@ -from typing import Iterable, Tuple +from typing import Iterable, Tuple, List def module_prefix(modules: Iterable[str], target: str) -> str: @@ -6,7 +6,7 @@ def module_prefix(modules: Iterable[str], target: str) -> str: def split_target(modules: Iterable[str], target: str) -> Tuple[str, str]: - remaining = [] + remaining = [] # type: List[str] while True: if target in modules: return target, '.'.join(remaining) diff --git a/mypy/server/update.py b/mypy/server/update.py index 1f1aad8e43cd..58caa86ae7c7 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -393,9 +393,9 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod active_class = node active_class_name = node.name() # TODO: Is it possible for the assertion to fail? - assert isinstance(node, (MypyFile, TypeInfo)) if isinstance(node, MypyFile): file = node + assert isinstance(node, (MypyFile, TypeInfo)) node = node.names[c].node if isinstance(node, TypeInfo): # A ClassDef target covers the body of the class and everything defined @@ -403,7 +403,8 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod # typically a module top-level, since we don't support processing class # bodies as separate entitites for simplicity. result = [DeferredNode(file, None, None)] - for name, node in node.names.items(): + for name, symnode in node.names.items(): + node = symnode.node if isinstance(node, FuncDef): result.extend(lookup_target(modules, target + '.' + name)) return result diff --git a/test-data/unit/semanal-typeinfo.test b/test-data/unit/semanal-typeinfo.test index 6bb62e1c57ce..098ce0b114ad 100644 --- a/test-data/unit/semanal-typeinfo.test +++ b/test-data/unit/semanal-typeinfo.test @@ -9,6 +9,7 @@ TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) + Mro(__main__.c, builtins.object) Names())) [case testClassWithMethod] @@ -19,6 +20,7 @@ TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) + Mro(__main__.c, builtins.object) Names( f))) @@ -32,6 +34,7 @@ TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(builtins.object) + Mro(__main__.c, builtins.object) Names( __init__ y @@ -45,10 +48,12 @@ TypeInfoMap( __main__.base : TypeInfo( Name(__main__.base) Bases(builtins.object) + Mro(__main__.base, builtins.object) Names()) __main__.c : TypeInfo( Name(__main__.c) Bases(__main__.base) + Mro(__main__.c, __main__.base, builtins.object) Names())) [case testClassAndAbstractClass] @@ -62,10 +67,12 @@ TypeInfoMap( __main__.c : TypeInfo( Name(__main__.c) Bases(__main__.i) + Mro(__main__.c, __main__.i, builtins.object) Names()) __main__.i : TypeInfo( Name(__main__.i) Bases(builtins.object) + Mro(__main__.i, builtins.object) Names())) [case testAttributeWithoutType] @@ -76,5 +83,6 @@ TypeInfoMap( __main__.A : TypeInfo( Name(__main__.A) Bases(builtins.object) + Mro(__main__.A, builtins.object) Names( a))) From 2b9104c52a029425eb084562386387950f0d33ca Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 28 Mar 2017 16:55:54 +0100 Subject: [PATCH 34/44] Add review feedback --- mypy/checker.py | 12 +++++++----- mypy/semanal.py | 1 + 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 11f3329ebd90..30a86085022f 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -200,6 +200,8 @@ def check_second_pass(self, todo: List[DeferredNode] = None) -> bool: self.pass_num += 1 if not todo: todo = self.deferred_nodes + else: + assert not self.deferred_nodes self.deferred_nodes = [] done = set() # type: Set[Union[FuncDef, MypyFile]] for node, type_name, active_typeinfo in todo: @@ -234,7 +236,7 @@ def check_top_level(self, node: MypyFile) -> None: def handle_cannot_determine_type(self, name: str, context: Context) -> None: node = self.scope.top_function() - if self.pass_num < LAST_PASS and node is not None: + if self.pass_num < LAST_PASS and node is not None and isinstance(node, FuncDef): # Don't report an error yet. Just defer. if self.errors.type_name: type_name = self.errors.type_name[-1] @@ -651,7 +653,7 @@ def is_implicit_any(t: Type) -> bool: for i in range(len(typ.arg_types)): arg_type = typ.arg_types[i] - ref_type = self.scope.active_self_type() # type: Type + ref_type = self.scope.active_self_type() # type: Optional[Type] if (isinstance(defn, FuncDef) and ref_type is not None and i == 0 and not defn.is_static and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]): @@ -3012,12 +3014,12 @@ def is_node_static(node: Node) -> Optional[bool]: class Scope: # We keep two stacks combined, to maintain the relative order - stack = None # type: List[Union[TypeInfo, FuncDef, MypyFile]] + stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]] def __init__(self, module: MypyFile) -> None: self.stack = [module] - def top_function(self) -> Optional[FuncDef]: + def top_function(self) -> Optional[FuncItem]: for e in reversed(self.stack): if isinstance(e, FuncItem): return e @@ -3035,7 +3037,7 @@ def active_self_type(self) -> Optional[Union[Instance, TupleType]]: return None @contextmanager - def push_function(self, item: FuncDef) -> Iterator[None]: + def push_function(self, item: FuncItem) -> Iterator[None]: self.stack.append(item) yield self.stack.pop() diff --git a/mypy/semanal.py b/mypy/semanal.py index 8b878c66db57..ca577c0fbe75 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -288,6 +288,7 @@ def refresh_class_def(self, defn: ClassDef) -> None: with self.analyze_class_body(defn) as should_continue: if should_continue: for d in defn.defs.body: + # TODO: Make sure refreshing class bodies works. if isinstance(d, ClassDef): self.refresh_class_def(d) elif not isinstance(d, FuncItem): From ef97a75195490971d97cd19ffa24fa267c399fe5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 29 Mar 2017 18:14:03 +0100 Subject: [PATCH 35/44] Address more feedback and fix a bug --- mypy/server/aststrip.py | 1 + test-data/unit/fine-grained.test | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index a34d34c3ed63..dd7ae96f0f92 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -38,6 +38,7 @@ def strip_class_body(self, node: ClassDef) -> None: node.info.type_vars = [] node.info.bases = [] node.info.abstract_attributes = [] + node.info.mro = [] node.info.add_type_vars() def visit_func_def(self, node: FuncDef) -> None: diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index a204d153b282..96b8bcb94e41 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -500,6 +500,28 @@ class B: pass main:4: error: "A" has no attribute "g" main:5: error: "A" has no attribute "x" +[case testRemoveBaseClass2] +import m +class A(m.B): + def f(self) -> None: + self.g() + self.x + self.y = 1 +[file m.py] +class C: + def g(self) -> None: + self.x = 1 +class B(C): pass +[file m.py.2] +class C: + def g(self) -> None: + self.x = 1 +class B: pass +[out] +== +main:4: error: "A" has no attribute "g" +main:5: error: "A" has no attribute "x" + [case testChangeInPackage] import m.n def f() -> None: From 23ca75be24dbeaf05cd74ad0609f92f410485fa7 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 30 Mar 2017 17:09:50 +0100 Subject: [PATCH 36/44] Add additional debug output --- mypy/server/update.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mypy/server/update.py b/mypy/server/update.py index 58caa86ae7c7..28ef5205e483 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -238,6 +238,8 @@ def propagate_changes_using_dependencies( # dependency loop that loops back to an originally processed module. up_to_date_modules = set() targets_with_errors = set() + if DEBUG: + print('triggered:', list(triggered)) def find_targets_recursive( From 027f79bbdfab6059abc7f91ab7cb83157838d1f3 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 3 Apr 2017 15:59:07 +0100 Subject: [PATCH 37/44] Fix issues caused by rebase --- mypy/semanal.py | 6 ++---- mypy/server/astdiff.py | 8 +------- mypy/server/astmerge.py | 5 +---- mypy/server/deps.py | 5 +---- mypy/server/subexpr.py | 4 ++-- mypy/test/testmerge.py | 2 ++ test-data/unit/fine-grained.test | 6 +++--- 7 files changed, 12 insertions(+), 24 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index ca577c0fbe75..e08c806f0656 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -78,7 +78,6 @@ NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType, FunctionLike, UnboundType, TypeList, TypeVarDef, TypeType, TupleType, UnionType, StarType, EllipsisType, function_type, TypedDictType, - Void, ) from mypy.nodes import implicit_module_attrs from mypy.typeanal import ( @@ -469,7 +468,7 @@ def find_type_variables_in_type(self, type: Type) -> List[Tuple[str, TypeVarExpr result.extend(self.find_type_variables_in_type(item)) elif isinstance(type, AnyType): pass - elif isinstance(type, (EllipsisType, TupleType, Void)): + elif isinstance(type, (EllipsisType, TupleType)): # TODO: Need to process tuple items? pass elif isinstance(type, Instance): @@ -719,9 +718,8 @@ def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]: if self.analyze_namedtuple_classdef(defn): # just analyze the class body so we catch type errors in default values self.enter_class(defn) - yield False + yield True self.leave_class() - return else: self.setup_class_def_analysis(defn) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index be5d86305334..9b9659d2f4ea 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -11,7 +11,7 @@ from mypy.nodes import SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var from mypy.types import ( - Type, TypeVisitor, UnboundType, ErrorType, TypeList, AnyType, Void, NoneTyp, UninhabitedType, + Type, TypeVisitor, UnboundType, TypeList, AnyType, NoneTyp, UninhabitedType, ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded, PartialType, TypeType ) @@ -137,18 +137,12 @@ def __init__(self, right: Type) -> None: def visit_unbound_type(self, left: UnboundType) -> bool: return False - def visit_error_type(self, left: ErrorType) -> bool: - return False - def visit_type_list(self, t: TypeList) -> bool: assert False, 'Not supported' def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) - def visit_void(self, left: Void) -> bool: - return isinstance(self.right, Void) - def visit_none_type(self, left: NoneTyp) -> bool: return isinstance(self.right, NoneTyp) diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 6ef251934a79..5591c9ac7862 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -13,7 +13,7 @@ from mypy.types import ( Type, TypeVisitor, Instance, AnyType, NoneTyp, CallableType, DeletedType, PartialType, TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, - Void, Overloaded + Overloaded ) @@ -195,9 +195,6 @@ def visit_uninhabited_type(self, typ: UninhabitedType) -> None: def visit_union_type(self, typ: UnionType) -> None: raise NotImplementedError - def visit_void(self, typ: Void) -> None: - pass - # Helpers def fixup(self, node: SN) -> SN: diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 95bed5e88dbe..6710cfa49bf0 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -11,7 +11,7 @@ from mypy.types import ( Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType, TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType, - Void, FunctionLike + FunctionLike ) from mypy.server.trigger import make_trigger @@ -224,9 +224,6 @@ def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]: def visit_union_type(self, typ: UnionType) -> List[str]: raise NotImplementedError - def visit_void(self, typ: Void) -> List[str]: - return [] - def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]: return [base for base in info.mro[1:] diff --git a/mypy/server/subexpr.py b/mypy/server/subexpr.py index 7a5433c06682..6659a4c51b03 100644 --- a/mypy/server/subexpr.py +++ b/mypy/server/subexpr.py @@ -5,7 +5,7 @@ from mypy.nodes import ( Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr, SliceExpr, CastExpr, RevealTypeExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, - IndexExpr, GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication, FuncExpr, + IndexExpr, GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication, LambdaExpr, StarExpr, BackquoteExpr, AwaitExpr ) from mypy.traverser import TraverserVisitor @@ -116,7 +116,7 @@ def visit_type_application(self, e: TypeApplication) -> None: self.add(e) super().visit_type_application(e) - def visit_func_expr(self, e: FuncExpr) -> None: + def visit_lambda_expr(self, e: LambdaExpr) -> None: self.add(e) super().visit_func_expr(e) diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index e2eed4e00d1c..9dafc39e8576 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -191,6 +191,8 @@ def dump_types(self, graph: Dict[str, State]) -> List[str]: # To make the results repeatable, we try to generate unique and # deterministic sort keys. for module_id in sorted(graph): + if module_id == 'builtins': + continue type_map = graph[module_id].type_checker.type_map if type_map: a.append('## {}'.format(module_id)) diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 96b8bcb94e41..881315998c4c 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -383,7 +383,7 @@ main:3: error: "module" has no attribute "g" [case testAddBaseClassMethodCausingInvalidOverride] import m class B(m.A): - def f(self) -> None: pass + def f(self) -> str: pass [file m.py] class A: pass [file m.py.2] @@ -396,10 +396,10 @@ main:3: error: Return type of "f" incompatible with supertype "A" [case testModifyBaseClassMethodCausingInvalidOverride] import m class B(m.A): - def f(self) -> None: pass + def f(self) -> str: pass [file m.py] class A: - def f(self) -> None: pass + def f(self) -> str: pass [file m.py.2] class A: def f(self) -> int: pass From dbd5d6782751f2f2a39374668ba574ff8286b7bd Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 3 Apr 2017 16:00:01 +0100 Subject: [PATCH 38/44] Remove travis CI workaround This is no longer necessary since runtests was recently fixed to not spawn multiple parallel pytest jobs. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 29ce77b353e7..4c25d0e4bdd6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,5 +16,5 @@ install: - python setup.py install script: - - python runtests.py -x lint -j4 + - python runtests.py -x lint - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi From a47ecb85e472595d80c52cf9e9532bf53638042a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 3 Apr 2017 16:10:31 +0100 Subject: [PATCH 39/44] Fix another issue caused by the rebase --- mypy/server/subexpr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/server/subexpr.py b/mypy/server/subexpr.py index 6659a4c51b03..b20fa4de10b4 100644 --- a/mypy/server/subexpr.py +++ b/mypy/server/subexpr.py @@ -118,7 +118,7 @@ def visit_type_application(self, e: TypeApplication) -> None: def visit_lambda_expr(self, e: LambdaExpr) -> None: self.add(e) - super().visit_func_expr(e) + super().visit_lambda_expr(e) def visit_star_expr(self, e: StarExpr) -> None: self.add(e) From ba17fe4c0c46c7ebd86ba0ec1f75a8a753f7c594 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 3 Apr 2017 18:19:47 +0100 Subject: [PATCH 40/44] Fix flaky test case --- mypy/test/testmerge.py | 2 +- test-data/unit/merge.test | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index 9dafc39e8576..6802e3431091 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -175,7 +175,7 @@ def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]: def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]: a = [] - for name, node in names.items(): + for name, node in sorted(names.items(), key=lambda x: x[0]): if isinstance(node.node, TypeInfo): a.extend(self.dump_typeinfo(node.node)) a.extend(self.dump_typeinfos_recursive(node.node.names)) diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index c269376b02d8..a6d2a424f975 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -528,11 +528,6 @@ TypeInfo<2>( Names( f<3>)) ==> -TypeInfo<4>( - Name(target.C) - Bases(builtins.object<1>) - Mro(target.C<4>, builtins.object<1>) - Names()) TypeInfo<0>( Name(target.A) Bases(builtins.object<1>) @@ -544,6 +539,11 @@ TypeInfo<2>( Mro(target.B<2>, target.A<0>, builtins.object<1>) Names( f<3>)) +TypeInfo<4>( + Name(target.C) + Bases(builtins.object<1>) + Mro(target.C<4>, builtins.object<1>) + Names()) [case testModuleAttribute] import target From d3bf92331c8993dacecb24d6d25aa3326b91a8cd Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 3 Apr 2017 18:33:28 +0100 Subject: [PATCH 41/44] Fix flaky test by making processing order deterministic --- mypy/server/update.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/mypy/server/update.py b/mypy/server/update.py index 28ef5205e483..f303a99d0efa 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -283,7 +283,7 @@ def find_targets_recursive( def reprocess_nodes(manager: BuildManager, graph: Dict[str, State], module_id: str, - nodes: Set[DeferredNode], + nodeset: Set[DeferredNode], deps: Dict[str, Set[str]]) -> Set[str]: """Reprocess a set of nodes within a single module. @@ -291,6 +291,16 @@ def reprocess_nodes(manager: BuildManager, """ file_node = manager.modules[module_id] + def key(node: DeferredNode) -> str: + fullname = node.node.fullname() + if isinstance(node.node, FuncDef) and fullname is None: + assert node.node.info is not None + fullname = '%s.%s' % (node.node.info.fullname(), node.node.name()) + return fullname + + # Some nodes by full name so that the order of processing is deterministic. + nodes = sorted(nodeset, key=key) + # Strip semantic analysis information. for deferred in nodes: strip_target(deferred.node) @@ -319,7 +329,7 @@ def reprocess_nodes(manager: BuildManager, old_types_map = get_enclosing_namespace_types(nodes) # Type check. - graph[module_id].type_checker.check_second_pass(list(nodes)) # TODO: check return value + graph[module_id].type_checker.check_second_pass(nodes) # TODO: check return value # Check if any attribute types were changed and need to be propagated further. new_triggered = get_triggered_namespace_items(old_types_map) @@ -333,8 +343,8 @@ def reprocess_nodes(manager: BuildManager, NamespaceNode = Union[TypeInfo, MypyFile] -def get_enclosing_namespace_types(nodes: Set[DeferredNode]) -> Dict[NamespaceNode, - Dict[str, Type]]: +def get_enclosing_namespace_types(nodes: List[DeferredNode]) -> Dict[NamespaceNode, + Dict[str, Type]]: types = {} # type: Dict[NamespaceNode, Dict[str, Type]] for deferred in nodes: info = deferred.active_typeinfo @@ -365,7 +375,7 @@ def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, T def update_deps(module_id: str, - nodes: Set[DeferredNode], + nodes: List[DeferredNode], graph: Dict[str, State], deps: Dict[str, Set[str]]) -> None: for deferred in nodes: From 5c03d7ed32739069910dc7016d953b07b2d353cc Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 4 Apr 2017 16:29:58 +0100 Subject: [PATCH 42/44] Attempt to fix tests on Windows --- mypy/test/testdiff.py | 2 +- mypy/test/testfinegrained.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py index aa427dbdb9d8..f379a3735ce7 100644 --- a/mypy/test/testdiff.py +++ b/mypy/test/testdiff.py @@ -34,7 +34,7 @@ def cases(cls) -> List[DataDrivenTestCase]: def run_case(self, testcase: DataDrivenTestCase) -> None: first_src = '\n'.join(testcase.input) files_dict = dict(testcase.files) - second_src = files_dict[os.path.join('tmp', 'next.py')] + second_src = files_dict['tmp/next.py'] messages1, files1 = self.build(first_src) messages2, files2 = self.build(second_src) diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index fd19840967bf..21afc8572b44 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -71,6 +71,9 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: a.append('==') a.extend(new_messages) + # Normalize paths in test output (for Windows). + a = [line.replace('\\', '/') for line in a] + assert_string_arrays_equal( testcase.output, a, 'Invalid output ({}, line {})'.format(testcase.file, From 89adec4c714eee6f1db0af85c1217798782248fb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 5 Apr 2017 13:21:24 +0100 Subject: [PATCH 43/44] Fix deferred lambdas --- mypy/checker.py | 3 ++- test-data/unit/check-inference.test | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index 30a86085022f..de9ea5a7b8af 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -236,7 +236,8 @@ def check_top_level(self, node: MypyFile) -> None: def handle_cannot_determine_type(self, name: str, context: Context) -> None: node = self.scope.top_function() - if self.pass_num < LAST_PASS and node is not None and isinstance(node, FuncDef): + if (self.pass_num < LAST_PASS and node is not None + and isinstance(node, (FuncDef, LambdaExpr))): # Don't report an error yet. Just defer. if self.errors.type_name: type_name = self.errors.type_name[-1] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 0457e49127d8..6eb38093f61a 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1130,6 +1130,20 @@ def f(a: Callable[[int, int, int], int] = lambda *a, **k: 1): pass [builtins fixtures/dict.pyi] +[case testLambdaDeferredSpecialCase] +from typing import Callable + +class A: + def f(self) -> None: + h(lambda: self.x) + + def g(self) -> None: + self.x = 1 + +def h(x: Callable[[], int]) -> None: + pass + + -- Boolean operators -- ----------------- From 287f831d74e8f35140f382be8b158445b208c618 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 5 Apr 2017 13:31:28 +0100 Subject: [PATCH 44/44] Fix self check failure --- mypy/checker.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index de9ea5a7b8af..ae6ce80c6676 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -70,7 +70,8 @@ DeferredNode = NamedTuple( 'DeferredNode', [ - ('node', Union[FuncDef, MypyFile]), # In batch mode only FuncDef is supported + # In batch mode only FuncDef and LambdaExpr are supported + ('node', Union[FuncDef, LambdaExpr, MypyFile]), ('context_type_name', Optional[str]), # Name of the surrounding class (for error messages) ('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis # self type handling) @@ -203,7 +204,7 @@ def check_second_pass(self, todo: List[DeferredNode] = None) -> bool: else: assert not self.deferred_nodes self.deferred_nodes = [] - done = set() # type: Set[Union[FuncDef, MypyFile]] + done = set() # type: Set[Union[FuncDef, LambdaExpr, MypyFile]] for node, type_name, active_typeinfo in todo: if node in done: continue @@ -216,9 +217,11 @@ def check_second_pass(self, todo: List[DeferredNode] = None) -> bool: self.check_partial(node) return True - def check_partial(self, node: Union[FuncDef, MypyFile]) -> None: + def check_partial(self, node: Union[FuncDef, LambdaExpr, MypyFile]) -> None: if isinstance(node, MypyFile): self.check_top_level(node) + elif isinstance(node, LambdaExpr): + self.expr_checker.accept(node) else: self.accept(node)