Skip to content

Commit 7b8c497

Browse files
committed
Support loading from cache in fine-grained incremental mdoe
1 parent e2224a6 commit 7b8c497

File tree

3 files changed

+27
-7
lines changed

3 files changed

+27
-7
lines changed

mypy/build.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2374,6 +2374,13 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
23742374
manager.log("Processing SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg))
23752375
process_stale_scc(graph, scc, manager)
23762376

2377+
# If we are running in fine-grained incremental mode with caching,
2378+
# we need to always process fresh SCCs.
2379+
if manager.options.fine_grained_incremental and manager.options.cache_fine_grained:
2380+
for prev_scc in fresh_scc_queue:
2381+
process_fresh_scc(graph, prev_scc, manager)
2382+
fresh_scc_queue = []
2383+
23772384
sccs_left = len(fresh_scc_queue)
23782385
nodes_left = sum(len(scc) for scc in fresh_scc_queue)
23792386
manager.add_stats(sccs_left=sccs_left, nodes_left=nodes_left)

mypy/dmypy_server.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,8 @@ def __init__(self, flags: List[str]) -> None:
105105
if self.fine_grained:
106106
options.incremental = True
107107
options.show_traceback = True
108-
options.cache_dir = os.devnull
108+
if not options.cache_fine_grained:
109+
options.cache_dir = os.devnull
109110

110111
def serve(self) -> None:
111112
"""Serve requests, synchronously (no thread or fork)."""

mypy/server/update.py

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -281,9 +281,10 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
281281
print('triggered:', sorted(filtered))
282282
self.triggered.extend(triggered | self.previous_targets_with_errors)
283283
collect_dependencies({module: tree}, self.deps, graph)
284-
propagate_changes_using_dependencies(manager, graph, self.deps, triggered,
285-
{module},
286-
self.previous_targets_with_errors)
284+
remaining += propagate_changes_using_dependencies(
285+
manager, graph, self.deps, triggered,
286+
{module},
287+
self.previous_targets_with_errors)
287288

288289
# Preserve state needed for the next update.
289290
self.previous_targets_with_errors = manager.errors.targets()
@@ -707,9 +708,10 @@ def propagate_changes_using_dependencies(
707708
deps: Dict[str, Set[str]],
708709
triggered: Set[str],
709710
up_to_date_modules: Set[str],
710-
targets_with_errors: Set[str]) -> None:
711+
targets_with_errors: Set[str]) -> List[Tuple[str, str]]:
711712
# TODO: Multiple type checking passes
712713
num_iter = 0
714+
remaining_modules = []
713715

714716
# Propagate changes until nothing visible has changed during the last
715717
# iteration.
@@ -733,7 +735,13 @@ def propagate_changes_using_dependencies(
733735
# TODO: Preserve order (set is not optimal)
734736
for id, nodes in sorted(todo.items(), key=lambda x: x[0]):
735737
assert id not in up_to_date_modules
736-
triggered |= reprocess_nodes(manager, graph, id, nodes, deps)
738+
# TODO: Is there a better way to detect that the file isn't loaded?
739+
if not manager.modules[id].defs:
740+
# We haven't actually loaded this file! Add it to the
741+
# queue of files that need to be processed fully.
742+
remaining_modules.append((id, manager.modules[id].path))
743+
else:
744+
triggered |= reprocess_nodes(manager, graph, id, nodes, deps)
737745
# Changes elsewhere may require us to reprocess modules that were
738746
# previously considered up to date. For example, there may be a
739747
# dependency loop that loops back to an originally processed module.
@@ -742,6 +750,8 @@ def propagate_changes_using_dependencies(
742750
if DEBUG:
743751
print('triggered:', list(triggered))
744752

753+
return remaining_modules
754+
745755

746756
def find_targets_recursive(
747757
triggers: Set[str],
@@ -989,4 +999,6 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
989999

9901000

9911001
def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]:
992-
return {id: state.type_map() for id, state in graph.items()}
1002+
# This is used to export information used only by the testmerge harness.
1003+
return {id: state.type_map() for id, state in graph.items()
1004+
if state.tree}

0 commit comments

Comments
 (0)