36
36
from mypy .errors import Errors , CompileError , ErrorInfo , report_internal_error
37
37
from mypy .util import (
38
38
DecodeError , decode_python_encoding , is_sub_path , get_mypy_comments , module_prefix ,
39
- read_py_file , hash_digest , is_typeshed_file , is_stub_package_file , get_top_two_prefixes
39
+ read_py_file , hash_digest , is_typeshed_file , is_stub_package_file , get_top_two_prefixes ,
40
+ time_ref , time_spent_us
40
41
)
41
42
if TYPE_CHECKING :
42
43
from mypy .report import Reports # Avoid unconditional slow import
@@ -256,6 +257,8 @@ def _build(sources: List[BuildSource],
256
257
graph = dispatch (sources , manager , stdout )
257
258
if not options .fine_grained_incremental :
258
259
TypeState .reset_all_subtype_caches ()
260
+ if options .timing_stats is not None :
261
+ dump_timing_stats (options .timing_stats , graph )
259
262
return BuildResult (manager , graph )
260
263
finally :
261
264
t0 = time .time ()
@@ -1808,6 +1811,9 @@ class State:
1808
1811
1809
1812
fine_grained_deps_loaded = False
1810
1813
1814
+ # Cumulative time spent on this file, in microseconds (for profiling stats)
1815
+ time_spent_us : int = 0
1816
+
1811
1817
def __init__ (self ,
1812
1818
id : Optional [str ],
1813
1819
path : Optional [str ],
@@ -2034,6 +2040,8 @@ def parse_file(self) -> None:
2034
2040
else :
2035
2041
manager .log ("Using cached AST for %s (%s)" % (self .xpath , self .id ))
2036
2042
2043
+ t0 = time_ref ()
2044
+
2037
2045
with self .wrap_context ():
2038
2046
source = self .source
2039
2047
self .source = None # We won't need it again.
@@ -2079,6 +2087,8 @@ def parse_file(self) -> None:
2079
2087
self .tree .ignored_lines ,
2080
2088
self .ignore_all or self .options .ignore_errors )
2081
2089
2090
+ self .time_spent_us += time_spent_us (t0 )
2091
+
2082
2092
if not cached :
2083
2093
# Make a copy of any errors produced during parse time so that
2084
2094
# fine-grained mode can repeat them when the module is
@@ -2113,6 +2123,9 @@ def semantic_analysis_pass1(self) -> None:
2113
2123
"""
2114
2124
options = self .options
2115
2125
assert self .tree is not None
2126
+
2127
+ t0 = time_ref ()
2128
+
2116
2129
# Do the first pass of semantic analysis: analyze the reachability
2117
2130
# of blocks and import statements. We must do this before
2118
2131
# processing imports, since this may mark some import statements as
@@ -2131,6 +2144,7 @@ def semantic_analysis_pass1(self) -> None:
2131
2144
if options .allow_redefinition :
2132
2145
# Perform more renaming across the AST to allow variable redefinitions
2133
2146
self .tree .accept (VariableRenameVisitor ())
2147
+ self .time_spent_us += time_spent_us (t0 )
2134
2148
2135
2149
def add_dependency (self , dep : str ) -> None :
2136
2150
if dep not in self .dependencies_set :
@@ -2188,8 +2202,10 @@ def compute_dependencies(self) -> None:
2188
2202
def type_check_first_pass (self ) -> None :
2189
2203
if self .options .semantic_analysis_only :
2190
2204
return
2205
+ t0 = time_ref ()
2191
2206
with self .wrap_context ():
2192
2207
self .type_checker ().check_first_pass ()
2208
+ self .time_spent_us += time_spent_us (t0 )
2193
2209
2194
2210
def type_checker (self ) -> TypeChecker :
2195
2211
if not self ._type_checker :
@@ -2207,14 +2223,17 @@ def type_map(self) -> Dict[Expression, Type]:
2207
2223
def type_check_second_pass (self ) -> bool :
2208
2224
if self .options .semantic_analysis_only :
2209
2225
return False
2226
+ t0 = time_ref ()
2210
2227
with self .wrap_context ():
2211
2228
return self .type_checker ().check_second_pass ()
2229
+ self .time_spent_us += time_spent_us (t0 )
2212
2230
2213
2231
def finish_passes (self ) -> None :
2214
2232
assert self .tree is not None , "Internal error: method must be called on parsed file only"
2215
2233
manager = self .manager
2216
2234
if self .options .semantic_analysis_only :
2217
2235
return
2236
+ t0 = time_ref ()
2218
2237
with self .wrap_context ():
2219
2238
# Some tests (and tools) want to look at the set of all types.
2220
2239
options = manager .options
@@ -2237,6 +2256,7 @@ def finish_passes(self) -> None:
2237
2256
self .free_state ()
2238
2257
if not manager .options .fine_grained_incremental and not manager .options .preserve_asts :
2239
2258
free_tree (self .tree )
2259
+ self .time_spent_us += time_spent_us (t0 )
2240
2260
2241
2261
def free_state (self ) -> None :
2242
2262
if self ._type_checker :
@@ -2771,6 +2791,16 @@ def dumps(self) -> str:
2771
2791
json .dumps (self .deps ))
2772
2792
2773
2793
2794
+ def dump_timing_stats (path : str , graph : Graph ) -> None :
2795
+ """
2796
+ Dump timing stats for each file in the given graph
2797
+ """
2798
+ with open (path , 'w' ) as f :
2799
+ for k in sorted (graph .keys ()):
2800
+ v = graph [k ]
2801
+ f .write ('{} {}\n ' .format (v .id , v .time_spent_us ))
2802
+
2803
+
2774
2804
def dump_graph (graph : Graph , stdout : Optional [TextIO ] = None ) -> None :
2775
2805
"""Dump the graph as a JSON string to stdout.
2776
2806
0 commit comments