@@ -335,6 +335,7 @@ class CacheMeta(NamedTuple):
335
335
# dep_prios and dep_lines are in parallel with dependencies + suppressed
336
336
dep_prios : list [int ]
337
337
dep_lines : list [int ]
338
+ dep_hashes : dict [str , str ]
338
339
interface_hash : str # hash representing the public interface
339
340
version_id : str # mypy version for cache invalidation
340
341
ignore_all : bool # if errors were ignored
@@ -373,6 +374,7 @@ def cache_meta_from_dict(meta: dict[str, Any], data_json: str) -> CacheMeta:
373
374
meta .get ("options" ),
374
375
meta .get ("dep_prios" , []),
375
376
meta .get ("dep_lines" , []),
377
+ meta .get ("dep_hashes" , {}),
376
378
meta .get ("interface_hash" , "" ),
377
379
meta .get ("version_id" , sentinel ),
378
380
meta .get ("ignore_all" , True ),
@@ -890,8 +892,6 @@ def log(self, *message: str) -> None:
890
892
self .stderr .flush ()
891
893
892
894
def log_fine_grained (self , * message : str ) -> None :
893
- import mypy .build
894
-
895
895
if self .verbosity () >= 1 :
896
896
self .log ("fine-grained:" , * message )
897
897
elif mypy .build .DEBUG_FINE_GRAINED :
@@ -1500,6 +1500,7 @@ def validate_meta(
1500
1500
"options" : (manager .options .clone_for_module (id ).select_options_affecting_cache ()),
1501
1501
"dep_prios" : meta .dep_prios ,
1502
1502
"dep_lines" : meta .dep_lines ,
1503
+ "dep_hashes" : meta .dep_hashes ,
1503
1504
"interface_hash" : meta .interface_hash ,
1504
1505
"version_id" : manager .version_id ,
1505
1506
"ignore_all" : meta .ignore_all ,
@@ -1543,7 +1544,7 @@ def write_cache(
1543
1544
source_hash : str ,
1544
1545
ignore_all : bool ,
1545
1546
manager : BuildManager ,
1546
- ) -> tuple [str , CacheMeta | None ]:
1547
+ ) -> tuple [str , tuple [ dict [ str , Any ], str , str ] | None ]:
1547
1548
"""Write cache files for a module.
1548
1549
1549
1550
Note that this mypy's behavior is still correct when any given
@@ -1564,9 +1565,9 @@ def write_cache(
1564
1565
manager: the build manager (for pyversion, log/trace)
1565
1566
1566
1567
Returns:
1567
- A tuple containing the interface hash and CacheMeta
1568
- corresponding to the metadata that was written (the latter may
1569
- be None if the cache could not be written).
1568
+ A tuple containing the interface hash and inner tuple with cache meta JSON
1569
+ that should be written and paths to cache files (inner tuple may be None,
1570
+ if the cache data could not be written).
1570
1571
"""
1571
1572
metastore = manager .metastore
1572
1573
# For Bazel we use relative paths and zero mtimes.
@@ -1581,6 +1582,8 @@ def write_cache(
1581
1582
if bazel :
1582
1583
tree .path = path
1583
1584
1585
+ plugin_data = manager .plugin .report_config_data (ReportConfigContext (id , path , is_check = False ))
1586
+
1584
1587
# Serialize data and analyze interface
1585
1588
if manager .options .fixed_format_cache :
1586
1589
data_io = Buffer ()
@@ -1589,9 +1592,7 @@ def write_cache(
1589
1592
else :
1590
1593
data = tree .serialize ()
1591
1594
data_bytes = json_dumps (data , manager .options .debug_cache )
1592
- interface_hash = hash_digest (data_bytes )
1593
-
1594
- plugin_data = manager .plugin .report_config_data (ReportConfigContext (id , path , is_check = False ))
1595
+ interface_hash = hash_digest (data_bytes + json_dumps (plugin_data ))
1595
1596
1596
1597
# Obtain and set up metadata
1597
1598
st = manager .get_stat (path )
@@ -1659,16 +1660,22 @@ def write_cache(
1659
1660
"ignore_all" : ignore_all ,
1660
1661
"plugin_data" : plugin_data ,
1661
1662
}
1663
+ return interface_hash , (meta , meta_json , data_json )
1662
1664
1665
+
1666
+ def write_cache_meta (
1667
+ meta : dict [str , Any ], manager : BuildManager , meta_json : str , data_json : str
1668
+ ) -> CacheMeta :
1663
1669
# Write meta cache file
1670
+ metastore = manager .metastore
1664
1671
meta_str = json_dumps (meta , manager .options .debug_cache )
1665
1672
if not metastore .write (meta_json , meta_str ):
1666
1673
# Most likely the error is the replace() call
1667
1674
# (see https://github.com/python/mypy/issues/3215).
1668
1675
# The next run will simply find the cache entry out of date.
1669
1676
manager .log (f"Error writing meta JSON file { meta_json } " )
1670
1677
1671
- return interface_hash , cache_meta_from_dict (meta , data_json )
1678
+ return cache_meta_from_dict (meta , data_json )
1672
1679
1673
1680
1674
1681
def delete_cache (id : str , path : str , manager : BuildManager ) -> None :
@@ -1867,6 +1874,9 @@ class State:
1867
1874
# Map each dependency to the line number where it is first imported
1868
1875
dep_line_map : dict [str , int ]
1869
1876
1877
+ # Map from dependency id to its last observed interface hash
1878
+ dep_hashes : dict [str , str ] = {}
1879
+
1870
1880
# Parent package, its parent, etc.
1871
1881
ancestors : list [str ] | None = None
1872
1882
@@ -1879,9 +1889,6 @@ class State:
1879
1889
# If caller_state is set, the line number in the caller where the import occurred
1880
1890
caller_line = 0
1881
1891
1882
- # If True, indicate that the public interface of this module is unchanged
1883
- externally_same = True
1884
-
1885
1892
# Contains a hash of the public interface in incremental mode
1886
1893
interface_hash : str = ""
1887
1894
@@ -1994,6 +2001,7 @@ def __init__(
1994
2001
self .priorities = {id : pri for id , pri in zip (all_deps , self .meta .dep_prios )}
1995
2002
assert len (all_deps ) == len (self .meta .dep_lines )
1996
2003
self .dep_line_map = {id : line for id , line in zip (all_deps , self .meta .dep_lines )}
2004
+ self .dep_hashes = self .meta .dep_hashes
1997
2005
if temporary :
1998
2006
self .load_tree (temporary = True )
1999
2007
if not manager .use_fine_grained_cache ():
@@ -2046,26 +2054,17 @@ def is_fresh(self) -> bool:
2046
2054
"""Return whether the cache data for this file is fresh."""
2047
2055
# NOTE: self.dependencies may differ from
2048
2056
# self.meta.dependencies when a dependency is dropped due to
2049
- # suppression by silent mode. However when a suppressed
2057
+ # suppression by silent mode. However, when a suppressed
2050
2058
# dependency is added back we find out later in the process.
2051
- return (
2052
- self .meta is not None
2053
- and self .is_interface_fresh ()
2054
- and self .dependencies == self .meta .dependencies
2055
- )
2056
-
2057
- def is_interface_fresh (self ) -> bool :
2058
- return self .externally_same
2059
+ return self .meta is not None and self .dependencies == self .meta .dependencies
2059
2060
2060
2061
def mark_as_rechecked (self ) -> None :
2061
2062
"""Marks this module as having been fully re-analyzed by the type-checker."""
2062
2063
self .manager .rechecked_modules .add (self .id )
2063
2064
2064
- def mark_interface_stale (self , * , on_errors : bool = False ) -> None :
2065
+ def mark_interface_stale (self ) -> None :
2065
2066
"""Marks this module as having a stale public interface, and discards the cache data."""
2066
- self .externally_same = False
2067
- if not on_errors :
2068
- self .manager .stale_modules .add (self .id )
2067
+ self .manager .stale_modules .add (self .id )
2069
2068
2070
2069
def check_blockers (self ) -> None :
2071
2070
"""Raise CompileError if a blocking error is detected."""
@@ -2507,7 +2506,7 @@ def valid_references(self) -> set[str]:
2507
2506
2508
2507
return valid_refs
2509
2508
2510
- def write_cache (self ) -> None :
2509
+ def write_cache (self ) -> tuple [ dict [ str , Any ], str , str ] | None :
2511
2510
assert self .tree is not None , "Internal error: method must be called on parsed file only"
2512
2511
# We don't support writing cache files in fine-grained incremental mode.
2513
2512
if (
@@ -2525,20 +2524,19 @@ def write_cache(self) -> None:
2525
2524
except Exception :
2526
2525
print (f"Error serializing { self .id } " , file = self .manager .stdout )
2527
2526
raise # Propagate to display traceback
2528
- return
2527
+ return None
2529
2528
is_errors = self .transitive_error
2530
2529
if is_errors :
2531
2530
delete_cache (self .id , self .path , self .manager )
2532
2531
self .meta = None
2533
- self .mark_interface_stale (on_errors = True )
2534
- return
2532
+ return None
2535
2533
dep_prios = self .dependency_priorities ()
2536
2534
dep_lines = self .dependency_lines ()
2537
2535
assert self .source_hash is not None
2538
2536
assert len (set (self .dependencies )) == len (
2539
2537
self .dependencies
2540
2538
), f"Duplicates in dependencies list for { self .id } ({ self .dependencies } )"
2541
- new_interface_hash , self . meta = write_cache (
2539
+ new_interface_hash , meta_tuple = write_cache (
2542
2540
self .id ,
2543
2541
self .path ,
2544
2542
self .tree ,
@@ -2557,6 +2555,7 @@ def write_cache(self) -> None:
2557
2555
self .manager .log (f"Cached module { self .id } has changed interface" )
2558
2556
self .mark_interface_stale ()
2559
2557
self .interface_hash = new_interface_hash
2558
+ return meta_tuple
2560
2559
2561
2560
def verify_dependencies (self , suppressed_only : bool = False ) -> None :
2562
2561
"""Report errors for import targets in modules that don't exist.
@@ -3287,7 +3286,19 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
3287
3286
for id in scc :
3288
3287
deps .update (graph [id ].dependencies )
3289
3288
deps -= ascc
3290
- stale_deps = {id for id in deps if id in graph and not graph [id ].is_interface_fresh ()}
3289
+
3290
+ # Verify that interfaces of dependencies still present in graph are up-to-date (fresh).
3291
+ # Note: if a dependency is not in graph anymore, it should be considered interface-stale.
3292
+ # This is important to trigger any relevant updates from indirect dependencies that were
3293
+ # removed in load_graph().
3294
+ stale_deps = set ()
3295
+ for id in ascc :
3296
+ for dep in graph [id ].dep_hashes :
3297
+ if dep not in graph :
3298
+ stale_deps .add (dep )
3299
+ continue
3300
+ if graph [dep ].interface_hash != graph [id ].dep_hashes [dep ]:
3301
+ stale_deps .add (dep )
3291
3302
fresh = fresh and not stale_deps
3292
3303
undeps = set ()
3293
3304
if fresh :
@@ -3518,14 +3529,25 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No
3518
3529
if any (manager .errors .is_errors_for_file (graph [id ].xpath ) for id in stale ):
3519
3530
for id in stale :
3520
3531
graph [id ].transitive_error = True
3532
+ meta_tuples = {}
3521
3533
for id in stale :
3522
3534
if graph [id ].xpath not in manager .errors .ignored_files :
3523
3535
errors = manager .errors .file_messages (
3524
3536
graph [id ].xpath , formatter = manager .error_formatter
3525
3537
)
3526
3538
manager .flush_errors (manager .errors .simplify_path (graph [id ].xpath ), errors , False )
3527
- graph [id ].write_cache ()
3539
+ meta_tuples [ id ] = graph [id ].write_cache ()
3528
3540
graph [id ].mark_as_rechecked ()
3541
+ for id in stale :
3542
+ meta_tuple = meta_tuples [id ]
3543
+ if meta_tuple is None :
3544
+ graph [id ].meta = None
3545
+ continue
3546
+ meta , meta_json , data_json = meta_tuple
3547
+ meta ["dep_hashes" ] = {
3548
+ dep : graph [dep ].interface_hash for dep in graph [id ].dependencies if dep in graph
3549
+ }
3550
+ graph [id ].meta = write_cache_meta (meta , manager , meta_json , data_json )
3529
3551
3530
3552
3531
3553
def sorted_components (
0 commit comments