Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 47 additions & 10 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,14 @@
from librt.internal import cache_version

import mypy.semanal_main
from mypy.cache import CACHE_VERSION, CacheMeta, ReadBuffer, WriteBuffer, write_json
from mypy.cache import (
CACHE_VERSION,
CacheMeta,
ReadBuffer,
SerializedError,
WriteBuffer,
write_json,
)
from mypy.checker import TypeChecker
from mypy.defaults import (
WORKER_CONNECTION_TIMEOUT,
Expand All @@ -52,7 +59,7 @@
WORKER_START_TIMEOUT,
)
from mypy.error_formatter import OUTPUT_CHOICES, ErrorFormatter
from mypy.errors import CompileError, ErrorInfo, Errors, report_internal_error
from mypy.errors import CompileError, ErrorInfo, Errors, ErrorTuple, report_internal_error
from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort
from mypy.indirection import TypeIndirectionVisitor
from mypy.ipc import BadStatus, IPCClient, read_status, ready_to_read, receive, send
Expand Down Expand Up @@ -2046,7 +2053,7 @@ class State:
dep_hashes: dict[str, bytes] = {}

# List of errors reported for this file last time.
error_lines: list[str] = []
error_lines: list[SerializedError] = []

# Parent package, its parent, etc.
ancestors: list[str] | None = None
Expand Down Expand Up @@ -3511,9 +3518,13 @@ def find_stale_sccs(
scc = order_ascc_ex(graph, ascc)
for id in scc:
if graph[id].error_lines:
manager.flush_errors(
manager.errors.simplify_path(graph[id].xpath), graph[id].error_lines, False
path = manager.errors.simplify_path(graph[id].xpath)
formatted = manager.errors.format_messages(
path,
deserialize_codes(graph[id].error_lines),
formatter=manager.error_formatter,
)
manager.flush_errors(path, formatted, False)
fresh_sccs.append(ascc)
else:
size = len(ascc.mod_ids)
Expand Down Expand Up @@ -3759,21 +3770,24 @@ def process_stale_scc(
# Flush errors, and write cache in two phases: first data files, then meta files.
meta_tuples = {}
errors_by_id = {}
formatted_by_id = {}
for id in stale:
if graph[id].xpath not in manager.errors.ignored_files:
errors = manager.errors.file_messages(
graph[id].xpath, formatter=manager.error_formatter
errors = manager.errors.file_messages(graph[id].xpath)
formatted = manager.errors.format_messages(
graph[id].xpath, errors, formatter=manager.error_formatter
)
manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), errors, False)
manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), formatted, False)
errors_by_id[id] = errors
formatted_by_id[id] = formatted
meta_tuples[id] = graph[id].write_cache()
for id in stale:
meta_tuple = meta_tuples[id]
if meta_tuple is None:
continue
meta, meta_file = meta_tuple
meta.dep_hashes = [graph[dep].interface_hash for dep in graph[id].dependencies]
meta.error_lines = errors_by_id.get(id, [])
meta.error_lines = serialize_codes(errors_by_id.get(id, []))
write_cache_meta(meta, manager, meta_file)
manager.done_sccs.add(ascc.id)
manager.add_stats(
Expand All @@ -3785,7 +3799,7 @@ def process_stale_scc(
)
scc_result = {}
for id in scc:
scc_result[id] = graph[id].interface_hash.hex(), errors_by_id.get(id, [])
scc_result[id] = graph[id].interface_hash.hex(), formatted_by_id.get(id, [])
return scc_result


Expand Down Expand Up @@ -3932,3 +3946,26 @@ def sccs_to_bytes(sccs: list[SCC]) -> bytes:
buf = WriteBuffer()
write_json(buf, {"sccs": scc_tuples})
return buf.getvalue()


def serialize_codes(errs: list[ErrorTuple]) -> list[SerializedError]:
return [
(path, line, column, end_line, end_column, severity, message, code.code if code else None)
for path, line, column, end_line, end_column, severity, message, code in errs
]


def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTuple]:
return [
(
path,
line,
column,
end_line,
end_column,
severity,
message,
codes.error_codes.get(code) if code else None,
)
for path, line, column, end_line, end_column, severity, message, code in errs
]
69 changes: 57 additions & 12 deletions mypy/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@
from mypy_extensions import u8

# High-level cache layout format
CACHE_VERSION: Final = 0
CACHE_VERSION: Final = 1

SerializedError: _TypeAlias = tuple[str | None, int, int, int, int, str, str, str | None]


class CacheMeta:
Expand All @@ -92,7 +94,7 @@ def __init__(
dep_lines: list[int],
dep_hashes: list[bytes],
interface_hash: bytes,
error_lines: list[str],
error_lines: list[SerializedError],
version_id: str,
ignore_all: bool,
plugin_data: Any,
Expand Down Expand Up @@ -157,7 +159,7 @@ def deserialize(cls, meta: dict[str, Any], data_file: str) -> CacheMeta | None:
dep_lines=meta["dep_lines"],
dep_hashes=[bytes.fromhex(dep) for dep in meta["dep_hashes"]],
interface_hash=bytes.fromhex(meta["interface_hash"]),
error_lines=meta["error_lines"],
error_lines=[tuple(err) for err in meta["error_lines"]],
version_id=meta["version_id"],
ignore_all=meta["ignore_all"],
plugin_data=meta["plugin_data"],
Expand All @@ -179,7 +181,7 @@ def write(self, data: WriteBuffer) -> None:
write_int_list(data, self.dep_lines)
write_bytes_list(data, self.dep_hashes)
write_bytes(data, self.interface_hash)
write_str_list(data, self.error_lines)
write_errors(data, self.error_lines)
write_str(data, self.version_id)
write_bool(data, self.ignore_all)
# Plugin data may be not a dictionary, so we use
Expand All @@ -204,7 +206,7 @@ def read(cls, data: ReadBuffer, data_file: str) -> CacheMeta | None:
dep_lines=read_int_list(data),
dep_hashes=read_bytes_list(data),
interface_hash=read_bytes(data),
error_lines=read_str_list(data),
error_lines=read_errors(data),
version_id=read_str(data),
ignore_all=read_bool(data),
plugin_data=read_json_value(data),
Expand All @@ -231,6 +233,7 @@ def read(cls, data: ReadBuffer, data_file: str) -> CacheMeta | None:
LIST_INT: Final[Tag] = 21
LIST_STR: Final[Tag] = 22
LIST_BYTES: Final[Tag] = 23
TUPLE_GEN: Final[Tag] = 24
DICT_STR_GEN: Final[Tag] = 30

# Misc classes.
Expand Down Expand Up @@ -391,12 +394,11 @@ def write_str_opt_list(data: WriteBuffer, value: list[str | None]) -> None:


Value: _TypeAlias = None | int | str | bool
JsonValue: _TypeAlias = Value | list["JsonValue"] | dict[str, "JsonValue"]

# Currently tuples are used by mypyc plugin. They will be normalized to
# JSON lists after a roundtrip.
JsonValueEx: _TypeAlias = (
Value | list["JsonValueEx"] | dict[str, "JsonValueEx"] | tuple["JsonValueEx", ...]
# Our JSON format is somewhat non-standard as we distinguish lists and tuples.
# This is convenient for some internal things, like mypyc plugin and error serialization.
JsonValue: _TypeAlias = (
Value | list["JsonValue"] | dict[str, "JsonValue"] | tuple["JsonValue", ...]
)


Expand All @@ -415,13 +417,16 @@ def read_json_value(data: ReadBuffer) -> JsonValue:
if tag == LIST_GEN:
size = read_int_bare(data)
return [read_json_value(data) for _ in range(size)]
if tag == TUPLE_GEN:
size = read_int_bare(data)
return tuple(read_json_value(data) for _ in range(size))
if tag == DICT_STR_GEN:
size = read_int_bare(data)
return {read_str_bare(data): read_json_value(data) for _ in range(size)}
assert False, f"Invalid JSON tag: {tag}"


def write_json_value(data: WriteBuffer, value: JsonValueEx) -> None:
def write_json_value(data: WriteBuffer, value: JsonValue) -> None:
if value is None:
write_tag(data, LITERAL_NONE)
elif isinstance(value, bool):
Expand All @@ -432,11 +437,16 @@ def write_json_value(data: WriteBuffer, value: JsonValueEx) -> None:
elif isinstance(value, str):
write_tag(data, LITERAL_STR)
write_str_bare(data, value)
elif isinstance(value, (list, tuple)):
elif isinstance(value, list):
write_tag(data, LIST_GEN)
write_int_bare(data, len(value))
for val in value:
write_json_value(data, val)
elif isinstance(value, tuple):
write_tag(data, TUPLE_GEN)
write_int_bare(data, len(value))
for val in value:
write_json_value(data, val)
elif isinstance(value, dict):
write_tag(data, DICT_STR_GEN)
write_int_bare(data, len(value))
Expand All @@ -461,3 +471,38 @@ def write_json(data: WriteBuffer, value: dict[str, Any]) -> None:
for key in sorted(value):
write_str_bare(data, key)
write_json_value(data, value[key])


def write_errors(data: WriteBuffer, errs: list[SerializedError]) -> None:
write_tag(data, LIST_GEN)
write_int_bare(data, len(errs))
for path, line, column, end_line, end_column, severity, message, code in errs:
write_tag(data, TUPLE_GEN)
write_str_opt(data, path)
write_int(data, line)
write_int(data, column)
write_int(data, end_line)
write_int(data, end_column)
write_str(data, severity)
write_str(data, message)
write_str_opt(data, code)


def read_errors(data: ReadBuffer) -> list[SerializedError]:
assert read_tag(data) == LIST_GEN
result = []
for _ in range(read_int_bare(data)):
assert read_tag(data) == TUPLE_GEN
result.append(
(
read_str_opt(data),
read_int(data),
read_int(data),
read_int(data),
read_int(data),
read_str(data),
read_str(data),
read_str_opt(data),
)
)
return result
25 changes: 15 additions & 10 deletions mypy/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -951,7 +951,7 @@ def raise_error(self, use_stdout: bool = True) -> NoReturn:
self.new_messages(), use_stdout=use_stdout, module_with_blocker=self.blocker_module()
)

def format_messages(
def format_messages_default(
self, error_tuples: list[ErrorTuple], source_lines: list[str] | None
) -> list[str]:
"""Return a string list that represents the error messages.
Expand Down Expand Up @@ -1009,24 +1009,28 @@ def format_messages(
a.append(" " * (DEFAULT_SOURCE_OFFSET + column) + marker)
return a

def file_messages(self, path: str, formatter: ErrorFormatter | None = None) -> list[str]:
"""Return a string list of new error messages from a given file.

Use a form suitable for displaying to the user.
"""
def file_messages(self, path: str) -> list[ErrorTuple]:
"""Return an error tuple list of new error messages from a given file."""
if path not in self.error_info_map:
return []

error_info = self.error_info_map[path]
error_info = [info for info in error_info if not info.hidden]
error_info = self.remove_duplicates(self.sort_messages(error_info))
error_tuples = self.render_messages(error_info)
return self.render_messages(error_info)

def format_messages(
self, path: str, error_tuples: list[ErrorTuple], formatter: ErrorFormatter | None = None
) -> list[str]:
"""Return a string list of new error messages from a given file.

Use a form suitable for displaying to the user.
"""
self.flushed_files.add(path)
if formatter is not None:
errors = create_errors(error_tuples)
return [formatter.report_error(err) for err in errors]

self.flushed_files.add(path)
source_lines = None
if self.options.pretty and self.read_source:
# Find shadow file mapping and read source lines if a shadow file exists for the given path.
Expand All @@ -1036,7 +1040,7 @@ def file_messages(self, path: str, formatter: ErrorFormatter | None = None) -> l
source_lines = self.read_source(mapped_path)
else:
source_lines = self.read_source(path)
return self.format_messages(error_tuples, source_lines)
return self.format_messages_default(error_tuples, source_lines)

def find_shadow_file_mapping(self, path: str) -> str | None:
"""Return the shadow file path for a given source file path or None."""
Expand All @@ -1058,7 +1062,8 @@ def new_messages(self) -> list[str]:
msgs = []
for path in self.error_info_map.keys():
if path not in self.flushed_files:
msgs.extend(self.file_messages(path))
error_tuples = self.file_messages(path)
msgs.extend(self.format_messages(path, error_tuples))
return msgs

def targets(self) -> set[str]:
Expand Down
10 changes: 10 additions & 0 deletions test-data/unit/check-incremental.test
Original file line number Diff line number Diff line change
Expand Up @@ -7626,3 +7626,13 @@ y = 1
class C: ...
[out2]
tmp/m.py:2: note: Revealed type is "def () -> other.C"

[case testOutputFormatterIncremental]
# flags2: --output json
def wrong() -> int:
if wrong():
return 0
[out]
main:2: error: Missing return statement
[out2]
{"file": "main", "line": 2, "column": 0, "message": "Missing return statement", "hint": null, "code": "return", "severity": "error"}