|
83 | 83 | WORKER_START_TIMEOUT, |
84 | 84 | ) |
85 | 85 | from mypy.error_formatter import OUTPUT_CHOICES, ErrorFormatter |
86 | | -from mypy.errors import CompileError, ErrorInfo, Errors, ErrorTuple, report_internal_error |
| 86 | +from mypy.errors import ( |
| 87 | + CompileError, |
| 88 | + ErrorInfo, |
| 89 | + Errors, |
| 90 | + ErrorTuple, |
| 91 | + ErrorTupleRaw, |
| 92 | + report_internal_error, |
| 93 | +) |
87 | 94 | from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort |
88 | 95 | from mypy.indirection import TypeIndirectionVisitor |
89 | 96 | from mypy.ipc import BadStatus, IPCClient, IPCMessage, read_status, ready_to_read, receive, send |
90 | 97 | from mypy.messages import MessageBuilder |
91 | | -from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable |
| 98 | +from mypy.nodes import ( |
| 99 | + ClassDef, |
| 100 | + Context, |
| 101 | + Import, |
| 102 | + ImportAll, |
| 103 | + ImportBase, |
| 104 | + ImportFrom, |
| 105 | + MypyFile, |
| 106 | + SymbolTable, |
| 107 | +) |
92 | 108 | from mypy.partially_defined import PossiblyUndefinedVariableVisitor |
93 | 109 | from mypy.semanal import SemanticAnalyzer |
94 | 110 | from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis |
| 111 | +from mypy.traverser import find_definitions |
95 | 112 | from mypy.util import ( |
96 | 113 | DecodeError, |
97 | 114 | decode_python_encoding, |
@@ -866,6 +883,10 @@ def __init__( |
866 | 883 | self.queue_order: int = 0 |
867 | 884 | # Is this an instance used by a parallel worker? |
868 | 885 | self.parallel_worker = parallel_worker |
| 886 | + # Cache for ASTs created during error message generation. Note these are |
| 887 | + # raw parsed trees not analyzed with mypy. We use these to find absolute |
| 888 | + # location of a symbol used as a location for an error message. |
| 889 | + self.extra_trees: dict[str, MypyFile] = {} |
869 | 890 |
|
870 | 891 | def dump_stats(self) -> None: |
871 | 892 | if self.options.dump_build_stats: |
@@ -1028,6 +1049,60 @@ def report_file( |
1028 | 1049 | if self.reports is not None and self.source_set.is_source(file): |
1029 | 1050 | self.reports.file(file, self.modules, type_map, options) |
1030 | 1051 |
|
| 1052 | + def resolve_location(self, graph: dict[str, State], fullname: str) -> Context | None: |
| 1053 | + """Resolve an absolute location of a symbol with given fullname.""" |
| 1054 | + rest = [] |
| 1055 | + head = fullname |
| 1056 | + while True: |
| 1057 | + # TODO: this mimics the logic in lookup.py but it is actually wrong. |
| 1058 | + # This is because we don't distinguish between submodule and a local symbol |
| 1059 | + # with the same name. |
| 1060 | + head, tail = head.rsplit(".", maxsplit=1) |
| 1061 | + rest.append(tail) |
| 1062 | + if head in graph: |
| 1063 | + state = graph[head] |
| 1064 | + break |
| 1065 | + if "." not in head: |
| 1066 | + return None |
| 1067 | + *prefix, name = reversed(rest) |
| 1068 | + # If this happens something is wrong, but it is better to give slightly |
| 1069 | + # less helpful error message than crash. |
| 1070 | + if state.path is None: |
| 1071 | + return None |
| 1072 | + if state.tree is not None and state.tree.defs: |
| 1073 | + # We usually free ASTs after processing, but reuse an existing AST if |
| 1074 | + # it is still available. |
| 1075 | + tree = state.tree |
| 1076 | + elif state.id in self.extra_trees: |
| 1077 | + tree = self.extra_trees[state.id] |
| 1078 | + else: |
| 1079 | + if state.source is not None: |
| 1080 | + # Sources are usually discarded after processing as well, check |
| 1081 | + # if we still have one just in case. |
| 1082 | + source = state.source |
| 1083 | + else: |
| 1084 | + path = state.manager.maybe_swap_for_shadow_path(state.path) |
| 1085 | + source = decode_python_encoding(state.manager.fscache.read(path)) |
| 1086 | + tree = parse(source, state.path, state.id, state.manager.errors, state.options) |
| 1087 | + # TODO: run first pass of semantic analysis on freshly parsed trees, |
| 1088 | + # we need this to get correct reachability information. |
| 1089 | + self.extra_trees[state.id] = tree |
| 1090 | + statements = tree.defs |
| 1091 | + while prefix: |
| 1092 | + part = prefix.pop(0) |
| 1093 | + for statement in statements: |
| 1094 | + defs = find_definitions(statement, part) |
| 1095 | + if not defs or not isinstance((defn := defs[0]), ClassDef): |
| 1096 | + continue |
| 1097 | + statements = defn.defs.body |
| 1098 | + break |
| 1099 | + else: |
| 1100 | + return None |
| 1101 | + for statement in statements: |
| 1102 | + if defs := find_definitions(statement, name): |
| 1103 | + return defs[0] |
| 1104 | + return None |
| 1105 | + |
1031 | 1106 | def verbosity(self) -> int: |
1032 | 1107 | return self.options.verbosity |
1033 | 1108 |
|
@@ -2359,7 +2434,7 @@ def load_tree(self, temporary: bool = False) -> None: |
2359 | 2434 |
|
2360 | 2435 | def fix_cross_refs(self) -> None: |
2361 | 2436 | assert self.tree is not None, "Internal error: method must be called on parsed file only" |
2362 | | - # We need to set allow_missing when doing a fine grained cache |
| 2437 | + # We need to set allow_missing when doing a fine-grained cache |
2363 | 2438 | # load because we need to gracefully handle missing modules. |
2364 | 2439 | fixup_module(self.tree, self.manager.modules, self.options.use_fine_grained_cache) |
2365 | 2440 |
|
@@ -3558,7 +3633,9 @@ def find_stale_sccs( |
3558 | 3633 | path = manager.errors.simplify_path(graph[id].xpath) |
3559 | 3634 | formatted = manager.errors.format_messages( |
3560 | 3635 | path, |
3561 | | - deserialize_codes(graph[id].error_lines), |
| 3636 | + transform_error_tuples( |
| 3637 | + manager, graph, deserialize_codes(graph[id].error_lines) |
| 3638 | + ), |
3562 | 3639 | formatter=manager.error_formatter, |
3563 | 3640 | ) |
3564 | 3641 | manager.flush_errors(path, formatted, False) |
@@ -3813,7 +3890,9 @@ def process_stale_scc( |
3813 | 3890 | if graph[id].xpath not in manager.errors.ignored_files: |
3814 | 3891 | errors = manager.errors.file_messages(graph[id].xpath) |
3815 | 3892 | formatted = manager.errors.format_messages( |
3816 | | - graph[id].xpath, errors, formatter=manager.error_formatter |
| 3893 | + graph[id].xpath, |
| 3894 | + transform_error_tuples(manager, graph, errors), |
| 3895 | + formatter=manager.error_formatter, |
3817 | 3896 | ) |
3818 | 3897 | manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), formatted, False) |
3819 | 3898 | errors_by_id[id] = errors |
@@ -3972,14 +4051,37 @@ def write_undocumented_ref_info( |
3972 | 4051 | metastore.write(ref_info_file, json_dumps(deps_json)) |
3973 | 4052 |
|
3974 | 4053 |
|
3975 | | -def serialize_codes(errs: list[ErrorTuple]) -> list[SerializedError]: |
| 4054 | +def transform_error_tuples( |
| 4055 | + manager: BuildManager, graph: dict[str, State], error_tuples_rel: list[ErrorTupleRaw] |
| 4056 | +) -> list[ErrorTuple]: |
| 4057 | + """Transform raw error tuples by resolving relative error locations.""" |
| 4058 | + error_tuples = [] |
| 4059 | + for e in error_tuples_rel: |
| 4060 | + file, line_rel, column, end_line, end_column, severity, message, code = e |
| 4061 | + if isinstance(line_rel, int): |
| 4062 | + line = line_rel |
| 4063 | + else: |
| 4064 | + assert file is not None |
| 4065 | + loc = manager.resolve_location(graph, line_rel) |
| 4066 | + if loc is not None: |
| 4067 | + line = loc.line |
| 4068 | + column = loc.column |
| 4069 | + end_line = loc.end_line or -1 |
| 4070 | + end_column = loc.end_column or -1 |
| 4071 | + else: |
| 4072 | + line = -1 |
| 4073 | + error_tuples.append((file, line, column, end_line, end_column, severity, message, code)) |
| 4074 | + return error_tuples |
| 4075 | + |
| 4076 | + |
| 4077 | +def serialize_codes(errs: list[ErrorTupleRaw]) -> list[SerializedError]: |
3976 | 4078 | return [ |
3977 | 4079 | (path, line, column, end_line, end_column, severity, message, code.code if code else None) |
3978 | 4080 | for path, line, column, end_line, end_column, severity, message, code in errs |
3979 | 4081 | ] |
3980 | 4082 |
|
3981 | 4083 |
|
3982 | | -def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTuple]: |
| 4084 | +def deserialize_codes(errs: list[SerializedError]) -> list[ErrorTupleRaw]: |
3983 | 4085 | return [ |
3984 | 4086 | ( |
3985 | 4087 | path, |
|
0 commit comments