diff options
| author | Veikka Tuominen <git@vexu.eu> | 2022-07-14 21:26:01 +0300 |
|---|---|---|
| committer | Veikka Tuominen <git@vexu.eu> | 2022-07-14 22:18:58 +0300 |
| commit | b5a838247bd7d66037ba48378c34ba4460747deb (patch) | |
| tree | 353926c424c6d6d8153e23b50862cfb9c3a7088b /src | |
| parent | a4559271501670b39305fc3575ae8d87fa03d35e (diff) | |
| download | zig-b5a838247bd7d66037ba48378c34ba4460747deb.tar.gz zig-b5a838247bd7d66037ba48378c34ba4460747deb.zip | |
stage2: point to error location using spans
Diffstat (limited to 'src')
| -rw-r--r-- | src/Compilation.zig | 59 | ||||
| -rw-r--r-- | src/Module.zig | 201 | ||||
| -rw-r--r-- | src/main.zig | 8 | ||||
| -rw-r--r-- | src/print_zir.zig | 10 |
4 files changed, 128 insertions, 150 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig index b3ae4e787d..edca150988 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -338,7 +338,7 @@ pub const AllErrors = struct { src_path: []const u8, line: u32, column: u32, - byte_offset: u32, + span: Module.SrcLoc.Span, /// Usually one, but incremented for redundant messages. count: u32 = 1, /// Does not include the trailing newline. @@ -429,7 +429,10 @@ pub const AllErrors = struct { try stderr.writeByte('\n'); try stderr.writeByteNTimes(' ', src.column); ttyconf.setColor(stderr, .Green); - try stderr.writeAll("^\n"); + try stderr.writeByte('^'); + // TODO basic unicode code point monospace width + try stderr.writeByteNTimes('~', src.span.end - src.span.start - 1); + try stderr.writeByte('\n'); ttyconf.setColor(stderr, .Reset); } } @@ -469,7 +472,8 @@ pub const AllErrors = struct { hasher.update(src.src_path); std.hash.autoHash(&hasher, src.line); std.hash.autoHash(&hasher, src.column); - std.hash.autoHash(&hasher, src.byte_offset); + std.hash.autoHash(&hasher, src.span.start); + std.hash.autoHash(&hasher, src.span.end); }, .plain => |plain| { hasher.update(plain.msg); @@ -488,7 +492,8 @@ pub const AllErrors = struct { mem.eql(u8, a_src.src_path, b_src.src_path) and a_src.line == b_src.line and a_src.column == b_src.column and - a_src.byte_offset == b_src.byte_offset; + a_src.span.start == b_src.span.start and + a_src.span.end == b_src.span.end; }, .plain => return false, }, @@ -527,20 +532,20 @@ pub const AllErrors = struct { std.hash_map.default_max_load_percentage, ).init(allocator); const err_source = try module_err_msg.src_loc.file_scope.getSource(module.gpa); - const err_byte_offset = try module_err_msg.src_loc.byteOffset(module.gpa); - const err_loc = std.zig.findLineColumn(err_source.bytes, err_byte_offset); + const err_span = try module_err_msg.src_loc.span(module.gpa); + const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.start); for (module_err_msg.notes) |module_note| { const source = try module_note.src_loc.file_scope.getSource(module.gpa); - const byte_offset = try module_note.src_loc.byteOffset(module.gpa); - const loc = std.zig.findLineColumn(source.bytes, byte_offset); + const span = try module_note.src_loc.span(module.gpa); + const loc = std.zig.findLineColumn(source.bytes, span.start); const file_path = try module_note.src_loc.file_scope.fullPath(allocator); const note = ¬es_buf[note_i]; note.* = .{ .src = .{ .src_path = file_path, .msg = try allocator.dupe(u8, module_note.msg), - .byte_offset = byte_offset, + .span = span, .line = @intCast(u32, loc.line), .column = @intCast(u32, loc.column), .source_line = if (err_loc.eql(loc)) null else try allocator.dupe(u8, loc.source_line), @@ -566,7 +571,7 @@ pub const AllErrors = struct { .src = .{ .src_path = file_path, .msg = try allocator.dupe(u8, module_err_msg.msg), - .byte_offset = err_byte_offset, + .span = err_span, .line = @intCast(u32, err_loc.line), .column = @intCast(u32, err_loc.column), .notes = notes_buf[0..note_i], @@ -593,16 +598,15 @@ pub const AllErrors = struct { while (item_i < items_len) : (item_i += 1) { const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index); extra_index = item.end; - const err_byte_offset = blk: { - const token_starts = file.tree.tokens.items(.start); + const err_span = blk: { if (item.data.node != 0) { - const main_tokens = file.tree.nodes.items(.main_token); - const main_token = main_tokens[item.data.node]; - break :blk token_starts[main_token]; + break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node); } - break :blk token_starts[item.data.token] + item.data.byte_offset; + const token_starts = file.tree.tokens.items(.start); + const start = token_starts[item.data.token] + item.data.byte_offset; + break :blk Module.SrcLoc.Span{ .start = start, .end = start + 1 }; }; - const err_loc = std.zig.findLineColumn(file.source, err_byte_offset); + const err_loc = std.zig.findLineColumn(file.source, err_span.start); var notes: []Message = &[0]Message{}; if (item.data.notes != 0) { @@ -612,22 +616,21 @@ pub const AllErrors = struct { for (notes) |*note, i| { const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body[i]); const msg = file.zir.nullTerminatedString(note_item.data.msg); - const byte_offset = blk: { - const token_starts = file.tree.tokens.items(.start); + const span = blk: { if (note_item.data.node != 0) { - const main_tokens = file.tree.nodes.items(.main_token); - const main_token = main_tokens[note_item.data.node]; - break :blk token_starts[main_token]; + break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node); } - break :blk token_starts[note_item.data.token] + note_item.data.byte_offset; + const token_starts = file.tree.tokens.items(.start); + const start = token_starts[note_item.data.token] + note_item.data.byte_offset; + break :blk Module.SrcLoc.Span{ .start = start, .end = start + 1 }; }; - const loc = std.zig.findLineColumn(file.source, byte_offset); + const loc = std.zig.findLineColumn(file.source, span.start); note.* = .{ .src = .{ .src_path = try file.fullPath(arena), .msg = try arena.dupe(u8, msg), - .byte_offset = byte_offset, + .span = span, .line = @intCast(u32, loc.line), .column = @intCast(u32, loc.column), .notes = &.{}, // TODO rework this function to be recursive @@ -642,7 +645,7 @@ pub const AllErrors = struct { .src = .{ .src_path = try file.fullPath(arena), .msg = try arena.dupe(u8, msg), - .byte_offset = err_byte_offset, + .span = err_span, .line = @intCast(u32, err_loc.line), .column = @intCast(u32, err_loc.column), .notes = notes, @@ -688,7 +691,7 @@ pub const AllErrors = struct { .src_path = try arena.dupe(u8, src.src_path), .line = src.line, .column = src.column, - .byte_offset = src.byte_offset, + .span = src.span, .source_line = if (src.source_line) |s| try arena.dupe(u8, s) else null, .notes = try dupeList(src.notes, arena), } }, @@ -2662,7 +2665,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors { .msg = try std.fmt.allocPrint(arena_allocator, "unable to build C object: {s}", .{ err_msg.msg, }), - .byte_offset = 0, + .span = .{ .start = 0, .end = 1 }, .line = err_msg.line, .column = err_msg.column, .source_line = null, // TODO diff --git a/src/Module.zig b/src/Module.zig index 67fc0ca619..08625406ab 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -2082,60 +2082,62 @@ pub const SrcLoc = struct { return @bitCast(Ast.Node.Index, offset + @bitCast(i32, src_loc.parent_decl_node)); } - pub fn byteOffset(src_loc: SrcLoc, gpa: Allocator) !u32 { + pub const Span = struct { + start: u32, + end: u32, + }; + + pub fn span(src_loc: SrcLoc, gpa: Allocator) !Span { switch (src_loc.lazy) { .unneeded => unreachable, - .entire_file => return 0, + .entire_file => return Span{ .start = 0, .end = 1 }, - .byte_abs => |byte_index| return byte_index, + .byte_abs => |byte_index| return Span{ .start = byte_index, .end = byte_index + 1 }, .token_abs => |tok_index| { const tree = try src_loc.file_scope.getTree(gpa); - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.tokens.items(.start)[tok_index]; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .node_abs => |node| { const tree = try src_loc.file_scope.getTree(gpa); - const token_starts = tree.tokens.items(.start); - const tok_index = tree.firstToken(node); - return token_starts[tok_index]; + return nodeToSpan(tree, node); }, .byte_offset => |byte_off| { const tree = try src_loc.file_scope.getTree(gpa); - const token_starts = tree.tokens.items(.start); - return token_starts[src_loc.declSrcToken()] + byte_off; + const tok_index = src_loc.declSrcToken(); + const start = tree.tokens.items(.start)[tok_index] + byte_off; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .token_offset => |tok_off| { const tree = try src_loc.file_scope.getTree(gpa); const tok_index = src_loc.declSrcToken() + tok_off; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.tokens.items(.start)[tok_index]; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .node_offset => |traced_off| { const node_off = traced_off.x; const tree = try src_loc.file_scope.getTree(gpa); const node = src_loc.declRelativeToNodeIndex(node_off); assert(src_loc.file_scope.tree_loaded); - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node); }, .node_offset_bin_op => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node = src_loc.declRelativeToNodeIndex(node_off); assert(src_loc.file_scope.tree_loaded); - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node); }, .node_offset_back2tok => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node = src_loc.declRelativeToNodeIndex(node_off); const tok_index = tree.firstToken(node) - 2; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.tokens.items(.start)[tok_index]; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .node_offset_var_decl_ty => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2154,8 +2156,9 @@ pub const SrcLoc = struct { } else blk: { break :blk full.ast.mut_token + 1; // the name token }; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.tokens.items(.start)[tok_index]; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .node_offset_builtin_call_arg0 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 0), .node_offset_builtin_call_arg1 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 1), @@ -2167,10 +2170,7 @@ pub const SrcLoc = struct { const tree = try src_loc.file_scope.getTree(gpa); const node_datas = tree.nodes.items(.data); const node = src_loc.declRelativeToNodeIndex(node_off); - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node_datas[node].rhs]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[node].rhs); }, .node_offset_slice_ptr, .node_offset_slice_start, @@ -2187,7 +2187,7 @@ pub const SrcLoc = struct { else => unreachable, }; const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[ + const part_node = main_tokens[ switch (src_loc.lazy) { .node_offset_slice_ptr => full.ast.sliced, .node_offset_slice_start => full.ast.start, @@ -2196,8 +2196,7 @@ pub const SrcLoc = struct { else => unreachable, } ]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, part_node); }, .node_offset_call_func => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2219,10 +2218,7 @@ pub const SrcLoc = struct { else => unreachable, }; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[full.ast.fn_expr]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.fn_expr); }, .node_offset_field_name => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2233,16 +2229,14 @@ pub const SrcLoc = struct { .field_access => node_datas[node].rhs, else => tree.firstToken(node) - 2, }; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.tokens.items(.start)[tok_index]; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .node_offset_deref_ptr => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); - const node_datas = tree.nodes.items(.data); const node = src_loc.declRelativeToNodeIndex(node_off); - const tok_index = node_datas[node].lhs; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node); }, .node_offset_asm_source => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2253,10 +2247,7 @@ pub const SrcLoc = struct { .@"asm" => tree.asmFull(node), else => unreachable, }; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[full.ast.template]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.template); }, .node_offset_asm_ret_ty => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2269,11 +2260,7 @@ pub const SrcLoc = struct { }; const asm_output = full.outputs[0]; const node_datas = tree.nodes.items(.data); - const ret_ty_node = node_datas[asm_output].lhs; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[ret_ty_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[asm_output].lhs); }, .node_offset_for_cond, .node_offset_if_cond => |node_off| { @@ -2290,41 +2277,26 @@ pub const SrcLoc = struct { .@"for" => tree.forFull(node).ast.cond_expr, else => unreachable, }; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[src_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, src_node); }, .node_offset_bin_lhs => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node = src_loc.declRelativeToNodeIndex(node_off); const node_datas = tree.nodes.items(.data); - const src_node = node_datas[node].lhs; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[src_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[node].lhs); }, .node_offset_bin_rhs => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node = src_loc.declRelativeToNodeIndex(node_off); const node_datas = tree.nodes.items(.data); - const src_node = node_datas[node].rhs; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[src_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[node].rhs); }, .node_offset_switch_operand => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node = src_loc.declRelativeToNodeIndex(node_off); const node_datas = tree.nodes.items(.data); - const src_node = node_datas[node].lhs; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[src_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[node].lhs); }, .node_offset_switch_special_prong => |node_off| { @@ -2347,9 +2319,7 @@ pub const SrcLoc = struct { mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_")); if (!is_special) continue; - const tok_index = main_tokens[case_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, case_node); } else unreachable; }, @@ -2375,9 +2345,7 @@ pub const SrcLoc = struct { for (case.ast.values) |item_node| { if (node_tags[item_node] == .switch_range) { - const tok_index = main_tokens[item_node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, item_node); } } } else unreachable; @@ -2403,10 +2371,7 @@ pub const SrcLoc = struct { }, else => unreachable, }; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[full.ast.callconv_expr]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.callconv_expr); }, .node_offset_fn_type_ret_ty => |node_off| { @@ -2421,21 +2386,14 @@ pub const SrcLoc = struct { .fn_proto => tree.fnProto(node), else => unreachable, }; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[full.ast.return_type]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.return_type); }, .node_offset_anyframe_type => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node_datas = tree.nodes.items(.data); const parent_node = src_loc.declRelativeToNodeIndex(node_off); - const node = node_datas[parent_node].rhs; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[parent_node].rhs); }, .node_offset_lib_name => |node_off| { @@ -2462,8 +2420,9 @@ pub const SrcLoc = struct { else => unreachable, }; const tok_index = full.lib_name.?; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.tokens.items(.start)[tok_index]; + const end = start + @intCast(u32, tree.tokenSlice(tok_index).len); + return Span{ .start = start, .end = end }; }, .node_offset_array_type_len => |node_off| { @@ -2476,11 +2435,7 @@ pub const SrcLoc = struct { .array_type_sentinel => tree.arrayTypeSentinel(parent_node), else => unreachable, }; - const node = full.ast.elem_count; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.elem_count); }, .node_offset_array_type_sentinel => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2492,11 +2447,7 @@ pub const SrcLoc = struct { .array_type_sentinel => tree.arrayTypeSentinel(parent_node), else => unreachable, }; - const node = full.ast.sentinel; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.sentinel); }, .node_offset_array_type_elem => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); @@ -2508,21 +2459,14 @@ pub const SrcLoc = struct { .array_type_sentinel => tree.arrayTypeSentinel(parent_node), else => unreachable, }; - const node = full.ast.elem_type; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, full.ast.elem_type); }, .node_offset_un_op => |node_off| { const tree = try src_loc.file_scope.getTree(gpa); const node_datas = tree.nodes.items(.data); const node = src_loc.declRelativeToNodeIndex(node_off); - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[node_datas[node].lhs]; - const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + return nodeToSpan(tree, node_datas[node].lhs); }, } } @@ -2532,7 +2476,7 @@ pub const SrcLoc = struct { gpa: Allocator, node_off: i32, arg_index: u32, - ) !u32 { + ) !Span { const tree = try src_loc.file_scope.getTree(gpa); const node_datas = tree.nodes.items(.data); const node_tags = tree.nodes.items(.tag); @@ -2546,10 +2490,33 @@ pub const SrcLoc = struct { .builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs + arg_index], else => unreachable, }; - const main_tokens = tree.nodes.items(.main_token); - const tok_index = main_tokens[param]; + return nodeToSpan(tree, param); + } + + pub fn nodeToSpan(tree: *const Ast, node: u32) Span { const token_starts = tree.tokens.items(.start); - return token_starts[tok_index]; + const start = tree.firstToken(node); + const end = tree.lastToken(node); + if (tree.tokensOnSameLine(start, end)) { + const start_off = token_starts[start]; + const end_off = token_starts[end] + @intCast(u32, tree.tokenSlice(end).len); + return Span{ .start = start_off, .end = end_off }; + } + + const main_token = tree.nodes.items(.main_token)[node]; + if (tree.tokensOnSameLine(start, main_token)) { + const start_off = token_starts[start]; + const end_off = token_starts[main_token] + @intCast(u32, tree.tokenSlice(main_token).len); + return Span{ .start = start_off, .end = end_off }; + } + if (tree.tokensOnSameLine(main_token, end)) { + const start_off = token_starts[main_token]; + const end_off = token_starts[end] + @intCast(u32, tree.tokenSlice(end).len); + return Span{ .start = start_off, .end = end_off }; + } + const start_off = token_starts[main_token]; + const end_off = token_starts[main_token] + @intCast(u32, tree.tokenSlice(main_token).len); + return Span{ .start = start_off, .end = end_off }; } }; @@ -3313,7 +3280,11 @@ pub fn astGenFile(mod: *Module, file: *File) !void { .src_loc = .{ .file_scope = file, .parent_decl_node = 0, - .lazy = .{ .byte_abs = token_starts[parse_err.token] + extra_offset }, + .lazy = if (extra_offset == 0) .{ + .token_abs = parse_err.token, + } else .{ + .byte_abs = token_starts[parse_err.token], + }, }, .msg = msg.toOwnedSlice(), }; @@ -3336,7 +3307,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void { .src_loc = .{ .file_scope = file, .parent_decl_node = 0, - .lazy = .{ .byte_abs = token_starts[note.token] }, + .lazy = .{ .token_abs = note.token }, }, .msg = msg.toOwnedSlice(), }; diff --git a/src/main.zig b/src/main.zig index 3d77fc242a..7187dac3e7 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4381,7 +4381,7 @@ fn printErrsMsgToStdErr( .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{ std.zig.fmtEscapes(tree.source[byte_offset..][0..1]), }), - .byte_offset = byte_offset, + .span = .{ .start = byte_offset, .end = byte_offset + 1 }, .line = @intCast(u32, start_loc.line), .column = @intCast(u32, start_loc.column) + bad_off, .source_line = source_line, @@ -4396,11 +4396,12 @@ fn printErrsMsgToStdErr( text_buf.items.len = 0; try tree.renderError(note, writer); const note_loc = tree.tokenLocation(0, note.token); + const byte_offset = @intCast(u32, note_loc.line_start); notes_buffer[notes_len] = .{ .src = .{ .src_path = path, .msg = try arena.dupe(u8, text_buf.items), - .byte_offset = @intCast(u32, note_loc.line_start), + .span = .{ .start = byte_offset, .end = byte_offset + @intCast(u32, tree.tokenSlice(note.token).len) }, .line = @intCast(u32, note_loc.line), .column = @intCast(u32, note_loc.column), .source_line = tree.source[note_loc.line_start..note_loc.line_end], @@ -4411,11 +4412,12 @@ fn printErrsMsgToStdErr( } const extra_offset = tree.errorOffset(parse_error); + const byte_offset = @intCast(u32, start_loc.line_start) + extra_offset; const message: Compilation.AllErrors.Message = .{ .src = .{ .src_path = path, .msg = text, - .byte_offset = @intCast(u32, start_loc.line_start) + extra_offset, + .span = .{ .start = byte_offset, .end = byte_offset + @intCast(u32, tree.tokenSlice(lok_token).len) }, .line = @intCast(u32, start_loc.line), .column = @intCast(u32, start_loc.column) + extra_offset, .source_line = source_line, diff --git a/src/print_zir.zig b/src/print_zir.zig index 8df8eaae07..3b622a70fd 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -2381,10 +2381,12 @@ const Writer = struct { .parent_decl_node = self.parent_decl_node, .lazy = src, }; - const abs_byte_off = src_loc.byteOffset(self.gpa) catch unreachable; - const delta_line = std.zig.findLineColumn(tree.source, abs_byte_off); - try stream.print("{s}:{d}:{d}", .{ - @tagName(src), delta_line.line + 1, delta_line.column + 1, + const src_span = src_loc.span(self.gpa) catch unreachable; + const start = std.zig.findLineColumn(tree.source, src_span.start); + const end = std.zig.findLineColumn(tree.source, src_span.end); + try stream.print("{s}:{d}:{d} to :{d}:{d}", .{ + @tagName(src), start.line + 1, start.column + 1, + end.line + 1, end.column + 1, }); } } |
