From 9c36cf92f009eda14cb773e89148547dadb137c6 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 16 Feb 2022 13:06:11 +0200 Subject: parser: make some errors point to end of previous token For some errors if the found token is not on the same line as the previous token, point to the end of the previous token. This usually results in more helpful errors. --- src/Module.zig | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'src/Module.zig') diff --git a/src/Module.zig b/src/Module.zig index 4ffd6925b6..280c58ade8 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -2995,7 +2995,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void { const token_starts = file.tree.tokens.items(.start); const token_tags = file.tree.tokens.items(.tag); - const extra_offset = file.tree.errorOffset(parse_err.tag, parse_err.token); + const extra_offset = file.tree.errorOffset(parse_err); try file.tree.renderError(parse_err, msg.writer()); const err_msg = try gpa.create(ErrorMsg); err_msg.* = .{ @@ -3006,9 +3006,9 @@ pub fn astGenFile(mod: *Module, file: *File) !void { }, .msg = msg.toOwnedSlice(), }; - if (token_tags[parse_err.token] == .invalid) { - const bad_off = @intCast(u32, file.tree.tokenSlice(parse_err.token).len); - const byte_abs = token_starts[parse_err.token] + bad_off; + if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) { + const bad_off = @intCast(u32, file.tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len); + const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off; try mod.errNoteNonLazy(.{ .file_scope = file, .parent_decl_node = 0, -- cgit v1.2.3 From 6b65590715d0871c11635fc49cb1fc471a60ea59 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Thu, 17 Feb 2022 22:14:45 +0200 Subject: parser: add notes to decl_between_fields error --- lib/std/zig/Ast.zig | 11 ++++ lib/std/zig/parse.zig | 25 ++++++++ lib/std/zig/parser_test.zig | 2 + src/Module.zig | 11 ++++ src/main.zig | 141 +++++++++++++++++++++++++------------------- test/compile_errors.zig | 2 + 6 files changed, 131 insertions(+), 61 deletions(-) (limited to 'src/Module.zig') diff --git a/lib/std/zig/Ast.zig b/lib/std/zig/Ast.zig index 5501352084..a68959837a 100644 --- a/lib/std/zig/Ast.zig +++ b/lib/std/zig/Ast.zig @@ -329,6 +329,13 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void { return stream.writeAll("expected field initializer"); }, + .previous_field => { + return stream.writeAll("field before declarations here"); + }, + .next_field => { + return stream.writeAll("field after declarations here"); + }, + .expected_token => { const found_tag = token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)]; const expected_symbol = parse_error.extra.expected_tag.symbol(); @@ -2470,6 +2477,7 @@ pub const full = struct { pub const Error = struct { tag: Tag, + is_note: bool = false, /// True if `token` points to the token before the token causing an issue. token_is_prev: bool = false, token: TokenIndex, @@ -2527,6 +2535,9 @@ pub const Error = struct { expected_comma_after_switch_prong, expected_initializer, + previous_field, + next_field, + /// `expected_tag` is populated. expected_token, }; diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 46d2b0f49e..2578629af5 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -91,6 +91,9 @@ const Parser = struct { extra_data: std.ArrayListUnmanaged(Node.Index), scratch: std.ArrayListUnmanaged(Node.Index), + /// Used for the error note of decl_between_fields error. + last_field: TokenIndex = undefined, + const SmallSpan = union(enum) { zero_or_one: Node.Index, multi: Node.SubRange, @@ -270,6 +273,8 @@ const Parser = struct { .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) { .identifier => { p.tok_i += 1; + const identifier = p.tok_i; + defer p.last_field = identifier; const container_field = try p.expectContainerFieldRecoverable(); if (container_field != 0) { switch (field_state) { @@ -280,6 +285,16 @@ const Parser = struct { .tag = .decl_between_fields, .token = p.nodes.items(.main_token)[node], }); + try p.warnMsg(.{ + .tag = .previous_field, + .is_note = true, + .token = p.last_field, + }); + try p.warnMsg(.{ + .tag = .next_field, + .is_note = true, + .token = identifier, + }); // Continue parsing; error will be reported later. field_state = .err; }, @@ -373,6 +388,8 @@ const Parser = struct { trailing = p.token_tags[p.tok_i - 1] == .semicolon; }, .identifier => { + const identifier = p.tok_i; + defer p.last_field = identifier; const container_field = try p.expectContainerFieldRecoverable(); if (container_field != 0) { switch (field_state) { @@ -383,6 +400,14 @@ const Parser = struct { .tag = .decl_between_fields, .token = p.nodes.items(.main_token)[node], }); + try p.warnMsg(.{ + .tag = .previous_field, + .token = p.last_field, + }); + try p.warnMsg(.{ + .tag = .next_field, + .token = identifier, + }); // Continue parsing; error will be reported later. field_state = .err; }, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9bf7ae8da9..0c79b3b187 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -226,6 +226,8 @@ test "zig fmt: decl between fields" { \\}; , &[_]Error{ .decl_between_fields, + .previous_field, + .next_field, }); } diff --git a/src/Module.zig b/src/Module.zig index 280c58ade8..62b992b546 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -3014,6 +3014,17 @@ pub fn astGenFile(mod: *Module, file: *File) !void { .parent_decl_node = 0, .lazy = .{ .byte_abs = byte_abs }, }, err_msg, "invalid byte: '{'}'", .{std.zig.fmtEscapes(source[byte_abs..][0..1])}); + } else if (parse_err.tag == .decl_between_fields) { + try mod.errNoteNonLazy(.{ + .file_scope = file, + .parent_decl_node = 0, + .lazy = .{ .byte_abs = token_starts[file.tree.errors[1].token] }, + }, err_msg, "field before declarations here", .{}); + try mod.errNoteNonLazy(.{ + .file_scope = file, + .parent_decl_node = 0, + .lazy = .{ .byte_abs = token_starts[file.tree.errors[2].token] }, + }, err_msg, "field after declarations here", .{}); } { diff --git a/src/main.zig b/src/main.zig index 3d33268718..4a934bdff5 100644 --- a/src/main.zig +++ b/src/main.zig @@ -3769,9 +3769,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void }; defer tree.deinit(gpa); - for (tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, tree, "", color); - } + try printErrsMsgToStdErr(gpa, arena, tree.errors, tree, "", color); var has_ast_error = false; if (check_ast_flag) { const Module = @import("Module.zig"); @@ -3959,9 +3957,7 @@ fn fmtPathFile( var tree = try std.zig.parse(fmt.gpa, source_code); defer tree.deinit(fmt.gpa); - for (tree.errors) |parse_error| { - try printErrMsgToStdErr(fmt.gpa, fmt.arena, parse_error, tree, file_path, fmt.color); - } + try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree.errors, tree, file_path, fmt.color); if (tree.errors.len != 0) { fmt.any_error = true; return; @@ -4041,66 +4037,95 @@ fn fmtPathFile( } } -fn printErrMsgToStdErr( +fn printErrsMsgToStdErr( gpa: mem.Allocator, arena: mem.Allocator, - parse_error: Ast.Error, + parse_errors: []const Ast.Error, tree: Ast, path: []const u8, color: Color, ) !void { - const lok_token = parse_error.token; - const token_tags = tree.tokens.items(.tag); - const start_loc = tree.tokenLocation(0, lok_token); - const source_line = tree.source[start_loc.line_start..start_loc.line_end]; - - var text_buf = std.ArrayList(u8).init(gpa); - defer text_buf.deinit(); - const writer = text_buf.writer(); - try tree.renderError(parse_error, writer); - const text = text_buf.items; - - var notes_buffer: [1]Compilation.AllErrors.Message = undefined; - var notes_len: usize = 0; - - if (token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)] == .invalid) { - const bad_off = @intCast(u32, tree.tokenSlice(parse_error.token + @boolToInt(parse_error.token_is_prev)).len); - const byte_offset = @intCast(u32, start_loc.line_start) + @intCast(u32, start_loc.column) + bad_off; - notes_buffer[notes_len] = .{ + var i: usize = 0; + while (i < parse_errors.len) : (i += 1) { + const parse_error = parse_errors[i]; + const lok_token = parse_error.token; + const token_tags = tree.tokens.items(.tag); + const start_loc = tree.tokenLocation(0, lok_token); + const source_line = tree.source[start_loc.line_start..start_loc.line_end]; + + var text_buf = std.ArrayList(u8).init(gpa); + defer text_buf.deinit(); + const writer = text_buf.writer(); + try tree.renderError(parse_error, writer); + const text = text_buf.items; + + var notes_buffer: [2]Compilation.AllErrors.Message = undefined; + var notes_len: usize = 0; + + if (token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)] == .invalid) { + const bad_off = @intCast(u32, tree.tokenSlice(parse_error.token + @boolToInt(parse_error.token_is_prev)).len); + const byte_offset = @intCast(u32, start_loc.line_start) + @intCast(u32, start_loc.column) + bad_off; + notes_buffer[notes_len] = .{ + .src = .{ + .src_path = path, + .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{ + std.zig.fmtEscapes(tree.source[byte_offset..][0..1]), + }), + .byte_offset = byte_offset, + .line = @intCast(u32, start_loc.line), + .column = @intCast(u32, start_loc.column) + bad_off, + .source_line = source_line, + }, + }; + notes_len += 1; + } else if (parse_error.tag == .decl_between_fields) { + const prev_loc = tree.tokenLocation(0, parse_errors[i + 1].token); + notes_buffer[0] = .{ + .src = .{ + .src_path = path, + .msg = "field before declarations here", + .byte_offset = @intCast(u32, prev_loc.line_start), + .line = @intCast(u32, prev_loc.line), + .column = @intCast(u32, prev_loc.column), + .source_line = tree.source[prev_loc.line_start..prev_loc.line_end], + }, + }; + const next_loc = tree.tokenLocation(0, parse_errors[i + 2].token); + notes_buffer[1] = .{ + .src = .{ + .src_path = path, + .msg = "field after declarations here", + .byte_offset = @intCast(u32, next_loc.line_start), + .line = @intCast(u32, next_loc.line), + .column = @intCast(u32, next_loc.column), + .source_line = tree.source[next_loc.line_start..next_loc.line_end], + }, + }; + notes_len = 2; + i += 2; + } + + const extra_offset = tree.errorOffset(parse_error); + const message: Compilation.AllErrors.Message = .{ .src = .{ .src_path = path, - .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{ - std.zig.fmtEscapes(tree.source[byte_offset..][0..1]), - }), - .byte_offset = byte_offset, + .msg = text, + .byte_offset = @intCast(u32, start_loc.line_start) + extra_offset, .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column) + bad_off, + .column = @intCast(u32, start_loc.column) + extra_offset, .source_line = source_line, + .notes = notes_buffer[0..notes_len], }, }; - notes_len += 1; - } - const extra_offset = tree.errorOffset(parse_error); - const message: Compilation.AllErrors.Message = .{ - .src = .{ - .src_path = path, - .msg = text, - .byte_offset = @intCast(u32, start_loc.line_start) + extra_offset, - .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column) + extra_offset, - .source_line = source_line, - .notes = notes_buffer[0..notes_len], - }, - }; - - const ttyconf: std.debug.TTY.Config = switch (color) { - .auto => std.debug.detectTTYConfig(), - .on => .escape_codes, - .off => .no_color, - }; + const ttyconf: std.debug.TTY.Config = switch (color) { + .auto => std.debug.detectTTYConfig(), + .on => .escape_codes, + .off => .no_color, + }; - message.renderToStdErr(ttyconf); + message.renderToStdErr(ttyconf); + } } pub const info_zen = @@ -4658,9 +4683,7 @@ pub fn cmdAstCheck( file.tree_loaded = true; defer file.tree.deinit(gpa); - for (file.tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, file.tree, file.sub_file_path, color); - } + try printErrsMsgToStdErr(gpa, arena, file.tree.errors, file.tree, file.sub_file_path, color); if (file.tree.errors.len != 0) { process.exit(1); } @@ -4786,9 +4809,7 @@ pub fn cmdChangelist( file.tree_loaded = true; defer file.tree.deinit(gpa); - for (file.tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, file.tree, old_source_file, .auto); - } + try printErrsMsgToStdErr(gpa, arena, file.tree.errors, file.tree, old_source_file, .auto); if (file.tree.errors.len != 0) { process.exit(1); } @@ -4825,9 +4846,7 @@ pub fn cmdChangelist( var new_tree = try std.zig.parse(gpa, new_source); defer new_tree.deinit(gpa); - for (new_tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, new_tree, new_source_file, .auto); - } + try printErrsMsgToStdErr(gpa, arena, new_tree.errors, new_tree, new_source_file, .auto); if (new_tree.errors.len != 0) { process.exit(1); } diff --git a/test/compile_errors.zig b/test/compile_errors.zig index 650f54ae3f..f982d8298a 100644 --- a/test/compile_errors.zig +++ b/test/compile_errors.zig @@ -877,6 +877,8 @@ pub fn addCases(ctx: *TestContext) !void { \\} , &[_][]const u8{ "tmp.zig:6:5: error: declarations are not allowed between container fields", + "tmp.zig:5:5: note: field before declarations here", + "tmp.zig:9:5: note: field after declarations here", }); ctx.objErrStage1("non-extern function with var args", -- cgit v1.2.3