diff options
| author | Veikka Tuominen <git@vexu.eu> | 2022-02-18 13:53:47 +0200 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2022-02-18 13:53:47 +0200 |
| commit | 53241f288e40a9e97a5425cb0e1ac7dbbc9de852 (patch) | |
| tree | d63dda15c2615440408100e213e8c97dfd7c4d52 /src | |
| parent | 56e9575e827208b3df5c90472826f52bfc8342c0 (diff) | |
| parent | 6b65590715d0871c11635fc49cb1fc471a60ea59 (diff) | |
| download | zig-53241f288e40a9e97a5425cb0e1ac7dbbc9de852.tar.gz zig-53241f288e40a9e97a5425cb0e1ac7dbbc9de852.zip | |
Merge pull request #10913 from Vexu/err
further parser error improvements
Diffstat (limited to 'src')
| -rw-r--r-- | src/Module.zig | 19 | ||||
| -rw-r--r-- | src/main.zig | 141 | ||||
| -rw-r--r-- | src/stage1/ir.cpp | 31 |
3 files changed, 122 insertions, 69 deletions
diff --git a/src/Module.zig b/src/Module.zig index e973c42a7d..524e8402cd 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -2995,7 +2995,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void { const token_starts = file.tree.tokens.items(.start); const token_tags = file.tree.tokens.items(.tag); - const extra_offset = file.tree.errorOffset(parse_err.tag, parse_err.token); + const extra_offset = file.tree.errorOffset(parse_err); try file.tree.renderError(parse_err, msg.writer()); const err_msg = try gpa.create(ErrorMsg); err_msg.* = .{ @@ -3006,14 +3006,25 @@ pub fn astGenFile(mod: *Module, file: *File) !void { }, .msg = msg.toOwnedSlice(), }; - if (token_tags[parse_err.token] == .invalid) { - const bad_off = @intCast(u32, file.tree.tokenSlice(parse_err.token).len); - const byte_abs = token_starts[parse_err.token] + bad_off; + if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) { + const bad_off = @intCast(u32, file.tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len); + const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off; try mod.errNoteNonLazy(.{ .file_scope = file, .parent_decl_node = 0, .lazy = .{ .byte_abs = byte_abs }, }, err_msg, "invalid byte: '{'}'", .{std.zig.fmtEscapes(source[byte_abs..][0..1])}); + } else if (parse_err.tag == .decl_between_fields) { + try mod.errNoteNonLazy(.{ + .file_scope = file, + .parent_decl_node = 0, + .lazy = .{ .byte_abs = token_starts[file.tree.errors[1].token] }, + }, err_msg, "field before declarations here", .{}); + try mod.errNoteNonLazy(.{ + .file_scope = file, + .parent_decl_node = 0, + .lazy = .{ .byte_abs = token_starts[file.tree.errors[2].token] }, + }, err_msg, "field after declarations here", .{}); } { diff --git a/src/main.zig b/src/main.zig index 248ae2dd76..2b120f8d9a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -3799,9 +3799,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void }; defer tree.deinit(gpa); - for (tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, tree, "<stdin>", color); - } + try printErrsMsgToStdErr(gpa, arena, tree.errors, tree, "<stdin>", color); var has_ast_error = false; if (check_ast_flag) { const Module = @import("Module.zig"); @@ -3989,9 +3987,7 @@ fn fmtPathFile( var tree = try std.zig.parse(fmt.gpa, source_code); defer tree.deinit(fmt.gpa); - for (tree.errors) |parse_error| { - try printErrMsgToStdErr(fmt.gpa, fmt.arena, parse_error, tree, file_path, fmt.color); - } + try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree.errors, tree, file_path, fmt.color); if (tree.errors.len != 0) { fmt.any_error = true; return; @@ -4071,66 +4067,95 @@ fn fmtPathFile( } } -fn printErrMsgToStdErr( +fn printErrsMsgToStdErr( gpa: mem.Allocator, arena: mem.Allocator, - parse_error: Ast.Error, + parse_errors: []const Ast.Error, tree: Ast, path: []const u8, color: Color, ) !void { - const lok_token = parse_error.token; - const token_tags = tree.tokens.items(.tag); - const start_loc = tree.tokenLocation(0, lok_token); - const source_line = tree.source[start_loc.line_start..start_loc.line_end]; - - var text_buf = std.ArrayList(u8).init(gpa); - defer text_buf.deinit(); - const writer = text_buf.writer(); - try tree.renderError(parse_error, writer); - const text = text_buf.items; - - var notes_buffer: [1]Compilation.AllErrors.Message = undefined; - var notes_len: usize = 0; - - if (token_tags[parse_error.token] == .invalid) { - const bad_off = @intCast(u32, tree.tokenSlice(parse_error.token).len); - const byte_offset = @intCast(u32, start_loc.line_start) + bad_off; - notes_buffer[notes_len] = .{ + var i: usize = 0; + while (i < parse_errors.len) : (i += 1) { + const parse_error = parse_errors[i]; + const lok_token = parse_error.token; + const token_tags = tree.tokens.items(.tag); + const start_loc = tree.tokenLocation(0, lok_token); + const source_line = tree.source[start_loc.line_start..start_loc.line_end]; + + var text_buf = std.ArrayList(u8).init(gpa); + defer text_buf.deinit(); + const writer = text_buf.writer(); + try tree.renderError(parse_error, writer); + const text = text_buf.items; + + var notes_buffer: [2]Compilation.AllErrors.Message = undefined; + var notes_len: usize = 0; + + if (token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)] == .invalid) { + const bad_off = @intCast(u32, tree.tokenSlice(parse_error.token + @boolToInt(parse_error.token_is_prev)).len); + const byte_offset = @intCast(u32, start_loc.line_start) + @intCast(u32, start_loc.column) + bad_off; + notes_buffer[notes_len] = .{ + .src = .{ + .src_path = path, + .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{ + std.zig.fmtEscapes(tree.source[byte_offset..][0..1]), + }), + .byte_offset = byte_offset, + .line = @intCast(u32, start_loc.line), + .column = @intCast(u32, start_loc.column) + bad_off, + .source_line = source_line, + }, + }; + notes_len += 1; + } else if (parse_error.tag == .decl_between_fields) { + const prev_loc = tree.tokenLocation(0, parse_errors[i + 1].token); + notes_buffer[0] = .{ + .src = .{ + .src_path = path, + .msg = "field before declarations here", + .byte_offset = @intCast(u32, prev_loc.line_start), + .line = @intCast(u32, prev_loc.line), + .column = @intCast(u32, prev_loc.column), + .source_line = tree.source[prev_loc.line_start..prev_loc.line_end], + }, + }; + const next_loc = tree.tokenLocation(0, parse_errors[i + 2].token); + notes_buffer[1] = .{ + .src = .{ + .src_path = path, + .msg = "field after declarations here", + .byte_offset = @intCast(u32, next_loc.line_start), + .line = @intCast(u32, next_loc.line), + .column = @intCast(u32, next_loc.column), + .source_line = tree.source[next_loc.line_start..next_loc.line_end], + }, + }; + notes_len = 2; + i += 2; + } + + const extra_offset = tree.errorOffset(parse_error); + const message: Compilation.AllErrors.Message = .{ .src = .{ .src_path = path, - .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{ - std.zig.fmtEscapes(tree.source[byte_offset..][0..1]), - }), - .byte_offset = byte_offset, + .msg = text, + .byte_offset = @intCast(u32, start_loc.line_start) + extra_offset, .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column) + bad_off, + .column = @intCast(u32, start_loc.column) + extra_offset, .source_line = source_line, + .notes = notes_buffer[0..notes_len], }, }; - notes_len += 1; - } - const extra_offset = tree.errorOffset(parse_error.tag, parse_error.token); - const message: Compilation.AllErrors.Message = .{ - .src = .{ - .src_path = path, - .msg = text, - .byte_offset = @intCast(u32, start_loc.line_start) + extra_offset, - .line = @intCast(u32, start_loc.line), - .column = @intCast(u32, start_loc.column) + extra_offset, - .source_line = source_line, - .notes = notes_buffer[0..notes_len], - }, - }; - - const ttyconf: std.debug.TTY.Config = switch (color) { - .auto => std.debug.detectTTYConfig(), - .on => .escape_codes, - .off => .no_color, - }; + const ttyconf: std.debug.TTY.Config = switch (color) { + .auto => std.debug.detectTTYConfig(), + .on => .escape_codes, + .off => .no_color, + }; - message.renderToStdErr(ttyconf); + message.renderToStdErr(ttyconf); + } } pub const info_zen = @@ -4688,9 +4713,7 @@ pub fn cmdAstCheck( file.tree_loaded = true; defer file.tree.deinit(gpa); - for (file.tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, file.tree, file.sub_file_path, color); - } + try printErrsMsgToStdErr(gpa, arena, file.tree.errors, file.tree, file.sub_file_path, color); if (file.tree.errors.len != 0) { process.exit(1); } @@ -4816,9 +4839,7 @@ pub fn cmdChangelist( file.tree_loaded = true; defer file.tree.deinit(gpa); - for (file.tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, file.tree, old_source_file, .auto); - } + try printErrsMsgToStdErr(gpa, arena, file.tree.errors, file.tree, old_source_file, .auto); if (file.tree.errors.len != 0) { process.exit(1); } @@ -4855,9 +4876,7 @@ pub fn cmdChangelist( var new_tree = try std.zig.parse(gpa, new_source); defer new_tree.deinit(gpa); - for (new_tree.errors) |parse_error| { - try printErrMsgToStdErr(gpa, arena, parse_error, new_tree, new_source_file, .auto); - } + try printErrsMsgToStdErr(gpa, arena, new_tree.errors, new_tree, new_source_file, .auto); if (new_tree.errors.len != 0) { process.exit(1); } diff --git a/src/stage1/ir.cpp b/src/stage1/ir.cpp index 63466849a4..3ac1ddb51c 100644 --- a/src/stage1/ir.cpp +++ b/src/stage1/ir.cpp @@ -7843,7 +7843,7 @@ static Stage1AirInst *ir_analyze_cast(IrAnalyze *ira, Scope *scope, AstNode *sou bool const_ok = (slice_ptr_type->data.pointer.is_const || array_type->data.array.len == 0 || !actual_type->data.pointer.is_const); - if (const_ok && types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type, + if (types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type, array_type->data.array.child_type, source_node, !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk && (slice_ptr_type->data.pointer.sentinel == nullptr || @@ -7851,6 +7851,14 @@ static Stage1AirInst *ir_analyze_cast(IrAnalyze *ira, Scope *scope, AstNode *sou const_values_equal(ira->codegen, array_type->data.array.sentinel, slice_ptr_type->data.pointer.sentinel)))) { + if (!const_ok) { + ErrorMsg *msg = ir_add_error_node(ira, source_node, + buf_sprintf("cannot cast pointer to array literal to slice type '%s'", + buf_ptr(&wanted_type->name))); + add_error_note(ira->codegen, msg, source_node, + buf_sprintf("cast discards const qualifier")); + return ira->codegen->invalid_inst_gen; + } // If the pointers both have ABI align, it works. // Or if the array length is 0, alignment doesn't matter. bool ok_align = array_type->data.array.len == 0 || @@ -8208,8 +8216,16 @@ static Stage1AirInst *ir_analyze_cast(IrAnalyze *ira, Scope *scope, AstNode *sou ZigType *wanted_child = wanted_type->data.pointer.child_type; bool const_ok = (!actual_type->data.pointer.is_const || wanted_type->data.pointer.is_const); if (wanted_child->id == ZigTypeIdArray && (is_array_init || field_count == 0) && - wanted_child->data.array.len == field_count && (const_ok || field_count == 0)) + wanted_child->data.array.len == field_count) { + if (!const_ok && field_count != 0) { + ErrorMsg *msg = ir_add_error_node(ira, source_node, + buf_sprintf("cannot cast pointer to array literal to '%s'", + buf_ptr(&wanted_type->name))); + add_error_note(ira->codegen, msg, source_node, + buf_sprintf("cast discards const qualifier")); + return ira->codegen->invalid_inst_gen; + } Stage1AirInst *res = ir_analyze_struct_literal_to_array(ira, scope, source_node, value, anon_type, wanted_child); if (res->value->type->id == ZigTypeIdPointer) return res; @@ -8241,6 +8257,13 @@ static Stage1AirInst *ir_analyze_cast(IrAnalyze *ira, Scope *scope, AstNode *sou res = ir_get_ref(ira, scope, source_node, res, actual_type->data.pointer.is_const, actual_type->data.pointer.is_volatile); return ir_resolve_ptr_of_array_to_slice(ira, scope, source_node, res, wanted_type, nullptr); + } else if (!slice_type->data.pointer.is_const && actual_type->data.pointer.is_const && field_count != 0) { + ErrorMsg *msg = ir_add_error_node(ira, source_node, + buf_sprintf("cannot cast pointer to array literal to slice type '%s'", + buf_ptr(&wanted_type->name))); + add_error_note(ira->codegen, msg, source_node, + buf_sprintf("cast discards const qualifier")); + return ira->codegen->invalid_inst_gen; } } } @@ -15068,7 +15091,7 @@ static Stage1AirInst *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, Stage1ZirI return ira->codegen->invalid_inst_gen; if (actual_array_type->id != ZigTypeIdArray) { ir_add_error_node(ira, elem_ptr_instruction->init_array_type_source_node, - buf_sprintf("array literal requires address-of operator to coerce to slice type '%s'", + buf_sprintf("array literal requires address-of operator (&) to coerce to slice type '%s'", buf_ptr(&actual_array_type->name))); return ira->codegen->invalid_inst_gen; } @@ -17473,7 +17496,7 @@ static Stage1AirInst *ir_analyze_instruction_container_init_list(IrAnalyze *ira, if (is_slice(container_type)) { ir_add_error_node(ira, instruction->init_array_type_source_node, - buf_sprintf("array literal requires address-of operator to coerce to slice type '%s'", + buf_sprintf("array literal requires address-of operator (&) to coerce to slice type '%s'", buf_ptr(&container_type->name))); return ira->codegen->invalid_inst_gen; } |
