diff options
Diffstat (limited to 'lib/std')
| -rw-r--r-- | lib/std/zig/Ast.zig | 41 | ||||
| -rw-r--r-- | lib/std/zig/parse.zig | 220 | ||||
| -rw-r--r-- | lib/std/zig/parser_test.zig | 28 | ||||
| -rw-r--r-- | lib/std/zig/render.zig | 96 |
4 files changed, 233 insertions, 152 deletions
diff --git a/lib/std/zig/Ast.zig b/lib/std/zig/Ast.zig index 5dd0cdd5af..0966978ab8 100644 --- a/lib/std/zig/Ast.zig +++ b/lib/std/zig/Ast.zig @@ -559,6 +559,7 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex { .container_field, => { const name_token = main_tokens[n]; + if (token_tags[name_token + 1] != .colon) return name_token - end_offset; if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) { end_offset += 1; } @@ -1320,33 +1321,39 @@ pub fn containerField(tree: Ast, node: Node.Index) full.ContainerField { assert(tree.nodes.items(.tag)[node] == .container_field); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ContainerField); + const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerField(.{ - .name_token = tree.nodes.items(.main_token)[node], + .main_token = main_token, .type_expr = data.lhs, .value_expr = extra.value_expr, .align_expr = extra.align_expr, + .tuple_like = tree.tokens.items(.tag)[main_token + 1] != .colon, }); } pub fn containerFieldInit(tree: Ast, node: Node.Index) full.ContainerField { assert(tree.nodes.items(.tag)[node] == .container_field_init); const data = tree.nodes.items(.data)[node]; + const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerField(.{ - .name_token = tree.nodes.items(.main_token)[node], + .main_token = main_token, .type_expr = data.lhs, .value_expr = data.rhs, .align_expr = 0, + .tuple_like = tree.tokens.items(.tag)[main_token + 1] != .colon, }); } pub fn containerFieldAlign(tree: Ast, node: Node.Index) full.ContainerField { assert(tree.nodes.items(.tag)[node] == .container_field_align); const data = tree.nodes.items(.data)[node]; + const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerField(.{ - .name_token = tree.nodes.items(.main_token)[node], + .main_token = main_token, .type_expr = data.lhs, .value_expr = 0, .align_expr = data.rhs, + .tuple_like = tree.tokens.items(.tag)[main_token + 1] != .colon, }); } @@ -1944,10 +1951,14 @@ fn fullContainerField(tree: Ast, info: full.ContainerField.Components) full.Cont .ast = info, .comptime_token = null, }; - // comptime name: type = init, - // ^ - if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) { - result.comptime_token = info.name_token - 1; + if (token_tags[info.main_token] == .keyword_comptime) { + // comptime type = init, + // ^ + result.comptime_token = info.main_token; + } else if (info.main_token > 0 and token_tags[info.main_token - 1] == .keyword_comptime) { + // comptime name: type = init, + // ^ + result.comptime_token = info.main_token - 1; } return result; } @@ -2256,14 +2267,26 @@ pub const full = struct { ast: Components, pub const Components = struct { - name_token: TokenIndex, + main_token: TokenIndex, type_expr: Node.Index, value_expr: Node.Index, align_expr: Node.Index, + tuple_like: bool, }; pub fn firstToken(cf: ContainerField) TokenIndex { - return cf.comptime_token orelse cf.ast.name_token; + return cf.comptime_token orelse cf.ast.main_token; + } + + pub fn convertToNonTupleLike(cf: *ContainerField, nodes: NodeList.Slice) void { + if (!cf.ast.tuple_like) return; + if (cf.ast.type_expr == 0) return; + if (nodes.items(.tag)[cf.ast.type_expr] != .identifier) return; + + const ident = nodes.items(.main_token)[cf.ast.type_expr]; + cf.ast.tuple_like = false; + cf.ast.main_token = ident; + cf.ast.type_expr = 0; } }; diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 1be074fc27..0226ec2e1d 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -272,53 +272,6 @@ const Parser = struct { trailing = false; }, .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) { - .identifier => { - p.tok_i += 1; - const identifier = p.tok_i; - defer last_field = identifier; - const container_field = try p.expectContainerFieldRecoverable(); - if (container_field != 0) { - switch (field_state) { - .none => field_state = .seen, - .err, .seen => {}, - .end => |node| { - try p.warnMsg(.{ - .tag = .decl_between_fields, - .token = p.nodes.items(.main_token)[node], - }); - try p.warnMsg(.{ - .tag = .previous_field, - .is_note = true, - .token = last_field, - }); - try p.warnMsg(.{ - .tag = .next_field, - .is_note = true, - .token = identifier, - }); - // Continue parsing; error will be reported later. - field_state = .err; - }, - } - try p.scratch.append(p.gpa, container_field); - switch (p.token_tags[p.tok_i]) { - .comma => { - p.tok_i += 1; - trailing = true; - continue; - }, - .r_brace, .eof => { - trailing = false; - break; - }, - else => {}, - } - // There is not allowed to be a decl after a field with no comma. - // Report error but recover parser. - try p.warn(.expected_comma_after_field); - p.findNextContainerMember(); - } - }, .l_brace => { if (doc_comment) |some| { try p.warnMsg(.{ .tag = .test_doc_comment, .token = some }); @@ -349,53 +302,15 @@ const Parser = struct { }, else => { p.tok_i += 1; - try p.warn(.expected_block_or_field); - }, - }, - .keyword_pub => { - p.tok_i += 1; - const top_level_decl = try p.expectTopLevelDeclRecoverable(); - if (top_level_decl != 0) { - if (field_state == .seen) { - field_state = .{ .end = top_level_decl }; - } - try p.scratch.append(p.gpa, top_level_decl); - } - trailing = p.token_tags[p.tok_i - 1] == .semicolon; - }, - .keyword_usingnamespace => { - const node = try p.expectUsingNamespaceRecoverable(); - if (node != 0) { - if (field_state == .seen) { - field_state = .{ .end = node }; - } - try p.scratch.append(p.gpa, node); - } - trailing = p.token_tags[p.tok_i - 1] == .semicolon; - }, - .keyword_const, - .keyword_var, - .keyword_threadlocal, - .keyword_export, - .keyword_extern, - .keyword_inline, - .keyword_noinline, - .keyword_fn, - => { - const top_level_decl = try p.expectTopLevelDeclRecoverable(); - if (top_level_decl != 0) { - if (field_state == .seen) { - field_state = .{ .end = top_level_decl }; - } - try p.scratch.append(p.gpa, top_level_decl); - } - trailing = p.token_tags[p.tok_i - 1] == .semicolon; - }, - .identifier => { - const identifier = p.tok_i; - defer last_field = identifier; - const container_field = try p.expectContainerFieldRecoverable(); - if (container_field != 0) { + const identifier = p.tok_i; + defer last_field = identifier; + const container_field = p.expectContainerField() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextContainerMember(); + continue; + }, + }; switch (field_state) { .none => field_state = .seen, .err, .seen => {}, @@ -435,7 +350,46 @@ const Parser = struct { // Report error but recover parser. try p.warn(.expected_comma_after_field); p.findNextContainerMember(); + }, + }, + .keyword_pub => { + p.tok_i += 1; + const top_level_decl = try p.expectTopLevelDeclRecoverable(); + if (top_level_decl != 0) { + if (field_state == .seen) { + field_state = .{ .end = top_level_decl }; + } + try p.scratch.append(p.gpa, top_level_decl); + } + trailing = p.token_tags[p.tok_i - 1] == .semicolon; + }, + .keyword_usingnamespace => { + const node = try p.expectUsingNamespaceRecoverable(); + if (node != 0) { + if (field_state == .seen) { + field_state = .{ .end = node }; + } + try p.scratch.append(p.gpa, node); + } + trailing = p.token_tags[p.tok_i - 1] == .semicolon; + }, + .keyword_const, + .keyword_var, + .keyword_threadlocal, + .keyword_export, + .keyword_extern, + .keyword_inline, + .keyword_noinline, + .keyword_fn, + => { + const top_level_decl = try p.expectTopLevelDeclRecoverable(); + if (top_level_decl != 0) { + if (field_state == .seen) { + field_state = .{ .end = top_level_decl }; + } + try p.scratch.append(p.gpa, top_level_decl); } + trailing = p.token_tags[p.tok_i - 1] == .semicolon; }, .eof, .r_brace => { if (doc_comment) |tok| { @@ -451,11 +405,57 @@ const Parser = struct { error.OutOfMemory => return error.OutOfMemory, error.ParseError => false, }; - if (!c_container) { - try p.warn(.expected_container_members); - // This was likely not supposed to end yet; try to find the next declaration. - p.findNextContainerMember(); + if (c_container) continue; + + const identifier = p.tok_i; + defer last_field = identifier; + const container_field = p.expectContainerField() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextContainerMember(); + continue; + }, + }; + switch (field_state) { + .none => field_state = .seen, + .err, .seen => {}, + .end => |node| { + try p.warnMsg(.{ + .tag = .decl_between_fields, + .token = p.nodes.items(.main_token)[node], + }); + try p.warnMsg(.{ + .tag = .previous_field, + .is_note = true, + .token = last_field, + }); + try p.warnMsg(.{ + .tag = .next_field, + .is_note = true, + .token = identifier, + }); + // Continue parsing; error will be reported later. + field_state = .err; + }, } + try p.scratch.append(p.gpa, container_field); + switch (p.token_tags[p.tok_i]) { + .comma => { + p.tok_i += 1; + trailing = true; + continue; + }, + .r_brace, .eof => { + trailing = false; + break; + }, + else => {}, + } + // There is not allowed to be a decl after a field with no comma. + // Report error but recover parser. + try p.warn(.expected_comma_after_field); + p.findNextContainerMember(); + continue; }, } } @@ -875,12 +875,16 @@ const Parser = struct { /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? fn expectContainerField(p: *Parser) !Node.Index { + var main_token = p.tok_i; _ = p.eatToken(.keyword_comptime); - const name_token = p.assertToken(.identifier); + const tuple_like = p.token_tags[p.tok_i] != .identifier or p.token_tags[p.tok_i + 1] != .colon; + if (!tuple_like) { + main_token = p.assertToken(.identifier); + } var align_expr: Node.Index = 0; var type_expr: Node.Index = 0; - if (p.eatToken(.colon)) |_| { + if (p.eatToken(.colon) != null or tuple_like) { type_expr = try p.expectTypeExpr(); align_expr = try p.parseByteAlign(); } @@ -890,7 +894,7 @@ const Parser = struct { if (align_expr == 0) { return p.addNode(.{ .tag = .container_field_init, - .main_token = name_token, + .main_token = main_token, .data = .{ .lhs = type_expr, .rhs = value_expr, @@ -899,7 +903,7 @@ const Parser = struct { } else if (value_expr == 0) { return p.addNode(.{ .tag = .container_field_align, - .main_token = name_token, + .main_token = main_token, .data = .{ .lhs = type_expr, .rhs = align_expr, @@ -908,7 +912,7 @@ const Parser = struct { } else { return p.addNode(.{ .tag = .container_field, - .main_token = name_token, + .main_token = main_token, .data = .{ .lhs = type_expr, .rhs = try p.addExtra(Node.ContainerField{ @@ -920,16 +924,6 @@ const Parser = struct { } } - fn expectContainerFieldRecoverable(p: *Parser) error{OutOfMemory}!Node.Index { - return p.expectContainerField() catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.ParseError => { - p.findNextContainerMember(); - return null_node; - }, - }; - } - /// Statement /// <- KEYWORD_comptime? VarDecl /// / KEYWORD_comptime BlockExprStatement diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index bf1da35d37..e554c51f70 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1,3 +1,15 @@ +test "zig fmt: tuple struct" { + try testCanonical( + \\const T = struct { + \\ comptime u32, + \\ *u32 = 1, + \\ // needs to be wrapped in parentheses to not be parsed as a function decl + \\ (fn () void) align(1), + \\}; + \\ + ); +} + test "zig fmt: preserves clobbers in inline asm with stray comma" { try testCanonical( \\fn foo() void { @@ -265,14 +277,6 @@ test "zig fmt: decl between fields" { }); } -test "zig fmt: eof after missing comma" { - try testError( - \\foo() - , &[_]Error{ - .expected_comma_after_field, - }); -} - test "zig fmt: errdefer with payload" { try testCanonical( \\pub fn main() anyerror!void { @@ -5732,8 +5736,8 @@ test "recovery: missing semicolon" { test "recovery: invalid container members" { try testError( \\usingnamespace; - \\foo+ - \\bar@, + \\@foo()+ + \\@bar()@, \\while (a == 2) { test "" {}} \\test "" { \\ a & b @@ -5741,7 +5745,7 @@ test "recovery: invalid container members" { , &[_]Error{ .expected_expr, .expected_comma_after_field, - .expected_container_members, + .expected_type_expr, .expected_semi_after_stmt, }); } @@ -5820,7 +5824,7 @@ test "recovery: invalid comptime" { try testError( \\comptime , &[_]Error{ - .expected_block_or_field, + .expected_type_expr, }); } diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 665c31a0af..160ec13f0c 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -40,14 +40,34 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast) Error!void { /// Render all members in the given slice, keeping empty lines where appropriate fn renderMembers(gpa: Allocator, ais: *Ais, tree: Ast, members: []const Ast.Node.Index) Error!void { if (members.len == 0) return; - try renderMember(gpa, ais, tree, members[0], .newline); + var is_tuple = true; + for (members) |member| { + const tuple_like = switch (tree.nodes.items(.tag)[member]) { + .container_field_init => tree.containerFieldInit(member).ast.tuple_like, + .container_field_align => tree.containerFieldAlign(member).ast.tuple_like, + .container_field => tree.containerField(member).ast.tuple_like, + else => continue, + }; + if (!tuple_like) { + is_tuple = false; + break; + } + } + try renderMember(gpa, ais, tree, members[0], is_tuple, .newline); for (members[1..]) |member| { try renderExtraNewline(ais, tree, member); - try renderMember(gpa, ais, tree, member, .newline); + try renderMember(gpa, ais, tree, member, is_tuple, .newline); } } -fn renderMember(gpa: Allocator, ais: *Ais, tree: Ast, decl: Ast.Node.Index, space: Space) Error!void { +fn renderMember( + gpa: Allocator, + ais: *Ais, + tree: Ast, + decl: Ast.Node.Index, + is_tuple: bool, + space: Space, +) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); @@ -161,9 +181,9 @@ fn renderMember(gpa: Allocator, ais: *Ais, tree: Ast, decl: Ast.Node.Index, spac try renderExpression(gpa, ais, tree, datas[decl].rhs, space); }, - .container_field_init => return renderContainerField(gpa, ais, tree, tree.containerFieldInit(decl), space), - .container_field_align => return renderContainerField(gpa, ais, tree, tree.containerFieldAlign(decl), space), - .container_field => return renderContainerField(gpa, ais, tree, tree.containerField(decl), space), + .container_field_init => return renderContainerField(gpa, ais, tree, tree.containerFieldInit(decl), is_tuple, space), + .container_field_align => return renderContainerField(gpa, ais, tree, tree.containerFieldAlign(decl), is_tuple, space), + .container_field => return renderContainerField(gpa, ais, tree, tree.containerField(decl), is_tuple, space), .@"comptime" => return renderExpression(gpa, ais, tree, decl, space), .root => unreachable, @@ -1158,18 +1178,34 @@ fn renderContainerField( gpa: Allocator, ais: *Ais, tree: Ast, - field: Ast.full.ContainerField, + field_param: Ast.full.ContainerField, + is_tuple: bool, space: Space, ) Error!void { + var field = field_param; + if (!is_tuple) field.convertToNonTupleLike(tree.nodes); + if (field.comptime_token) |t| { try renderToken(ais, tree, t, .space); // comptime } if (field.ast.type_expr == 0 and field.ast.value_expr == 0) { - return renderIdentifierComma(ais, tree, field.ast.name_token, space, .eagerly_unquote); // name + if (field.ast.align_expr != 0) { + try renderIdentifier(ais, tree, field.ast.main_token, .space, .eagerly_unquote); // name + const lparen_token = tree.firstToken(field.ast.align_expr) - 1; + const align_kw = lparen_token - 1; + const rparen_token = tree.lastToken(field.ast.align_expr) + 1; + try renderToken(ais, tree, align_kw, .none); // align + try renderToken(ais, tree, lparen_token, .none); // ( + try renderExpression(gpa, ais, tree, field.ast.align_expr, .none); // alignment + return renderToken(ais, tree, rparen_token, .space); // ) + } + return renderIdentifierComma(ais, tree, field.ast.main_token, space, .eagerly_unquote); // name } if (field.ast.type_expr != 0 and field.ast.value_expr == 0) { - try renderIdentifier(ais, tree, field.ast.name_token, .none, .eagerly_unquote); // name - try renderToken(ais, tree, field.ast.name_token + 1, .space); // : + if (!field.ast.tuple_like) { + try renderIdentifier(ais, tree, field.ast.main_token, .none, .eagerly_unquote); // name + try renderToken(ais, tree, field.ast.main_token + 1, .space); // : + } if (field.ast.align_expr != 0) { try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type @@ -1184,13 +1220,23 @@ fn renderContainerField( } } if (field.ast.type_expr == 0 and field.ast.value_expr != 0) { - try renderIdentifier(ais, tree, field.ast.name_token, .space, .eagerly_unquote); // name - try renderToken(ais, tree, field.ast.name_token + 1, .space); // = + try renderIdentifier(ais, tree, field.ast.main_token, .space, .eagerly_unquote); // name + if (field.ast.align_expr != 0) { + const lparen_token = tree.firstToken(field.ast.align_expr) - 1; + const align_kw = lparen_token - 1; + const rparen_token = tree.lastToken(field.ast.align_expr) + 1; + try renderToken(ais, tree, align_kw, .none); // align + try renderToken(ais, tree, lparen_token, .none); // ( + try renderExpression(gpa, ais, tree, field.ast.align_expr, .none); // alignment + try renderToken(ais, tree, rparen_token, .space); // ) + } + try renderToken(ais, tree, field.ast.main_token + 1, .space); // = return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value } - - try renderIdentifier(ais, tree, field.ast.name_token, .none, .eagerly_unquote); // name - try renderToken(ais, tree, field.ast.name_token + 1, .space); // : + if (!field.ast.tuple_like) { + try renderIdentifier(ais, tree, field.ast.main_token, .none, .eagerly_unquote); // name + try renderToken(ais, tree, field.ast.main_token + 1, .space); // : + } try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type if (field.ast.align_expr != 0) { @@ -1901,6 +1947,20 @@ fn renderContainerDecl( try renderToken(ais, tree, layout_token, .space); } + var is_tuple = token_tags[container_decl.ast.main_token] == .keyword_struct; + if (is_tuple) for (container_decl.ast.members) |member| { + const tuple_like = switch (tree.nodes.items(.tag)[member]) { + .container_field_init => tree.containerFieldInit(member).ast.tuple_like, + .container_field_align => tree.containerFieldAlign(member).ast.tuple_like, + .container_field => tree.containerField(member).ast.tuple_like, + else => continue, + }; + if (!tuple_like) { + is_tuple = false; + break; + } + }; + var lbrace: Ast.TokenIndex = undefined; if (container_decl.ast.enum_token) |enum_token| { try renderToken(ais, tree, container_decl.ast.main_token, .none); // union @@ -1967,7 +2027,7 @@ fn renderContainerDecl( // Print all the declarations on the same line. try renderToken(ais, tree, lbrace, .space); // lbrace for (container_decl.ast.members) |member| { - try renderMember(gpa, ais, tree, member, .space); + try renderMember(gpa, ais, tree, member, is_tuple, .space); } return renderToken(ais, tree, rbrace, space); // rbrace } @@ -1985,9 +2045,9 @@ fn renderContainerDecl( .container_field_init, .container_field_align, .container_field, - => try renderMember(gpa, ais, tree, member, .comma), + => try renderMember(gpa, ais, tree, member, is_tuple, .comma), - else => try renderMember(gpa, ais, tree, member, .newline), + else => try renderMember(gpa, ais, tree, member, is_tuple, .newline), } } ais.popIndent(); |
