diff options
| author | mlugg <mlugg@mlugg.co.uk> | 2023-08-31 14:30:58 +0100 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2023-09-15 11:33:53 -0700 |
| commit | 88f5315ddfc6eaf3e28433504ec046fb3252db7c (patch) | |
| tree | 5cd6e8e16b285d136a1fbaa98d12739aeab34feb /lib/std | |
| parent | 50ef10eb4963167225f7153dc5165292dbac0046 (diff) | |
| download | zig-88f5315ddfc6eaf3e28433504ec046fb3252db7c.tar.gz zig-88f5315ddfc6eaf3e28433504ec046fb3252db7c.zip | |
compiler: implement destructuring syntax
This change implements the following syntax into the compiler:
```zig
const x: u32, var y, foo.bar = .{ 1, 2, 3 };
```
A destructure expression may only appear within a block (i.e. not at
comtainer scope). The LHS consists of a sequence of comma-separated var
decls and/or lvalue expressions. The RHS is a normal expression.
A new result location type, `destructure`, is used, which contains
result pointers for each component of the destructure. This means that
when the RHS is a more complicated expression, peer type resolution is
not used: each result value is individually destructured and written to
the result pointers. RLS is always used for destructure expressions,
meaning every `const` on the LHS of such an expression creates a true
stack allocation.
Aside from anonymous array literals, Sema is capable of destructuring
the following types:
* Tuples
* Arrays
* Vectors
A destructure may be prefixed with the `comptime` keyword, in which case
the entire destructure is evaluated at comptime: this means all `var`s
in the LHS are `comptime var`s, every lvalue expression is evaluated at
comptime, and the RHS is evaluated at comptime. If every LHS is a
`const`, this is not allowed: as with single declarations, the user
should instead mark the RHS as `comptime`.
There are a few subtleties in the grammar changes here. For one thing,
if every LHS is an lvalue expression (rather than a var decl), a
destructure is considered an expression. This makes, for instance,
`if (cond) x, y = .{ 1, 2 };` valid Zig code. A destructure is allowed
in almost every context where a standard assignment expression is
permitted. The exception is `switch` prongs, which cannot be
destructures as the comma is ambiguous with the end of the prong.
A follow-up commit will begin utilizing this syntax in the Zig compiler.
Resolves: #498
Diffstat (limited to 'lib/std')
| -rw-r--r-- | lib/std/zig/Ast.zig | 28 | ||||
| -rw-r--r-- | lib/std/zig/Parse.zig | 350 | ||||
| -rw-r--r-- | lib/std/zig/parser_test.zig | 14 | ||||
| -rw-r--r-- | lib/std/zig/render.zig | 114 |
4 files changed, 395 insertions, 111 deletions
diff --git a/lib/std/zig/Ast.zig b/lib/std/zig/Ast.zig index 7987d19778..04ea0b0b84 100644 --- a/lib/std/zig/Ast.zig +++ b/lib/std/zig/Ast.zig @@ -241,6 +241,11 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void { token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(), }); }, + .expected_expr_or_var_decl => { + return stream.print("expected expression or var decl, found '{s}'", .{ + token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(), + }); + }, .expected_fn => { return stream.print("expected function, found '{s}'", .{ token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(), @@ -584,6 +589,13 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex { .error_union, => n = datas[n].lhs, + .assign_destructure => { + const extra_idx = datas[n].lhs; + const lhs_len = tree.extra_data[extra_idx]; + assert(lhs_len > 0); + n = tree.extra_data[extra_idx + 1]; + }, + .fn_decl, .fn_proto_simple, .fn_proto_multi, @@ -816,6 +828,7 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex { .assign_add_sat, .assign_sub_sat, .assign, + .assign_destructure, .merge_error_sets, .mul, .div, @@ -2846,6 +2859,7 @@ pub const Error = struct { expected_container_members, expected_expr, expected_expr_or_assignment, + expected_expr_or_var_decl, expected_fn, expected_inlinable, expected_labelable, @@ -3006,6 +3020,20 @@ pub const Node = struct { assign_sub_sat, /// `lhs = rhs`. main_token is op. assign, + /// `a, b, ... = rhs`. main_token is op. lhs is index into `extra_data` + /// of an lhs elem count followed by an array of that many `Node.Index`, + /// with each node having one of the following types: + /// * `global_var_decl` + /// * `local_var_decl` + /// * `simple_var_decl` + /// * `aligned_var_decl` + /// * Any expression node + /// The first 3 types correspond to a `var` or `const` lhs node (note + /// that their `rhs` is always 0). An expression node corresponds to a + /// standard assignment LHS (which must be evaluated as an lvalue). + /// There may be a preceding `comptime` token, which does not create a + /// corresponding `comptime` node so must be manually detected. + assign_destructure, /// `lhs || rhs`. main_token is the `||`. merge_error_sets, /// `lhs * rhs`. main_token is the `*`. diff --git a/lib/std/zig/Parse.zig b/lib/std/zig/Parse.zig index 14968d9923..bd99c4d90f 100644 --- a/lib/std/zig/Parse.zig +++ b/lib/std/zig/Parse.zig @@ -658,9 +658,8 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index { } const thread_local_token = p.eatToken(.keyword_threadlocal); - const var_decl = try p.parseVarDecl(); + const var_decl = try p.parseGlobalVarDecl(); if (var_decl != 0) { - try p.expectSemicolon(.expected_semi_after_decl, false); return var_decl; } if (thread_local_token != null) { @@ -792,8 +791,9 @@ fn parseFnProto(p: *Parse) !Node.Index { } } -/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection? (EQUAL Expr)? SEMICOLON -fn parseVarDecl(p: *Parse) !Node.Index { +/// VarDeclProto <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection? +/// Returns a `*_var_decl` node with its rhs (init expression) initialized to 0. +fn parseVarDeclProto(p: *Parse) !Node.Index { const mut_token = p.eatToken(.keyword_const) orelse p.eatToken(.keyword_var) orelse return null_node; @@ -803,18 +803,7 @@ fn parseVarDecl(p: *Parse) !Node.Index { const align_node = try p.parseByteAlign(); const addrspace_node = try p.parseAddrSpace(); const section_node = try p.parseLinkSection(); - const init_node: Node.Index = switch (p.token_tags[p.tok_i]) { - .equal_equal => blk: { - try p.warn(.wrong_equal_var_decl); - p.tok_i += 1; - break :blk try p.expectExpr(); - }, - .equal => blk: { - p.tok_i += 1; - break :blk try p.expectExpr(); - }, - else => 0, - }; + if (section_node == 0 and addrspace_node == 0) { if (align_node == 0) { return p.addNode(.{ @@ -822,31 +811,33 @@ fn parseVarDecl(p: *Parse) !Node.Index { .main_token = mut_token, .data = .{ .lhs = type_node, - .rhs = init_node, + .rhs = 0, }, }); - } else if (type_node == 0) { + } + + if (type_node == 0) { return p.addNode(.{ .tag = .aligned_var_decl, .main_token = mut_token, .data = .{ .lhs = align_node, - .rhs = init_node, - }, - }); - } else { - return p.addNode(.{ - .tag = .local_var_decl, - .main_token = mut_token, - .data = .{ - .lhs = try p.addExtra(Node.LocalVarDecl{ - .type_node = type_node, - .align_node = align_node, - }), - .rhs = init_node, + .rhs = 0, }, }); } + + return p.addNode(.{ + .tag = .local_var_decl, + .main_token = mut_token, + .data = .{ + .lhs = try p.addExtra(Node.LocalVarDecl{ + .type_node = type_node, + .align_node = align_node, + }), + .rhs = 0, + }, + }); } else { return p.addNode(.{ .tag = .global_var_decl, @@ -858,12 +849,38 @@ fn parseVarDecl(p: *Parse) !Node.Index { .addrspace_node = addrspace_node, .section_node = section_node, }), - .rhs = init_node, + .rhs = 0, }, }); } } +/// GlobalVarDecl <- VarDeclProto (EQUAL Expr?) SEMICOLON +fn parseGlobalVarDecl(p: *Parse) !Node.Index { + const var_decl = try p.parseVarDeclProto(); + if (var_decl == 0) { + return null_node; + } + + const init_node: Node.Index = switch (p.token_tags[p.tok_i]) { + .equal_equal => blk: { + try p.warn(.wrong_equal_var_decl); + p.tok_i += 1; + break :blk try p.expectExpr(); + }, + .equal => blk: { + p.tok_i += 1; + break :blk try p.expectExpr(); + }, + else => 0, + }; + + p.nodes.items(.data)[var_decl].rhs = init_node; + + try p.expectSemicolon(.expected_semi_after_decl, false); + return var_decl; +} + /// ContainerField /// <- doc_comment? KEYWORD_comptime? IDENTIFIER (COLON TypeExpr)? ByteAlign? (EQUAL Expr)? /// / doc_comment? KEYWORD_comptime? (IDENTIFIER COLON)? !KEYWORD_fn TypeExpr ByteAlign? (EQUAL Expr)? @@ -918,8 +935,7 @@ fn expectContainerField(p: *Parse) !Node.Index { } /// Statement -/// <- KEYWORD_comptime? VarDecl -/// / KEYWORD_comptime BlockExprStatement +/// <- KEYWORD_comptime ComptimeStatement /// / KEYWORD_nosuspend BlockExprStatement /// / KEYWORD_suspend BlockExprStatement /// / KEYWORD_defer BlockExprStatement @@ -927,27 +943,28 @@ fn expectContainerField(p: *Parse) !Node.Index { /// / IfStatement /// / LabeledStatement /// / SwitchExpr -/// / AssignExpr SEMICOLON -fn parseStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index { - const comptime_token = p.eatToken(.keyword_comptime); - - if (allow_defer_var) { - const var_decl = try p.parseVarDecl(); - if (var_decl != 0) { - try p.expectSemicolon(.expected_semi_after_decl, true); - return var_decl; +/// / VarDeclExprStatement +fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index { + if (p.eatToken(.keyword_comptime)) |comptime_token| { + const block_expr = try p.parseBlockExpr(); + if (block_expr != 0) { + return p.addNode(.{ + .tag = .@"comptime", + .main_token = comptime_token, + .data = .{ + .lhs = block_expr, + .rhs = undefined, + }, + }); } - } - if (comptime_token) |token| { - return p.addNode(.{ - .tag = .@"comptime", - .main_token = token, - .data = .{ - .lhs = try p.expectBlockExprStatement(), - .rhs = undefined, - }, - }); + if (allow_defer_var) { + return p.expectVarDeclExprStatement(comptime_token); + } else { + const assign = try p.expectAssignExpr(); + try p.expectSemicolon(.expected_semi_after_stmt, true); + return assign; + } } switch (p.token_tags[p.tok_i]) { @@ -1011,21 +1028,145 @@ fn parseStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index { const labeled_statement = try p.parseLabeledStatement(); if (labeled_statement != 0) return labeled_statement; - const assign_expr = try p.parseAssignExpr(); - if (assign_expr != 0) { + if (allow_defer_var) { + return p.expectVarDeclExprStatement(null); + } else { + const assign = try p.expectAssignExpr(); try p.expectSemicolon(.expected_semi_after_stmt, true); - return assign_expr; + return assign; } +} - return null_node; +/// ComptimeStatement +/// <- BlockExpr +/// / VarDeclExprStatement +fn expectComptimeStatement(p: *Parse, comptime_token: TokenIndex) !Node.Index { + const block_expr = try p.parseBlockExpr(); + if (block_expr != 0) { + return p.addNode(.{ + .tag = .@"comptime", + .main_token = comptime_token, + .data = .{ .lhs = block_expr, .rhs = undefined }, + }); + } + return p.expectVarDeclExprStatement(comptime_token); } -fn expectStatement(p: *Parse, allow_defer_var: bool) !Node.Index { - const statement = try p.parseStatement(allow_defer_var); - if (statement == 0) { - return p.fail(.expected_statement); +/// VarDeclExprStatement +/// <- VarDeclProto (COMMA (VarDeclProto / Expr))* EQUAL Expr SEMICOLON +/// / Expr (AssignOp Expr / (COMMA (VarDeclProto / Expr))+ EQUAL Expr)? SEMICOLON +fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Index { + const scratch_top = p.scratch.items.len; + defer p.scratch.shrinkRetainingCapacity(scratch_top); + + while (true) { + const var_decl_proto = try p.parseVarDeclProto(); + if (var_decl_proto != 0) { + try p.scratch.append(p.gpa, var_decl_proto); + } else { + const expr = try p.parseExpr(); + if (expr == 0) { + if (p.scratch.items.len == scratch_top) { + // We parsed nothing + return p.fail(.expected_statement); + } else { + // We've had at least one LHS, but had a bad comma + return p.fail(.expected_expr_or_var_decl); + } + } + try p.scratch.append(p.gpa, expr); + } + _ = p.eatToken(.comma) orelse break; + } + + const lhs_count = p.scratch.items.len - scratch_top; + assert(lhs_count > 0); + + const equal_token = p.eatToken(.equal) orelse eql: { + if (lhs_count > 1) { + // Definitely a destructure, so allow recovering from == + if (p.eatToken(.equal_equal)) |tok| { + try p.warnMsg(.{ .tag = .wrong_equal_var_decl, .token = tok }); + break :eql tok; + } + return p.failExpected(.equal); + } + const lhs = p.scratch.items[scratch_top]; + switch (p.nodes.items(.tag)[lhs]) { + .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + // Definitely a var decl, so allow recovering from == + if (p.eatToken(.equal_equal)) |tok| { + try p.warnMsg(.{ .tag = .wrong_equal_var_decl, .token = tok }); + break :eql tok; + } + return p.failExpected(.equal); + }, + else => {}, + } + + const expr = try p.finishAssignExpr(lhs); + try p.expectSemicolon(.expected_semi_after_stmt, true); + if (comptime_token) |t| { + return p.addNode(.{ + .tag = .@"comptime", + .main_token = t, + .data = .{ + .lhs = expr, + .rhs = undefined, + }, + }); + } else { + return expr; + } + }; + + const rhs = try p.expectExpr(); + try p.expectSemicolon(.expected_semi_after_stmt, true); + + if (lhs_count == 1) { + const lhs = p.scratch.items[scratch_top]; + switch (p.nodes.items(.tag)[lhs]) { + .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => { + p.nodes.items(.data)[lhs].rhs = rhs; + // Don't need to wrap in comptime + return lhs; + }, + else => {}, + } + const expr = try p.addNode(.{ + .tag = .assign, + .main_token = equal_token, + .data = .{ .lhs = lhs, .rhs = rhs }, + }); + if (comptime_token) |t| { + return p.addNode(.{ + .tag = .@"comptime", + .main_token = t, + .data = .{ + .lhs = expr, + .rhs = undefined, + }, + }); + } else { + return expr; + } } - return statement; + + // An actual destructure! No need for any `comptime` wrapper here. + + const extra_start = p.extra_data.items.len; + try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1); + p.extra_data.appendAssumeCapacity(@intCast(lhs_count)); + p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]); + + return p.addNode(.{ + .tag = .assign_destructure, + .main_token = equal_token, + .data = .{ + .lhs = @intCast(extra_start), + .rhs = rhs, + }, + }); } /// If a parse error occurs, reports an error, but then finds the next statement @@ -1345,7 +1486,7 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index { } } -/// AssignExpr <- Expr (AssignOp Expr)? +/// AssignExpr <- Expr (AssignOp Expr / (COMMA Expr)+ EQUAL Expr)? /// /// AssignOp /// <- ASTERISKEQUAL @@ -1369,8 +1510,40 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index { fn parseAssignExpr(p: *Parse) !Node.Index { const expr = try p.parseExpr(); if (expr == 0) return null_node; + return p.finishAssignExpr(expr); +} - const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { +/// SingleAssignExpr <- Expr (AssignOp Expr)? +fn parseSingleAssignExpr(p: *Parse) !Node.Index { + const lhs = try p.parseExpr(); + if (lhs == 0) return null_node; + const tag = assignOpNode(p.token_tags[p.tok_i]) orelse return lhs; + return p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = lhs, + .rhs = try p.expectExpr(), + }, + }); +} + +fn finishAssignExpr(p: *Parse, lhs: Node.Index) !Node.Index { + const tok = p.token_tags[p.tok_i]; + if (tok == .comma) return p.finishAssignDestructureExpr(lhs); + const tag = assignOpNode(tok) orelse return lhs; + return p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = lhs, + .rhs = try p.expectExpr(), + }, + }); +} + +fn assignOpNode(tok: Token.Tag) ?Node.Tag { + return switch (tok) { .asterisk_equal => .assign_mul, .slash_equal => .assign_div, .percent_equal => .assign_mod, @@ -1389,18 +1562,51 @@ fn parseAssignExpr(p: *Parse) !Node.Index { .plus_pipe_equal => .assign_add_sat, .minus_pipe_equal => .assign_sub_sat, .equal => .assign, - else => return expr, + else => null, }; +} + +fn finishAssignDestructureExpr(p: *Parse, first_lhs: Node.Index) !Node.Index { + const scratch_top = p.scratch.items.len; + defer p.scratch.shrinkRetainingCapacity(scratch_top); + + try p.scratch.append(p.gpa, first_lhs); + + while (p.eatToken(.comma)) |_| { + const expr = try p.expectExpr(); + try p.scratch.append(p.gpa, expr); + } + + const equal_token = try p.expectToken(.equal); + + const rhs = try p.expectExpr(); + + const lhs_count = p.scratch.items.len - scratch_top; + assert(lhs_count > 1); // we already had first_lhs, and must have at least one more lvalue + + const extra_start = p.extra_data.items.len; + try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1); + p.extra_data.appendAssumeCapacity(@intCast(lhs_count)); + p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]); + return p.addNode(.{ - .tag = tag, - .main_token = p.nextToken(), + .tag = .assign_destructure, + .main_token = equal_token, .data = .{ - .lhs = expr, - .rhs = try p.expectExpr(), + .lhs = @intCast(extra_start), + .rhs = rhs, }, }); } +fn expectSingleAssignExpr(p: *Parse) !Node.Index { + const expr = try p.parseSingleAssignExpr(); + if (expr == 0) { + return p.fail(.expected_expr_or_assignment); + } + return expr; +} + fn expectAssignExpr(p: *Parse) !Node.Index { const expr = try p.parseAssignExpr(); if (expr == 0) { @@ -3260,7 +3466,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index { .main_token = arrow_token, .data = .{ .lhs = 0, - .rhs = try p.expectAssignExpr(), + .rhs = try p.expectSingleAssignExpr(), }, }), 1 => return p.addNode(.{ @@ -3268,7 +3474,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index { .main_token = arrow_token, .data = .{ .lhs = items[0], - .rhs = try p.expectAssignExpr(), + .rhs = try p.expectSingleAssignExpr(), }, }), else => return p.addNode(.{ @@ -3276,7 +3482,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index { .main_token = arrow_token, .data = .{ .lhs = try p.addExtra(try p.listToSpan(items)), - .rhs = try p.expectAssignExpr(), + .rhs = try p.expectSingleAssignExpr(), }, }), } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 00c50525a3..68adf6aecd 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4348,12 +4348,12 @@ test "zig fmt: invalid else branch statement" { \\ for ("") |_| {} else defer {} \\} , &[_]Error{ - .expected_statement, - .expected_statement, - .expected_statement, - .expected_statement, - .expected_statement, - .expected_statement, + .expected_expr_or_assignment, + .expected_expr_or_assignment, + .expected_expr_or_assignment, + .expected_expr_or_assignment, + .expected_expr_or_assignment, + .expected_expr_or_assignment, }); } @@ -6078,7 +6078,7 @@ test "recovery: missing for payload" { try testError( \\comptime { \\ const a = for(a) {}; - \\ const a: for(a) blk: {}; + \\ const a: for(a) blk: {} = {}; \\ for(a) {} \\} , &[_]Error{ diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 2cf7bc9716..45a89a0c3b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -164,7 +164,7 @@ fn renderMember( .local_var_decl, .simple_var_decl, .aligned_var_decl, - => return renderVarDecl(gpa, ais, tree, tree.fullVarDecl(decl).?), + => return renderVarDecl(gpa, ais, tree, tree.fullVarDecl(decl).?, false, .semicolon), .test_decl => { const test_token = main_tokens[decl]; @@ -427,6 +427,42 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index, return renderExpression(gpa, ais, tree, infix.rhs, space); }, + .assign_destructure => { + const lhs_count = tree.extra_data[datas[node].lhs]; + assert(lhs_count > 1); + const lhs_exprs = tree.extra_data[datas[node].lhs + 1 ..][0..lhs_count]; + const rhs = datas[node].rhs; + + const maybe_comptime_token = tree.firstToken(node) - 1; + if (token_tags[maybe_comptime_token] == .keyword_comptime) { + try renderToken(ais, tree, maybe_comptime_token, .space); + } + + for (lhs_exprs, 0..) |lhs_node, i| { + const lhs_space: Space = if (i == lhs_exprs.len - 1) .space else .comma_space; + switch (node_tags[lhs_node]) { + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => { + try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(lhs_node).?, true, lhs_space); + }, + else => try renderExpression(gpa, ais, tree, lhs_node, lhs_space), + } + } + const equal_token = main_tokens[node]; + if (tree.tokensOnSameLine(equal_token, equal_token + 1)) { + try renderToken(ais, tree, equal_token, .space); + } else { + ais.pushIndent(); + try renderToken(ais, tree, equal_token, .newline); + ais.popIndent(); + } + ais.pushIndentOneShot(); + return renderExpression(gpa, ais, tree, rhs, space); + }, + .bit_not, .bool_not, .negation, @@ -943,7 +979,16 @@ fn renderAsmInput( return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen } -fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDecl) Error!void { +fn renderVarDecl( + gpa: Allocator, + ais: *Ais, + tree: Ast, + var_decl: Ast.full.VarDecl, + /// Destructures intentionally ignore leading `comptime` tokens. + ignore_comptime_token: bool, + /// `comma_space` and `space` are used for destructure LHS decls. + space: Space, +) Error!void { if (var_decl.visib_token) |visib_token| { try renderToken(ais, tree, visib_token, Space.space); // pub } @@ -960,21 +1005,31 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec try renderToken(ais, tree, thread_local_token, Space.space); // threadlocal } - if (var_decl.comptime_token) |comptime_token| { - try renderToken(ais, tree, comptime_token, Space.space); // comptime + if (!ignore_comptime_token) { + if (var_decl.comptime_token) |comptime_token| { + try renderToken(ais, tree, comptime_token, Space.space); // comptime + } } try renderToken(ais, tree, var_decl.ast.mut_token, .space); // var - const name_space = if (var_decl.ast.type_node == 0 and - (var_decl.ast.align_node != 0 or - var_decl.ast.addrspace_node != 0 or - var_decl.ast.section_node != 0 or - var_decl.ast.init_node != 0)) - Space.space - else - Space.none; - try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name + if (var_decl.ast.type_node != 0 or var_decl.ast.align_node != 0 or + var_decl.ast.addrspace_node != 0 or var_decl.ast.section_node != 0 or + var_decl.ast.init_node != 0) + { + const name_space = if (var_decl.ast.type_node == 0 and + (var_decl.ast.align_node != 0 or + var_decl.ast.addrspace_node != 0 or + var_decl.ast.section_node != 0 or + var_decl.ast.init_node != 0)) + Space.space + else + Space.none; + + try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name + } else { + return renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, space, .preserve_when_shadowing); // name + } if (var_decl.ast.type_node != 0) { try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // : @@ -983,9 +1038,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec { try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .space); } else { - try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .none); - const semicolon = tree.lastToken(var_decl.ast.type_node) + 1; - return renderToken(ais, tree, semicolon, Space.newline); // ; + return renderExpression(gpa, ais, tree, var_decl.ast.type_node, space); } } @@ -1001,8 +1054,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec { try renderToken(ais, tree, rparen, .space); // ) } else { - try renderToken(ais, tree, rparen, .none); // ) - return renderToken(ais, tree, rparen + 1, Space.newline); // ; + return renderToken(ais, tree, rparen, space); // ) } } @@ -1031,23 +1083,21 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec if (var_decl.ast.init_node != 0) { try renderToken(ais, tree, rparen, .space); // ) } else { - try renderToken(ais, tree, rparen, .none); // ) - return renderToken(ais, tree, rparen + 1, Space.newline); // ; + return renderToken(ais, tree, rparen, space); // ) } } - if (var_decl.ast.init_node != 0) { - const eq_token = tree.firstToken(var_decl.ast.init_node) - 1; - const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline; - { - ais.pushIndent(); - try renderToken(ais, tree, eq_token, eq_space); // = - ais.popIndent(); - } - ais.pushIndentOneShot(); - return renderExpression(gpa, ais, tree, var_decl.ast.init_node, .semicolon); // ; + assert(var_decl.ast.init_node != 0); + + const eq_token = tree.firstToken(var_decl.ast.init_node) - 1; + const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline; + { + ais.pushIndent(); + try renderToken(ais, tree, eq_token, eq_space); // = + ais.popIndent(); } - return renderToken(ais, tree, var_decl.ast.mut_token + 2, .newline); // ; + ais.pushIndentOneShot(); + return renderExpression(gpa, ais, tree, var_decl.ast.init_node, space); // ; } fn renderIf(gpa: Allocator, ais: *Ais, tree: Ast, if_node: Ast.full.If, space: Space) Error!void { @@ -1825,7 +1875,7 @@ fn renderBlock( .local_var_decl, .simple_var_decl, .aligned_var_decl, - => try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(stmt).?), + => try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(stmt).?, false, .semicolon), else => try renderExpression(gpa, ais, tree, stmt, .semicolon), } } |
