From db0812d4b7f856425e0bd26cd6f579468f3ac8ab Mon Sep 17 00:00:00 2001 From: Jimmi Holst Christensen Date: Tue, 10 Apr 2018 14:22:01 +0200 Subject: std.zig.parser: changed block exprs from primary expr to expr --- std/zig/parser.zig | 232 ++++++++++++++++++++++++++++---------------------- std/zig/tokenizer.zig | 1 - 2 files changed, 129 insertions(+), 104 deletions(-) (limited to 'std') diff --git a/std/zig/parser.zig b/std/zig/parser.zig index 5f7412b5c2..65e8056ad2 100644 --- a/std/zig/parser.zig +++ b/std/zig/parser.zig @@ -766,6 +766,101 @@ pub const Parser = struct { dest_ptr.store(&resume_node.base); stack.append(State { .Expression = DestPtr { .Field = &resume_node.rhs } }) catch unreachable; }, + Token.Id.Keyword_suspend => { + const node = try arena.create(ast.NodeSuspend); + *node = ast.NodeSuspend { + .base = self.initNode(ast.Node.Id.Suspend), + .suspend_token = token, + .payload = null, + .body = null, + }; + dest_ptr.store(&node.base); + stack.append(State { .SuspendBody = node }) catch unreachable; + try stack.append(State { .Payload = &node.payload }); + continue; + }, + Token.Id.Keyword_if => { + const node = try arena.create(ast.NodeIf); + *node = ast.NodeIf { + .base = self.initNode(ast.Node.Id.If), + .if_token = token, + .condition = undefined, + .payload = null, + .body = undefined, + .@"else" = null, + }; + dest_ptr.store(&node.base); + + stack.append(State { .Else = &node.@"else" }) catch unreachable; + try stack.append(State { .Expression = DestPtr { .Field = &node.body } }); + try stack.append(State { .PointerPayload = &node.payload }); + try stack.append(State { .ExpectToken = Token.Id.RParen }); + try stack.append(State { .Expression = DestPtr { .Field = &node.condition } }); + try stack.append(State { .ExpectToken = Token.Id.LParen }); + continue; + }, + Token.Id.Keyword_while => { + stack.append(State { + .While = LoopCtx { + .label = null, + .inline_token = null, + .loop_token = token, + .dest_ptr = dest_ptr, + } + }) catch unreachable; + continue; + }, + Token.Id.Keyword_for => { + stack.append(State { + .For = LoopCtx { + .label = null, + .inline_token = null, + .loop_token = token, + .dest_ptr = dest_ptr, + } + }) catch unreachable; + continue; + }, + Token.Id.Keyword_switch => { + const node = try arena.create(ast.NodeSwitch); + *node = ast.NodeSwitch { + .base = self.initNode(ast.Node.Id.Switch), + .switch_token = token, + .expr = undefined, + .cases = ArrayList(&ast.NodeSwitchCase).init(arena), + .rbrace = undefined, + }; + dest_ptr.store(&node.base); + + stack.append(State { + .SwitchCaseOrEnd = ListSave(&ast.NodeSwitchCase) { + .list = &node.cases, + .ptr = &node.rbrace, + }, + }) catch unreachable; + try stack.append(State { .ExpectToken = Token.Id.LBrace }); + try stack.append(State { .ExpectToken = Token.Id.RParen }); + try stack.append(State { .Expression = DestPtr { .Field = &node.expr } }); + try stack.append(State { .ExpectToken = Token.Id.LParen }); + }, + Token.Id.Keyword_comptime => { + const node = try arena.create(ast.NodeComptime); + *node = ast.NodeComptime { + .base = self.initNode(ast.Node.Id.Comptime), + .comptime_token = token, + .expr = undefined, + }; + dest_ptr.store(&node.base); + try stack.append(State { .Expression = DestPtr { .Field = &node.expr } }); + continue; + }, + Token.Id.LBrace => { + const block = try self.createBlock(arena, (?Token)(null), token); + dest_ptr.store(&block.base); + + stack.append(State { .Block = block }) catch unreachable; + continue; + }, else => { self.putBackToken(token); stack.append(State { .UnwrapExpressionBegin = dest_ptr }) catch unreachable; @@ -1328,19 +1423,6 @@ pub const Parser = struct { dest_ptr.store(&node.base); continue; }, - Token.Id.Keyword_suspend => { - const node = try arena.create(ast.NodeSuspend); - *node = ast.NodeSuspend { - .base = self.initNode(ast.Node.Id.Suspend), - .suspend_token = token, - .payload = null, - .body = null, - }; - dest_ptr.store(&node.base); - stack.append(State { .SuspendBody = node }) catch unreachable; - try stack.append(State { .Payload = &node.payload }); - continue; - }, Token.Id.MultilineStringLiteralLine => { const node = try arena.create(ast.NodeMultilineStringLiteral); *node = ast.NodeMultilineStringLiteral { @@ -1544,13 +1626,6 @@ pub const Parser = struct { }) catch unreachable; continue; }, - Token.Id.LBrace => { - const block = try self.createBlock(arena, (?Token)(null), token); - dest_ptr.store(&block.base); - - stack.append(State { .Block = block }) catch unreachable; - continue; - }, Token.Id.Keyword_fn => { // TODO shouldn't need these casts const fn_proto = try self.createFnProto(arena, token, @@ -1608,26 +1683,6 @@ pub const Parser = struct { try stack.append(State { .AsmOutputItems = &node.outputs }); try stack.append(State { .IfToken = Token.Id.Colon }); }, - Token.Id.Keyword_if => { - const node = try arena.create(ast.NodeIf); - *node = ast.NodeIf { - .base = self.initNode(ast.Node.Id.If), - .if_token = token, - .condition = undefined, - .payload = null, - .body = undefined, - .@"else" = null, - }; - dest_ptr.store(&node.base); - - stack.append(State { .Else = &node.@"else" }) catch unreachable; - try stack.append(State { .Expression = DestPtr { .Field = &node.body } }); - try stack.append(State { .PointerPayload = &node.payload }); - try stack.append(State { .ExpectToken = Token.Id.RParen }); - try stack.append(State { .Expression = DestPtr { .Field = &node.condition } }); - try stack.append(State { .ExpectToken = Token.Id.LParen }); - continue; - }, Token.Id.Keyword_inline => { stack.append(State { .Inline = InlineCtx { @@ -1638,61 +1693,6 @@ pub const Parser = struct { }) catch unreachable; continue; }, - Token.Id.Keyword_while => { - stack.append(State { - .While = LoopCtx { - .label = null, - .inline_token = null, - .loop_token = token, - .dest_ptr = dest_ptr, - } - }) catch unreachable; - continue; - }, - Token.Id.Keyword_for => { - stack.append(State { - .For = LoopCtx { - .label = null, - .inline_token = null, - .loop_token = token, - .dest_ptr = dest_ptr, - } - }) catch unreachable; - continue; - }, - Token.Id.Keyword_switch => { - const node = try arena.create(ast.NodeSwitch); - *node = ast.NodeSwitch { - .base = self.initNode(ast.Node.Id.Switch), - .switch_token = token, - .expr = undefined, - .cases = ArrayList(&ast.NodeSwitchCase).init(arena), - .rbrace = undefined, - }; - dest_ptr.store(&node.base); - - stack.append(State { - .SwitchCaseOrEnd = ListSave(&ast.NodeSwitchCase) { - .list = &node.cases, - .ptr = &node.rbrace, - }, - }) catch unreachable; - try stack.append(State { .ExpectToken = Token.Id.LBrace }); - try stack.append(State { .ExpectToken = Token.Id.RParen }); - try stack.append(State { .Expression = DestPtr { .Field = &node.expr } }); - try stack.append(State { .ExpectToken = Token.Id.LParen }); - }, - Token.Id.Keyword_comptime => { - const node = try arena.create(ast.NodeComptime); - *node = ast.NodeComptime { - .base = self.initNode(ast.Node.Id.Comptime), - .comptime_token = token, - .expr = undefined, - }; - dest_ptr.store(&node.base); - try stack.append(State { .Expression = DestPtr { .Field = &node.expr } }); - continue; - }, else => { try self.parseError(&stack, token, "expected primary expression, found {}", @tagName(token.id)); continue; @@ -4966,11 +4966,37 @@ test "zig fmt: coroutines" { ); } -//{ -// var it = self.link_libs.iterator(); -// while (true) { -// const entry = it.next() ?? break; -// zig_args.append("--library") catch unreachable; -// zig_args.append(entry.key) catch unreachable; -// } -//} +test "zig fmt: coroutines" { + try testCanonical( + \\async fn simpleAsyncFn() void { + \\ x += 1; + \\ suspend; + \\ x += 1; + \\ suspend |p| {} + \\ const p = async simpleAsyncFn() catch unreachable; + \\ await p; + \\} + \\ + \\test "coroutine suspend, resume, cancel" { + \\ const p = try async testAsyncSeq(); + \\ resume p; + \\ cancel p; + \\} + \\ + ); +} + +test "zig fmt: Block after if" { + try testCanonical( + \\test "Block after if" { + \\ if (true) { + \\ const a = 0; + \\ } + \\ + \\ { + \\ const a = 0; + \\ } + \\} + \\ + ); +} diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig index 7b1f86712a..91fb20974f 100644 --- a/std/zig/tokenizer.zig +++ b/std/zig/tokenizer.zig @@ -1158,7 +1158,6 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void { var tokenizer = Tokenizer.init(source); for (expected_tokens) |expected_token_id| { const token = tokenizer.next(); - std.debug.warn("{} {}\n", @tagName(expected_token_id), @tagName(token.id)); std.debug.assert(@TagType(Token.Id)(token.id) == @TagType(Token.Id)(expected_token_id)); switch (expected_token_id) { Token.Id.StringLiteral => |expected_kind| { -- cgit v1.2.3