diff options
| author | Vexu <git@vexu.eu> | 2020-03-09 11:02:16 +0200 |
|---|---|---|
| committer | Vexu <git@vexu.eu> | 2020-03-09 12:23:30 +0200 |
| commit | 6f8d732599461aa816f545b658a068eceb6ac9bc (patch) | |
| tree | 5fda80a4507cf0676c1eb9a2d4badfd17d5ee599 /lib | |
| parent | e2fd289a33bb35cf4b86daa4d80adb7cc0c2c2b0 (diff) | |
| download | zig-6f8d732599461aa816f545b658a068eceb6ac9bc.tar.gz zig-6f8d732599461aa816f545b658a068eceb6ac9bc.zip | |
update parsers to new noasync syntax
Diffstat (limited to 'lib')
| -rw-r--r-- | lib/std/zig/ast.zig | 29 | ||||
| -rw-r--r-- | lib/std/zig/parse.zig | 62 | ||||
| -rw-r--r-- | lib/std/zig/render.zig | 9 |
3 files changed, 69 insertions, 31 deletions
diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index bc4f6350d6..711806b810 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -431,6 +431,7 @@ pub const Node = struct { ContainerDecl, Asm, Comptime, + Noasync, Block, // Misc @@ -1078,6 +1079,30 @@ pub const Node = struct { } }; + pub const Noasync = struct { + base: Node = Node{ .id = .Noasync }, + doc_comments: ?*DocComment, + noasync_token: TokenIndex, + expr: *Node, + + pub fn iterate(self: *Noasync, index: usize) ?*Node { + var i = index; + + if (i < 1) return self.expr; + i -= 1; + + return null; + } + + pub fn firstToken(self: *const Noasync) TokenIndex { + return self.noasync_token; + } + + pub fn lastToken(self: *const Noasync) TokenIndex { + return self.expr.lastToken(); + } + }; + pub const Payload = struct { base: Node = Node{ .id = .Payload }, lpipe: TokenIndex, @@ -1560,9 +1585,7 @@ pub const Node = struct { pub const Op = union(enum) { AddressOf, ArrayType: ArrayInfo, - Await: struct { - noasync_token: ?TokenIndex = null, - }, + Await, BitNot, BoolNot, Cancel, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index cadf25eef6..5392c18dfe 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -856,6 +856,7 @@ fn parsePrefixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node { /// / IfExpr /// / KEYWORD_break BreakLabel? Expr? /// / KEYWORD_comptime Expr +/// / KEYWORD_noasync Expr /// / KEYWORD_continue BreakLabel? /// / KEYWORD_resume Expr /// / KEYWORD_return Expr? @@ -870,7 +871,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node const label = try parseBreakLabel(arena, it, tree); const expr_node = try parseExpr(arena, it, tree); const node = try arena.create(Node.ControlFlowExpression); - node.* = Node.ControlFlowExpression{ + node.* = .{ .ltoken = token, .kind = Node.ControlFlowExpression.Kind{ .Break = label }, .rhs = expr_node, @@ -883,7 +884,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node .ExpectedExpr = AstError.ExpectedExpr{ .token = it.index }, }); const node = try arena.create(Node.Comptime); - node.* = Node.Comptime{ + node.* = .{ .doc_comments = null, .comptime_token = token, .expr = expr_node, @@ -891,10 +892,23 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node return &node.base; } + if (eatToken(it, .Keyword_noasync)) |token| { + const expr_node = try expectNode(arena, it, tree, parseExpr, AstError{ + .ExpectedExpr = AstError.ExpectedExpr{ .token = it.index }, + }); + const node = try arena.create(Node.Noasync); + node.* = .{ + .doc_comments = null, + .noasync_token = token, + .expr = expr_node, + }; + return &node.base; + } + if (eatToken(it, .Keyword_continue)) |token| { const label = try parseBreakLabel(arena, it, tree); const node = try arena.create(Node.ControlFlowExpression); - node.* = Node.ControlFlowExpression{ + node.* = .{ .ltoken = token, .kind = Node.ControlFlowExpression.Kind{ .Continue = label }, .rhs = null, @@ -907,7 +921,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node .ExpectedExpr = AstError.ExpectedExpr{ .token = it.index }, }); const node = try arena.create(Node.PrefixOp); - node.* = Node.PrefixOp{ + node.* = .{ .op_token = token, .op = Node.PrefixOp.Op.Resume, .rhs = expr_node, @@ -918,7 +932,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node if (eatToken(it, .Keyword_return)) |token| { const expr_node = try parseExpr(arena, it, tree); const node = try arena.create(Node.ControlFlowExpression); - node.* = Node.ControlFlowExpression{ + node.* = .{ .ltoken = token, .kind = Node.ControlFlowExpression.Kind.Return, .rhs = expr_node, @@ -1126,19 +1140,18 @@ fn parseErrorUnionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No /// SuffixExpr /// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments -/// / KEYWORD_noasync PrimaryTypeExpr SuffixOp* FnCallArguments /// / PrimaryTypeExpr (SuffixOp / FnCallArguments)* fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node { - const maybe_async = eatAnnotatedToken(it, .Keyword_async) orelse eatAnnotatedToken(it, .Keyword_noasync); + const maybe_async = eatToken(it, .Keyword_async); if (maybe_async) |async_token| { const token_fn = eatToken(it, .Keyword_fn); - if (async_token.ptr.id == .Keyword_async and token_fn != null) { + if (token_fn != null) { // HACK: If we see the keyword `fn`, then we assume that // we are parsing an async fn proto, and not a call. // We therefore put back all tokens consumed by the async // prefix... putBackToken(it, token_fn.?); - putBackToken(it, async_token.index); + putBackToken(it, async_token); return parsePrimaryTypeExpr(arena, it, tree); } // TODO: Implement hack for parsing `async fn ...` in ast_parse_suffix_expr @@ -1167,7 +1180,7 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node { .op = Node.SuffixOp.Op{ .Call = Node.SuffixOp.Op.Call{ .params = params.list, - .async_token = async_token.index, + .async_token = async_token, }, }, .rtoken = params.rparen, @@ -1224,6 +1237,7 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node { /// / IfTypeExpr /// / INTEGER /// / KEYWORD_comptime TypeExpr +/// / KEYWORD_noasync TypeExpr /// / KEYWORD_error DOT IDENTIFIER /// / KEYWORD_false /// / KEYWORD_null @@ -1255,13 +1269,23 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N if (eatToken(it, .Keyword_comptime)) |token| { const expr = (try parseTypeExpr(arena, it, tree)) orelse return null; const node = try arena.create(Node.Comptime); - node.* = Node.Comptime{ + node.* = .{ .doc_comments = null, .comptime_token = token, .expr = expr, }; return &node.base; } + if (eatToken(it, .Keyword_noasync)) |token| { + const expr = (try parseTypeExpr(arena, it, tree)) orelse return null; + const node = try arena.create(Node.Noasync); + node.* = .{ + .doc_comments = null, + .noasync_token = token, + .expr = expr, + }; + return &node.base; + } if (eatToken(it, .Keyword_error)) |token| { const period = try expectToken(it, tree, .Period); const identifier = try expectNode(arena, it, tree, parseIdentifier, AstError{ @@ -1269,7 +1293,7 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N }); const global_error_set = try createLiteral(arena, Node.ErrorType, token); const node = try arena.create(Node.InfixOp); - node.* = Node.InfixOp{ + node.* = .{ .op_token = period, .lhs = global_error_set, .op = Node.InfixOp.Op.Period, @@ -1281,7 +1305,7 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N if (eatToken(it, .Keyword_null)) |token| return createLiteral(arena, Node.NullLiteral, token); if (eatToken(it, .Keyword_anyframe)) |token| { const node = try arena.create(Node.AnyFrameType); - node.* = Node.AnyFrameType{ + node.* = .{ .anyframe_token = token, .result = null, }; @@ -2180,18 +2204,6 @@ fn parsePrefixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node { .Ampersand => ops{ .AddressOf = {} }, .Keyword_try => ops{ .Try = {} }, .Keyword_await => ops{ .Await = .{} }, - .Keyword_noasync => if (eatToken(it, .Keyword_await)) |await_tok| { - const node = try arena.create(Node.PrefixOp); - node.* = Node.PrefixOp{ - .op_token = await_tok, - .op = .{ .Await = .{ .noasync_token = token.index } }, - .rhs = undefined, // set by caller - }; - return &node.base; - } else { - putBackToken(it, token.index); - return null; - }, else => { putBackToken(it, token.index); return null; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 625aef3131..ce9049e35b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -390,6 +390,12 @@ fn renderExpression( try renderToken(tree, stream, comptime_node.comptime_token, indent, start_col, Space.Space); return renderExpression(allocator, stream, tree, indent, start_col, comptime_node.expr, space); }, + .Noasync => { + const noasync_node = @fieldParentPtr(ast.Node.Noasync, "base", base); + + try renderToken(tree, stream, noasync_node.noasync_token, indent, start_col, Space.Space); + return renderExpression(allocator, stream, tree, indent, start_col, noasync_node.expr, space); + }, .Suspend => { const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); @@ -590,9 +596,6 @@ fn renderExpression( }, .Await => |await_info| { - if (await_info.noasync_token) |tok| { - try renderToken(tree, stream, tok, indent, start_col, Space.Space); - } try renderToken(tree, stream, prefix_op_node.op_token, indent, start_col, Space.Space); }, } |
