aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVeikka Tuominen <git@vexu.eu>2021-02-12 01:14:04 +0200
committerVeikka Tuominen <git@vexu.eu>2021-02-12 02:12:00 +0200
commite2289961c6b53451e5bdcdfb6eec27e27f6553ca (patch)
tree4752b2cf7df22896fb7289649f5b44b0a22b454e
parentfaa3fa65ac89b774b26bdd0ea4ac70861b29d0b4 (diff)
downloadzig-e2289961c6b53451e5bdcdfb6eec27e27f6553ca.tar.gz
zig-e2289961c6b53451e5bdcdfb6eec27e27f6553ca.zip
snake_case Token.Tag
-rw-r--r--lib/std/zig/ast.zig112
-rw-r--r--lib/std/zig/parse.zig992
-rw-r--r--lib/std/zig/render.zig154
-rw-r--r--lib/std/zig/tokenizer.zig1572
4 files changed, 1415 insertions, 1415 deletions
diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig
index ad96baf3f3..6a56c0e242 100644
--- a/lib/std/zig/ast.zig
+++ b/lib/std/zig/ast.zig
@@ -343,7 +343,7 @@ pub const Tree = struct {
.ContainerField,
=> {
const name_token = main_tokens[n];
- if (name_token > 0 and token_tags[name_token - 1] == .Keyword_comptime) {
+ if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) {
end_offset += 1;
}
return name_token - end_offset;
@@ -358,12 +358,12 @@ pub const Tree = struct {
while (i > 0) {
i -= 1;
switch (token_tags[i]) {
- .Keyword_extern,
- .Keyword_export,
- .Keyword_comptime,
- .Keyword_pub,
- .Keyword_threadlocal,
- .StringLiteral,
+ .keyword_extern,
+ .keyword_export,
+ .keyword_comptime,
+ .keyword_pub,
+ .keyword_threadlocal,
+ .string_literal,
=> continue,
else => return i + 1 - end_offset,
@@ -379,7 +379,7 @@ pub const Tree = struct {
=> {
// Look for a label.
const lbrace = main_tokens[n];
- if (token_tags[lbrace - 1] == .Colon) {
+ if (token_tags[lbrace - 1] == .colon) {
end_offset += 2;
}
return lbrace - end_offset;
@@ -400,7 +400,7 @@ pub const Tree = struct {
=> {
const main_token = main_tokens[n];
switch (token_tags[main_token - 1]) {
- .Keyword_packed, .Keyword_extern => end_offset += 1,
+ .keyword_packed, .keyword_extern => end_offset += 1,
else => {},
}
return main_token - end_offset;
@@ -413,13 +413,13 @@ pub const Tree = struct {
=> {
const main_token = main_tokens[n];
return switch (token_tags[main_token]) {
- .Asterisk,
- .AsteriskAsterisk,
+ .asterisk,
+ .asterisk_asterisk,
=> switch (token_tags[main_token - 1]) {
- .LBracket => main_token - 1,
+ .l_bracket => main_token - 1,
else => main_token,
},
- .LBracket => main_token,
+ .l_bracket => main_token,
else => unreachable,
} - end_offset;
},
@@ -438,7 +438,7 @@ pub const Tree = struct {
},
.AsmOutput, .AsmInput => {
- assert(token_tags[main_tokens[n] - 1] == .LBracket);
+ assert(token_tags[main_tokens[n] - 1] == .l_bracket);
return main_tokens[n] - 1 - end_offset;
},
@@ -450,7 +450,7 @@ pub const Tree = struct {
=> {
const main_token = main_tokens[n];
return switch (token_tags[main_token - 1]) {
- .Keyword_inline => main_token - 1,
+ .keyword_inline => main_token - 1,
else => main_token,
} - end_offset;
},
@@ -1599,11 +1599,11 @@ pub const Tree = struct {
while (i > 0) {
i -= 1;
switch (token_tags[i]) {
- .Keyword_extern, .Keyword_export => result.extern_export_token = i,
- .Keyword_comptime => result.comptime_token = i,
- .Keyword_pub => result.visib_token = i,
- .Keyword_threadlocal => result.threadlocal_token = i,
- .StringLiteral => result.lib_name = i,
+ .keyword_extern, .keyword_export => result.extern_export_token = i,
+ .keyword_comptime => result.comptime_token = i,
+ .keyword_pub => result.visib_token = i,
+ .keyword_threadlocal => result.threadlocal_token = i,
+ .string_literal => result.lib_name = i,
else => break,
}
}
@@ -1621,14 +1621,14 @@ pub const Tree = struct {
// if (cond_expr) |x|
// ^ ^
const payload_pipe = tree.lastToken(info.cond_expr) + 2;
- if (token_tags[payload_pipe] == .Pipe) {
+ if (token_tags[payload_pipe] == .pipe) {
result.payload_token = payload_pipe + 1;
}
if (info.else_expr != 0) {
// then_expr else |x|
// ^ ^
result.else_token = tree.lastToken(info.then_expr) + 1;
- if (token_tags[result.else_token + 1] == .Pipe) {
+ if (token_tags[result.else_token + 1] == .pipe) {
result.error_token = result.else_token + 2;
}
}
@@ -1643,7 +1643,7 @@ pub const Tree = struct {
};
// comptime name: type = init,
// ^
- if (info.name_token > 0 and token_tags[info.name_token - 1] == .Keyword_comptime) {
+ if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) {
result.comptime_token = info.name_token - 1;
}
return result;
@@ -1671,17 +1671,17 @@ pub const Tree = struct {
// literals in some places here
const Kind = full.PtrType.Kind;
const kind: Kind = switch (token_tags[info.main_token]) {
- .Asterisk,
- .AsteriskAsterisk,
+ .asterisk,
+ .asterisk_asterisk,
=> switch (token_tags[info.main_token + 1]) {
- .RBracket => .many,
- .Colon => .sentinel,
- .Identifier => if (token_tags[info.main_token - 1] == .LBracket) Kind.c else .one,
+ .r_bracket => .many,
+ .colon => .sentinel,
+ .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Kind.c else .one,
else => .one,
},
- .LBracket => switch (token_tags[info.main_token + 1]) {
- .RBracket => Kind.slice,
- .Colon => .slice_sentinel,
+ .l_bracket => switch (token_tags[info.main_token + 1]) {
+ .r_bracket => Kind.slice,
+ .colon => .slice_sentinel,
else => unreachable,
},
else => unreachable,
@@ -1707,10 +1707,10 @@ pub const Tree = struct {
const end = tree.firstToken(info.child_type);
while (i < end) : (i += 1) {
switch (token_tags[i]) {
- .Keyword_allowzero => result.allowzero_token = i,
- .Keyword_const => result.const_token = i,
- .Keyword_volatile => result.volatile_token = i,
- .Keyword_align => {
+ .keyword_allowzero => result.allowzero_token = i,
+ .keyword_const => result.const_token = i,
+ .keyword_volatile => result.volatile_token = i,
+ .keyword_align => {
assert(info.align_node != 0);
if (info.bit_range_end != 0) {
assert(info.bit_range_start != 0);
@@ -1732,7 +1732,7 @@ pub const Tree = struct {
.layout_token = null,
};
switch (token_tags[info.main_token - 1]) {
- .Keyword_extern, .Keyword_packed => result.layout_token = info.main_token - 1,
+ .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1,
else => {},
}
return result;
@@ -1744,7 +1744,7 @@ pub const Tree = struct {
.ast = info,
.payload_token = null,
};
- if (token_tags[info.arrow_token + 1] == .Pipe) {
+ if (token_tags[info.arrow_token + 1] == .pipe) {
result.payload_token = info.arrow_token + 2;
}
return result;
@@ -1760,7 +1760,7 @@ pub const Tree = struct {
.outputs = &.{},
.first_clobber = null,
};
- if (token_tags[info.asm_token + 1] == .Keyword_volatile) {
+ if (token_tags[info.asm_token + 1] == .keyword_volatile) {
result.volatile_token = info.asm_token + 1;
}
const outputs_end: usize = for (info.items) |item, i| {
@@ -1776,10 +1776,10 @@ pub const Tree = struct {
if (info.items.len == 0) {
// asm ("foo" ::: "a", "b");
const template_token = tree.lastToken(info.template);
- if (token_tags[template_token + 1] == .Colon and
- token_tags[template_token + 2] == .Colon and
- token_tags[template_token + 3] == .Colon and
- token_tags[template_token + 4] == .StringLiteral)
+ if (token_tags[template_token + 1] == .colon and
+ token_tags[template_token + 2] == .colon and
+ token_tags[template_token + 3] == .colon and
+ token_tags[template_token + 4] == .string_literal)
{
result.first_clobber = template_token + 4;
}
@@ -1787,8 +1787,8 @@ pub const Tree = struct {
// asm ("foo" :: [_] "" (y) : "a", "b");
const last_input = result.inputs[result.inputs.len - 1];
const rparen = tree.lastToken(last_input);
- if (token_tags[rparen + 1] == .Colon and
- token_tags[rparen + 2] == .StringLiteral)
+ if (token_tags[rparen + 1] == .colon and
+ token_tags[rparen + 2] == .string_literal)
{
result.first_clobber = rparen + 2;
}
@@ -1796,9 +1796,9 @@ pub const Tree = struct {
// asm ("foo" : [_] "" (x) :: "a", "b");
const last_output = result.outputs[result.outputs.len - 1];
const rparen = tree.lastToken(last_output);
- if (token_tags[rparen + 1] == .Colon and
- token_tags[rparen + 2] == .Colon and
- token_tags[rparen + 3] == .StringLiteral)
+ if (token_tags[rparen + 1] == .colon and
+ token_tags[rparen + 2] == .colon and
+ token_tags[rparen + 3] == .string_literal)
{
result.first_clobber = rparen + 3;
}
@@ -1818,24 +1818,24 @@ pub const Tree = struct {
.error_token = null,
};
var tok_i = info.while_token - 1;
- if (token_tags[tok_i] == .Keyword_inline) {
+ if (token_tags[tok_i] == .keyword_inline) {
result.inline_token = tok_i;
tok_i -= 1;
}
- if (token_tags[tok_i] == .Colon and
- token_tags[tok_i - 1] == .Identifier)
+ if (token_tags[tok_i] == .colon and
+ token_tags[tok_i - 1] == .identifier)
{
result.label_token = tok_i - 1;
}
const last_cond_token = tree.lastToken(info.cond_expr);
- if (token_tags[last_cond_token + 2] == .Pipe) {
+ if (token_tags[last_cond_token + 2] == .pipe) {
result.payload_token = last_cond_token + 3;
}
if (info.else_expr != 0) {
// then_expr else |x|
// ^ ^
result.else_token = tree.lastToken(info.then_expr) + 1;
- if (token_tags[result.else_token + 1] == .Pipe) {
+ if (token_tags[result.else_token + 1] == .pipe) {
result.error_token = result.else_token + 2;
}
}
@@ -1849,7 +1849,7 @@ pub const Tree = struct {
.async_token = null,
};
const maybe_async_token = tree.firstToken(info.fn_expr) - 1;
- if (token_tags[maybe_async_token] == .Keyword_async) {
+ if (token_tags[maybe_async_token] == .keyword_async) {
result.async_token = maybe_async_token;
}
return result;
@@ -2120,7 +2120,7 @@ pub const Error = union(enum) {
pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{s}'");
pub const ExpectedFn = SingleTokenError("Expected function, found '{s}'");
pub const ExpectedReturnType = SingleTokenError("Expected return type expression, found '{s}'");
- pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.Keyword_struct.symbol() ++ "', '" ++ Token.Tag.Keyword_union.symbol() ++ "', '" ++ Token.Tag.Keyword_enum.symbol() ++ "', or '" ++ Token.Tag.Keyword_opaque.symbol() ++ "', found '{s}'");
+ pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.keyword_struct.symbol() ++ "', '" ++ Token.Tag.keyword_union.symbol() ++ "', '" ++ Token.Tag.keyword_enum.symbol() ++ "', or '" ++ Token.Tag.keyword_opaque.symbol() ++ "', found '{s}'");
pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{s}'");
pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{s}'");
pub const ExpectedSemiOrElse = SingleTokenError("Expected ';' or 'else', found '{s}'");
@@ -2129,7 +2129,7 @@ pub const Error = union(enum) {
pub const ExpectedColonOrRParen = SingleTokenError("Expected ':' or ')', found '{s}'");
pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'");
pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found '{s}'");
- pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Tag.Identifier.symbol() ++ "', found '{s}'");
+ pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Tag.identifier.symbol() ++ "', found '{s}'");
pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found '{s}'");
pub const ExpectedTypeExpr = SingleTokenError("Expected type expression, found '{s}'");
pub const ExpectedPrimaryTypeExpr = SingleTokenError("Expected primary type expression, found '{s}'");
@@ -2185,7 +2185,7 @@ pub const Error = union(enum) {
pub fn render(self: *const ExpectedToken, tokens: []const Token.Tag, stream: anytype) !void {
const found_token = tokens[self.token];
switch (found_token) {
- .Invalid => {
+ .invalid => {
return stream.print("expected '{s}', found invalid bytes", .{self.expected_id.symbol()});
},
else => {
diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig
index 21ebe43582..855f889794 100644
--- a/lib/std/zig/parse.zig
+++ b/lib/std/zig/parse.zig
@@ -32,7 +32,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree {
.tag = token.tag,
.start = @intCast(u32, token.loc.start),
});
- if (token.tag == .Eof) break;
+ if (token.tag == .eof) break;
}
var parser: Parser = .{
@@ -68,7 +68,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree {
const root_decls = try root_members.toSpan(&parser);
// parseContainerMembers will try to skip as much invalid tokens as
// it can, so we are now at EOF.
- assert(parser.token_tags[parser.tok_i] == .Eof);
+ assert(parser.token_tags[parser.tok_i] == .eof);
parser.nodes.items(.data)[0] = .{
.lhs = root_decls.start,
.rhs = root_decls.end,
@@ -186,14 +186,14 @@ const Parser = struct {
} = .none;
// Skip container doc comments.
- while (p.eatToken(.ContainerDocComment)) |_| {}
+ while (p.eatToken(.container_doc_comment)) |_| {}
var trailing_comma = false;
while (true) {
const doc_comment = p.eatDocComments();
switch (p.token_tags[p.tok_i]) {
- .Keyword_test => {
+ .keyword_test => {
const test_decl_node = try p.expectTestDeclRecoverable();
if (test_decl_node != 0) {
if (field_state == .seen) {
@@ -203,8 +203,8 @@ const Parser = struct {
}
trailing_comma = false;
},
- .Keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
- .Identifier => {
+ .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
+ .identifier => {
p.tok_i += 1;
const container_field = try p.expectContainerFieldRecoverable();
if (container_field != 0) {
@@ -221,12 +221,12 @@ const Parser = struct {
}
try list.append(container_field);
switch (p.token_tags[p.tok_i]) {
- .Comma => {
+ .comma => {
p.tok_i += 1;
trailing_comma = true;
continue;
},
- .RBrace, .Eof => {
+ .r_brace, .eof => {
trailing_comma = false;
break;
},
@@ -235,12 +235,12 @@ const Parser = struct {
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
p.findNextContainerMember();
}
},
- .LBrace => {
+ .l_brace => {
const comptime_token = p.nextToken();
const block = p.parseBlock() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
@@ -270,7 +270,7 @@ const Parser = struct {
try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i } });
},
},
- .Keyword_pub => {
+ .keyword_pub => {
p.tok_i += 1;
const top_level_decl = try p.expectTopLevelDeclRecoverable();
if (top_level_decl != 0) {
@@ -281,7 +281,7 @@ const Parser = struct {
}
trailing_comma = false;
},
- .Keyword_usingnamespace => {
+ .keyword_usingnamespace => {
const node = try p.expectUsingNamespaceRecoverable();
if (node != 0) {
if (field_state == .seen) {
@@ -291,14 +291,14 @@ const Parser = struct {
}
trailing_comma = false;
},
- .Keyword_const,
- .Keyword_var,
- .Keyword_threadlocal,
- .Keyword_export,
- .Keyword_extern,
- .Keyword_inline,
- .Keyword_noinline,
- .Keyword_fn,
+ .keyword_const,
+ .keyword_var,
+ .keyword_threadlocal,
+ .keyword_export,
+ .keyword_extern,
+ .keyword_inline,
+ .keyword_noinline,
+ .keyword_fn,
=> {
const top_level_decl = try p.expectTopLevelDeclRecoverable();
if (top_level_decl != 0) {
@@ -309,7 +309,7 @@ const Parser = struct {
}
trailing_comma = false;
},
- .Identifier => {
+ .identifier => {
const container_field = try p.expectContainerFieldRecoverable();
if (container_field != 0) {
switch (field_state) {
@@ -325,12 +325,12 @@ const Parser = struct {
}
try list.append(container_field);
switch (p.token_tags[p.tok_i]) {
- .Comma => {
+ .comma => {
p.tok_i += 1;
trailing_comma = true;
continue;
},
- .RBrace, .Eof => {
+ .r_brace, .eof => {
trailing_comma = false;
break;
},
@@ -339,12 +339,12 @@ const Parser = struct {
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
p.findNextContainerMember();
}
},
- .Eof, .RBrace => {
+ .eof, .r_brace => {
if (doc_comment) |tok| {
try p.warn(.{ .UnattachedDocComment = .{ .token = tok } });
}
@@ -396,36 +396,36 @@ const Parser = struct {
const tok = p.nextToken();
switch (p.token_tags[tok]) {
// any of these can start a new top level declaration
- .Keyword_test,
- .Keyword_comptime,
- .Keyword_pub,
- .Keyword_export,
- .Keyword_extern,
- .Keyword_inline,
- .Keyword_noinline,
- .Keyword_usingnamespace,
- .Keyword_threadlocal,
- .Keyword_const,
- .Keyword_var,
- .Keyword_fn,
- .Identifier,
+ .keyword_test,
+ .keyword_comptime,
+ .keyword_pub,
+ .keyword_export,
+ .keyword_extern,
+ .keyword_inline,
+ .keyword_noinline,
+ .keyword_usingnamespace,
+ .keyword_threadlocal,
+ .keyword_const,
+ .keyword_var,
+ .keyword_fn,
+ .identifier,
=> {
if (level == 0) {
p.tok_i -= 1;
return;
}
},
- .Comma, .Semicolon => {
+ .comma, .semicolon => {
// this decl was likely meant to end here
if (level == 0) {
return;
}
},
- .LParen, .LBracket, .LBrace => level += 1,
- .RParen, .RBracket => {
+ .l_paren, .l_bracket, .l_brace => level += 1,
+ .r_paren, .r_bracket => {
if (level != 0) level -= 1;
},
- .RBrace => {
+ .r_brace => {
if (level == 0) {
// end of container, exit
p.tok_i -= 1;
@@ -433,7 +433,7 @@ const Parser = struct {
}
level -= 1;
},
- .Eof => {
+ .eof => {
p.tok_i -= 1;
return;
},
@@ -448,20 +448,20 @@ const Parser = struct {
while (true) {
const tok = p.nextToken();
switch (p.token_tags[tok]) {
- .LBrace => level += 1,
- .RBrace => {
+ .l_brace => level += 1,
+ .r_brace => {
if (level == 0) {
p.tok_i -= 1;
return;
}
level -= 1;
},
- .Semicolon => {
+ .semicolon => {
if (level == 0) {
return;
}
},
- .Eof => {
+ .eof => {
p.tok_i -= 1;
return;
},
@@ -472,8 +472,8 @@ const Parser = struct {
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block
fn expectTestDecl(p: *Parser) !Node.Index {
- const test_token = p.assertToken(.Keyword_test);
- const name_token = p.eatToken(.StringLiteral);
+ const test_token = p.assertToken(.keyword_test);
+ const name_token = p.eatToken(.string_literal);
const block_node = try p.parseBlock();
if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } });
return p.addNode(.{
@@ -505,15 +505,15 @@ const Parser = struct {
var expect_fn: bool = false;
var exported: bool = false;
switch (p.token_tags[extern_export_inline_token]) {
- .Keyword_extern => _ = p.eatToken(.StringLiteral),
- .Keyword_export => exported = true,
- .Keyword_inline, .Keyword_noinline => expect_fn = true,
+ .keyword_extern => _ = p.eatToken(.string_literal),
+ .keyword_export => exported = true,
+ .keyword_inline, .keyword_noinline => expect_fn = true,
else => p.tok_i -= 1,
}
const fn_proto = try p.parseFnProto();
if (fn_proto != 0) {
switch (p.token_tags[p.tok_i]) {
- .Semicolon => {
+ .semicolon => {
const semicolon_token = p.nextToken();
try p.parseAppendedDocComment(semicolon_token);
return p.addNode(.{
@@ -525,7 +525,7 @@ const Parser = struct {
},
});
},
- .LBrace => {
+ .l_brace => {
const body_block = try p.parseBlock();
assert(body_block != 0);
return p.addNode(.{
@@ -553,10 +553,10 @@ const Parser = struct {
return error.ParseError;
}
- const thread_local_token = p.eatToken(.Keyword_threadlocal);
+ const thread_local_token = p.eatToken(.keyword_threadlocal);
const var_decl = try p.parseVarDecl();
if (var_decl != 0) {
- const semicolon_token = try p.expectToken(.Semicolon);
+ const semicolon_token = try p.expectToken(.semicolon);
try p.parseAppendedDocComment(semicolon_token);
return var_decl;
}
@@ -582,9 +582,9 @@ const Parser = struct {
}
fn expectUsingNamespace(p: *Parser) !Node.Index {
- const usingnamespace_token = try p.expectToken(.Keyword_usingnamespace);
+ const usingnamespace_token = try p.expectToken(.keyword_usingnamespace);
const expr = try p.expectExpr();
- const semicolon_token = try p.expectToken(.Semicolon);
+ const semicolon_token = try p.expectToken(.semicolon);
try p.parseAppendedDocComment(semicolon_token);
return p.addNode(.{
.tag = .UsingNamespace,
@@ -608,14 +608,14 @@ const Parser = struct {
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr)
fn parseFnProto(p: *Parser) !Node.Index {
- const fn_token = p.eatToken(.Keyword_fn) orelse return null_node;
- _ = p.eatToken(.Identifier);
+ const fn_token = p.eatToken(.keyword_fn) orelse return null_node;
+ _ = p.eatToken(.identifier);
const params = try p.parseParamDeclList();
defer params.deinit(p.gpa);
const align_expr = try p.parseByteAlign();
const section_expr = try p.parseLinkSection();
const callconv_expr = try p.parseCallconv();
- const bang_token = p.eatToken(.Bang);
+ const bang_token = p.eatToken(.bang);
const return_type_expr = try p.parseTypeExpr();
if (return_type_expr == 0) {
@@ -686,15 +686,15 @@ const Parser = struct {
/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
fn parseVarDecl(p: *Parser) !Node.Index {
- const mut_token = p.eatToken(.Keyword_const) orelse
- p.eatToken(.Keyword_var) orelse
+ const mut_token = p.eatToken(.keyword_const) orelse
+ p.eatToken(.keyword_var) orelse
return null_node;
- _ = try p.expectToken(.Identifier);
- const type_node: Node.Index = if (p.eatToken(.Colon) == null) 0 else try p.expectTypeExpr();
+ _ = try p.expectToken(.identifier);
+ const type_node: Node.Index = if (p.eatToken(.colon) == null) 0 else try p.expectTypeExpr();
const align_node = try p.parseByteAlign();
const section_node = try p.parseLinkSection();
- const init_node: Node.Index = if (p.eatToken(.Equal) == null) 0 else try p.expectExpr();
+ const init_node: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr();
if (section_node == 0) {
if (align_node == 0) {
return p.addNode(.{
@@ -745,13 +745,13 @@ const Parser = struct {
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
fn expectContainerField(p: *Parser) !Node.Index {
- const comptime_token = p.eatToken(.Keyword_comptime);
- const name_token = p.assertToken(.Identifier);
+ const comptime_token = p.eatToken(.keyword_comptime);
+ const name_token = p.assertToken(.identifier);
var align_expr: Node.Index = 0;
var type_expr: Node.Index = 0;
- if (p.eatToken(.Colon)) |_| {
- if (p.eatToken(.Keyword_anytype)) |anytype_tok| {
+ if (p.eatToken(.colon)) |_| {
+ if (p.eatToken(.keyword_anytype)) |anytype_tok| {
type_expr = try p.addNode(.{
.tag = .AnyType,
.main_token = anytype_tok,
@@ -766,7 +766,7 @@ const Parser = struct {
}
}
- const value_expr: Node.Index = if (p.eatToken(.Equal) == null) 0 else try p.expectExpr();
+ const value_expr: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr();
if (align_expr == 0) {
return p.addNode(.{
@@ -823,11 +823,11 @@ const Parser = struct {
/// / SwitchExpr
/// / AssignExpr SEMICOLON
fn parseStatement(p: *Parser) Error!Node.Index {
- const comptime_token = p.eatToken(.Keyword_comptime);
+ const comptime_token = p.eatToken(.keyword_comptime);
const var_decl = try p.parseVarDecl();
if (var_decl != 0) {
- _ = try p.expectTokenRecoverable(.Semicolon);
+ _ = try p.expectTokenRecoverable(.semicolon);
return var_decl;
}
@@ -843,7 +843,7 @@ const Parser = struct {
}
switch (p.token_tags[p.tok_i]) {
- .Keyword_nosuspend => {
+ .keyword_nosuspend => {
return p.addNode(.{
.tag = .Nosuspend,
.main_token = p.nextToken(),
@@ -853,9 +853,9 @@ const Parser = struct {
},
});
},
- .Keyword_suspend => {
+ .keyword_suspend => {
const token = p.nextToken();
- const block_expr: Node.Index = if (p.eatToken(.Semicolon) != null)
+ const block_expr: Node.Index = if (p.eatToken(.semicolon) != null)
0
else
try p.expectBlockExprStatement();
@@ -868,7 +868,7 @@ const Parser = struct {
},
});
},
- .Keyword_defer => return p.addNode(.{
+ .keyword_defer => return p.addNode(.{
.tag = .Defer,
.main_token = p.nextToken(),
.data = .{
@@ -876,7 +876,7 @@ const Parser = struct {
.rhs = try p.expectBlockExprStatement(),
},
}),
- .Keyword_errdefer => return p.addNode(.{
+ .keyword_errdefer => return p.addNode(.{
.tag = .ErrDefer,
.main_token = p.nextToken(),
.data = .{
@@ -884,8 +884,8 @@ const Parser = struct {
.rhs = try p.expectBlockExprStatement(),
},
}),
- .Keyword_switch => return p.expectSwitchExpr(),
- .Keyword_if => return p.expectIfStatement(),
+ .keyword_switch => return p.expectSwitchExpr(),
+ .keyword_if => return p.expectIfStatement(),
else => {},
}
@@ -894,7 +894,7 @@ const Parser = struct {
const assign_expr = try p.parseAssignExpr();
if (assign_expr != 0) {
- _ = try p.expectTokenRecoverable(.Semicolon);
+ _ = try p.expectTokenRecoverable(.semicolon);
return assign_expr;
}
@@ -918,7 +918,7 @@ const Parser = struct {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextStmt(); // Try to skip to the next statement.
- if (p.token_tags[p.tok_i] == .RBrace) return null_node;
+ if (p.token_tags[p.tok_i] == .r_brace) return null_node;
continue;
},
};
@@ -929,10 +929,10 @@ const Parser = struct {
/// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
fn expectIfStatement(p: *Parser) !Node.Index {
- const if_token = p.assertToken(.Keyword_if);
- _ = try p.expectToken(.LParen);
+ const if_token = p.assertToken(.keyword_if);
+ _ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
// TODO propose to change the syntax so that semicolons are always required
@@ -945,7 +945,7 @@ const Parser = struct {
if (assign_expr == 0) {
return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } });
}
- if (p.eatToken(.Semicolon)) |_| {
+ if (p.eatToken(.semicolon)) |_| {
return p.addNode(.{
.tag = .IfSimple,
.main_token = if_token,
@@ -958,7 +958,7 @@ const Parser = struct {
else_required = true;
break :blk assign_expr;
};
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
if (else_required) {
return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } });
}
@@ -1004,7 +1004,7 @@ const Parser = struct {
/// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
fn parseLoopStatement(p: *Parser) !Node.Index {
- const inline_token = p.eatToken(.Keyword_inline);
+ const inline_token = p.eatToken(.keyword_inline);
const for_statement = try p.parseForStatement();
if (for_statement != 0) return for_statement;
@@ -1023,10 +1023,10 @@ const Parser = struct {
/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
fn parseForStatement(p: *Parser) !Node.Index {
- const for_token = p.eatToken(.Keyword_for) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const for_token = p.eatToken(.keyword_for) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const array_expr = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
_ = try p.parsePtrIndexPayload();
// TODO propose to change the syntax so that semicolons are always required
@@ -1039,7 +1039,7 @@ const Parser = struct {
if (assign_expr == 0) {
return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } });
}
- if (p.eatToken(.Semicolon)) |_| {
+ if (p.eatToken(.semicolon)) |_| {
return p.addNode(.{
.tag = .ForSimple,
.main_token = for_token,
@@ -1052,7 +1052,7 @@ const Parser = struct {
else_required = true;
break :blk assign_expr;
};
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
if (else_required) {
return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } });
}
@@ -1083,10 +1083,10 @@ const Parser = struct {
/// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
fn parseWhileStatement(p: *Parser) !Node.Index {
- const while_token = p.eatToken(.Keyword_while) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const while_token = p.eatToken(.keyword_while) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
const cont_expr = try p.parseWhileContinueExpr();
@@ -1100,7 +1100,7 @@ const Parser = struct {
if (assign_expr == 0) {
return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } });
}
- if (p.eatToken(.Semicolon)) |_| {
+ if (p.eatToken(.semicolon)) |_| {
if (cont_expr == 0) {
return p.addNode(.{
.tag = .WhileSimple,
@@ -1127,7 +1127,7 @@ const Parser = struct {
else_required = true;
break :blk assign_expr;
};
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
if (else_required) {
return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } });
}
@@ -1180,7 +1180,7 @@ const Parser = struct {
}
const assign_expr = try p.parseAssignExpr();
if (assign_expr != 0) {
- _ = try p.expectTokenRecoverable(.Semicolon);
+ _ = try p.expectTokenRecoverable(.semicolon);
return assign_expr;
}
return null_node;
@@ -1197,9 +1197,9 @@ const Parser = struct {
/// BlockExpr <- BlockLabel? Block
fn parseBlockExpr(p: *Parser) Error!Node.Index {
switch (p.token_tags[p.tok_i]) {
- .Identifier => {
- if (p.token_tags[p.tok_i + 1] == .Colon and
- p.token_tags[p.tok_i + 2] == .LBrace)
+ .identifier => {
+ if (p.token_tags[p.tok_i + 1] == .colon and
+ p.token_tags[p.tok_i + 2] == .l_brace)
{
p.tok_i += 2;
return p.parseBlock();
@@ -1207,7 +1207,7 @@ const Parser = struct {
return null_node;
}
},
- .LBrace => return p.parseBlock(),
+ .l_brace => return p.parseBlock(),
else => return null_node,
}
}
@@ -1233,20 +1233,20 @@ const Parser = struct {
if (expr == 0) return null_node;
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .AsteriskEqual => .AssignMul,
- .SlashEqual => .AssignDiv,
- .PercentEqual => .AssignMod,
- .PlusEqual => .AssignAdd,
- .MinusEqual => .AssignSub,
- .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft,
- .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight,
- .AmpersandEqual => .AssignBitAnd,
- .CaretEqual => .AssignBitXor,
- .PipeEqual => .AssignBitOr,
- .AsteriskPercentEqual => .AssignMulWrap,
- .PlusPercentEqual => .AssignAddWrap,
- .MinusPercentEqual => .AssignSubWrap,
- .Equal => .Assign,
+ .asterisk_equal => .AssignMul,
+ .slash_equal => .AssignDiv,
+ .percent_equal => .AssignMod,
+ .plus_equal => .AssignAdd,
+ .minus_equal => .AssignSub,
+ .angle_bracket_angle_bracket_left_equal => .AssignBitShiftLeft,
+ .angle_bracket_angle_bracket_right_equal => .AssignBitShiftRight,
+ .ampersand_equal => .AssignBitAnd,
+ .caret_equal => .AssignBitXor,
+ .pipe_equal => .AssignBitOr,
+ .asterisk_percent_equal => .AssignMulWrap,
+ .plus_percent_equal => .AssignAddWrap,
+ .minus_percent_equal => .AssignSubWrap,
+ .equal => .Assign,
else => return expr,
};
return p.addNode(.{
@@ -1288,7 +1288,7 @@ const Parser = struct {
while (true) {
switch (p.token_tags[p.tok_i]) {
- .Keyword_or => {
+ .keyword_or => {
const or_token = p.nextToken();
const rhs = try p.parseBoolAndExpr();
if (rhs == 0) {
@@ -1315,7 +1315,7 @@ const Parser = struct {
while (true) {
switch (p.token_tags[p.tok_i]) {
- .Keyword_and => {
+ .keyword_and => {
const and_token = p.nextToken();
const rhs = try p.parseCompareExpr();
if (rhs == 0) {
@@ -1348,12 +1348,12 @@ const Parser = struct {
if (expr == 0) return null_node;
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .EqualEqual => .EqualEqual,
- .BangEqual => .BangEqual,
- .AngleBracketLeft => .LessThan,
- .AngleBracketRight => .GreaterThan,
- .AngleBracketLeftEqual => .LessOrEqual,
- .AngleBracketRightEqual => .GreaterOrEqual,
+ .equal_equal => .EqualEqual,
+ .bang_equal => .BangEqual,
+ .angle_bracket_left => .LessThan,
+ .angle_bracket_right => .GreaterThan,
+ .angle_bracket_left_equal => .LessOrEqual,
+ .angle_bracket_right_equal => .GreaterOrEqual,
else => return expr,
};
return p.addNode(.{
@@ -1379,11 +1379,11 @@ const Parser = struct {
while (true) {
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .Ampersand => .BitAnd,
- .Caret => .BitXor,
- .Pipe => .BitOr,
- .Keyword_orelse => .OrElse,
- .Keyword_catch => {
+ .ampersand => .BitAnd,
+ .caret => .BitXor,
+ .pipe => .BitOr,
+ .keyword_orelse => .OrElse,
+ .keyword_catch => {
const catch_token = p.nextToken();
_ = try p.parsePayload();
const rhs = try p.parseBitShiftExpr();
@@ -1432,8 +1432,8 @@ const Parser = struct {
while (true) {
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .AngleBracketAngleBracketLeft => .BitShiftLeft,
- .AngleBracketAngleBracketRight => .BitShiftRight,
+ .angle_bracket_angle_bracket_left => .BitShiftLeft,
+ .angle_bracket_angle_bracket_right => .BitShiftRight,
else => return res,
};
res = try p.addNode(.{
@@ -1469,11 +1469,11 @@ const Parser = struct {
while (true) {
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .Plus => .Add,
- .Minus => .Sub,
- .PlusPlus => .ArrayCat,
- .PlusPercent => .AddWrap,
- .MinusPercent => .SubWrap,
+ .plus => .Add,
+ .minus => .Sub,
+ .plus_plus => .ArrayCat,
+ .plus_percent => .AddWrap,
+ .minus_percent => .SubWrap,
else => return res,
};
res = try p.addNode(.{
@@ -1509,12 +1509,12 @@ const Parser = struct {
while (true) {
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .PipePipe => .MergeErrorSets,
- .Asterisk => .Mul,
- .Slash => .Div,
- .Percent => .Mod,
- .AsteriskAsterisk => .ArrayMult,
- .AsteriskPercent => .MulWrap,
+ .pipe_pipe => .MergeErrorSets,
+ .asterisk => .Mul,
+ .slash => .Div,
+ .percent => .Mod,
+ .asterisk_asterisk => .ArrayMult,
+ .asterisk_percent => .MulWrap,
else => return res,
};
res = try p.addNode(.{
@@ -1547,13 +1547,13 @@ const Parser = struct {
/// / KEYWORD_await
fn parsePrefixExpr(p: *Parser) Error!Node.Index {
const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
- .Bang => .BoolNot,
- .Minus => .Negation,
- .Tilde => .BitNot,
- .MinusPercent => .NegationWrap,
- .Ampersand => .AddressOf,
- .Keyword_try => .Try,
- .Keyword_await => .Await,
+ .bang => .BoolNot,
+ .minus => .Negation,
+ .tilde => .BitNot,
+ .minus_percent => .NegationWrap,
+ .ampersand => .AddressOf,
+ .keyword_try => .Try,
+ .keyword_await => .Await,
else => return p.parsePrimaryExpr(),
};
return p.addNode(.{
@@ -1587,7 +1587,7 @@ const Parser = struct {
/// ArrayTypeStart <- LBRACKET Expr? (COLON Expr)? RBRACKET
fn parseTypeExpr(p: *Parser) Error!Node.Index {
switch (p.token_tags[p.tok_i]) {
- .QuestionMark => return p.addNode(.{
+ .question_mark => return p.addNode(.{
.tag = .OptionalType,
.main_token = p.nextToken(),
.data = .{
@@ -1595,8 +1595,8 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) {
- .Arrow => return p.addNode(.{
+ .keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) {
+ .arrow => return p.addNode(.{
.tag = .AnyFrameType,
.main_token = p.nextToken(),
.data = .{
@@ -1606,7 +1606,7 @@ const Parser = struct {
}),
else => return p.parseErrorUnionExpr(),
},
- .Asterisk => {
+ .asterisk => {
const asterisk = p.nextToken();
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
@@ -1635,7 +1635,7 @@ const Parser = struct {
});
}
},
- .AsteriskAsterisk => {
+ .asterisk_asterisk => {
const asterisk = p.nextToken();
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
@@ -1674,13 +1674,13 @@ const Parser = struct {
},
});
},
- .LBracket => switch (p.token_tags[p.tok_i + 1]) {
- .Asterisk => {
+ .l_bracket => switch (p.token_tags[p.tok_i + 1]) {
+ .asterisk => {
const lbracket = p.nextToken();
const asterisk = p.nextToken();
var sentinel: Node.Index = 0;
prefix: {
- if (p.eatToken(.Identifier)) |ident| {
+ if (p.eatToken(.identifier)) |ident| {
const token_slice = p.source[p.token_starts[ident]..][0..2];
if (!std.mem.eql(u8, token_slice, "c]")) {
p.tok_i -= 1;
@@ -1688,11 +1688,11 @@ const Parser = struct {
break :prefix;
}
}
- if (p.eatToken(.Colon)) |_| {
+ if (p.eatToken(.colon)) |_| {
sentinel = try p.expectExpr();
}
}
- _ = try p.expectToken(.RBracket);
+ _ = try p.expectToken(.r_bracket);
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
if (mods.bit_range_start == 0) {
@@ -1746,11 +1746,11 @@ const Parser = struct {
else => {
const lbracket = p.nextToken();
const len_expr = try p.parseExpr();
- const sentinel: Node.Index = if (p.eatToken(.Colon)) |_|
+ const sentinel: Node.Index = if (p.eatToken(.colon)) |_|
try p.expectExpr()
else
0;
- _ = try p.expectToken(.RBracket);
+ _ = try p.expectToken(.r_bracket);
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
if (mods.bit_range_start != 0) {
@@ -1849,9 +1849,9 @@ const Parser = struct {
/// / CurlySuffixExpr
fn parsePrimaryExpr(p: *Parser) !Node.Index {
switch (p.token_tags[p.tok_i]) {
- .Keyword_asm => return p.expectAsmExpr(),
- .Keyword_if => return p.parseIfExpr(),
- .Keyword_break => {
+ .keyword_asm => return p.expectAsmExpr(),
+ .keyword_if => return p.parseIfExpr(),
+ .keyword_break => {
p.tok_i += 1;
return p.addNode(.{
.tag = .Break,
@@ -1862,7 +1862,7 @@ const Parser = struct {
},
});
},
- .Keyword_continue => {
+ .keyword_continue => {
p.tok_i += 1;
return p.addNode(.{
.tag = .Continue,
@@ -1873,7 +1873,7 @@ const Parser = struct {
},
});
},
- .Keyword_comptime => {
+ .keyword_comptime => {
p.tok_i += 1;
return p.addNode(.{
.tag = .Comptime,
@@ -1884,7 +1884,7 @@ const Parser = struct {
},
});
},
- .Keyword_nosuspend => {
+ .keyword_nosuspend => {
p.tok_i += 1;
return p.addNode(.{
.tag = .Nosuspend,
@@ -1895,7 +1895,7 @@ const Parser = struct {
},
});
},
- .Keyword_resume => {
+ .keyword_resume => {
p.tok_i += 1;
return p.addNode(.{
.tag = .Resume,
@@ -1906,7 +1906,7 @@ const Parser = struct {
},
});
},
- .Keyword_return => {
+ .keyword_return => {
p.tok_i += 1;
return p.addNode(.{
.tag = .Return,
@@ -1917,28 +1917,28 @@ const Parser = struct {
},
});
},
- .Identifier => {
- if (p.token_tags[p.tok_i + 1] == .Colon) {
+ .identifier => {
+ if (p.token_tags[p.tok_i + 1] == .colon) {
switch (p.token_tags[p.tok_i + 2]) {
- .Keyword_inline => {
+ .keyword_inline => {
p.tok_i += 3;
switch (p.token_tags[p.tok_i]) {
- .Keyword_for => return p.parseForExpr(),
- .Keyword_while => return p.parseWhileExpr(),
+ .keyword_for => return p.parseForExpr(),
+ .keyword_while => return p.parseWhileExpr(),
else => return p.fail(.{
.ExpectedInlinable = .{ .token = p.tok_i },
}),
}
},
- .Keyword_for => {
+ .keyword_for => {
p.tok_i += 2;
return p.parseForExpr();
},
- .Keyword_while => {
+ .keyword_while => {
p.tok_i += 2;
return p.parseWhileExpr();
},
- .LBrace => {
+ .l_brace => {
p.tok_i += 2;
return p.parseBlock();
},
@@ -1948,19 +1948,19 @@ const Parser = struct {
return p.parseCurlySuffixExpr();
}
},
- .Keyword_inline => {
+ .keyword_inline => {
p.tok_i += 2;
switch (p.token_tags[p.tok_i]) {
- .Keyword_for => return p.parseForExpr(),
- .Keyword_while => return p.parseWhileExpr(),
+ .keyword_for => return p.parseForExpr(),
+ .keyword_while => return p.parseWhileExpr(),
else => return p.fail(.{
.ExpectedInlinable = .{ .token = p.tok_i },
}),
}
},
- .Keyword_for => return p.parseForExpr(),
- .Keyword_while => return p.parseWhileExpr(),
- .LBrace => return p.parseBlock(),
+ .keyword_for => return p.parseForExpr(),
+ .keyword_while => return p.parseWhileExpr(),
+ .l_brace => return p.parseBlock(),
else => return p.parseCurlySuffixExpr(),
}
}
@@ -1972,9 +1972,9 @@ const Parser = struct {
/// Block <- LBRACE Statement* RBRACE
fn parseBlock(p: *Parser) !Node.Index {
- const lbrace = p.eatToken(.LBrace) orelse return null_node;
+ const lbrace = p.eatToken(.l_brace) orelse return null_node;
- if (p.eatToken(.RBrace)) |_| {
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = .BlockTwo,
.main_token = lbrace,
@@ -1986,8 +1986,8 @@ const Parser = struct {
}
const stmt_one = try p.expectStatementRecoverable();
- if (p.eatToken(.RBrace)) |_| {
- const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon;
+ if (p.eatToken(.r_brace)) |_| {
+ const semicolon = p.token_tags[p.tok_i - 2] == .semicolon;
return p.addNode(.{
.tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo,
.main_token = lbrace,
@@ -1998,8 +1998,8 @@ const Parser = struct {
});
}
const stmt_two = try p.expectStatementRecoverable();
- if (p.eatToken(.RBrace)) |_| {
- const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon;
+ if (p.eatToken(.r_brace)) |_| {
+ const semicolon = p.token_tags[p.tok_i - 2] == .semicolon;
return p.addNode(.{
.tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo,
.main_token = lbrace,
@@ -2013,16 +2013,16 @@ const Parser = struct {
var statements = std.ArrayList(Node.Index).init(p.gpa);
defer statements.deinit();
- try statements.appendSlice(&[_]Node.Index{ stmt_one, stmt_two });
+ try statements.appendSlice(&.{ stmt_one, stmt_two });
while (true) {
const statement = try p.expectStatementRecoverable();
if (statement == 0) break;
try statements.append(statement);
- if (p.token_tags[p.tok_i] == .RBrace) break;
+ if (p.token_tags[p.tok_i] == .r_brace) break;
}
- _ = try p.expectToken(.RBrace);
- const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon;
+ _ = try p.expectToken(.r_brace);
+ const semicolon = p.token_tags[p.tok_i - 2] == .semicolon;
const statements_span = try p.listToSpan(statements.items);
return p.addNode(.{
.tag = if (semicolon) .BlockSemicolon else .Block,
@@ -2037,14 +2037,14 @@ const Parser = struct {
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
/// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
fn parseForExpr(p: *Parser) !Node.Index {
- const for_token = p.eatToken(.Keyword_for) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const for_token = p.eatToken(.keyword_for) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const array_expr = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
_ = try p.parsePtrIndexPayload();
const then_expr = try p.expectExpr();
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
return p.addNode(.{
.tag = .ForSimple,
.main_token = for_token,
@@ -2071,15 +2071,15 @@ const Parser = struct {
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
/// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
fn parseWhileExpr(p: *Parser) !Node.Index {
- const while_token = p.eatToken(.Keyword_while) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const while_token = p.eatToken(.keyword_while) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
const cont_expr = try p.parseWhileContinueExpr();
const then_expr = try p.expectExpr();
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
if (cont_expr == 0) {
return p.addNode(.{
.tag = .WhileSimple,
@@ -2127,12 +2127,12 @@ const Parser = struct {
fn parseCurlySuffixExpr(p: *Parser) !Node.Index {
const lhs = try p.parseTypeExpr();
if (lhs == 0) return null_node;
- const lbrace = p.eatToken(.LBrace) orelse return lhs;
+ const lbrace = p.eatToken(.l_brace) orelse return lhs;
// If there are 0 or 1 items, we can use ArrayInitOne/StructInitOne;
// otherwise we use the full ArrayInit/StructInit.
- if (p.eatToken(.RBrace)) |_| {
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = .StructInitOne,
.main_token = lbrace,
@@ -2144,8 +2144,8 @@ const Parser = struct {
}
const field_init = try p.parseFieldInit();
if (field_init != 0) {
- const comma_one = p.eatToken(.Comma);
- if (p.eatToken(.RBrace)) |_| {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = if (comma_one != null) .StructInitOneComma else .StructInitOne,
.main_token = lbrace,
@@ -2166,17 +2166,17 @@ const Parser = struct {
try init_list.append(next);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RBrace)) |_| break;
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
continue;
},
- .RBrace => break,
- .Colon, .RParen, .RBracket => {
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .RBrace,
+ .expected_id = .r_brace,
},
});
},
@@ -2185,14 +2185,14 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
}
const span = try p.listToSpan(init_list.items);
return p.addNode(.{
- .tag = if (p.token_tags[p.tok_i - 2] == .Comma) .StructInitComma else .StructInit,
+ .tag = if (p.token_tags[p.tok_i - 2] == .comma) .StructInitComma else .StructInit,
.main_token = lbrace,
.data = .{
.lhs = lhs,
@@ -2205,8 +2205,8 @@ const Parser = struct {
}
const elem_init = try p.expectExpr();
- const comma_one = p.eatToken(.Comma);
- if (p.eatToken(.RBrace)) |_| {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = if (comma_one != null) .ArrayInitOneComma else .ArrayInitOne,
.main_token = lbrace,
@@ -2218,7 +2218,7 @@ const Parser = struct {
}
if (comma_one == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
@@ -2231,12 +2231,12 @@ const Parser = struct {
var next = try p.parseExpr();
while (next != 0) : (next = try p.parseExpr()) {
try init_list.append(next);
- if (p.eatToken(.Comma) == null) {
+ if (p.eatToken(.comma) == null) {
trailing_comma = false;
break;
}
}
- _ = try p.expectToken(.RBrace);
+ _ = try p.expectToken(.r_brace);
const span = try p.listToSpan(init_list.items);
return p.addNode(.{
.tag = if (trailing_comma) .ArrayInitComma else .ArrayInit,
@@ -2255,7 +2255,7 @@ const Parser = struct {
fn parseErrorUnionExpr(p: *Parser) !Node.Index {
const suffix_expr = try p.parseSuffixExpr();
if (suffix_expr == 0) return null_node;
- const bang = p.eatToken(.Bang) orelse return suffix_expr;
+ const bang = p.eatToken(.bang) orelse return suffix_expr;
return p.addNode(.{
.tag = .ErrorUnion,
.main_token = bang,
@@ -2272,7 +2272,7 @@ const Parser = struct {
/// FnCallArguments <- LPAREN ExprList RPAREN
/// ExprList <- (Expr COMMA)* Expr?
fn parseSuffixExpr(p: *Parser) !Node.Index {
- if (p.eatToken(.Keyword_async)) |async_token| {
+ if (p.eatToken(.keyword_async)) |async_token| {
var res = try p.expectPrimaryTypeExpr();
while (true) {
@@ -2280,11 +2280,11 @@ const Parser = struct {
if (node == 0) break;
res = node;
}
- const lparen = (try p.expectTokenRecoverable(.LParen)) orelse {
+ const lparen = (try p.expectTokenRecoverable(.l_paren)) orelse {
try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i } });
return res;
};
- if (p.eatToken(.RParen)) |_| {
+ if (p.eatToken(.r_paren)) |_| {
return p.addNode(.{
.tag = .AsyncCallOne,
.main_token = lparen,
@@ -2295,8 +2295,8 @@ const Parser = struct {
});
}
const param_one = try p.expectExpr();
- const comma_one = p.eatToken(.Comma);
- if (p.eatToken(.RParen)) |_| {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_paren)) |_| {
return p.addNode(.{
.tag = if (comma_one == null) .AsyncCallOne else .AsyncCallOneComma,
.main_token = lparen,
@@ -2308,7 +2308,7 @@ const Parser = struct {
}
if (comma_one == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
@@ -2321,8 +2321,8 @@ const Parser = struct {
const next = try p.expectExpr();
try param_list.append(next);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RParen)) |_| {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
const span = try p.listToSpan(param_list.items);
return p.addNode(.{
.tag = .AsyncCallComma,
@@ -2339,7 +2339,7 @@ const Parser = struct {
continue;
}
},
- .RParen => {
+ .r_paren => {
const span = try p.listToSpan(param_list.items);
return p.addNode(.{
.tag = .AsyncCall,
@@ -2353,12 +2353,12 @@ const Parser = struct {
},
});
},
- .Colon, .RBrace, .RBracket => {
+ .colon, .r_brace, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .RParen,
+ .expected_id = .r_paren,
},
});
},
@@ -2367,7 +2367,7 @@ const Parser = struct {
try p.warn(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .Comma,
+ .expected_id = .comma,
},
});
},
@@ -2384,8 +2384,8 @@ const Parser = struct {
continue;
}
res = res: {
- const lparen = p.eatToken(.LParen) orelse return res;
- if (p.eatToken(.RParen)) |_| {
+ const lparen = p.eatToken(.l_paren) orelse return res;
+ if (p.eatToken(.r_paren)) |_| {
break :res try p.addNode(.{
.tag = .CallOne,
.main_token = lparen,
@@ -2396,8 +2396,8 @@ const Parser = struct {
});
}
const param_one = try p.expectExpr();
- const comma_one = p.eatToken(.Comma);
- if (p.eatToken(.RParen)) |_| {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_paren)) |_| {
break :res try p.addNode(.{
.tag = if (comma_one == null) .CallOne else .CallOneComma,
.main_token = lparen,
@@ -2409,7 +2409,7 @@ const Parser = struct {
}
if (comma_one == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
@@ -2422,8 +2422,8 @@ const Parser = struct {
const next = try p.expectExpr();
try param_list.append(next);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RParen)) |_| {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
const span = try p.listToSpan(param_list.items);
break :res try p.addNode(.{
.tag = .CallComma,
@@ -2440,7 +2440,7 @@ const Parser = struct {
continue;
}
},
- .RParen => {
+ .r_paren => {
const span = try p.listToSpan(param_list.items);
break :res try p.addNode(.{
.tag = .Call,
@@ -2454,12 +2454,12 @@ const Parser = struct {
},
});
},
- .Colon, .RBrace, .RBracket => {
+ .colon, .r_brace, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .RParen,
+ .expected_id = .r_paren,
},
});
},
@@ -2468,7 +2468,7 @@ const Parser = struct {
try p.warn(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .Comma,
+ .expected_id = .comma,
},
});
},
@@ -2517,7 +2517,7 @@ const Parser = struct {
/// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
fn parsePrimaryTypeExpr(p: *Parser) !Node.Index {
switch (p.token_tags[p.tok_i]) {
- .CharLiteral => return p.addNode(.{
+ .char_literal => return p.addNode(.{
.tag = .CharLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2525,7 +2525,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .IntegerLiteral => return p.addNode(.{
+ .integer_literal => return p.addNode(.{
.tag = .IntegerLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2533,7 +2533,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .FloatLiteral => return p.addNode(.{
+ .float_literal => return p.addNode(.{
.tag = .FloatLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2541,7 +2541,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_false => return p.addNode(.{
+ .keyword_false => return p.addNode(.{
.tag = .FalseLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2549,7 +2549,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_true => return p.addNode(.{
+ .keyword_true => return p.addNode(.{
.tag = .TrueLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2557,7 +2557,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_null => return p.addNode(.{
+ .keyword_null => return p.addNode(.{
.tag = .NullLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2565,7 +2565,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_undefined => return p.addNode(.{
+ .keyword_undefined => return p.addNode(.{
.tag = .UndefinedLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2573,7 +2573,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_unreachable => return p.addNode(.{
+ .keyword_unreachable => return p.addNode(.{
.tag = .UnreachableLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2581,7 +2581,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Keyword_anyframe => return p.addNode(.{
+ .keyword_anyframe => return p.addNode(.{
.tag = .AnyFrameLiteral,
.main_token = p.nextToken(),
.data = .{
@@ -2589,7 +2589,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .StringLiteral => {
+ .string_literal => {
const main_token = p.nextToken();
return p.addNode(.{
.tag = .StringLiteral,
@@ -2601,25 +2601,25 @@ const Parser = struct {
});
},
- .Builtin => return p.parseBuiltinCall(),
- .Keyword_fn => return p.parseFnProto(),
- .Keyword_if => return p.parseIf(parseTypeExpr),
- .Keyword_switch => return p.expectSwitchExpr(),
+ .builtin => return p.parseBuiltinCall(),
+ .keyword_fn => return p.parseFnProto(),
+ .keyword_if => return p.parseIf(parseTypeExpr),
+ .keyword_switch => return p.expectSwitchExpr(),
- .Keyword_extern,
- .Keyword_packed,
+ .keyword_extern,
+ .keyword_packed,
=> {
p.tok_i += 1;
return p.parseContainerDeclAuto();
},
- .Keyword_struct,
- .Keyword_opaque,
- .Keyword_enum,
- .Keyword_union,
+ .keyword_struct,
+ .keyword_opaque,
+ .keyword_enum,
+ .keyword_union,
=> return p.parseContainerDeclAuto(),
- .Keyword_comptime => return p.addNode(.{
+ .keyword_comptime => return p.addNode(.{
.tag = .Comptime,
.main_token = p.nextToken(),
.data = .{
@@ -2627,9 +2627,9 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .MultilineStringLiteralLine => {
+ .multiline_string_literal_line => {
const first_line = p.nextToken();
- while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) {
+ while (p.token_tags[p.tok_i] == .multiline_string_literal_line) {
p.tok_i += 1;
}
return p.addNode(.{
@@ -2641,23 +2641,23 @@ const Parser = struct {
},
});
},
- .Identifier => switch (p.token_tags[p.tok_i + 1]) {
- .Colon => switch (p.token_tags[p.tok_i + 2]) {
- .Keyword_inline => {
+ .identifier => switch (p.token_tags[p.tok_i + 1]) {
+ .colon => switch (p.token_tags[p.tok_i + 2]) {
+ .keyword_inline => {
p.tok_i += 3;
switch (p.token_tags[p.tok_i]) {
- .Keyword_for => return p.parseForTypeExpr(),
- .Keyword_while => return p.parseWhileTypeExpr(),
+ .keyword_for => return p.parseForTypeExpr(),
+ .keyword_while => return p.parseWhileTypeExpr(),
else => return p.fail(.{
.ExpectedInlinable = .{ .token = p.tok_i },
}),
}
},
- .Keyword_for => {
+ .keyword_for => {
p.tok_i += 2;
return p.parseForTypeExpr();
},
- .Keyword_while => {
+ .keyword_while => {
p.tok_i += 2;
return p.parseWhileTypeExpr();
},
@@ -2679,8 +2679,8 @@ const Parser = struct {
},
}),
},
- .Period => switch (p.token_tags[p.tok_i + 1]) {
- .Identifier => return p.addNode(.{
+ .period => switch (p.token_tags[p.tok_i + 1]) {
+ .identifier => return p.addNode(.{
.tag = .EnumLiteral,
.data = .{
.lhs = p.nextToken(), // dot
@@ -2688,14 +2688,14 @@ const Parser = struct {
},
.main_token = p.nextToken(), // identifier
}),
- .LBrace => {
+ .l_brace => {
const lbrace = p.tok_i + 1;
p.tok_i = lbrace + 1;
// If there are 0, 1, or 2 items, we can use ArrayInitDotTwo/StructInitDotTwo;
// otherwise we use the full ArrayInitDot/StructInitDot.
- if (p.eatToken(.RBrace)) |_| {
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = .StructInitDotTwo,
.main_token = lbrace,
@@ -2707,8 +2707,8 @@ const Parser = struct {
}
const field_init_one = try p.parseFieldInit();
if (field_init_one != 0) {
- const comma_one = p.eatToken(.Comma);
- if (p.eatToken(.RBrace)) |_| {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = if (comma_one != null) .StructInitDotTwoComma else .StructInitDotTwo,
.main_token = lbrace,
@@ -2720,12 +2720,12 @@ const Parser = struct {
}
if (comma_one == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
const field_init_two = try p.expectFieldInit();
- const comma_two = p.eatToken(.Comma);
- if (p.eatToken(.RBrace)) |_| {
+ const comma_two = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = if (comma_two != null) .StructInitDotTwoComma else .StructInitDotTwo,
.main_token = lbrace,
@@ -2737,30 +2737,30 @@ const Parser = struct {
}
if (comma_two == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
var init_list = std.ArrayList(Node.Index).init(p.gpa);
defer init_list.deinit();
- try init_list.appendSlice(&[_]Node.Index{ field_init_one, field_init_two });
+ try init_list.appendSlice(&.{ field_init_one, field_init_two });
while (true) {
const next = try p.expectFieldInit();
assert(next != 0);
try init_list.append(next);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RBrace)) |_| break;
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
continue;
},
- .RBrace => break,
- .Colon, .RParen, .RBracket => {
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .RBrace,
+ .expected_id = .r_brace,
},
});
},
@@ -2769,14 +2769,14 @@ const Parser = struct {
try p.warn(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .Comma,
+ .expected_id = .comma,
},
});
},
}
}
const span = try p.listToSpan(init_list.items);
- const trailing_comma = p.token_tags[p.tok_i - 2] == .Comma;
+ const trailing_comma = p.token_tags[p.tok_i - 2] == .comma;
return p.addNode(.{
.tag = if (trailing_comma) .StructInitDotComma else .StructInitDot,
.main_token = lbrace,
@@ -2788,8 +2788,8 @@ const Parser = struct {
}
const elem_init_one = try p.expectExpr();
- const comma_one = p.eatToken(.Comma);
- if (p.eatToken(.RBrace)) |_| {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo,
.main_token = lbrace,
@@ -2801,12 +2801,12 @@ const Parser = struct {
}
if (comma_one == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
const elem_init_two = try p.expectExpr();
- const comma_two = p.eatToken(.Comma);
- if (p.eatToken(.RBrace)) |_| {
+ const comma_two = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
return p.addNode(.{
.tag = if (comma_two != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo,
.main_token = lbrace,
@@ -2818,30 +2818,30 @@ const Parser = struct {
}
if (comma_two == null) {
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
}
var init_list = std.ArrayList(Node.Index).init(p.gpa);
defer init_list.deinit();
- try init_list.appendSlice(&[_]Node.Index{ elem_init_one, elem_init_two });
+ try init_list.appendSlice(&.{ elem_init_one, elem_init_two });
while (true) {
const next = try p.expectExpr();
if (next == 0) break;
try init_list.append(next);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RBrace)) |_| break;
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
continue;
},
- .RBrace => break,
- .Colon, .RParen, .RBracket => {
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .RBrace,
+ .expected_id = .r_brace,
},
});
},
@@ -2850,7 +2850,7 @@ const Parser = struct {
try p.warn(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .Comma,
+ .expected_id = .comma,
},
});
},
@@ -2858,7 +2858,7 @@ const Parser = struct {
}
const span = try p.listToSpan(init_list.items);
return p.addNode(.{
- .tag = if (p.token_tags[p.tok_i - 2] == .Comma) .ArrayInitDotComma else .ArrayInitDot,
+ .tag = if (p.token_tags[p.tok_i - 2] == .comma) .ArrayInitDotComma else .ArrayInitDot,
.main_token = lbrace,
.data = .{
.lhs = span.start,
@@ -2868,12 +2868,12 @@ const Parser = struct {
},
else => return null_node,
},
- .Keyword_error => switch (p.token_tags[p.tok_i + 1]) {
- .LBrace => {
+ .keyword_error => switch (p.token_tags[p.tok_i + 1]) {
+ .l_brace => {
const error_token = p.tok_i;
p.tok_i += 2;
- if (p.eatToken(.RBrace)) |rbrace| {
+ if (p.eatToken(.r_brace)) |rbrace| {
return p.addNode(.{
.tag = .ErrorSetDecl,
.main_token = error_token,
@@ -2886,19 +2886,19 @@ const Parser = struct {
while (true) {
const doc_comment = p.eatDocComments();
- const identifier = try p.expectToken(.Identifier);
+ const identifier = try p.expectToken(.identifier);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RBrace)) |_| break;
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
continue;
},
- .RBrace => break,
- .Colon, .RParen, .RBracket => {
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
.ExpectedToken = .{
.token = p.tok_i,
- .expected_id = .RBrace,
+ .expected_id = .r_brace,
},
});
},
@@ -2907,7 +2907,7 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -2925,17 +2925,17 @@ const Parser = struct {
.tag = .ErrorValue,
.main_token = p.nextToken(),
.data = .{
- .lhs = try p.expectToken(.Period),
- .rhs = try p.expectToken(.Identifier),
+ .lhs = try p.expectToken(.period),
+ .rhs = try p.expectToken(.identifier),
},
}),
},
- .LParen => return p.addNode(.{
+ .l_paren => return p.addNode(.{
.tag = .GroupedExpression,
.main_token = p.nextToken(),
.data = .{
.lhs = try p.expectExpr(),
- .rhs = try p.expectToken(.RParen),
+ .rhs = try p.expectToken(.r_paren),
},
}),
else => return null_node,
@@ -2953,14 +2953,14 @@ const Parser = struct {
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
fn parseForTypeExpr(p: *Parser) !Node.Index {
- const for_token = p.eatToken(.Keyword_for) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const for_token = p.eatToken(.keyword_for) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const array_expr = try p.expectTypeExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
_ = try p.parsePtrIndexPayload();
const then_expr = try p.expectExpr();
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
return p.addNode(.{
.tag = .ForSimple,
.main_token = for_token,
@@ -2987,15 +2987,15 @@ const Parser = struct {
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
/// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
fn parseWhileTypeExpr(p: *Parser) !Node.Index {
- const while_token = p.eatToken(.Keyword_while) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const while_token = p.eatToken(.keyword_while) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
const cont_expr = try p.parseWhileContinueExpr();
const then_expr = try p.expectTypeExpr();
- const else_token = p.eatToken(.Keyword_else) orelse {
+ const else_token = p.eatToken(.keyword_else) orelse {
if (cont_expr == 0) {
return p.addNode(.{
.tag = .WhileSimple,
@@ -3037,14 +3037,14 @@ const Parser = struct {
/// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
fn expectSwitchExpr(p: *Parser) !Node.Index {
- const switch_token = p.assertToken(.Keyword_switch);
- _ = try p.expectToken(.LParen);
+ const switch_token = p.assertToken(.keyword_switch);
+ _ = try p.expectToken(.l_paren);
const expr_node = try p.expectExpr();
- _ = try p.expectToken(.RParen);
- _ = try p.expectToken(.LBrace);
+ _ = try p.expectToken(.r_paren);
+ _ = try p.expectToken(.l_brace);
const cases = try p.parseSwitchProngList();
- const trailing_comma = p.token_tags[p.tok_i - 1] == .Comma;
- _ = try p.expectToken(.RBrace);
+ const trailing_comma = p.token_tags[p.tok_i - 1] == .comma;
+ _ = try p.expectToken(.r_brace);
return p.addNode(.{
.tag = if (trailing_comma) .SwitchComma else .Switch,
@@ -3067,12 +3067,12 @@ const Parser = struct {
/// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
/// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
fn expectAsmExpr(p: *Parser) !Node.Index {
- const asm_token = p.assertToken(.Keyword_asm);
- _ = p.eatToken(.Keyword_volatile);
- _ = try p.expectToken(.LParen);
+ const asm_token = p.assertToken(.keyword_asm);
+ _ = p.eatToken(.keyword_volatile);
+ _ = try p.expectToken(.l_paren);
const template = try p.expectExpr();
- if (p.eatToken(.RParen)) |rparen| {
+ if (p.eatToken(.r_paren)) |rparen| {
return p.addNode(.{
.tag = .AsmSimple,
.main_token = asm_token,
@@ -3083,7 +3083,7 @@ const Parser = struct {
});
}
- _ = try p.expectToken(.Colon);
+ _ = try p.expectToken(.colon);
var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit();
@@ -3093,51 +3093,51 @@ const Parser = struct {
if (output_item == 0) break;
try list.append(output_item);
switch (p.token_tags[p.tok_i]) {
- .Comma => p.tok_i += 1,
- .Colon, .RParen, .RBrace, .RBracket => break, // All possible delimiters.
+ .comma => p.tok_i += 1,
+ .colon, .r_paren, .r_brace, .r_bracket => break, // All possible delimiters.
else => {
// This is likely just a missing comma;
// give an error but continue parsing this list.
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
}
- if (p.eatToken(.Colon)) |_| {
+ if (p.eatToken(.colon)) |_| {
while (true) {
const input_item = try p.parseAsmInputItem();
if (input_item == 0) break;
try list.append(input_item);
switch (p.token_tags[p.tok_i]) {
- .Comma => p.tok_i += 1,
- .Colon, .RParen, .RBrace, .RBracket => break, // All possible delimiters.
+ .comma => p.tok_i += 1,
+ .colon, .r_paren, .r_brace, .r_bracket => break, // All possible delimiters.
else => {
// This is likely just a missing comma;
// give an error but continue parsing this list.
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
}
- if (p.eatToken(.Colon)) |_| {
- while (p.eatToken(.StringLiteral)) |_| {
+ if (p.eatToken(.colon)) |_| {
+ while (p.eatToken(.string_literal)) |_| {
switch (p.token_tags[p.tok_i]) {
- .Comma => p.tok_i += 1,
- .Colon, .RParen, .RBrace, .RBracket => break,
+ .comma => p.tok_i += 1,
+ .colon, .r_paren, .r_brace, .r_bracket => break,
else => {
// This is likely just a missing comma;
// give an error but continue parsing this list.
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
}
}
}
- const rparen = try p.expectToken(.RParen);
+ const rparen = try p.expectToken(.r_paren);
const span = try p.listToSpan(list.items);
return p.addNode(.{
.tag = .Asm,
@@ -3155,20 +3155,20 @@ const Parser = struct {
/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
fn parseAsmOutputItem(p: *Parser) !Node.Index {
- _ = p.eatToken(.LBracket) orelse return null_node;
- const identifier = try p.expectToken(.Identifier);
- _ = try p.expectToken(.RBracket);
- _ = try p.expectToken(.StringLiteral);
- _ = try p.expectToken(.LParen);
+ _ = p.eatToken(.l_bracket) orelse return null_node;
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.r_bracket);
+ _ = try p.expectToken(.string_literal);
+ _ = try p.expectToken(.l_paren);
const type_expr: Node.Index = blk: {
- if (p.eatToken(.Arrow)) |_| {
+ if (p.eatToken(.arrow)) |_| {
break :blk try p.expectTypeExpr();
} else {
- _ = try p.expectToken(.Identifier);
+ _ = try p.expectToken(.identifier);
break :blk null_node;
}
};
- const rparen = try p.expectToken(.RParen);
+ const rparen = try p.expectToken(.r_paren);
return p.addNode(.{
.tag = .AsmOutput,
.main_token = identifier,
@@ -3181,13 +3181,13 @@ const Parser = struct {
/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
fn parseAsmInputItem(p: *Parser) !Node.Index {
- _ = p.eatToken(.LBracket) orelse return null_node;
- const identifier = try p.expectToken(.Identifier);
- _ = try p.expectToken(.RBracket);
- _ = try p.expectToken(.StringLiteral);
- _ = try p.expectToken(.LParen);
+ _ = p.eatToken(.l_bracket) orelse return null_node;
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.r_bracket);
+ _ = try p.expectToken(.string_literal);
+ _ = try p.expectToken(.l_paren);
const expr = try p.expectExpr();
- const rparen = try p.expectToken(.RParen);
+ const rparen = try p.expectToken(.r_paren);
return p.addNode(.{
.tag = .AsmInput,
.main_token = identifier,
@@ -3200,14 +3200,14 @@ const Parser = struct {
/// BreakLabel <- COLON IDENTIFIER
fn parseBreakLabel(p: *Parser) !TokenIndex {
- _ = p.eatToken(.Colon) orelse return @as(TokenIndex, 0);
- return p.expectToken(.Identifier);
+ _ = p.eatToken(.colon) orelse return @as(TokenIndex, 0);
+ return p.expectToken(.identifier);
}
/// BlockLabel <- IDENTIFIER COLON
fn parseBlockLabel(p: *Parser) TokenIndex {
- if (p.token_tags[p.tok_i] == .Identifier and
- p.token_tags[p.tok_i + 1] == .Colon)
+ if (p.token_tags[p.tok_i] == .identifier and
+ p.token_tags[p.tok_i + 1] == .colon)
{
const identifier = p.tok_i;
p.tok_i += 2;
@@ -3218,9 +3218,9 @@ const Parser = struct {
/// FieldInit <- DOT IDENTIFIER EQUAL Expr
fn parseFieldInit(p: *Parser) !Node.Index {
- if (p.token_tags[p.tok_i + 0] == .Period and
- p.token_tags[p.tok_i + 1] == .Identifier and
- p.token_tags[p.tok_i + 2] == .Equal)
+ if (p.token_tags[p.tok_i + 0] == .period and
+ p.token_tags[p.tok_i + 1] == .identifier and
+ p.token_tags[p.tok_i + 2] == .equal)
{
p.tok_i += 3;
return p.expectExpr();
@@ -3230,37 +3230,37 @@ const Parser = struct {
}
fn expectFieldInit(p: *Parser) !Node.Index {
- _ = try p.expectToken(.Period);
- _ = try p.expectToken(.Identifier);
- _ = try p.expectToken(.Equal);
+ _ = try p.expectToken(.period);
+ _ = try p.expectToken(.identifier);
+ _ = try p.expectToken(.equal);
return p.expectExpr();
}
/// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
fn parseWhileContinueExpr(p: *Parser) !Node.Index {
- _ = p.eatToken(.Colon) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ _ = p.eatToken(.colon) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const node = try p.parseAssignExpr();
if (node == 0) return p.fail(.{ .ExpectedExprOrAssignment = .{ .token = p.tok_i } });
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
return node;
}
/// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
fn parseLinkSection(p: *Parser) !Node.Index {
- _ = p.eatToken(.Keyword_linksection) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ _ = p.eatToken(.keyword_linksection) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const expr_node = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
return expr_node;
}
/// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
fn parseCallconv(p: *Parser) !Node.Index {
- _ = p.eatToken(.Keyword_callconv) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ _ = p.eatToken(.keyword_callconv) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const expr_node = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
return expr_node;
}
@@ -3276,20 +3276,20 @@ const Parser = struct {
fn expectParamDecl(p: *Parser) !Node.Index {
_ = p.eatDocComments();
switch (p.token_tags[p.tok_i]) {
- .Keyword_noalias, .Keyword_comptime => p.tok_i += 1,
- .Ellipsis3 => {
+ .keyword_noalias, .keyword_comptime => p.tok_i += 1,
+ .ellipsis3 => {
p.tok_i += 1;
return null_node;
},
else => {},
}
- if (p.token_tags[p.tok_i] == .Identifier and
- p.token_tags[p.tok_i + 1] == .Colon)
+ if (p.token_tags[p.tok_i] == .identifier and
+ p.token_tags[p.tok_i + 1] == .colon)
{
p.tok_i += 2;
}
switch (p.token_tags[p.tok_i]) {
- .Keyword_anytype => {
+ .keyword_anytype => {
p.tok_i += 1;
return null_node;
},
@@ -3299,31 +3299,31 @@ const Parser = struct {
/// Payload <- PIPE IDENTIFIER PIPE
fn parsePayload(p: *Parser) !TokenIndex {
- _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0);
- const identifier = try p.expectToken(.Identifier);
- _ = try p.expectToken(.Pipe);
+ _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0);
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.pipe);
return identifier;
}
/// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
fn parsePtrPayload(p: *Parser) !TokenIndex {
- _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0);
- _ = p.eatToken(.Asterisk);
- const identifier = try p.expectToken(.Identifier);
- _ = try p.expectToken(.Pipe);
+ _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0);
+ _ = p.eatToken(.asterisk);
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.pipe);
return identifier;
}
/// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
/// Returns the first identifier token, if any.
fn parsePtrIndexPayload(p: *Parser) !TokenIndex {
- _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0);
- _ = p.eatToken(.Asterisk);
- const identifier = try p.expectToken(.Identifier);
- if (p.eatToken(.Comma) != null) {
- _ = try p.expectToken(.Identifier);
+ _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0);
+ _ = p.eatToken(.asterisk);
+ const identifier = try p.expectToken(.identifier);
+ if (p.eatToken(.comma) != null) {
+ _ = try p.expectToken(.identifier);
}
- _ = try p.expectToken(.Pipe);
+ _ = try p.expectToken(.pipe);
return identifier;
}
@@ -3332,8 +3332,8 @@ const Parser = struct {
/// <- SwitchItem (COMMA SwitchItem)* COMMA?
/// / KEYWORD_else
fn parseSwitchProng(p: *Parser) !Node.Index {
- if (p.eatToken(.Keyword_else)) |_| {
- const arrow_token = try p.expectToken(.EqualAngleBracketRight);
+ if (p.eatToken(.keyword_else)) |_| {
+ const arrow_token = try p.expectToken(.equal_angle_bracket_right);
_ = try p.parsePtrPayload();
return p.addNode(.{
.tag = .SwitchCaseOne,
@@ -3347,7 +3347,7 @@ const Parser = struct {
const first_item = try p.parseSwitchItem();
if (first_item == 0) return null_node;
- if (p.eatToken(.EqualAngleBracketRight)) |arrow_token| {
+ if (p.eatToken(.equal_angle_bracket_right)) |arrow_token| {
_ = try p.parsePtrPayload();
return p.addNode(.{
.tag = .SwitchCaseOne,
@@ -3363,13 +3363,13 @@ const Parser = struct {
defer list.deinit();
try list.append(first_item);
- while (p.eatToken(.Comma)) |_| {
+ while (p.eatToken(.comma)) |_| {
const next_item = try p.parseSwitchItem();
if (next_item == 0) break;
try list.append(next_item);
}
const span = try p.listToSpan(list.items);
- const arrow_token = try p.expectToken(.EqualAngleBracketRight);
+ const arrow_token = try p.expectToken(.equal_angle_bracket_right);
_ = try p.parsePtrPayload();
return p.addNode(.{
.tag = .SwitchCase,
@@ -3389,7 +3389,7 @@ const Parser = struct {
const expr = try p.parseExpr();
if (expr == 0) return null_node;
- if (p.eatToken(.Ellipsis3)) |token| {
+ if (p.eatToken(.ellipsis3)) |token| {
return p.addNode(.{
.tag = .SwitchRange,
.main_token = token,
@@ -3419,25 +3419,25 @@ const Parser = struct {
var saw_allowzero = false;
while (true) {
switch (p.token_tags[p.tok_i]) {
- .Keyword_align => {
+ .keyword_align => {
if (result.align_node != 0) {
try p.warn(.{
.ExtraAlignQualifier = .{ .token = p.tok_i },
});
}
p.tok_i += 1;
- _ = try p.expectToken(.LParen);
+ _ = try p.expectToken(.l_paren);
result.align_node = try p.expectExpr();
- if (p.eatToken(.Colon)) |_| {
+ if (p.eatToken(.colon)) |_| {
result.bit_range_start = try p.expectExpr();
- _ = try p.expectToken(.Colon);
+ _ = try p.expectToken(.colon);
result.bit_range_end = try p.expectExpr();
}
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
},
- .Keyword_const => {
+ .keyword_const => {
if (saw_const) {
try p.warn(.{
.ExtraConstQualifier = .{ .token = p.tok_i },
@@ -3446,7 +3446,7 @@ const Parser = struct {
p.tok_i += 1;
saw_const = true;
},
- .Keyword_volatile => {
+ .keyword_volatile => {
if (saw_volatile) {
try p.warn(.{
.ExtraVolatileQualifier = .{ .token = p.tok_i },
@@ -3455,7 +3455,7 @@ const Parser = struct {
p.tok_i += 1;
saw_volatile = true;
},
- .Keyword_allowzero => {
+ .keyword_allowzero => {
if (saw_allowzero) {
try p.warn(.{
.ExtraAllowZeroQualifier = .{ .token = p.tok_i },
@@ -3476,14 +3476,14 @@ const Parser = struct {
/// / DOTQUESTIONMARK
fn parseSuffixOp(p: *Parser, lhs: Node.Index) !Node.Index {
switch (p.token_tags[p.tok_i]) {
- .LBracket => {
+ .l_bracket => {
const lbracket = p.nextToken();
const index_expr = try p.expectExpr();
- if (p.eatToken(.Ellipsis2)) |_| {
+ if (p.eatToken(.ellipsis2)) |_| {
const end_expr = try p.parseExpr();
if (end_expr == 0) {
- _ = try p.expectToken(.RBracket);
+ _ = try p.expectToken(.r_bracket);
return p.addNode(.{
.tag = .SliceOpen,
.main_token = lbracket,
@@ -3493,9 +3493,9 @@ const Parser = struct {
},
});
}
- if (p.eatToken(.Colon)) |_| {
+ if (p.eatToken(.colon)) |_| {
const sentinel = try p.parseExpr();
- _ = try p.expectToken(.RBracket);
+ _ = try p.expectToken(.r_bracket);
return p.addNode(.{
.tag = .SliceSentinel,
.main_token = lbracket,
@@ -3509,7 +3509,7 @@ const Parser = struct {
},
});
} else {
- _ = try p.expectToken(.RBracket);
+ _ = try p.expectToken(.r_bracket);
return p.addNode(.{
.tag = .Slice,
.main_token = lbracket,
@@ -3523,7 +3523,7 @@ const Parser = struct {
});
}
}
- _ = try p.expectToken(.RBracket);
+ _ = try p.expectToken(.r_bracket);
return p.addNode(.{
.tag = .ArrayAccess,
.main_token = lbracket,
@@ -3533,7 +3533,7 @@ const Parser = struct {
},
});
},
- .PeriodAsterisk => return p.addNode(.{
+ .period_asterisk => return p.addNode(.{
.tag = .Deref,
.main_token = p.nextToken(),
.data = .{
@@ -3541,7 +3541,7 @@ const Parser = struct {
.rhs = undefined,
},
}),
- .Invalid_periodasterisks => {
+ .invalid_periodasterisks => {
const period_asterisk = p.nextToken();
try p.warn(.{ .AsteriskAfterPointerDereference = .{ .token = period_asterisk } });
return p.addNode(.{
@@ -3553,8 +3553,8 @@ const Parser = struct {
},
});
},
- .Period => switch (p.token_tags[p.tok_i + 1]) {
- .Identifier => return p.addNode(.{
+ .period => switch (p.token_tags[p.tok_i + 1]) {
+ .identifier => return p.addNode(.{
.tag = .FieldAccess,
.main_token = p.nextToken(),
.data = .{
@@ -3562,7 +3562,7 @@ const Parser = struct {
.rhs = p.nextToken(),
},
}),
- .QuestionMark => return p.addNode(.{
+ .question_mark => return p.addNode(.{
.tag = .UnwrapOptional,
.main_token = p.nextToken(),
.data = .{
@@ -3589,28 +3589,28 @@ const Parser = struct {
fn parseContainerDeclAuto(p: *Parser) !Node.Index {
const main_token = p.nextToken();
const arg_expr = switch (p.token_tags[main_token]) {
- .Keyword_struct, .Keyword_opaque => null_node,
- .Keyword_enum => blk: {
- if (p.eatToken(.LParen)) |_| {
+ .keyword_struct, .keyword_opaque => null_node,
+ .keyword_enum => blk: {
+ if (p.eatToken(.l_paren)) |_| {
const expr = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
break :blk expr;
} else {
break :blk null_node;
}
},
- .Keyword_union => blk: {
- if (p.eatToken(.LParen)) |_| {
- if (p.eatToken(.Keyword_enum)) |_| {
- if (p.eatToken(.LParen)) |_| {
+ .keyword_union => blk: {
+ if (p.eatToken(.l_paren)) |_| {
+ if (p.eatToken(.keyword_enum)) |_| {
+ if (p.eatToken(.l_paren)) |_| {
const enum_tag_expr = try p.expectExpr();
- _ = try p.expectToken(.RParen);
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
+ _ = try p.expectToken(.r_paren);
- _ = try p.expectToken(.LBrace);
+ _ = try p.expectToken(.l_brace);
const members = try p.parseContainerMembers();
const members_span = try members.toSpan(p);
- _ = try p.expectToken(.RBrace);
+ _ = try p.expectToken(.r_brace);
return p.addNode(.{
.tag = switch (members.trailing_comma) {
true => .TaggedUnionEnumTagComma,
@@ -3623,11 +3623,11 @@ const Parser = struct {
},
});
} else {
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
- _ = try p.expectToken(.LBrace);
+ _ = try p.expectToken(.l_brace);
const members = try p.parseContainerMembers();
- _ = try p.expectToken(.RBrace);
+ _ = try p.expectToken(.r_brace);
if (members.len <= 2) {
return p.addNode(.{
.tag = switch (members.trailing_comma) {
@@ -3657,7 +3657,7 @@ const Parser = struct {
}
} else {
const expr = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
break :blk expr;
}
} else {
@@ -3666,9 +3666,9 @@ const Parser = struct {
},
else => unreachable,
};
- _ = try p.expectToken(.LBrace);
+ _ = try p.expectToken(.l_brace);
const members = try p.parseContainerMembers();
- _ = try p.expectToken(.RBrace);
+ _ = try p.expectToken(.r_brace);
if (arg_expr == 0) {
if (members.len <= 2) {
return p.addNode(.{
@@ -3718,10 +3718,10 @@ const Parser = struct {
/// Holds temporary data until we are ready to construct the full ContainerDecl AST node.
/// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
fn parseByteAlign(p: *Parser) !Node.Index {
- _ = p.eatToken(.Keyword_align) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ _ = p.eatToken(.keyword_align) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const expr = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
return expr;
}
@@ -3732,22 +3732,22 @@ const Parser = struct {
/// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
fn parseParamDeclList(p: *Parser) !SmallSpan {
- _ = try p.expectToken(.LParen);
- if (p.eatToken(.RParen)) |_| {
+ _ = try p.expectToken(.l_paren);
+ if (p.eatToken(.r_paren)) |_| {
return SmallSpan{ .zero_or_one = 0 };
}
const param_one = while (true) {
const param = try p.expectParamDecl();
if (param != 0) break param;
switch (p.token_tags[p.nextToken()]) {
- .Comma => continue,
- .RParen => return SmallSpan{ .zero_or_one = 0 },
+ .comma => continue,
+ .r_paren => return SmallSpan{ .zero_or_one = 0 },
else => {
// This is likely just a missing comma;
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -3755,19 +3755,19 @@ const Parser = struct {
const param_two = while (true) {
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RParen)) |_| {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
return SmallSpan{ .zero_or_one = param_one };
}
const param = try p.expectParamDecl();
if (param != 0) break param;
continue;
},
- .RParen => return SmallSpan{ .zero_or_one = param_one },
- .Colon, .RBrace, .RBracket => {
+ .r_paren => return SmallSpan{ .zero_or_one = param_one },
+ .colon, .r_brace, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .RParen },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .r_paren },
});
},
else => {
@@ -3775,7 +3775,7 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -3784,12 +3784,12 @@ const Parser = struct {
var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit();
- try list.appendSlice(&[_]Node.Index{ param_one, param_two });
+ try list.appendSlice(&.{ param_one, param_two });
while (true) {
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.token_tags[p.tok_i] == .RParen) {
+ .comma => {
+ if (p.token_tags[p.tok_i] == .r_paren) {
p.tok_i += 1;
return SmallSpan{ .multi = list.toOwnedSlice() };
}
@@ -3799,11 +3799,11 @@ const Parser = struct {
}
continue;
},
- .RParen => return SmallSpan{ .multi = list.toOwnedSlice() },
- .Colon, .RBrace, .RBracket => {
+ .r_paren => return SmallSpan{ .multi = list.toOwnedSlice() },
+ .colon, .r_brace, .r_bracket => {
p.tok_i -= 1;
return p.fail(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .RParen },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .r_paren },
});
},
else => {
@@ -3811,7 +3811,7 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -3833,14 +3833,14 @@ const Parser = struct {
try list.append(item);
switch (p.token_tags[p.tok_i]) {
- .Comma => p.tok_i += 1,
+ .comma => p.tok_i += 1,
// all possible delimiters
- .Colon, .RParen, .RBrace, .RBracket => break,
+ .colon, .r_paren, .r_brace, .r_bracket => break,
else => {
// This is likely just a missing comma;
// give an error but continue parsing this list.
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -3853,8 +3853,8 @@ const Parser = struct {
/// FnCallArguments <- LPAREN ExprList RPAREN
/// ExprList <- (Expr COMMA)* Expr?
fn parseBuiltinCall(p: *Parser) !Node.Index {
- const builtin_token = p.assertToken(.Builtin);
- _ = (try p.expectTokenRecoverable(.LParen)) orelse {
+ const builtin_token = p.assertToken(.builtin);
+ _ = (try p.expectTokenRecoverable(.l_paren)) orelse {
try p.warn(.{
.ExpectedParamList = .{ .token = p.tok_i },
});
@@ -3868,7 +3868,7 @@ const Parser = struct {
},
});
};
- if (p.eatToken(.RParen)) |_| {
+ if (p.eatToken(.r_paren)) |_| {
return p.addNode(.{
.tag = .BuiltinCallTwo,
.main_token = builtin_token,
@@ -3880,8 +3880,8 @@ const Parser = struct {
}
const param_one = try p.expectExpr();
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RParen)) |_| {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
return p.addNode(.{
.tag = .BuiltinCallTwoComma,
.main_token = builtin_token,
@@ -3892,7 +3892,7 @@ const Parser = struct {
});
}
},
- .RParen => return p.addNode(.{
+ .r_paren => return p.addNode(.{
.tag = .BuiltinCallTwo,
.main_token = builtin_token,
.data = .{
@@ -3905,14 +3905,14 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
const param_two = try p.expectExpr();
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RParen)) |_| {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
return p.addNode(.{
.tag = .BuiltinCallTwoComma,
.main_token = builtin_token,
@@ -3923,7 +3923,7 @@ const Parser = struct {
});
}
},
- .RParen => return p.addNode(.{
+ .r_paren => return p.addNode(.{
.tag = .BuiltinCallTwo,
.main_token = builtin_token,
.data = .{
@@ -3936,7 +3936,7 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -3944,14 +3944,14 @@ const Parser = struct {
var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit();
- try list.appendSlice(&[_]Node.Index{ param_one, param_two });
+ try list.appendSlice(&.{ param_one, param_two });
while (true) {
const param = try p.expectExpr();
try list.append(param);
switch (p.token_tags[p.nextToken()]) {
- .Comma => {
- if (p.eatToken(.RParen)) |_| {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
const params = try p.listToSpan(list.items);
return p.addNode(.{
.tag = .BuiltinCallComma,
@@ -3964,7 +3964,7 @@ const Parser = struct {
}
continue;
},
- .RParen => {
+ .r_paren => {
const params = try p.listToSpan(list.items);
return p.addNode(.{
.tag = .BuiltinCall,
@@ -3980,7 +3980,7 @@ const Parser = struct {
// give an error but continue parsing this list.
p.tok_i -= 1;
try p.warn(.{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma },
});
},
}
@@ -3990,7 +3990,7 @@ const Parser = struct {
// string literal or multiline string literal
fn parseStringLiteral(p: *Parser) !Node.Index {
switch (p.token_tags[p.tok_i]) {
- .StringLiteral => {
+ .string_literal => {
const main_token = p.nextToken();
return p.addNode(.{
.tag = .StringLiteral,
@@ -4001,9 +4001,9 @@ const Parser = struct {
},
});
},
- .MultilineStringLiteralLine => {
+ .multiline_string_literal_line => {
const first_line = p.nextToken();
- while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) {
+ while (p.token_tags[p.tok_i] == .multiline_string_literal_line) {
p.tok_i += 1;
}
return p.addNode(.{
@@ -4030,7 +4030,7 @@ const Parser = struct {
fn expectIntegerLiteral(p: *Parser) !Node.Index {
return p.addNode(.{
.tag = .IntegerLiteral,
- .main_token = try p.expectToken(.IntegerLiteral),
+ .main_token = try p.expectToken(.integer_literal),
.data = .{
.lhs = undefined,
.rhs = undefined,
@@ -4040,16 +4040,16 @@ const Parser = struct {
/// KEYWORD_if LPAREN Expr RPAREN PtrPayload? Body (KEYWORD_else Payload? Body)?
fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !Node.Index {
- const if_token = p.eatToken(.Keyword_if) orelse return null_node;
- _ = try p.expectToken(.LParen);
+ const if_token = p.eatToken(.keyword_if) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
- _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
const then_expr = try bodyParseFn(p);
if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } });
- const else_token = p.eatToken(.Keyword_else) orelse return p.addNode(.{
+ const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{
.tag = .IfSimple,
.main_token = if_token,
.data = .{
@@ -4076,8 +4076,8 @@ const Parser = struct {
/// Skips over doc comment tokens. Returns the first one, if any.
fn eatDocComments(p: *Parser) ?TokenIndex {
- if (p.eatToken(.DocComment)) |first_line| {
- while (p.eatToken(.DocComment)) |_| {}
+ if (p.eatToken(.doc_comment)) |first_line| {
+ while (p.eatToken(.doc_comment)) |_| {}
return first_line;
}
return null;
@@ -4089,7 +4089,7 @@ const Parser = struct {
/// Eat a single-line doc comment on the same line as another node
fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !void {
- const comment_token = p.eatToken(.DocComment) orelse return;
+ const comment_token = p.eatToken(.doc_comment) orelse return;
if (!p.tokensOnSameLine(after_token, comment_token)) {
p.tok_i -= 1;
}
diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig
index 4ffbaeff19..0f14856fdf 100644
--- a/lib/std/zig/render.zig
+++ b/lib/std/zig/render.zig
@@ -107,12 +107,12 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E
while (i > 0) {
i -= 1;
switch (token_tags[i]) {
- .Keyword_extern,
- .Keyword_export,
- .Keyword_pub,
- .StringLiteral,
- .Keyword_inline,
- .Keyword_noinline,
+ .keyword_extern,
+ .keyword_export,
+ .keyword_pub,
+ .string_literal,
+ .keyword_inline,
+ .keyword_noinline,
=> continue,
else => {
@@ -144,7 +144,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E
.UsingNamespace => {
const main_token = main_tokens[decl];
const expr = datas[decl].lhs;
- if (main_token > 0 and token_tags[main_token - 1] == .Keyword_pub) {
+ if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
try renderToken(ais, tree, main_token - 1, .Space); // pub
}
try renderToken(ais, tree, main_token, .Space); // usingnamespace
@@ -160,7 +160,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E
.TestDecl => {
const test_token = main_tokens[decl];
try renderToken(ais, tree, test_token, .Space);
- if (token_tags[test_token + 1] == .StringLiteral) {
+ if (token_tags[test_token + 1] == .string_literal) {
try renderToken(ais, tree, test_token + 1, .Space);
}
try renderExpression(ais, tree, datas[decl].rhs, space);
@@ -269,13 +269,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
try renderExpression(ais, tree, datas[node].lhs, .Space); // target
- if (token_tags[fallback_first - 1] == .Pipe) {
+ if (token_tags[fallback_first - 1] == .pipe) {
try renderToken(ais, tree, main_token, .Space); // catch keyword
try renderToken(ais, tree, main_token + 1, .None); // pipe
try renderToken(ais, tree, main_token + 2, .None); // payload identifier
try renderToken(ais, tree, main_token + 3, after_op_space); // pipe
} else {
- assert(token_tags[fallback_first - 1] == .Keyword_catch);
+ assert(token_tags[fallback_first - 1] == .keyword_catch);
try renderToken(ais, tree, main_token, after_op_space); // catch keyword
}
@@ -532,22 +532,22 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
// There is nothing between the braces so render condensed: `error{}`
try renderToken(ais, tree, lbrace, .None);
return renderToken(ais, tree, rbrace, space);
- } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .Identifier) {
+ } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .identifier) {
// There is exactly one member and no trailing comma or
// comments, so render without surrounding spaces: `error{Foo}`
try renderToken(ais, tree, lbrace, .None);
try renderToken(ais, tree, lbrace + 1, .None); // identifier
return renderToken(ais, tree, rbrace, space);
- } else if (token_tags[rbrace - 1] == .Comma) {
+ } else if (token_tags[rbrace - 1] == .comma) {
// There is a trailing comma so render each member on a new line.
try renderToken(ais, tree, lbrace, .Newline);
ais.pushIndent();
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
switch (token_tags[i]) {
- .DocComment => try renderToken(ais, tree, i, .Newline),
- .Identifier => try renderToken(ais, tree, i, .Comma),
- .Comma => {},
+ .doc_comment => try renderToken(ais, tree, i, .Newline),
+ .identifier => try renderToken(ais, tree, i, .Comma),
+ .comma => {},
else => unreachable,
}
}
@@ -559,9 +559,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
switch (token_tags[i]) {
- .DocComment => unreachable, // TODO
- .Identifier => try renderToken(ais, tree, i, .CommaSpace),
- .Comma => {},
+ .doc_comment => unreachable, // TODO
+ .identifier => try renderToken(ais, tree, i, .CommaSpace),
+ .comma => {},
else => unreachable,
}
}
@@ -701,7 +701,7 @@ fn renderPtrType(
// in such a relationship. If so, skip rendering anything for
// this pointer type and rely on the child to render our asterisk
// as well when it renders the ** token.
- if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .AsteriskAsterisk and
+ if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .asterisk_asterisk and
ptr_type.ast.main_token == tree.nodes.items(.main_token)[ptr_type.ast.child_type])
{
return renderExpression(ais, tree, ptr_type.ast.child_type, space);
@@ -823,7 +823,7 @@ fn renderAsmOutput(
try renderToken(ais, tree, symbolic_name + 2, .Space); // "constraint"
try renderToken(ais, tree, symbolic_name + 3, .None); // lparen
- if (token_tags[symbolic_name + 4] == .Arrow) {
+ if (token_tags[symbolic_name + 4] == .arrow) {
try renderToken(ais, tree, symbolic_name + 4, .Space); // ->
try renderExpression(ais, tree, datas[asm_output].lhs, Space.None);
return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen
@@ -982,7 +982,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa
try renderToken(ais, tree, payload_token - 2, .Space); // )
try renderToken(ais, tree, payload_token - 1, .None); // |
const ident = blk: {
- if (token_tags[payload_token] == .Asterisk) {
+ if (token_tags[payload_token] == .asterisk) {
try renderToken(ais, tree, payload_token, .None); // *
break :blk payload_token + 1;
} else {
@@ -991,7 +991,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa
};
try renderToken(ais, tree, ident, .None); // identifier
const pipe = blk: {
- if (token_tags[ident + 1] == .Comma) {
+ if (token_tags[ident + 1] == .comma) {
try renderToken(ais, tree, ident + 1, .Space); // ,
try renderToken(ais, tree, ident + 2, .None); // index
break :blk payload_token + 3;
@@ -1035,7 +1035,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa
try renderToken(ais, tree, payload_token - 2, .Space); // )
try renderToken(ais, tree, payload_token - 1, .None); // |
const ident = blk: {
- if (token_tags[payload_token] == .Asterisk) {
+ if (token_tags[payload_token] == .asterisk) {
try renderToken(ais, tree, payload_token, .None); // *
break :blk payload_token + 1;
} else {
@@ -1044,7 +1044,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa
};
try renderToken(ais, tree, ident, .None); // identifier
const pipe = blk: {
- if (token_tags[ident + 1] == .Comma) {
+ if (token_tags[ident + 1] == .comma) {
try renderToken(ais, tree, ident + 1, .Space); // ,
try renderToken(ais, tree, ident + 2, .None); // index
break :blk payload_token + 3;
@@ -1108,7 +1108,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa
try renderToken(ais, tree, payload_token - 2, .Space); // )
try renderToken(ais, tree, payload_token - 1, .None); // |
const ident = blk: {
- if (token_tags[payload_token] == .Asterisk) {
+ if (token_tags[payload_token] == .asterisk) {
try renderToken(ais, tree, payload_token, .None); // *
break :blk payload_token + 1;
} else {
@@ -1117,7 +1117,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa
};
try renderToken(ais, tree, ident, .None); // identifier
const pipe = blk: {
- if (token_tags[ident + 1] == .Comma) {
+ if (token_tags[ident + 1] == .comma) {
try renderToken(ais, tree, ident + 1, .Space); // ,
try renderToken(ais, tree, ident + 2, .None); // index
break :blk payload_token + 3;
@@ -1227,7 +1227,7 @@ fn renderBuiltinCall(
const last_param = params[params.len - 1];
const after_last_param_token = tree.lastToken(last_param) + 1;
- if (token_tags[after_last_param_token] != .Comma) {
+ if (token_tags[after_last_param_token] != .comma) {
// Render all on one line, no trailing comma.
try renderToken(ais, tree, builtin_token + 1, .None); // (
@@ -1259,7 +1259,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S
const token_starts = tree.tokens.items(.start);
const after_fn_token = fn_proto.ast.fn_token + 1;
- const lparen = if (token_tags[after_fn_token] == .Identifier) blk: {
+ const lparen = if (token_tags[after_fn_token] == .identifier) blk: {
try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn
try renderToken(ais, tree, after_fn_token, .None); // name
break :blk after_fn_token + 1;
@@ -1267,7 +1267,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S
try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn
break :blk fn_proto.ast.fn_token + 1;
};
- assert(token_tags[lparen] == .LParen);
+ assert(token_tags[lparen] == .l_paren);
const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
const rparen = blk: {
@@ -1301,11 +1301,11 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S
}
break :blk rparen;
};
- assert(token_tags[rparen] == .RParen);
+ assert(token_tags[rparen] == .r_paren);
// The params list is a sparse set that does *not* include anytype or ... parameters.
- if (token_tags[rparen - 1] != .Comma) {
+ if (token_tags[rparen - 1] != .comma) {
// Render all on one line, no trailing comma.
try renderToken(ais, tree, lparen, .None); // (
@@ -1314,39 +1314,39 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S
while (true) {
last_param_token += 1;
switch (token_tags[last_param_token]) {
- .DocComment => {
+ .doc_comment => {
try renderToken(ais, tree, last_param_token, .Newline);
continue;
},
- .Ellipsis3 => {
+ .ellipsis3 => {
try renderToken(ais, tree, last_param_token, .None); // ...
break;
},
- .Keyword_noalias, .Keyword_comptime => {
+ .keyword_noalias, .keyword_comptime => {
try renderToken(ais, tree, last_param_token, .Space);
last_param_token += 1;
},
- .Identifier => {},
- .Keyword_anytype => {
+ .identifier => {},
+ .keyword_anytype => {
try renderToken(ais, tree, last_param_token, .None); // anytype
continue;
},
- .RParen => break,
- .Comma => {
+ .r_paren => break,
+ .comma => {
try renderToken(ais, tree, last_param_token, .Space); // ,
last_param_token += 1;
},
else => {}, // Parameter type without a name.
}
- if (token_tags[last_param_token] == .Identifier and
- token_tags[last_param_token + 1] == .Colon)
+ if (token_tags[last_param_token] == .identifier and
+ token_tags[last_param_token + 1] == .colon)
{
try renderToken(ais, tree, last_param_token, .None); // name
last_param_token += 1;
try renderToken(ais, tree, last_param_token, .Space); // :
last_param_token += 1;
}
- if (token_tags[last_param_token] == .Keyword_anytype) {
+ if (token_tags[last_param_token] == .keyword_anytype) {
try renderToken(ais, tree, last_param_token, .None); // anytype
continue;
}
@@ -1365,33 +1365,33 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S
while (true) {
last_param_token += 1;
switch (token_tags[last_param_token]) {
- .DocComment => {
+ .doc_comment => {
try renderToken(ais, tree, last_param_token, .Newline);
continue;
},
- .Ellipsis3 => {
+ .ellipsis3 => {
try renderToken(ais, tree, last_param_token, .Comma); // ...
break;
},
- .Keyword_noalias, .Keyword_comptime => {
+ .keyword_noalias, .keyword_comptime => {
try renderToken(ais, tree, last_param_token, .Space);
last_param_token += 1;
},
- .Identifier => {},
- .Keyword_anytype => {
+ .identifier => {},
+ .keyword_anytype => {
try renderToken(ais, tree, last_param_token, .Comma); // anytype
continue;
},
- .RParen => break,
+ .r_paren => break,
else => unreachable,
}
- if (token_tags[last_param_token] == .Identifier) {
+ if (token_tags[last_param_token] == .identifier) {
try renderToken(ais, tree, last_param_token, .None); // name
last_param_token += 1;
try renderToken(ais, tree, last_param_token, .Space); // :
last_param_token += 1;
}
- if (token_tags[last_param_token] == .Keyword_anytype) {
+ if (token_tags[last_param_token] == .keyword_anytype) {
try renderToken(ais, tree, last_param_token, .Comma); // anytype
continue;
}
@@ -1435,7 +1435,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S
try renderToken(ais, tree, callconv_rparen, .Space); // )
}
- if (token_tags[maybe_bang] == .Bang) {
+ if (token_tags[maybe_bang] == .bang) {
try renderToken(ais, tree, maybe_bang, .None); // !
}
return renderExpression(ais, tree, fn_proto.ast.return_type, space);
@@ -1448,7 +1448,7 @@ fn renderSwitchCase(
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
- const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .Comma;
+ const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .comma;
// Render everything before the arrow
if (switch_case.ast.values.len == 0) {
@@ -1473,7 +1473,7 @@ fn renderSwitchCase(
if (switch_case.payload_token) |payload_token| {
try renderToken(ais, tree, payload_token - 1, .None); // pipe
- if (token_tags[payload_token] == .Asterisk) {
+ if (token_tags[payload_token] == .asterisk) {
try renderToken(ais, tree, payload_token, .None); // asterisk
try renderToken(ais, tree, payload_token + 1, .None); // identifier
try renderToken(ais, tree, payload_token + 2, .Space); // pipe
@@ -1498,8 +1498,8 @@ fn renderBlock(
const nodes_data = tree.nodes.items(.data);
const lbrace = tree.nodes.items(.main_token)[block_node];
- if (token_tags[lbrace - 1] == .Colon and
- token_tags[lbrace - 2] == .Identifier)
+ if (token_tags[lbrace - 1] == .colon and
+ token_tags[lbrace - 2] == .identifier)
{
try renderToken(ais, tree, lbrace - 2, .None);
try renderToken(ais, tree, lbrace - 1, .Space);
@@ -1547,7 +1547,7 @@ fn renderStructInit(
}
const last_field = struct_init.ast.fields[struct_init.ast.fields.len - 1];
const last_field_token = tree.lastToken(last_field);
- if (token_tags[last_field_token + 1] == .Comma) {
+ if (token_tags[last_field_token + 1] == .comma) {
// Render one field init per line.
ais.pushIndent();
try renderToken(ais, tree, struct_init.ast.lbrace, .Newline);
@@ -1597,7 +1597,7 @@ fn renderArrayInit(
}
const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1];
const last_elem_token = tree.lastToken(last_elem);
- if (token_tags[last_elem_token + 1] == .Comma) {
+ if (token_tags[last_elem_token + 1] == .comma) {
// Render one element per line.
ais.pushIndent();
try renderToken(ais, tree, array_init.ast.lbrace, .Newline);
@@ -1673,16 +1673,16 @@ fn renderContainerDecl(
const last_member = container_decl.ast.members[container_decl.ast.members.len - 1];
const last_member_token = tree.lastToken(last_member);
const rbrace = switch (token_tags[last_member_token + 1]) {
- .DocComment => last_member_token + 2,
- .Comma, .Semicolon => switch (token_tags[last_member_token + 2]) {
- .DocComment => last_member_token + 3,
- .RBrace => last_member_token + 2,
+ .doc_comment => last_member_token + 2,
+ .comma, .semicolon => switch (token_tags[last_member_token + 2]) {
+ .doc_comment => last_member_token + 3,
+ .r_brace => last_member_token + 2,
else => unreachable,
},
- .RBrace => last_member_token + 1,
+ .r_brace => last_member_token + 1,
else => unreachable,
};
- const src_has_trailing_comma = token_tags[last_member_token + 1] == .Comma;
+ const src_has_trailing_comma = token_tags[last_member_token + 1] == .comma;
if (!src_has_trailing_comma) one_line: {
// We can only print all the members in-line if all the members are fields.
@@ -1734,8 +1734,8 @@ fn renderAsm(
try renderToken(ais, tree, tok_i, .None);
tok_i += 1;
switch (token_tags[tok_i]) {
- .RParen => return renderToken(ais, tree, tok_i, space),
- .Comma => try renderToken(ais, tree, tok_i, .Space),
+ .r_paren => return renderToken(ais, tree, tok_i, space),
+ .comma => try renderToken(ais, tree, tok_i, .Space),
else => unreachable,
}
}
@@ -1775,7 +1775,7 @@ fn renderAsm(
const comma_or_colon = tree.lastToken(asm_output) + 1;
ais.popIndent();
break :colon2 switch (token_tags[comma_or_colon]) {
- .Comma => comma_or_colon + 1,
+ .comma => comma_or_colon + 1,
else => comma_or_colon,
};
}
@@ -1806,7 +1806,7 @@ fn renderAsm(
const comma_or_colon = tree.lastToken(asm_input) + 1;
ais.popIndent();
break :colon3 switch (token_tags[comma_or_colon]) {
- .Comma => comma_or_colon + 1,
+ .comma => comma_or_colon + 1,
else => comma_or_colon,
};
}
@@ -1819,13 +1819,13 @@ fn renderAsm(
var tok_i = first_clobber;
while (true) {
switch (token_tags[tok_i + 1]) {
- .RParen => {
+ .r_paren => {
ais.setIndentDelta(indent_delta);
ais.popIndent();
try renderToken(ais, tree, tok_i, .Newline);
return renderToken(ais, tree, tok_i + 1, space);
},
- .Comma => {
+ .comma => {
try renderToken(ais, tree, tok_i, .None);
try renderToken(ais, tree, tok_i + 1, .Space);
tok_i += 2;
@@ -1859,7 +1859,7 @@ fn renderCall(
const last_param = params[params.len - 1];
const after_last_param_tok = tree.lastToken(last_param) + 1;
- if (token_tags[after_last_param_tok] == .Comma) {
+ if (token_tags[after_last_param_tok] == .comma) {
ais.pushIndent();
try renderToken(ais, tree, lparen, Space.Newline); // (
for (params) |param_node, i| {
@@ -1868,7 +1868,7 @@ fn renderCall(
// Unindent the comma for multiline string literals
const is_multiline_string = node_tags[param_node] == .StringLiteral and
- token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine;
+ token_tags[main_tokens[param_node]] == .multiline_string_literal_line;
if (is_multiline_string) ais.popIndent();
const comma = tree.lastToken(param_node) + 1;
@@ -1900,7 +1900,7 @@ fn renderCall(
fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag);
const maybe_comma = tree.lastToken(node) + 1;
- if (token_tags[maybe_comma] == .Comma) {
+ if (token_tags[maybe_comma] == .comma) {
try renderExpression(ais, tree, node, .None);
return renderToken(ais, tree, maybe_comma, space);
} else {
@@ -1911,7 +1911,7 @@ fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space:
fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void {
const token_tags = tree.tokens.items(.tag);
const maybe_comma = token + 1;
- if (token_tags[maybe_comma] == .Comma) {
+ if (token_tags[maybe_comma] == .comma) {
try renderToken(ais, tree, token, .None);
return renderToken(ais, tree, maybe_comma, space);
} else {
@@ -1962,7 +1962,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp
.None => _ = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]),
.Comma => {
const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]);
- if (token_tags[token_index + 1] == .Comma) {
+ if (token_tags[token_index + 1] == .comma) {
return renderToken(ais, tree, token_index + 1, .Newline);
} else if (!comment) {
return ais.insertNewline();
@@ -1970,7 +1970,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp
},
.CommaSpace => {
const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]);
- if (token_tags[token_index + 1] == .Comma) {
+ if (token_tags[token_index + 1] == .comma) {
return renderToken(ais, tree, token_index + 1, .Space);
} else if (!comment) {
return ais.writer().writeByte(' ');
@@ -1978,7 +1978,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp
},
.Semicolon => {
const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]);
- if (token_tags[token_index + 1] == .Semicolon) {
+ if (token_tags[token_index + 1] == .semicolon) {
return renderToken(ais, tree, token_index + 1, .Newline);
} else if (!comment) {
return ais.insertNewline();
@@ -2005,7 +2005,7 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error
const token_tags = tree.tokens.items(.tag);
if (end_token == 0) return;
var tok = end_token - 1;
- while (token_tags[tok] == .DocComment) {
+ while (token_tags[tok] == .doc_comment) {
if (tok == 0) break;
tok -= 1;
} else {
@@ -2016,7 +2016,7 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error
while (true) : (tok += 1) {
switch (token_tags[tok]) {
- .DocComment => {
+ .doc_comment => {
if (first_tok < end_token) {
try renderToken(ais, tree, tok, .Newline);
} else {
diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig
index 8e5ecc7010..dd18025efb 100644
--- a/lib/std/zig/tokenizer.zig
+++ b/lib/std/zig/tokenizer.zig
@@ -16,58 +16,58 @@ pub const Token = struct {
};
pub const keywords = std.ComptimeStringMap(Tag, .{
- .{ "align", .Keyword_align },
- .{ "allowzero", .Keyword_allowzero },
- .{ "and", .Keyword_and },
- .{ "anyframe", .Keyword_anyframe },
- .{ "anytype", .Keyword_anytype },
- .{ "asm", .Keyword_asm },
- .{ "async", .Keyword_async },
- .{ "await", .Keyword_await },
- .{ "break", .Keyword_break },
- .{ "callconv", .Keyword_callconv },
- .{ "catch", .Keyword_catch },
- .{ "comptime", .Keyword_comptime },
- .{ "const", .Keyword_const },
- .{ "continue", .Keyword_continue },
- .{ "defer", .Keyword_defer },
- .{ "else", .Keyword_else },
- .{ "enum", .Keyword_enum },
- .{ "errdefer", .Keyword_errdefer },
- .{ "error", .Keyword_error },
- .{ "export", .Keyword_export },
- .{ "extern", .Keyword_extern },
- .{ "false", .Keyword_false },
- .{ "fn", .Keyword_fn },
- .{ "for", .Keyword_for },
- .{ "if", .Keyword_if },
- .{ "inline", .Keyword_inline },
- .{ "noalias", .Keyword_noalias },
- .{ "noinline", .Keyword_noinline },
- .{ "nosuspend", .Keyword_nosuspend },
- .{ "null", .Keyword_null },
- .{ "opaque", .Keyword_opaque },
- .{ "or", .Keyword_or },
- .{ "orelse", .Keyword_orelse },
- .{ "packed", .Keyword_packed },
- .{ "pub", .Keyword_pub },
- .{ "resume", .Keyword_resume },
- .{ "return", .Keyword_return },
- .{ "linksection", .Keyword_linksection },
- .{ "struct", .Keyword_struct },
- .{ "suspend", .Keyword_suspend },
- .{ "switch", .Keyword_switch },
- .{ "test", .Keyword_test },
- .{ "threadlocal", .Keyword_threadlocal },
- .{ "true", .Keyword_true },
- .{ "try", .Keyword_try },
- .{ "undefined", .Keyword_undefined },
- .{ "union", .Keyword_union },
- .{ "unreachable", .Keyword_unreachable },
- .{ "usingnamespace", .Keyword_usingnamespace },
- .{ "var", .Keyword_var },
- .{ "volatile", .Keyword_volatile },
- .{ "while", .Keyword_while },
+ .{ "align", .keyword_align },
+ .{ "allowzero", .keyword_allowzero },
+ .{ "and", .keyword_and },
+ .{ "anyframe", .keyword_anyframe },
+ .{ "anytype", .keyword_anytype },
+ .{ "asm", .keyword_asm },
+ .{ "async", .keyword_async },
+ .{ "await", .keyword_await },
+ .{ "break", .keyword_break },
+ .{ "callconv", .keyword_callconv },
+ .{ "catch", .keyword_catch },
+ .{ "comptime", .keyword_comptime },
+ .{ "const", .keyword_const },
+ .{ "continue", .keyword_continue },
+ .{ "defer", .keyword_defer },
+ .{ "else", .keyword_else },
+ .{ "enum", .keyword_enum },
+ .{ "errdefer", .keyword_errdefer },
+ .{ "error", .keyword_error },
+ .{ "export", .keyword_export },
+ .{ "extern", .keyword_extern },
+ .{ "false", .keyword_false },
+ .{ "fn", .keyword_fn },
+ .{ "for", .keyword_for },
+ .{ "if", .keyword_if },
+ .{ "inline", .keyword_inline },
+ .{ "noalias", .keyword_noalias },
+ .{ "noinline", .keyword_noinline },
+ .{ "nosuspend", .keyword_nosuspend },
+ .{ "null", .keyword_null },
+ .{ "opaque", .keyword_opaque },
+ .{ "or", .keyword_or },
+ .{ "orelse", .keyword_orelse },
+ .{ "packed", .keyword_packed },
+ .{ "pub", .keyword_pub },
+ .{ "resume", .keyword_resume },
+ .{ "return", .keyword_return },
+ .{ "linksection", .keyword_linksection },
+ .{ "struct", .keyword_struct },
+ .{ "suspend", .keyword_suspend },
+ .{ "switch", .keyword_switch },
+ .{ "test", .keyword_test },
+ .{ "threadlocal", .keyword_threadlocal },
+ .{ "true", .keyword_true },
+ .{ "try", .keyword_try },
+ .{ "undefined", .keyword_undefined },
+ .{ "union", .keyword_union },
+ .{ "unreachable", .keyword_unreachable },
+ .{ "usingnamespace", .keyword_usingnamespace },
+ .{ "var", .keyword_var },
+ .{ "volatile", .keyword_volatile },
+ .{ "while", .keyword_while },
});
pub fn getKeyword(bytes: []const u8) ?Tag {
@@ -75,249 +75,249 @@ pub const Token = struct {
}
pub const Tag = enum {
- Invalid,
- Invalid_ampersands,
- Invalid_periodasterisks,
- Identifier,
- StringLiteral,
- MultilineStringLiteralLine,
- CharLiteral,
- Eof,
- Builtin,
- Bang,
- Pipe,
- PipePipe,
- PipeEqual,
- Equal,
- EqualEqual,
- EqualAngleBracketRight,
- BangEqual,
- LParen,
- RParen,
- Semicolon,
- Percent,
- PercentEqual,
- LBrace,
- RBrace,
- LBracket,
- RBracket,
- Period,
- PeriodAsterisk,
- Ellipsis2,
- Ellipsis3,
- Caret,
- CaretEqual,
- Plus,
- PlusPlus,
- PlusEqual,
- PlusPercent,
- PlusPercentEqual,
- Minus,
- MinusEqual,
- MinusPercent,
- MinusPercentEqual,
- Asterisk,
- AsteriskEqual,
- AsteriskAsterisk,
- AsteriskPercent,
- AsteriskPercentEqual,
- Arrow,
- Colon,
- Slash,
- SlashEqual,
- Comma,
- Ampersand,
- AmpersandEqual,
- QuestionMark,
- AngleBracketLeft,
- AngleBracketLeftEqual,
- AngleBracketAngleBracketLeft,
- AngleBracketAngleBracketLeftEqual,
- AngleBracketRight,
- AngleBracketRightEqual,
- AngleBracketAngleBracketRight,
- AngleBracketAngleBracketRightEqual,
- Tilde,
- IntegerLiteral,
- FloatLiteral,
- DocComment,
- ContainerDocComment,
- Keyword_align,
- Keyword_allowzero,
- Keyword_and,
- Keyword_anyframe,
- Keyword_anytype,
- Keyword_asm,
- Keyword_async,
- Keyword_await,
- Keyword_break,
- Keyword_callconv,
- Keyword_catch,
- Keyword_comptime,
- Keyword_const,
- Keyword_continue,
- Keyword_defer,
- Keyword_else,
- Keyword_enum,
- Keyword_errdefer,
- Keyword_error,
- Keyword_export,
- Keyword_extern,
- Keyword_false,
- Keyword_fn,
- Keyword_for,
- Keyword_if,
- Keyword_inline,
- Keyword_noalias,
- Keyword_noinline,
- Keyword_nosuspend,
- Keyword_null,
- Keyword_opaque,
- Keyword_or,
- Keyword_orelse,
- Keyword_packed,
- Keyword_pub,
- Keyword_resume,
- Keyword_return,
- Keyword_linksection,
- Keyword_struct,
- Keyword_suspend,
- Keyword_switch,
- Keyword_test,
- Keyword_threadlocal,
- Keyword_true,
- Keyword_try,
- Keyword_undefined,
- Keyword_union,
- Keyword_unreachable,
- Keyword_usingnamespace,
- Keyword_var,
- Keyword_volatile,
- Keyword_while,
+ invalid,
+ invalid_ampersands,
+ invalid_periodasterisks,
+ identifier,
+ string_literal,
+ multiline_string_literal_line,
+ char_literal,
+ eof,
+ builtin,
+ bang,
+ pipe,
+ pipe_pipe,
+ pipe_equal,
+ equal,
+ equal_equal,
+ equal_angle_bracket_right,
+ bang_equal,
+ l_paren,
+ r_paren,
+ semicolon,
+ percent,
+ percent_equal,
+ l_brace,
+ r_brace,
+ l_bracket,
+ r_bracket,
+ period,
+ period_asterisk,
+ ellipsis2,
+ ellipsis3,
+ caret,
+ caret_equal,
+ plus,
+ plus_plus,
+ plus_equal,
+ plus_percent,
+ plus_percent_equal,
+ minus,
+ minus_equal,
+ minus_percent,
+ minus_percent_equal,
+ asterisk,
+ asterisk_equal,
+ asterisk_asterisk,
+ asterisk_percent,
+ asterisk_percent_equal,
+ arrow,
+ colon,
+ slash,
+ slash_equal,
+ comma,
+ ampersand,
+ ampersand_equal,
+ question_mark,
+ angle_bracket_left,
+ angle_bracket_left_equal,
+ angle_bracket_angle_bracket_left,
+ angle_bracket_angle_bracket_left_equal,
+ angle_bracket_right,
+ angle_bracket_right_equal,
+ angle_bracket_angle_bracket_right,
+ angle_bracket_angle_bracket_right_equal,
+ tilde,
+ integer_literal,
+ float_literal,
+ doc_comment,
+ container_doc_comment,
+ keyword_align,
+ keyword_allowzero,
+ keyword_and,
+ keyword_anyframe,
+ keyword_anytype,
+ keyword_asm,
+ keyword_async,
+ keyword_await,
+ keyword_break,
+ keyword_callconv,
+ keyword_catch,
+ keyword_comptime,
+ keyword_const,
+ keyword_continue,
+ keyword_defer,
+ keyword_else,
+ keyword_enum,
+ keyword_errdefer,
+ keyword_error,
+ keyword_export,
+ keyword_extern,
+ keyword_false,
+ keyword_fn,
+ keyword_for,
+ keyword_if,
+ keyword_inline,
+ keyword_noalias,
+ keyword_noinline,
+ keyword_nosuspend,
+ keyword_null,
+ keyword_opaque,
+ keyword_or,
+ keyword_orelse,
+ keyword_packed,
+ keyword_pub,
+ keyword_resume,
+ keyword_return,
+ keyword_linksection,
+ keyword_struct,
+ keyword_suspend,
+ keyword_switch,
+ keyword_test,
+ keyword_threadlocal,
+ keyword_true,
+ keyword_try,
+ keyword_undefined,
+ keyword_union,
+ keyword_unreachable,
+ keyword_usingnamespace,
+ keyword_var,
+ keyword_volatile,
+ keyword_while,
pub fn lexeme(tag: Tag) ?[]const u8 {
return switch (tag) {
- .Invalid,
- .Identifier,
- .StringLiteral,
- .MultilineStringLiteralLine,
- .CharLiteral,
- .Eof,
- .Builtin,
- .IntegerLiteral,
- .FloatLiteral,
- .DocComment,
- .ContainerDocComment,
+ .invalid,
+ .identifier,
+ .string_literal,
+ .multiline_string_literal_line,
+ .char_literal,
+ .eof,
+ .builtin,
+ .integer_literal,
+ .float_literal,
+ .doc_comment,
+ .container_doc_comment,
=> null,
- .Invalid_ampersands => "&&",
- .Invalid_periodasterisks => ".**",
- .Bang => "!",
- .Pipe => "|",
- .PipePipe => "||",
- .PipeEqual => "|=",
- .Equal => "=",
- .EqualEqual => "==",
- .EqualAngleBracketRight => "=>",
- .BangEqual => "!=",
- .LParen => "(",
- .RParen => ")",
- .Semicolon => ";",
- .Percent => "%",
- .PercentEqual => "%=",
- .LBrace => "{",
- .RBrace => "}",
- .LBracket => "[",
- .RBracket => "]",
- .Period => ".",
- .PeriodAsterisk => ".*",
- .Ellipsis2 => "..",
- .Ellipsis3 => "...",
- .Caret => "^",
- .CaretEqual => "^=",
- .Plus => "+",
- .PlusPlus => "++",
- .PlusEqual => "+=",
- .PlusPercent => "+%",
- .PlusPercentEqual => "+%=",
- .Minus => "-",
- .MinusEqual => "-=",
- .MinusPercent => "-%",
- .MinusPercentEqual => "-%=",
- .Asterisk => "*",
- .AsteriskEqual => "*=",
- .AsteriskAsterisk => "**",
- .AsteriskPercent => "*%",
- .AsteriskPercentEqual => "*%=",
- .Arrow => "->",
- .Colon => ":",
- .Slash => "/",
- .SlashEqual => "/=",
- .Comma => ",",
- .Ampersand => "&",
- .AmpersandEqual => "&=",
- .QuestionMark => "?",
- .AngleBracketLeft => "<",
- .AngleBracketLeftEqual => "<=",
- .AngleBracketAngleBracketLeft => "<<",
- .AngleBracketAngleBracketLeftEqual => "<<=",
- .AngleBracketRight => ">",
- .AngleBracketRightEqual => ">=",
- .AngleBracketAngleBracketRight => ">>",
- .AngleBracketAngleBracketRightEqual => ">>=",
- .Tilde => "~",
- .Keyword_align => "align",
- .Keyword_allowzero => "allowzero",
- .Keyword_and => "and",
- .Keyword_anyframe => "anyframe",
- .Keyword_anytype => "anytype",
- .Keyword_asm => "asm",
- .Keyword_async => "async",
- .Keyword_await => "await",
- .Keyword_break => "break",
- .Keyword_callconv => "callconv",
- .Keyword_catch => "catch",
- .Keyword_comptime => "comptime",
- .Keyword_const => "const",
- .Keyword_continue => "continue",
- .Keyword_defer => "defer",
- .Keyword_else => "else",
- .Keyword_enum => "enum",
- .Keyword_errdefer => "errdefer",
- .Keyword_error => "error",
- .Keyword_export => "export",
- .Keyword_extern => "extern",
- .Keyword_false => "false",
- .Keyword_fn => "fn",
- .Keyword_for => "for",
- .Keyword_if => "if",
- .Keyword_inline => "inline",
- .Keyword_noalias => "noalias",
- .Keyword_noinline => "noinline",
- .Keyword_nosuspend => "nosuspend",
- .Keyword_null => "null",
- .Keyword_opaque => "opaque",
- .Keyword_or => "or",
- .Keyword_orelse => "orelse",
- .Keyword_packed => "packed",
- .Keyword_pub => "pub",
- .Keyword_resume => "resume",
- .Keyword_return => "return",
- .Keyword_linksection => "linksection",
- .Keyword_struct => "struct",
- .Keyword_suspend => "suspend",
- .Keyword_switch => "switch",
- .Keyword_test => "test",
- .Keyword_threadlocal => "threadlocal",
- .Keyword_true => "true",
- .Keyword_try => "try",
- .Keyword_undefined => "undefined",
- .Keyword_union => "union",
- .Keyword_unreachable => "unreachable",
- .Keyword_usingnamespace => "usingnamespace",
- .Keyword_var => "var",
- .Keyword_volatile => "volatile",
- .Keyword_while => "while",
+ .invalid_ampersands => "&&",
+ .invalid_periodasterisks => ".**",
+ .bang => "!",
+ .pipe => "|",
+ .pipe_pipe => "||",
+ .pipe_equal => "|=",
+ .equal => "=",
+ .equal_equal => "==",
+ .equal_angle_bracket_right => "=>",
+ .bang_equal => "!=",
+ .l_paren => "(",
+ .r_paren => ")",
+ .semicolon => ";",
+ .percent => "%",
+ .percent_equal => "%=",
+ .l_brace => "{",
+ .r_brace => "}",
+ .l_bracket => "[",
+ .r_bracket => "]",
+ .period => ".",
+ .period_asterisk => ".*",
+ .ellipsis2 => "..",
+ .ellipsis3 => "...",
+ .caret => "^",
+ .caret_equal => "^=",
+ .plus => "+",
+ .plus_plus => "++",
+ .plus_equal => "+=",
+ .plus_percent => "+%",
+ .plus_percent_equal => "+%=",
+ .minus => "-",
+ .minus_equal => "-=",
+ .minus_percent => "-%",
+ .minus_percent_equal => "-%=",
+ .asterisk => "*",
+ .asterisk_equal => "*=",
+ .asterisk_asterisk => "**",
+ .asterisk_percent => "*%",
+ .asterisk_percent_equal => "*%=",
+ .arrow => "->",
+ .colon => ":",
+ .slash => "/",
+ .slash_equal => "/=",
+ .comma => ",",
+ .ampersand => "&",
+ .ampersand_equal => "&=",
+ .question_mark => "?",
+ .angle_bracket_left => "<",
+ .angle_bracket_left_equal => "<=",
+ .angle_bracket_angle_bracket_left => "<<",
+ .angle_bracket_angle_bracket_left_equal => "<<=",
+ .angle_bracket_right => ">",
+ .angle_bracket_right_equal => ">=",
+ .angle_bracket_angle_bracket_right => ">>",
+ .angle_bracket_angle_bracket_right_equal => ">>=",
+ .tilde => "~",
+ .keyword_align => "align",
+ .keyword_allowzero => "allowzero",
+ .keyword_and => "and",
+ .keyword_anyframe => "anyframe",
+ .keyword_anytype => "anytype",
+ .keyword_asm => "asm",
+ .keyword_async => "async",
+ .keyword_await => "await",
+ .keyword_break => "break",
+ .keyword_callconv => "callconv",
+ .keyword_catch => "catch",
+ .keyword_comptime => "comptime",
+ .keyword_const => "const",
+ .keyword_continue => "continue",
+ .keyword_defer => "defer",
+ .keyword_else => "else",
+ .keyword_enum => "enum",
+ .keyword_errdefer => "errdefer",
+ .keyword_error => "error",
+ .keyword_export => "export",
+ .keyword_extern => "extern",
+ .keyword_false => "false",
+ .keyword_fn => "fn",
+ .keyword_for => "for",
+ .keyword_if => "if",
+ .keyword_inline => "inline",
+ .keyword_noalias => "noalias",
+ .keyword_noinline => "noinline",
+ .keyword_nosuspend => "nosuspend",
+ .keyword_null => "null",
+ .keyword_opaque => "opaque",
+ .keyword_or => "or",
+ .keyword_orelse => "orelse",
+ .keyword_packed => "packed",
+ .keyword_pub => "pub",
+ .keyword_resume => "resume",
+ .keyword_return => "return",
+ .keyword_linksection => "linksection",
+ .keyword_struct => "struct",
+ .keyword_suspend => "suspend",
+ .keyword_switch => "switch",
+ .keyword_test => "test",
+ .keyword_threadlocal => "threadlocal",
+ .keyword_true => "true",
+ .keyword_try => "try",
+ .keyword_undefined => "undefined",
+ .keyword_union => "union",
+ .keyword_unreachable => "unreachable",
+ .keyword_usingnamespace => "usingnamespace",
+ .keyword_var => "var",
+ .keyword_volatile => "volatile",
+ .keyword_while => "while",
};
}
@@ -421,7 +421,7 @@ pub const Tokenizer = struct {
const start_index = self.index;
var state: State = .start;
var result = Token{
- .tag = .Eof,
+ .tag = .eof,
.loc = .{
.start = self.index,
.end = undefined,
@@ -438,14 +438,14 @@ pub const Tokenizer = struct {
},
'"' => {
state = .string_literal;
- result.tag = .StringLiteral;
+ result.tag = .string_literal;
},
'\'' => {
state = .char_literal;
},
'a'...'z', 'A'...'Z', '_' => {
state = .identifier;
- result.tag = .Identifier;
+ result.tag = .identifier;
},
'@' => {
state = .saw_at_sign;
@@ -460,42 +460,42 @@ pub const Tokenizer = struct {
state = .pipe;
},
'(' => {
- result.tag = .LParen;
+ result.tag = .l_paren;
self.index += 1;
break;
},
')' => {
- result.tag = .RParen;
+ result.tag = .r_paren;
self.index += 1;
break;
},
'[' => {
- result.tag = .LBracket;
+ result.tag = .l_bracket;
self.index += 1;
break;
},
']' => {
- result.tag = .RBracket;
+ result.tag = .r_bracket;
self.index += 1;
break;
},
';' => {
- result.tag = .Semicolon;
+ result.tag = .semicolon;
self.index += 1;
break;
},
',' => {
- result.tag = .Comma;
+ result.tag = .comma;
self.index += 1;
break;
},
'?' => {
- result.tag = .QuestionMark;
+ result.tag = .question_mark;
self.index += 1;
break;
},
':' => {
- result.tag = .Colon;
+ result.tag = .colon;
self.index += 1;
break;
},
@@ -519,20 +519,20 @@ pub const Tokenizer = struct {
},
'\\' => {
state = .backslash;
- result.tag = .MultilineStringLiteralLine;
+ result.tag = .multiline_string_literal_line;
},
'{' => {
- result.tag = .LBrace;
+ result.tag = .l_brace;
self.index += 1;
break;
},
'}' => {
- result.tag = .RBrace;
+ result.tag = .r_brace;
self.index += 1;
break;
},
'~' => {
- result.tag = .Tilde;
+ result.tag = .tilde;
self.index += 1;
break;
},
@@ -550,14 +550,14 @@ pub const Tokenizer = struct {
},
'0' => {
state = .zero;
- result.tag = .IntegerLiteral;
+ result.tag = .integer_literal;
},
'1'...'9' => {
state = .int_literal_dec;
- result.tag = .IntegerLiteral;
+ result.tag = .integer_literal;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
self.index += 1;
break;
},
@@ -565,42 +565,42 @@ pub const Tokenizer = struct {
.saw_at_sign => switch (c) {
'"' => {
- result.tag = .Identifier;
+ result.tag = .identifier;
state = .string_literal;
},
else => {
// reinterpret as a builtin
self.index -= 1;
state = .builtin;
- result.tag = .Builtin;
+ result.tag = .builtin;
},
},
.ampersand => switch (c) {
'&' => {
- result.tag = .Invalid_ampersands;
+ result.tag = .invalid_ampersands;
self.index += 1;
break;
},
'=' => {
- result.tag = .AmpersandEqual;
+ result.tag = .ampersand_equal;
self.index += 1;
break;
},
else => {
- result.tag = .Ampersand;
+ result.tag = .ampersand;
break;
},
},
.asterisk => switch (c) {
'=' => {
- result.tag = .AsteriskEqual;
+ result.tag = .asterisk_equal;
self.index += 1;
break;
},
'*' => {
- result.tag = .AsteriskAsterisk;
+ result.tag = .asterisk_asterisk;
self.index += 1;
break;
},
@@ -608,43 +608,43 @@ pub const Tokenizer = struct {
state = .asterisk_percent;
},
else => {
- result.tag = .Asterisk;
+ result.tag = .asterisk;
break;
},
},
.asterisk_percent => switch (c) {
'=' => {
- result.tag = .AsteriskPercentEqual;
+ result.tag = .asterisk_percent_equal;
self.index += 1;
break;
},
else => {
- result.tag = .AsteriskPercent;
+ result.tag = .asterisk_percent;
break;
},
},
.percent => switch (c) {
'=' => {
- result.tag = .PercentEqual;
+ result.tag = .percent_equal;
self.index += 1;
break;
},
else => {
- result.tag = .Percent;
+ result.tag = .percent;
break;
},
},
.plus => switch (c) {
'=' => {
- result.tag = .PlusEqual;
+ result.tag = .plus_equal;
self.index += 1;
break;
},
'+' => {
- result.tag = .PlusPlus;
+ result.tag = .plus_plus;
self.index += 1;
break;
},
@@ -652,31 +652,31 @@ pub const Tokenizer = struct {
state = .plus_percent;
},
else => {
- result.tag = .Plus;
+ result.tag = .plus;
break;
},
},
.plus_percent => switch (c) {
'=' => {
- result.tag = .PlusPercentEqual;
+ result.tag = .plus_percent_equal;
self.index += 1;
break;
},
else => {
- result.tag = .PlusPercent;
+ result.tag = .plus_percent;
break;
},
},
.caret => switch (c) {
'=' => {
- result.tag = .CaretEqual;
+ result.tag = .caret_equal;
self.index += 1;
break;
},
else => {
- result.tag = .Caret;
+ result.tag = .caret;
break;
},
},
@@ -724,7 +724,7 @@ pub const Tokenizer = struct {
state = .char_literal_backslash;
},
'\'', 0x80...0xbf, 0xf8...0xff => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
0xc0...0xdf => { // 110xxxxx
@@ -746,7 +746,7 @@ pub const Tokenizer = struct {
.char_literal_backslash => switch (c) {
'\n' => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
'x' => {
@@ -769,7 +769,7 @@ pub const Tokenizer = struct {
}
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -780,7 +780,7 @@ pub const Tokenizer = struct {
seen_escape_digits = 0;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
state = .char_literal_unicode_invalid;
},
},
@@ -791,14 +791,14 @@ pub const Tokenizer = struct {
},
'}' => {
if (seen_escape_digits == 0) {
- result.tag = .Invalid;
+ result.tag = .invalid;
state = .char_literal_unicode_invalid;
} else {
state = .char_literal_end;
}
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
state = .char_literal_unicode_invalid;
},
},
@@ -813,12 +813,12 @@ pub const Tokenizer = struct {
.char_literal_end => switch (c) {
'\'' => {
- result.tag = .CharLiteral;
+ result.tag = .char_literal;
self.index += 1;
break;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -831,7 +831,7 @@ pub const Tokenizer = struct {
}
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -847,58 +847,58 @@ pub const Tokenizer = struct {
.bang => switch (c) {
'=' => {
- result.tag = .BangEqual;
+ result.tag = .bang_equal;
self.index += 1;
break;
},
else => {
- result.tag = .Bang;
+ result.tag = .bang;
break;
},
},
.pipe => switch (c) {
'=' => {
- result.tag = .PipeEqual;
+ result.tag = .pipe_equal;
self.index += 1;
break;
},
'|' => {
- result.tag = .PipePipe;
+ result.tag = .pipe_pipe;
self.index += 1;
break;
},
else => {
- result.tag = .Pipe;
+ result.tag = .pipe;
break;
},
},
.equal => switch (c) {
'=' => {
- result.tag = .EqualEqual;
+ result.tag = .equal_equal;
self.index += 1;
break;
},
'>' => {
- result.tag = .EqualAngleBracketRight;
+ result.tag = .equal_angle_bracket_right;
self.index += 1;
break;
},
else => {
- result.tag = .Equal;
+ result.tag = .equal;
break;
},
},
.minus => switch (c) {
'>' => {
- result.tag = .Arrow;
+ result.tag = .arrow;
self.index += 1;
break;
},
'=' => {
- result.tag = .MinusEqual;
+ result.tag = .minus_equal;
self.index += 1;
break;
},
@@ -906,19 +906,19 @@ pub const Tokenizer = struct {
state = .minus_percent;
},
else => {
- result.tag = .Minus;
+ result.tag = .minus;
break;
},
},
.minus_percent => switch (c) {
'=' => {
- result.tag = .MinusPercentEqual;
+ result.tag = .minus_percent_equal;
self.index += 1;
break;
},
else => {
- result.tag = .MinusPercent;
+ result.tag = .minus_percent;
break;
},
},
@@ -928,24 +928,24 @@ pub const Tokenizer = struct {
state = .angle_bracket_angle_bracket_left;
},
'=' => {
- result.tag = .AngleBracketLeftEqual;
+ result.tag = .angle_bracket_left_equal;
self.index += 1;
break;
},
else => {
- result.tag = .AngleBracketLeft;
+ result.tag = .angle_bracket_left;
break;
},
},
.angle_bracket_angle_bracket_left => switch (c) {
'=' => {
- result.tag = .AngleBracketAngleBracketLeftEqual;
+ result.tag = .angle_bracket_angle_bracket_left_equal;
self.index += 1;
break;
},
else => {
- result.tag = .AngleBracketAngleBracketLeft;
+ result.tag = .angle_bracket_angle_bracket_left;
break;
},
},
@@ -955,24 +955,24 @@ pub const Tokenizer = struct {
state = .angle_bracket_angle_bracket_right;
},
'=' => {
- result.tag = .AngleBracketRightEqual;
+ result.tag = .angle_bracket_right_equal;
self.index += 1;
break;
},
else => {
- result.tag = .AngleBracketRight;
+ result.tag = .angle_bracket_right;
break;
},
},
.angle_bracket_angle_bracket_right => switch (c) {
'=' => {
- result.tag = .AngleBracketAngleBracketRightEqual;
+ result.tag = .angle_bracket_angle_bracket_right_equal;
self.index += 1;
break;
},
else => {
- result.tag = .AngleBracketAngleBracketRight;
+ result.tag = .angle_bracket_angle_bracket_right;
break;
},
},
@@ -985,30 +985,30 @@ pub const Tokenizer = struct {
state = .period_asterisk;
},
else => {
- result.tag = .Period;
+ result.tag = .period;
break;
},
},
.period_2 => switch (c) {
'.' => {
- result.tag = .Ellipsis3;
+ result.tag = .ellipsis3;
self.index += 1;
break;
},
else => {
- result.tag = .Ellipsis2;
+ result.tag = .ellipsis2;
break;
},
},
.period_asterisk => switch (c) {
'*' => {
- result.tag = .Invalid_periodasterisks;
+ result.tag = .invalid_periodasterisks;
break;
},
else => {
- result.tag = .PeriodAsterisk;
+ result.tag = .period_asterisk;
break;
},
},
@@ -1018,12 +1018,12 @@ pub const Tokenizer = struct {
state = .line_comment_start;
},
'=' => {
- result.tag = .SlashEqual;
+ result.tag = .slash_equal;
self.index += 1;
break;
},
else => {
- result.tag = .Slash;
+ result.tag = .slash;
break;
},
},
@@ -1032,7 +1032,7 @@ pub const Tokenizer = struct {
state = .doc_comment_start;
},
'!' => {
- result.tag = .ContainerDocComment;
+ result.tag = .container_doc_comment;
state = .container_doc_comment;
},
'\n' => {
@@ -1050,16 +1050,16 @@ pub const Tokenizer = struct {
state = .line_comment;
},
'\n' => {
- result.tag = .DocComment;
+ result.tag = .doc_comment;
break;
},
'\t', '\r' => {
state = .doc_comment;
- result.tag = .DocComment;
+ result.tag = .doc_comment;
},
else => {
state = .doc_comment;
- result.tag = .DocComment;
+ result.tag = .doc_comment;
self.checkLiteralCharacter();
},
},
@@ -1093,7 +1093,7 @@ pub const Tokenizer = struct {
},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1103,7 +1103,7 @@ pub const Tokenizer = struct {
state = .int_literal_bin;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1114,7 +1114,7 @@ pub const Tokenizer = struct {
'0'...'1' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1124,7 +1124,7 @@ pub const Tokenizer = struct {
state = .int_literal_oct;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1135,7 +1135,7 @@ pub const Tokenizer = struct {
'0'...'7' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1145,7 +1145,7 @@ pub const Tokenizer = struct {
state = .int_literal_dec;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1155,16 +1155,16 @@ pub const Tokenizer = struct {
},
'.' => {
state = .num_dot_dec;
- result.tag = .FloatLiteral;
+ result.tag = .float_literal;
},
'e', 'E' => {
state = .float_exponent_unsigned;
- result.tag = .FloatLiteral;
+ result.tag = .float_literal;
},
'0'...'9' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1174,7 +1174,7 @@ pub const Tokenizer = struct {
state = .int_literal_hex;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1184,23 +1184,23 @@ pub const Tokenizer = struct {
},
'.' => {
state = .num_dot_hex;
- result.tag = .FloatLiteral;
+ result.tag = .float_literal;
},
'p', 'P' => {
state = .float_exponent_unsigned;
- result.tag = .FloatLiteral;
+ result.tag = .float_literal;
},
'0'...'9', 'a'...'f', 'A'...'F' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
},
.num_dot_dec => switch (c) {
'.' => {
- result.tag = .IntegerLiteral;
+ result.tag = .integer_literal;
self.index -= 1;
state = .start;
break;
@@ -1213,14 +1213,14 @@ pub const Tokenizer = struct {
},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
},
.num_dot_hex => switch (c) {
'.' => {
- result.tag = .IntegerLiteral;
+ result.tag = .integer_literal;
self.index -= 1;
state = .start;
break;
@@ -1229,12 +1229,12 @@ pub const Tokenizer = struct {
state = .float_exponent_unsigned;
},
'0'...'9', 'a'...'f', 'A'...'F' => {
- result.tag = .FloatLiteral;
+ result.tag = .float_literal;
state = .float_fraction_hex;
},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1244,7 +1244,7 @@ pub const Tokenizer = struct {
state = .float_fraction_dec;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1258,7 +1258,7 @@ pub const Tokenizer = struct {
'0'...'9' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1268,7 +1268,7 @@ pub const Tokenizer = struct {
state = .float_fraction_hex;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1282,7 +1282,7 @@ pub const Tokenizer = struct {
'0'...'9', 'a'...'f', 'A'...'F' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1302,7 +1302,7 @@ pub const Tokenizer = struct {
state = .float_exponent_num;
},
else => {
- result.tag = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1313,7 +1313,7 @@ pub const Tokenizer = struct {
'0'...'9' => {},
else => {
if (isIdentifierChar(c)) {
- result.tag = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1344,10 +1344,10 @@ pub const Tokenizer = struct {
}
},
.doc_comment, .doc_comment_start => {
- result.tag = .DocComment;
+ result.tag = .doc_comment;
},
.container_doc_comment => {
- result.tag = .ContainerDocComment;
+ result.tag = .container_doc_comment;
},
.int_literal_dec_no_underscore,
@@ -1370,76 +1370,76 @@ pub const Tokenizer = struct {
.char_literal_unicode,
.string_literal_backslash,
=> {
- result.tag = .Invalid;
+ result.tag = .invalid;
},
.equal => {
- result.tag = .Equal;
+ result.tag = .equal;
},
.bang => {
- result.tag = .Bang;
+ result.tag = .bang;
},
.minus => {
- result.tag = .Minus;
+ result.tag = .minus;
},
.slash => {
- result.tag = .Slash;
+ result.tag = .slash;
},
.zero => {
- result.tag = .IntegerLiteral;
+ result.tag = .integer_literal;
},
.ampersand => {
- result.tag = .Ampersand;
+ result.tag = .ampersand;
},
.period => {
- result.tag = .Period;
+ result.tag = .period;
},
.period_2 => {
- result.tag = .Ellipsis2;
+ result.tag = .ellipsis2;
},
.period_asterisk => {
- result.tag = .PeriodAsterisk;
+ result.tag = .period_asterisk;
},
.pipe => {
- result.tag = .Pipe;
+ result.tag = .pipe;
},
.angle_bracket_angle_bracket_right => {
- result.tag = .AngleBracketAngleBracketRight;
+ result.tag = .angle_bracket_angle_bracket_right;
},
.angle_bracket_right => {
- result.tag = .AngleBracketRight;
+ result.tag = .angle_bracket_right;
},
.angle_bracket_angle_bracket_left => {
- result.tag = .AngleBracketAngleBracketLeft;
+ result.tag = .angle_bracket_angle_bracket_left;
},
.angle_bracket_left => {
- result.tag = .AngleBracketLeft;
+ result.tag = .angle_bracket_left;
},
.plus_percent => {
- result.tag = .PlusPercent;
+ result.tag = .plus_percent;
},
.plus => {
- result.tag = .Plus;
+ result.tag = .plus;
},
.percent => {
- result.tag = .Percent;
+ result.tag = .percent;
},
.caret => {
- result.tag = .Caret;
+ result.tag = .caret;
},
.asterisk_percent => {
- result.tag = .AsteriskPercent;
+ result.tag = .asterisk_percent;
},
.asterisk => {
- result.tag = .Asterisk;
+ result.tag = .asterisk;
},
.minus_percent => {
- result.tag = .MinusPercent;
+ result.tag = .minus_percent;
},
}
}
- if (result.tag == .Eof) {
+ if (result.tag == .eof) {
if (self.pending_invalid_token) |token| {
self.pending_invalid_token = null;
return token;
@@ -1455,7 +1455,7 @@ pub const Tokenizer = struct {
const invalid_length = self.getInvalidCharacterLength();
if (invalid_length == 0) return;
self.pending_invalid_token = .{
- .tag = .Invalid,
+ .tag = .invalid,
.loc = .{
.start = self.index,
.end = self.index + invalid_length,
@@ -1502,7 +1502,7 @@ pub const Tokenizer = struct {
};
test "tokenizer" {
- testTokenize("test", &[_]Token.Tag{.Keyword_test});
+ testTokenize("test", &.{.keyword_test});
}
test "line comment followed by top-level comptime" {
@@ -1510,10 +1510,10 @@ test "line comment followed by top-level comptime" {
\\// line comment
\\comptime {}
\\
- , &[_]Token.Tag{
- .Keyword_comptime,
- .LBrace,
- .RBrace,
+ , &.{
+ .keyword_comptime,
+ .l_brace,
+ .r_brace,
});
}
@@ -1521,199 +1521,199 @@ test "tokenizer - unknown length pointer and then c pointer" {
testTokenize(
\\[*]u8
\\[*c]u8
- , &[_]Token.Tag{
- .LBracket,
- .Asterisk,
- .RBracket,
- .Identifier,
- .LBracket,
- .Asterisk,
- .Identifier,
- .RBracket,
- .Identifier,
+ , &.{
+ .l_bracket,
+ .asterisk,
+ .r_bracket,
+ .identifier,
+ .l_bracket,
+ .asterisk,
+ .identifier,
+ .r_bracket,
+ .identifier,
});
}
test "tokenizer - char literal with hex escape" {
testTokenize(
\\'\x1b'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\x1'
- , &[_]Token.Tag{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
}
test "tokenizer - char literal with unicode escapes" {
// Valid unicode escapes
testTokenize(
\\'\u{3}'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{01}'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{2a}'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{3f9}'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{6E09aBc1523}'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\"\u{440}"
- , &[_]Token.Tag{.StringLiteral});
+ , &.{.string_literal});
// Invalid unicode escapes
testTokenize(
\\'\u'
- , &[_]Token.Tag{.Invalid});
+ , &.{.invalid});
testTokenize(
\\'\u{{'
- , &[_]Token.Tag{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{}'
- , &[_]Token.Tag{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{s}'
- , &[_]Token.Tag{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{2z}'
- , &[_]Token.Tag{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{4a'
- , &[_]Token.Tag{.Invalid});
+ , &.{.invalid});
// Test old-style unicode literals
testTokenize(
\\'\u0333'
- , &[_]Token.Tag{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\U0333'
- , &[_]Token.Tag{ .Invalid, .IntegerLiteral, .Invalid });
+ , &.{ .invalid, .integer_literal, .invalid });
}
test "tokenizer - char literal with unicode code point" {
testTokenize(
\\'💩'
- , &[_]Token.Tag{.CharLiteral});
+ , &.{.char_literal});
}
test "tokenizer - float literal e exponent" {
- testTokenize("a = 4.94065645841246544177e-324;\n", &[_]Token.Tag{
- .Identifier,
- .Equal,
- .FloatLiteral,
- .Semicolon,
+ testTokenize("a = 4.94065645841246544177e-324;\n", &.{
+ .identifier,
+ .equal,
+ .float_literal,
+ .semicolon,
});
}
test "tokenizer - float literal p exponent" {
- testTokenize("a = 0x1.a827999fcef32p+1022;\n", &[_]Token.Tag{
- .Identifier,
- .Equal,
- .FloatLiteral,
- .Semicolon,
+ testTokenize("a = 0x1.a827999fcef32p+1022;\n", &.{
+ .identifier,
+ .equal,
+ .float_literal,
+ .semicolon,
});
}
test "tokenizer - chars" {
- testTokenize("'c'", &[_]Token.Tag{.CharLiteral});
+ testTokenize("'c'", &.{.char_literal});
}
test "tokenizer - invalid token characters" {
- testTokenize("#", &[_]Token.Tag{.Invalid});
- testTokenize("`", &[_]Token.Tag{.Invalid});
- testTokenize("'c", &[_]Token.Tag{.Invalid});
- testTokenize("'", &[_]Token.Tag{.Invalid});
- testTokenize("''", &[_]Token.Tag{ .Invalid, .Invalid });
+ testTokenize("#", &.{.invalid});
+ testTokenize("`", &.{.invalid});
+ testTokenize("'c", &.{.invalid});
+ testTokenize("'", &.{.invalid});
+ testTokenize("''", &.{ .invalid, .invalid });
}
test "tokenizer - invalid literal/comment characters" {
- testTokenize("\"\x00\"", &[_]Token.Tag{
- .StringLiteral,
- .Invalid,
+ testTokenize("\"\x00\"", &.{
+ .string_literal,
+ .invalid,
});
- testTokenize("//\x00", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\x00", &.{
+ .invalid,
});
- testTokenize("//\x1f", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\x1f", &.{
+ .invalid,
});
- testTokenize("//\x7f", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\x7f", &.{
+ .invalid,
});
}
test "tokenizer - utf8" {
- testTokenize("//\xc2\x80", &[_]Token.Tag{});
- testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Tag{});
+ testTokenize("//\xc2\x80", &.{});
+ testTokenize("//\xf4\x8f\xbf\xbf", &.{});
}
test "tokenizer - invalid utf8" {
- testTokenize("//\x80", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\x80", &.{
+ .invalid,
});
- testTokenize("//\xbf", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xbf", &.{
+ .invalid,
});
- testTokenize("//\xf8", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xf8", &.{
+ .invalid,
});
- testTokenize("//\xff", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xff", &.{
+ .invalid,
});
- testTokenize("//\xc2\xc0", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xc2\xc0", &.{
+ .invalid,
});
- testTokenize("//\xe0", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xe0", &.{
+ .invalid,
});
- testTokenize("//\xf0", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xf0", &.{
+ .invalid,
});
- testTokenize("//\xf0\x90\x80\xc0", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xf0\x90\x80\xc0", &.{
+ .invalid,
});
}
test "tokenizer - illegal unicode codepoints" {
// unicode newline characters.U+0085, U+2028, U+2029
- testTokenize("//\xc2\x84", &[_]Token.Tag{});
- testTokenize("//\xc2\x85", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xc2\x84", &.{});
+ testTokenize("//\xc2\x85", &.{
+ .invalid,
});
- testTokenize("//\xc2\x86", &[_]Token.Tag{});
- testTokenize("//\xe2\x80\xa7", &[_]Token.Tag{});
- testTokenize("//\xe2\x80\xa8", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xc2\x86", &.{});
+ testTokenize("//\xe2\x80\xa7", &.{});
+ testTokenize("//\xe2\x80\xa8", &.{
+ .invalid,
});
- testTokenize("//\xe2\x80\xa9", &[_]Token.Tag{
- .Invalid,
+ testTokenize("//\xe2\x80\xa9", &.{
+ .invalid,
});
- testTokenize("//\xe2\x80\xaa", &[_]Token.Tag{});
+ testTokenize("//\xe2\x80\xaa", &.{});
}
test "tokenizer - string identifier and builtin fns" {
testTokenize(
\\const @"if" = @import("std");
- , &[_]Token.Tag{
- .Keyword_const,
- .Identifier,
- .Equal,
- .Builtin,
- .LParen,
- .StringLiteral,
- .RParen,
- .Semicolon,
+ , &.{
+ .keyword_const,
+ .identifier,
+ .equal,
+ .builtin,
+ .l_paren,
+ .string_literal,
+ .r_paren,
+ .semicolon,
});
}
test "tokenizer - multiline string literal with literal tab" {
testTokenize(
\\\\foo bar
- , &[_]Token.Tag{
- .MultilineStringLiteralLine,
+ , &.{
+ .multiline_string_literal_line,
});
}
@@ -1725,30 +1725,30 @@ test "tokenizer - comments with literal tab" {
\\// foo
\\/// foo
\\/// /foo
- , &[_]Token.Tag{
- .ContainerDocComment,
- .DocComment,
- .DocComment,
- .DocComment,
+ , &.{
+ .container_doc_comment,
+ .doc_comment,
+ .doc_comment,
+ .doc_comment,
});
}
test "tokenizer - pipe and then invalid" {
- testTokenize("||=", &[_]Token.Tag{
- .PipePipe,
- .Equal,
+ testTokenize("||=", &.{
+ .pipe_pipe,
+ .equal,
});
}
test "tokenizer - line comment and doc comment" {
- testTokenize("//", &[_]Token.Tag{});
- testTokenize("// a / b", &[_]Token.Tag{});
- testTokenize("// /", &[_]Token.Tag{});
- testTokenize("/// a", &[_]Token.Tag{.DocComment});
- testTokenize("///", &[_]Token.Tag{.DocComment});
- testTokenize("////", &[_]Token.Tag{});
- testTokenize("//!", &[_]Token.Tag{.ContainerDocComment});
- testTokenize("//!!", &[_]Token.Tag{.ContainerDocComment});
+ testTokenize("//", &.{});
+ testTokenize("// a / b", &.{});
+ testTokenize("// /", &.{});
+ testTokenize("/// a", &.{.doc_comment});
+ testTokenize("///", &.{.doc_comment});
+ testTokenize("////", &.{});
+ testTokenize("//!", &.{.container_doc_comment});
+ testTokenize("//!!", &.{.container_doc_comment});
}
test "tokenizer - line comment followed by identifier" {
@@ -1756,293 +1756,293 @@ test "tokenizer - line comment followed by identifier" {
\\ Unexpected,
\\ // another
\\ Another,
- , &[_]Token.Tag{
- .Identifier,
- .Comma,
- .Identifier,
- .Comma,
+ , &.{
+ .identifier,
+ .comma,
+ .identifier,
+ .comma,
});
}
test "tokenizer - UTF-8 BOM is recognized and skipped" {
- testTokenize("\xEF\xBB\xBFa;\n", &[_]Token.Tag{
- .Identifier,
- .Semicolon,
+ testTokenize("\xEF\xBB\xBFa;\n", &.{
+ .identifier,
+ .semicolon,
});
}
test "correctly parse pointer assignment" {
- testTokenize("b.*=3;\n", &[_]Token.Tag{
- .Identifier,
- .PeriodAsterisk,
- .Equal,
- .IntegerLiteral,
- .Semicolon,
+ testTokenize("b.*=3;\n", &.{
+ .identifier,
+ .period_asterisk,
+ .equal,
+ .integer_literal,
+ .semicolon,
});
}
test "correctly parse pointer dereference followed by asterisk" {
- testTokenize("\"b\".* ** 10", &[_]Token.Tag{
- .StringLiteral,
- .PeriodAsterisk,
- .AsteriskAsterisk,
- .IntegerLiteral,
+ testTokenize("\"b\".* ** 10", &.{
+ .string_literal,
+ .period_asterisk,
+ .asterisk_asterisk,
+ .integer_literal,
});
- testTokenize("(\"b\".*)** 10", &[_]Token.Tag{
- .LParen,
- .StringLiteral,
- .PeriodAsterisk,
- .RParen,
- .AsteriskAsterisk,
- .IntegerLiteral,
+ testTokenize("(\"b\".*)** 10", &.{
+ .l_paren,
+ .string_literal,
+ .period_asterisk,
+ .r_paren,
+ .asterisk_asterisk,
+ .integer_literal,
});
- testTokenize("\"b\".*** 10", &[_]Token.Tag{
- .StringLiteral,
- .Invalid_periodasterisks,
- .AsteriskAsterisk,
- .IntegerLiteral,
+ testTokenize("\"b\".*** 10", &.{
+ .string_literal,
+ .invalid_periodasterisks,
+ .asterisk_asterisk,
+ .integer_literal,
});
}
test "tokenizer - range literals" {
- testTokenize("0...9", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
- testTokenize("'0'...'9'", &[_]Token.Tag{ .CharLiteral, .Ellipsis3, .CharLiteral });
- testTokenize("0x00...0x09", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
- testTokenize("0b00...0b11", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
- testTokenize("0o00...0o11", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
+ testTokenize("0...9", &.{ .integer_literal, .ellipsis3, .integer_literal });
+ testTokenize("'0'...'9'", &.{ .char_literal, .ellipsis3, .char_literal });
+ testTokenize("0x00...0x09", &.{ .integer_literal, .ellipsis3, .integer_literal });
+ testTokenize("0b00...0b11", &.{ .integer_literal, .ellipsis3, .integer_literal });
+ testTokenize("0o00...0o11", &.{ .integer_literal, .ellipsis3, .integer_literal });
}
test "tokenizer - number literals decimal" {
- testTokenize("0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("1", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("2", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("3", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("4", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("5", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("6", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("7", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("8", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("9", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("1..", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis2 });
- testTokenize("0a", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("9b", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1z", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1z_1", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("9z3", &[_]Token.Tag{ .Invalid, .Identifier });
-
- testTokenize("0_0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0001", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("01234567890", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("012_345_6789_0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0_1_2_3_4_5_6_7_8_9_0", &[_]Token.Tag{.IntegerLiteral});
-
- testTokenize("00_", &[_]Token.Tag{.Invalid});
- testTokenize("0_0_", &[_]Token.Tag{.Invalid});
- testTokenize("0__0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0_0f", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0_0_f", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0_0_f_00", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1_,", &[_]Token.Tag{ .Invalid, .Comma });
-
- testTokenize("1.", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0.0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1.0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("10.0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0e0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1e0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1e100", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1.e100", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1.0e100", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1.0e+100", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1.0e-100", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("1.+", &[_]Token.Tag{ .FloatLiteral, .Plus });
-
- testTokenize("1e", &[_]Token.Tag{.Invalid});
- testTokenize("1.0e1f0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0p100", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0p-100", &[_]Token.Tag{ .Invalid, .Identifier, .Minus, .IntegerLiteral });
- testTokenize("1.0p1f0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0_,", &[_]Token.Tag{ .Invalid, .Comma });
- testTokenize("1_.0", &[_]Token.Tag{ .Invalid, .Period, .IntegerLiteral });
- testTokenize("1._", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.a", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.z", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1._0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1._+", &[_]Token.Tag{ .Invalid, .Identifier, .Plus });
- testTokenize("1._e", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0e", &[_]Token.Tag{.Invalid});
- testTokenize("1.0e,", &[_]Token.Tag{ .Invalid, .Comma });
- testTokenize("1.0e_", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0e+_", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0e-_", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("1.0e0_+", &[_]Token.Tag{ .Invalid, .Plus });
+ testTokenize("0", &.{.integer_literal});
+ testTokenize("1", &.{.integer_literal});
+ testTokenize("2", &.{.integer_literal});
+ testTokenize("3", &.{.integer_literal});
+ testTokenize("4", &.{.integer_literal});
+ testTokenize("5", &.{.integer_literal});
+ testTokenize("6", &.{.integer_literal});
+ testTokenize("7", &.{.integer_literal});
+ testTokenize("8", &.{.integer_literal});
+ testTokenize("9", &.{.integer_literal});
+ testTokenize("1..", &.{ .integer_literal, .ellipsis2 });
+ testTokenize("0a", &.{ .invalid, .identifier });
+ testTokenize("9b", &.{ .invalid, .identifier });
+ testTokenize("1z", &.{ .invalid, .identifier });
+ testTokenize("1z_1", &.{ .invalid, .identifier });
+ testTokenize("9z3", &.{ .invalid, .identifier });
+
+ testTokenize("0_0", &.{.integer_literal});
+ testTokenize("0001", &.{.integer_literal});
+ testTokenize("01234567890", &.{.integer_literal});
+ testTokenize("012_345_6789_0", &.{.integer_literal});
+ testTokenize("0_1_2_3_4_5_6_7_8_9_0", &.{.integer_literal});
+
+ testTokenize("00_", &.{.invalid});
+ testTokenize("0_0_", &.{.invalid});
+ testTokenize("0__0", &.{ .invalid, .identifier });
+ testTokenize("0_0f", &.{ .invalid, .identifier });
+ testTokenize("0_0_f", &.{ .invalid, .identifier });
+ testTokenize("0_0_f_00", &.{ .invalid, .identifier });
+ testTokenize("1_,", &.{ .invalid, .comma });
+
+ testTokenize("1.", &.{.float_literal});
+ testTokenize("0.0", &.{.float_literal});
+ testTokenize("1.0", &.{.float_literal});
+ testTokenize("10.0", &.{.float_literal});
+ testTokenize("0e0", &.{.float_literal});
+ testTokenize("1e0", &.{.float_literal});
+ testTokenize("1e100", &.{.float_literal});
+ testTokenize("1.e100", &.{.float_literal});
+ testTokenize("1.0e100", &.{.float_literal});
+ testTokenize("1.0e+100", &.{.float_literal});
+ testTokenize("1.0e-100", &.{.float_literal});
+ testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &.{.float_literal});
+ testTokenize("1.+", &.{ .float_literal, .plus });
+
+ testTokenize("1e", &.{.invalid});
+ testTokenize("1.0e1f0", &.{ .invalid, .identifier });
+ testTokenize("1.0p100", &.{ .invalid, .identifier });
+ testTokenize("1.0p-100", &.{ .invalid, .identifier, .minus, .integer_literal });
+ testTokenize("1.0p1f0", &.{ .invalid, .identifier });
+ testTokenize("1.0_,", &.{ .invalid, .comma });
+ testTokenize("1_.0", &.{ .invalid, .period, .integer_literal });
+ testTokenize("1._", &.{ .invalid, .identifier });
+ testTokenize("1.a", &.{ .invalid, .identifier });
+ testTokenize("1.z", &.{ .invalid, .identifier });
+ testTokenize("1._0", &.{ .invalid, .identifier });
+ testTokenize("1._+", &.{ .invalid, .identifier, .plus });
+ testTokenize("1._e", &.{ .invalid, .identifier });
+ testTokenize("1.0e", &.{.invalid});
+ testTokenize("1.0e,", &.{ .invalid, .comma });
+ testTokenize("1.0e_", &.{ .invalid, .identifier });
+ testTokenize("1.0e+_", &.{ .invalid, .identifier });
+ testTokenize("1.0e-_", &.{ .invalid, .identifier });
+ testTokenize("1.0e0_+", &.{ .invalid, .plus });
}
test "tokenizer - number literals binary" {
- testTokenize("0b0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0b1", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0b2", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b3", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b4", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b5", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b6", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b7", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b8", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0b9", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0ba", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0bb", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0bc", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0bd", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0be", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0bf", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0bz", &[_]Token.Tag{ .Invalid, .Identifier });
-
- testTokenize("0b0000_0000", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0b1111_1111", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0b10_10_10_10", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0b0_1_0_1_0_1_0_1", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0b1.", &[_]Token.Tag{ .IntegerLiteral, .Period });
- testTokenize("0b1.0", &[_]Token.Tag{ .IntegerLiteral, .Period, .IntegerLiteral });
-
- testTokenize("0B0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b_", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b_0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b1_", &[_]Token.Tag{.Invalid});
- testTokenize("0b0__1", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b0_1_", &[_]Token.Tag{.Invalid});
- testTokenize("0b1e", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b1p", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b1e0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b1p0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0b1_,", &[_]Token.Tag{ .Invalid, .Comma });
+ testTokenize("0b0", &.{.integer_literal});
+ testTokenize("0b1", &.{.integer_literal});
+ testTokenize("0b2", &.{ .invalid, .integer_literal });
+ testTokenize("0b3", &.{ .invalid, .integer_literal });
+ testTokenize("0b4", &.{ .invalid, .integer_literal });
+ testTokenize("0b5", &.{ .invalid, .integer_literal });
+ testTokenize("0b6", &.{ .invalid, .integer_literal });
+ testTokenize("0b7", &.{ .invalid, .integer_literal });
+ testTokenize("0b8", &.{ .invalid, .integer_literal });
+ testTokenize("0b9", &.{ .invalid, .integer_literal });
+ testTokenize("0ba", &.{ .invalid, .identifier });
+ testTokenize("0bb", &.{ .invalid, .identifier });
+ testTokenize("0bc", &.{ .invalid, .identifier });
+ testTokenize("0bd", &.{ .invalid, .identifier });
+ testTokenize("0be", &.{ .invalid, .identifier });
+ testTokenize("0bf", &.{ .invalid, .identifier });
+ testTokenize("0bz", &.{ .invalid, .identifier });
+
+ testTokenize("0b0000_0000", &.{.integer_literal});
+ testTokenize("0b1111_1111", &.{.integer_literal});
+ testTokenize("0b10_10_10_10", &.{.integer_literal});
+ testTokenize("0b0_1_0_1_0_1_0_1", &.{.integer_literal});
+ testTokenize("0b1.", &.{ .integer_literal, .period });
+ testTokenize("0b1.0", &.{ .integer_literal, .period, .integer_literal });
+
+ testTokenize("0B0", &.{ .invalid, .identifier });
+ testTokenize("0b_", &.{ .invalid, .identifier });
+ testTokenize("0b_0", &.{ .invalid, .identifier });
+ testTokenize("0b1_", &.{.invalid});
+ testTokenize("0b0__1", &.{ .invalid, .identifier });
+ testTokenize("0b0_1_", &.{.invalid});
+ testTokenize("0b1e", &.{ .invalid, .identifier });
+ testTokenize("0b1p", &.{ .invalid, .identifier });
+ testTokenize("0b1e0", &.{ .invalid, .identifier });
+ testTokenize("0b1p0", &.{ .invalid, .identifier });
+ testTokenize("0b1_,", &.{ .invalid, .comma });
}
test "tokenizer - number literals octal" {
- testTokenize("0o0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o1", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o2", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o3", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o4", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o5", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o6", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o7", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o8", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0o9", &[_]Token.Tag{ .Invalid, .IntegerLiteral });
- testTokenize("0oa", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0ob", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0oc", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0od", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0oe", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0of", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0oz", &[_]Token.Tag{ .Invalid, .Identifier });
-
- testTokenize("0o01234567", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o0123_4567", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o01_23_45_67", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o0_1_2_3_4_5_6_7", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0o7.", &[_]Token.Tag{ .IntegerLiteral, .Period });
- testTokenize("0o7.0", &[_]Token.Tag{ .IntegerLiteral, .Period, .IntegerLiteral });
-
- testTokenize("0O0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o_", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o_0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o1_", &[_]Token.Tag{.Invalid});
- testTokenize("0o0__1", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o0_1_", &[_]Token.Tag{.Invalid});
- testTokenize("0o1e", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o1p", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o1e0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o1p0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0o_,", &[_]Token.Tag{ .Invalid, .Identifier, .Comma });
+ testTokenize("0o0", &.{.integer_literal});
+ testTokenize("0o1", &.{.integer_literal});
+ testTokenize("0o2", &.{.integer_literal});
+ testTokenize("0o3", &.{.integer_literal});
+ testTokenize("0o4", &.{.integer_literal});
+ testTokenize("0o5", &.{.integer_literal});
+ testTokenize("0o6", &.{.integer_literal});
+ testTokenize("0o7", &.{.integer_literal});
+ testTokenize("0o8", &.{ .invalid, .integer_literal });
+ testTokenize("0o9", &.{ .invalid, .integer_literal });
+ testTokenize("0oa", &.{ .invalid, .identifier });
+ testTokenize("0ob", &.{ .invalid, .identifier });
+ testTokenize("0oc", &.{ .invalid, .identifier });
+ testTokenize("0od", &.{ .invalid, .identifier });
+ testTokenize("0oe", &.{ .invalid, .identifier });
+ testTokenize("0of", &.{ .invalid, .identifier });
+ testTokenize("0oz", &.{ .invalid, .identifier });
+
+ testTokenize("0o01234567", &.{.integer_literal});
+ testTokenize("0o0123_4567", &.{.integer_literal});
+ testTokenize("0o01_23_45_67", &.{.integer_literal});
+ testTokenize("0o0_1_2_3_4_5_6_7", &.{.integer_literal});
+ testTokenize("0o7.", &.{ .integer_literal, .period });
+ testTokenize("0o7.0", &.{ .integer_literal, .period, .integer_literal });
+
+ testTokenize("0O0", &.{ .invalid, .identifier });
+ testTokenize("0o_", &.{ .invalid, .identifier });
+ testTokenize("0o_0", &.{ .invalid, .identifier });
+ testTokenize("0o1_", &.{.invalid});
+ testTokenize("0o0__1", &.{ .invalid, .identifier });
+ testTokenize("0o0_1_", &.{.invalid});
+ testTokenize("0o1e", &.{ .invalid, .identifier });
+ testTokenize("0o1p", &.{ .invalid, .identifier });
+ testTokenize("0o1e0", &.{ .invalid, .identifier });
+ testTokenize("0o1p0", &.{ .invalid, .identifier });
+ testTokenize("0o_,", &.{ .invalid, .identifier, .comma });
}
test "tokenizer - number literals hexadeciaml" {
- testTokenize("0x0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x1", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x2", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x3", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x4", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x5", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x6", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x7", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x8", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x9", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xa", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xb", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xc", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xd", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xe", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xf", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xA", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xB", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xC", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xD", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xE", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0xF", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x0z", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0xz", &[_]Token.Tag{ .Invalid, .Identifier });
-
- testTokenize("0x0123456789ABCDEF", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x0123_4567_89AB_CDEF", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x01_23_45_67_89AB_CDE_F", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &[_]Token.Tag{.IntegerLiteral});
-
- testTokenize("0X0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x_", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x_1", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x1_", &[_]Token.Tag{.Invalid});
- testTokenize("0x0__1", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0_1_", &[_]Token.Tag{.Invalid});
- testTokenize("0x_,", &[_]Token.Tag{ .Invalid, .Identifier, .Comma });
-
- testTokenize("0x1.", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x1.0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xF.", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xF.0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xF.F", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xF.Fp0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xF.FP0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x1p0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xfp0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x1.+0xF.", &[_]Token.Tag{ .FloatLiteral, .Plus, .FloatLiteral });
-
- testTokenize("0x0123456.789ABCDEF", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x0_123_456.789_ABC_DEF", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x0p0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0x0.0p0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xff.ffp10", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xff.ffP10", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xff.p10", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xffp10", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xff_ff.ff_ffp1_0_0_0", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &[_]Token.Tag{.FloatLiteral});
- testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &[_]Token.Tag{.FloatLiteral});
-
- testTokenize("0x1e", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x1e0", &[_]Token.Tag{.IntegerLiteral});
- testTokenize("0x1p", &[_]Token.Tag{.Invalid});
- testTokenize("0xfp0z1", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0xff.ffpff", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.p", &[_]Token.Tag{.Invalid});
- testTokenize("0x0.z", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0._", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0_.0", &[_]Token.Tag{ .Invalid, .Period, .IntegerLiteral });
- testTokenize("0x0_.0.0", &[_]Token.Tag{ .Invalid, .Period, .FloatLiteral });
- testTokenize("0x0._0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.0_", &[_]Token.Tag{.Invalid});
- testTokenize("0x0_p0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0_.p0", &[_]Token.Tag{ .Invalid, .Period, .Identifier });
- testTokenize("0x0._p0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.0_p0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0._0p0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.0p_0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.0p+_0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.0p-_0", &[_]Token.Tag{ .Invalid, .Identifier });
- testTokenize("0x0.0p0_", &[_]Token.Tag{ .Invalid, .Eof });
+ testTokenize("0x0", &.{.integer_literal});
+ testTokenize("0x1", &.{.integer_literal});
+ testTokenize("0x2", &.{.integer_literal});
+ testTokenize("0x3", &.{.integer_literal});
+ testTokenize("0x4", &.{.integer_literal});
+ testTokenize("0x5", &.{.integer_literal});
+ testTokenize("0x6", &.{.integer_literal});
+ testTokenize("0x7", &.{.integer_literal});
+ testTokenize("0x8", &.{.integer_literal});
+ testTokenize("0x9", &.{.integer_literal});
+ testTokenize("0xa", &.{.integer_literal});
+ testTokenize("0xb", &.{.integer_literal});
+ testTokenize("0xc", &.{.integer_literal});
+ testTokenize("0xd", &.{.integer_literal});
+ testTokenize("0xe", &.{.integer_literal});
+ testTokenize("0xf", &.{.integer_literal});
+ testTokenize("0xA", &.{.integer_literal});
+ testTokenize("0xB", &.{.integer_literal});
+ testTokenize("0xC", &.{.integer_literal});
+ testTokenize("0xD", &.{.integer_literal});
+ testTokenize("0xE", &.{.integer_literal});
+ testTokenize("0xF", &.{.integer_literal});
+ testTokenize("0x0z", &.{ .invalid, .identifier });
+ testTokenize("0xz", &.{ .invalid, .identifier });
+
+ testTokenize("0x0123456789ABCDEF", &.{.integer_literal});
+ testTokenize("0x0123_4567_89AB_CDEF", &.{.integer_literal});
+ testTokenize("0x01_23_45_67_89AB_CDE_F", &.{.integer_literal});
+ testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &.{.integer_literal});
+
+ testTokenize("0X0", &.{ .invalid, .identifier });
+ testTokenize("0x_", &.{ .invalid, .identifier });
+ testTokenize("0x_1", &.{ .invalid, .identifier });
+ testTokenize("0x1_", &.{.invalid});
+ testTokenize("0x0__1", &.{ .invalid, .identifier });
+ testTokenize("0x0_1_", &.{.invalid});
+ testTokenize("0x_,", &.{ .invalid, .identifier, .comma });
+
+ testTokenize("0x1.", &.{.float_literal});
+ testTokenize("0x1.0", &.{.float_literal});
+ testTokenize("0xF.", &.{.float_literal});
+ testTokenize("0xF.0", &.{.float_literal});
+ testTokenize("0xF.F", &.{.float_literal});
+ testTokenize("0xF.Fp0", &.{.float_literal});
+ testTokenize("0xF.FP0", &.{.float_literal});
+ testTokenize("0x1p0", &.{.float_literal});
+ testTokenize("0xfp0", &.{.float_literal});
+ testTokenize("0x1.+0xF.", &.{ .float_literal, .plus, .float_literal });
+
+ testTokenize("0x0123456.789ABCDEF", &.{.float_literal});
+ testTokenize("0x0_123_456.789_ABC_DEF", &.{.float_literal});
+ testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &.{.float_literal});
+ testTokenize("0x0p0", &.{.float_literal});
+ testTokenize("0x0.0p0", &.{.float_literal});
+ testTokenize("0xff.ffp10", &.{.float_literal});
+ testTokenize("0xff.ffP10", &.{.float_literal});
+ testTokenize("0xff.p10", &.{.float_literal});
+ testTokenize("0xffp10", &.{.float_literal});
+ testTokenize("0xff_ff.ff_ffp1_0_0_0", &.{.float_literal});
+ testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &.{.float_literal});
+ testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &.{.float_literal});
+
+ testTokenize("0x1e", &.{.integer_literal});
+ testTokenize("0x1e0", &.{.integer_literal});
+ testTokenize("0x1p", &.{.invalid});
+ testTokenize("0xfp0z1", &.{ .invalid, .identifier });
+ testTokenize("0xff.ffpff", &.{ .invalid, .identifier });
+ testTokenize("0x0.p", &.{.invalid});
+ testTokenize("0x0.z", &.{ .invalid, .identifier });
+ testTokenize("0x0._", &.{ .invalid, .identifier });
+ testTokenize("0x0_.0", &.{ .invalid, .period, .integer_literal });
+ testTokenize("0x0_.0.0", &.{ .invalid, .period, .float_literal });
+ testTokenize("0x0._0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0_", &.{.invalid});
+ testTokenize("0x0_p0", &.{ .invalid, .identifier });
+ testTokenize("0x0_.p0", &.{ .invalid, .period, .identifier });
+ testTokenize("0x0._p0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0_p0", &.{ .invalid, .identifier });
+ testTokenize("0x0._0p0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p_0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p+_0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p-_0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p0_", &.{ .invalid, .eof });
}
fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void {
@@ -2054,5 +2054,5 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void {
}
}
const last_token = tokenizer.next();
- std.testing.expect(last_token.tag == .Eof);
+ std.testing.expect(last_token.tag == .eof);
}