diff options
| author | Wink Saville <wink@saville.com> | 2018-09-23 15:53:52 -0700 |
|---|---|---|
| committer | Andrew Kelley <superjoe30@gmail.com> | 2018-09-24 19:28:46 -0400 |
| commit | 0e6c18c8207cab5d3946f119a51d62f91c1b9028 (patch) | |
| tree | b1e18987d751ac82beaaecb544753cb260c40595 /std | |
| parent | 4241cd666dbf5117a6c32357c091ca3bc7a0fcd0 (diff) | |
| download | zig-0e6c18c8207cab5d3946f119a51d62f91c1b9028.tar.gz zig-0e6c18c8207cab5d3946f119a51d62f91c1b9028.zip | |
Remove StrLitKind enum
I was looking at the tokenizer specifically fn testTokenize and the
this statement looked odd:
if (@TagType(Token.Id)(token.id) != @TagType(Token.Id)(expected_token_id)) {
I then saw the TODO and thought I'd remove StrLitKind figuring that
would make testTokenize simpler. It did so I thought I'd prepare this PR.
The tests are still working and stage2 zig seems to work, it compiles and
I was able to use the fmt command.
Diffstat (limited to 'std')
| -rw-r--r-- | std/zig/ast.zig | 4 | ||||
| -rw-r--r-- | std/zig/parse.zig | 24 | ||||
| -rw-r--r-- | std/zig/tokenizer.zig | 37 |
3 files changed, 25 insertions, 40 deletions
diff --git a/std/zig/ast.zig b/std/zig/ast.zig index b5b3ce9add..2102da456c 100644 --- a/std/zig/ast.zig +++ b/std/zig/ast.zig @@ -211,7 +211,7 @@ pub const Error = union(enum) { pub const ExpectedToken = struct { token: TokenIndex, - expected_id: @TagType(Token.Id), + expected_id: Token.Id, pub fn render(self: *const ExpectedToken, tokens: *Tree.TokenList, stream: var) !void { const token_name = @tagName(tokens.at(self.token).id); @@ -221,7 +221,7 @@ pub const Error = union(enum) { pub const ExpectedCommaOrEnd = struct { token: TokenIndex, - end_id: @TagType(Token.Id), + end_id: Token.Id, pub fn render(self: *const ExpectedCommaOrEnd, tokens: *Tree.TokenList, stream: var) !void { const token_name = @tagName(tokens.at(self.token).id); diff --git a/std/zig/parse.zig b/std/zig/parse.zig index 136525e0ad..5941ab6905 100644 --- a/std/zig/parse.zig +++ b/std/zig/parse.zig @@ -2846,12 +2846,12 @@ const ContainerKindCtx = struct { }; const ExpectTokenSave = struct { - id: @TagType(Token.Id), + id: Token.Id, ptr: *TokenIndex, }; const OptionalTokenSave = struct { - id: @TagType(Token.Id), + id: Token.Id, ptr: *?TokenIndex, }; @@ -3066,9 +3066,9 @@ const State = union(enum) { Identifier: OptionalCtx, ErrorTag: **ast.Node, - IfToken: @TagType(Token.Id), + IfToken: Token.Id, IfTokenSave: ExpectTokenSave, - ExpectToken: @TagType(Token.Id), + ExpectToken: Token.Id, ExpectTokenSave: ExpectTokenSave, OptionalTokenSave: OptionalTokenSave, }; @@ -3243,7 +3243,7 @@ const ExpectCommaOrEndResult = union(enum) { parse_error: Error, }; -fn expectCommaOrEnd(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, end: @TagType(Token.Id)) ExpectCommaOrEndResult { +fn expectCommaOrEnd(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, end: Token.Id) ExpectCommaOrEndResult { const token = nextToken(tok_it, tree); const token_index = token.index; const token_ptr = token.ptr; @@ -3288,7 +3288,7 @@ fn tokenIdToAssignment(id: *const Token.Id) ?ast.Node.InfixOp.Op { }; } -fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { +fn tokenIdToUnwrapExpr(id: Token.Id) ?ast.Node.InfixOp.Op { return switch (id) { Token.Id.Keyword_catch => ast.Node.InfixOp.Op{ .Catch = null }, Token.Id.Keyword_orelse => ast.Node.InfixOp.Op{ .UnwrapOptional = void{} }, @@ -3296,7 +3296,7 @@ fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { }; } -fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { +fn tokenIdToComparison(id: Token.Id) ?ast.Node.InfixOp.Op { return switch (id) { Token.Id.BangEqual => ast.Node.InfixOp.Op{ .BangEqual = void{} }, Token.Id.EqualEqual => ast.Node.InfixOp.Op{ .EqualEqual = void{} }, @@ -3308,7 +3308,7 @@ fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { }; } -fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { +fn tokenIdToBitShift(id: Token.Id) ?ast.Node.InfixOp.Op { return switch (id) { Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op{ .BitShiftLeft = void{} }, Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op{ .BitShiftRight = void{} }, @@ -3316,7 +3316,7 @@ fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { }; } -fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { +fn tokenIdToAddition(id: Token.Id) ?ast.Node.InfixOp.Op { return switch (id) { Token.Id.Minus => ast.Node.InfixOp.Op{ .Sub = void{} }, Token.Id.MinusPercent => ast.Node.InfixOp.Op{ .SubWrap = void{} }, @@ -3327,7 +3327,7 @@ fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { }; } -fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { +fn tokenIdToMultiply(id: Token.Id) ?ast.Node.InfixOp.Op { return switch (id) { Token.Id.Slash => ast.Node.InfixOp.Op{ .Div = void{} }, Token.Id.Asterisk => ast.Node.InfixOp.Op{ .Mult = void{} }, @@ -3339,7 +3339,7 @@ fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op { }; } -fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op { +fn tokenIdToPrefixOp(id: Token.Id) ?ast.Node.PrefixOp.Op { return switch (id) { Token.Id.Bang => ast.Node.PrefixOp.Op{ .BoolNot = void{} }, Token.Id.Tilde => ast.Node.PrefixOp.Op{ .BitNot = void{} }, @@ -3374,7 +3374,7 @@ fn createToCtxLiteral(arena: *mem.Allocator, opt_ctx: *const OptionalCtx, compti return node; } -fn eatToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, id: @TagType(Token.Id)) ?TokenIndex { +fn eatToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, id: Token.Id) ?TokenIndex { const token = tok_it.peek().?; if (token.id == id) { diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig index 1bb3de4935..b996876200 100644 --- a/std/zig/tokenizer.zig +++ b/std/zig/tokenizer.zig @@ -73,17 +73,11 @@ pub const Token = struct { return null; } - /// TODO remove this enum - const StrLitKind = enum { - Normal, - C, - }; - - pub const Id = union(enum) { + pub const Id = enum { Invalid, Identifier, - StringLiteral: StrLitKind, - MultilineStringLiteralLine: StrLitKind, + StringLiteral, + MultilineStringLiteralLine, CharLiteral, Eof, Builtin, @@ -311,7 +305,7 @@ pub const Tokenizer = struct { }, '"' => { state = State.StringLiteral; - result.id = Token.Id{ .StringLiteral = Token.StrLitKind.Normal }; + result.id = Token.Id.StringLiteral; }, '\'' => { state = State.CharLiteral; @@ -390,7 +384,7 @@ pub const Tokenizer = struct { }, '\\' => { state = State.Backslash; - result.id = Token.Id{ .MultilineStringLiteralLine = Token.StrLitKind.Normal }; + result.id = Token.Id.MultilineStringLiteralLine; }, '{' => { result.id = Token.Id.LBrace; @@ -591,11 +585,11 @@ pub const Tokenizer = struct { State.C => switch (c) { '\\' => { state = State.Backslash; - result.id = Token.Id{ .MultilineStringLiteralLine = Token.StrLitKind.C }; + result.id = Token.Id.MultilineStringLiteralLine; }, '"' => { state = State.StringLiteral; - result.id = Token.Id{ .StringLiteral = Token.StrLitKind.C }; + result.id = Token.Id.StringLiteral; }, 'a'...'z', 'A'...'Z', '_', '0'...'9' => { state = State.Identifier; @@ -1218,7 +1212,7 @@ test "tokenizer - invalid token characters" { test "tokenizer - invalid literal/comment characters" { testTokenize("\"\x00\"", []Token.Id{ - Token.Id{ .StringLiteral = Token.StrLitKind.Normal }, + Token.Id.StringLiteral, Token.Id.Invalid, }); testTokenize("//\x00", []Token.Id{ @@ -1304,7 +1298,7 @@ test "tokenizer - string identifier and builtin fns" { Token.Id.Equal, Token.Id.Builtin, Token.Id.LParen, - Token.Id{ .StringLiteral = Token.StrLitKind.Normal }, + Token.Id.StringLiteral, Token.Id.RParen, Token.Id.Semicolon, }); @@ -1344,17 +1338,8 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void { var tokenizer = Tokenizer.init(source); for (expected_tokens) |expected_token_id| { const token = tokenizer.next(); - if (@TagType(Token.Id)(token.id) != @TagType(Token.Id)(expected_token_id)) { - std.debug.panic("expected {}, found {}\n", @tagName(@TagType(Token.Id)(expected_token_id)), @tagName(@TagType(Token.Id)(token.id))); - } - switch (expected_token_id) { - Token.Id.StringLiteral => |expected_kind| { - std.debug.assert(expected_kind == switch (token.id) { - Token.Id.StringLiteral => |kind| kind, - else => unreachable, - }); - }, - else => {}, + if (token.id != expected_token_id) { + std.debug.panic("expected {}, found {}\n", @tagName(expected_token_id), @tagName(token.id)); } } const last_token = tokenizer.next(); |
