diff options
| author | Andrew Kelley <superjoe30@gmail.com> | 2018-02-13 10:28:55 -0500 |
|---|---|---|
| committer | Andrew Kelley <superjoe30@gmail.com> | 2018-02-13 10:28:55 -0500 |
| commit | 2dcff95bd2fb8f377491ac48b0ecf961183abcd7 (patch) | |
| tree | 8fc8547d6f4bb443d01ff8a5923b6e4332a344e2 /std | |
| parent | dfbb8254ca97154b5314bde03655417c1dca86ae (diff) | |
| download | zig-2dcff95bd2fb8f377491ac48b0ecf961183abcd7.tar.gz zig-2dcff95bd2fb8f377491ac48b0ecf961183abcd7.zip | |
self hosted: add tokenizer test fix eof handling
Diffstat (limited to 'std')
| -rw-r--r-- | std/zig/tokenizer.zig | 33 |
1 files changed, 30 insertions, 3 deletions
diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig index 694a036f97..de1263ac55 100644 --- a/std/zig/tokenizer.zig +++ b/std/zig/tokenizer.zig @@ -72,6 +72,8 @@ pub const Token = struct { Eof, Builtin, Bang, + Pipe, + PipeEqual, Equal, EqualEqual, BangEqual, @@ -193,6 +195,7 @@ pub const Tokenizer = struct { StringLiteralBackslash, Equal, Bang, + Pipe, Minus, Slash, LineComment, @@ -248,6 +251,9 @@ pub const Tokenizer = struct { '!' => { state = State.Bang; }, + '|' => { + state = State.Pipe; + }, '(' => { result.id = Token.Id.LParen; self.index += 1; @@ -394,6 +400,18 @@ pub const Tokenizer = struct { }, }, + State.Pipe => switch (c) { + '=' => { + result.id = Token.Id.PipeEqual; + self.index += 1; + break; + }, + else => { + result.id = Token.Id.Pipe; + break; + }, + }, + State.Equal => switch (c) { '=' => { result.id = Token.Id.EqualEqual; @@ -525,9 +543,7 @@ pub const Tokenizer = struct { else => break, }, } - } - result.end = self.index; - if (self.index == self.buffer.len) { + } else if (self.index == self.buffer.len) { switch (state) { State.Start, State.C, @@ -578,6 +594,9 @@ pub const Tokenizer = struct { State.Period2 => { result.id = Token.Id.Ellipsis2; }, + State.Pipe => { + result.id = Token.Id.Pipe; + }, } } if (result.id == Token.Id.Eof) { @@ -587,6 +606,7 @@ pub const Tokenizer = struct { } } + result.end = self.index; return result; } @@ -716,6 +736,13 @@ test "tokenizer - string identifier and builtin fns" { ); } +test "tokenizer - pipe and then invalid" { + testTokenize("||=", []Token.Id{ + Token.Id.Pipe, + Token.Id.PipeEqual, + }); +} + fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void { var tokenizer = Tokenizer.init(source); for (expected_tokens) |expected_token_id| { |
