diff options
| author | Andrew Kelley <superjoe30@gmail.com> | 2018-04-30 00:56:59 -0400 |
|---|---|---|
| committer | Andrew Kelley <superjoe30@gmail.com> | 2018-04-30 00:56:59 -0400 |
| commit | 54987c3d8f358e4ded9113a9ee5fa04dc1372a56 (patch) | |
| tree | 455cfafce4790a7b209e9703337b5c95b1b5150f /std | |
| parent | 0bf7ebcfea7934cb972aef84b25494c92f0dcf6f (diff) | |
| download | zig-54987c3d8f358e4ded9113a9ee5fa04dc1372a56.tar.gz zig-54987c3d8f358e4ded9113a9ee5fa04dc1372a56.zip | |
std.zig.tokenizer: 3 slashes is doc comment, 4 is line comment
Diffstat (limited to 'std')
| -rw-r--r-- | std/zig/tokenizer.zig | 21 |
1 files changed, 18 insertions, 3 deletions
diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig index 9f5712fa99..92a0fbc5d5 100644 --- a/std/zig/tokenizer.zig +++ b/std/zig/tokenizer.zig @@ -260,6 +260,7 @@ pub const Tokenizer = struct { Slash, LineCommentStart, LineComment, + DocCommentStart, DocComment, Zero, IntegerLiteral, @@ -840,8 +841,7 @@ pub const Tokenizer = struct { }, State.LineCommentStart => switch (c) { '/' => { - result.id = Token.Id.DocComment; - state = State.DocComment; + state = State.DocCommentStart; }, '\n' => break, else => { @@ -849,6 +849,20 @@ pub const Tokenizer = struct { self.checkLiteralCharacter(); }, }, + State.DocCommentStart => switch (c) { + '/' => { + state = State.LineComment; + }, + '\n' => { + result.id = Token.Id.DocComment; + break; + }, + else => { + state = State.DocComment; + result.id = Token.Id.DocComment; + self.checkLiteralCharacter(); + }, + }, State.LineComment, State.DocComment => switch (c) { '\n' => break, else => self.checkLiteralCharacter(), @@ -938,7 +952,7 @@ pub const Tokenizer = struct { State.LineComment => { result.id = Token.Id.LineComment; }, - State.DocComment => { + State.DocComment, State.DocCommentStart => { result.id = Token.Id.DocComment; }, @@ -1213,6 +1227,7 @@ test "tokenizer - line comment and doc comment" { testTokenize("// /", []Token.Id{Token.Id.LineComment}); testTokenize("/// a", []Token.Id{Token.Id.DocComment}); testTokenize("///", []Token.Id{Token.Id.DocComment}); + testTokenize("////", []Token.Id{Token.Id.LineComment}); } test "tokenizer - line comment followed by identifier" { |
