From 54987c3d8f358e4ded9113a9ee5fa04dc1372a56 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 30 Apr 2018 00:56:59 -0400 Subject: std.zig.tokenizer: 3 slashes is doc comment, 4 is line comment --- std/zig/tokenizer.zig | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) (limited to 'std') diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig index 9f5712fa99..92a0fbc5d5 100644 --- a/std/zig/tokenizer.zig +++ b/std/zig/tokenizer.zig @@ -260,6 +260,7 @@ pub const Tokenizer = struct { Slash, LineCommentStart, LineComment, + DocCommentStart, DocComment, Zero, IntegerLiteral, @@ -840,8 +841,7 @@ pub const Tokenizer = struct { }, State.LineCommentStart => switch (c) { '/' => { - result.id = Token.Id.DocComment; - state = State.DocComment; + state = State.DocCommentStart; }, '\n' => break, else => { @@ -849,6 +849,20 @@ pub const Tokenizer = struct { self.checkLiteralCharacter(); }, }, + State.DocCommentStart => switch (c) { + '/' => { + state = State.LineComment; + }, + '\n' => { + result.id = Token.Id.DocComment; + break; + }, + else => { + state = State.DocComment; + result.id = Token.Id.DocComment; + self.checkLiteralCharacter(); + }, + }, State.LineComment, State.DocComment => switch (c) { '\n' => break, else => self.checkLiteralCharacter(), @@ -938,7 +952,7 @@ pub const Tokenizer = struct { State.LineComment => { result.id = Token.Id.LineComment; }, - State.DocComment => { + State.DocComment, State.DocCommentStart => { result.id = Token.Id.DocComment; }, @@ -1213,6 +1227,7 @@ test "tokenizer - line comment and doc comment" { testTokenize("// /", []Token.Id{Token.Id.LineComment}); testTokenize("/// a", []Token.Id{Token.Id.DocComment}); testTokenize("///", []Token.Id{Token.Id.DocComment}); + testTokenize("////", []Token.Id{Token.Id.LineComment}); } test "tokenizer - line comment followed by identifier" { -- cgit v1.2.3