diff options
Diffstat (limited to 'src/tokenizer.cpp')
| -rw-r--r-- | src/tokenizer.cpp | 53 |
1 files changed, 51 insertions, 2 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 71a24fe726..399597b7bc 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -196,6 +196,8 @@ enum TokenizeState { TokenizeStateSawStar, TokenizeStateSawStarPercent, TokenizeStateSawSlash, + TokenizeStateSawSlash2, + TokenizeStateSawSlash3, TokenizeStateSawBackslash, TokenizeStateSawPercent, TokenizeStateSawPlus, @@ -206,6 +208,7 @@ enum TokenizeState { TokenizeStateSawCaret, TokenizeStateSawBar, TokenizeStateSawBarBar, + TokenizeStateDocComment, TokenizeStateLineComment, TokenizeStateLineString, TokenizeStateLineStringEnd, @@ -910,8 +913,7 @@ void tokenize(Buf *buf, Tokenization *out) { case TokenizeStateSawSlash: switch (c) { case '/': - cancel_token(&t); - t.state = TokenizeStateLineComment; + t.state = TokenizeStateSawSlash2; break; case '=': set_token_id(&t, t.cur_tok, TokenIdDivEq); @@ -925,6 +927,38 @@ void tokenize(Buf *buf, Tokenization *out) { continue; } break; + case TokenizeStateSawSlash2: + switch (c) { + case '/': + t.state = TokenizeStateSawSlash3; + break; + case '\n': + cancel_token(&t); + t.state = TokenizeStateStart; + break; + default: + cancel_token(&t); + t.state = TokenizeStateLineComment; + break; + } + break; + case TokenizeStateSawSlash3: + switch (c) { + case '/': + cancel_token(&t); + t.state = TokenizeStateLineComment; + break; + case '\n': + set_token_id(&t, t.cur_tok, TokenIdDocComment); + end_token(&t); + t.state = TokenizeStateStart; + break; + default: + set_token_id(&t, t.cur_tok, TokenIdDocComment); + t.state = TokenizeStateDocComment; + break; + } + break; case TokenizeStateSawBackslash: switch (c) { case '\\': @@ -1004,6 +1038,17 @@ void tokenize(Buf *buf, Tokenization *out) { break; } break; + case TokenizeStateDocComment: + switch (c) { + case '\n': + end_token(&t); + t.state = TokenizeStateStart; + break; + default: + // do nothing + break; + } + break; case TokenizeStateSymbolFirstC: switch (c) { case '"': @@ -1466,6 +1511,7 @@ void tokenize(Buf *buf, Tokenization *out) { case TokenizeStateLineStringEnd: case TokenizeStateSawBarBar: case TokenizeStateLBracket: + case TokenizeStateDocComment: end_token(&t); break; case TokenizeStateSawDotDot: @@ -1478,6 +1524,8 @@ void tokenize(Buf *buf, Tokenization *out) { tokenize_error(&t, "unexpected EOF"); break; case TokenizeStateLineComment: + case TokenizeStateSawSlash2: + case TokenizeStateSawSlash3: break; } if (t.state != TokenizeStateError) { @@ -1524,6 +1572,7 @@ const char * token_name(TokenId id) { case TokenIdComma: return ","; case TokenIdDash: return "-"; case TokenIdDivEq: return "/="; + case TokenIdDocComment: return "DocComment"; case TokenIdDot: return "."; case TokenIdEllipsis2: return ".."; case TokenIdEllipsis3: return "..."; |
