aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer.cpp
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2019-11-17 22:31:12 +0000
committerGitHub <noreply@github.com>2019-11-17 22:31:12 +0000
commit57b8614a5a287d0a312b1cade463ec5485f0518f (patch)
treefda14f96b79a6878208ab483027f9fad97cf2460 /src/tokenizer.cpp
parent4e28d7a5f7d6346acc42a7524dd77fa5f9322029 (diff)
parent314cb707fce553e51d2ffd5c1ea506fbd1acdf76 (diff)
downloadzig-57b8614a5a287d0a312b1cade463ec5485f0518f.tar.gz
zig-57b8614a5a287d0a312b1cade463ec5485f0518f.zip
Merge pull request #3697 from Vexu/container-docs
Implement container level doc comments
Diffstat (limited to 'src/tokenizer.cpp')
-rw-r--r--src/tokenizer.cpp32
1 files changed, 32 insertions, 0 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 8b301f85ac..7ece5ff3fe 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -198,6 +198,7 @@ enum TokenizeState {
TokenizeStateSawSlash,
TokenizeStateSawSlash2,
TokenizeStateSawSlash3,
+ TokenizeStateSawSlashBang,
TokenizeStateSawBackslash,
TokenizeStateSawPercent,
TokenizeStateSawPlus,
@@ -209,6 +210,7 @@ enum TokenizeState {
TokenizeStateSawBar,
TokenizeStateSawBarBar,
TokenizeStateDocComment,
+ TokenizeStateContainerDocComment,
TokenizeStateLineComment,
TokenizeStateLineString,
TokenizeStateLineStringEnd,
@@ -938,6 +940,9 @@ void tokenize(Buf *buf, Tokenization *out) {
case '/':
t.state = TokenizeStateSawSlash3;
break;
+ case '!':
+ t.state = TokenizeStateSawSlashBang;
+ break;
case '\n':
cancel_token(&t);
t.state = TokenizeStateStart;
@@ -965,6 +970,19 @@ void tokenize(Buf *buf, Tokenization *out) {
break;
}
break;
+ case TokenizeStateSawSlashBang:
+ switch (c) {
+ case '\n':
+ set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
+ end_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
+ t.state = TokenizeStateContainerDocComment;
+ break;
+ }
+ break;
case TokenizeStateSawBackslash:
switch (c) {
case '\\':
@@ -1055,6 +1073,17 @@ void tokenize(Buf *buf, Tokenization *out) {
break;
}
break;
+ case TokenizeStateContainerDocComment:
+ switch (c) {
+ case '\n':
+ end_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ // do nothing
+ break;
+ }
+ break;
case TokenizeStateSymbolFirstC:
switch (c) {
case '"':
@@ -1545,6 +1574,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateSawBarBar:
case TokenizeStateLBracket:
case TokenizeStateDocComment:
+ case TokenizeStateContainerDocComment:
end_token(&t);
break;
case TokenizeStateSawDotDot:
@@ -1559,6 +1589,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateLineComment:
case TokenizeStateSawSlash2:
case TokenizeStateSawSlash3:
+ case TokenizeStateSawSlashBang:
break;
}
if (t.state != TokenizeStateError) {
@@ -1606,6 +1637,7 @@ const char * token_name(TokenId id) {
case TokenIdDash: return "-";
case TokenIdDivEq: return "/=";
case TokenIdDocComment: return "DocComment";
+ case TokenIdContainerDocComment: return "ContainerDocComment";
case TokenIdDot: return ".";
case TokenIdDotStar: return ".*";
case TokenIdEllipsis2: return "..";