aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer.cpp
diff options
context:
space:
mode:
authorVexu <15308111+Vexu@users.noreply.github.com>2019-11-15 14:12:14 +0200
committerVexu <15308111+Vexu@users.noreply.github.com>2019-11-15 15:02:51 +0200
commite509d21f39af726da3c6001b0c20f2c765be07a5 (patch)
treeeff4c0f8081c81065a9bbf9d1ab0f4e3d7aec0ab /src/tokenizer.cpp
parentb92f42d1f4b91bb343558f72af84ea052da4ac98 (diff)
downloadzig-e509d21f39af726da3c6001b0c20f2c765be07a5.tar.gz
zig-e509d21f39af726da3c6001b0c20f2c765be07a5.zip
implemented container doc comments in stage 1
Diffstat (limited to 'src/tokenizer.cpp')
-rw-r--r--src/tokenizer.cpp32
1 files changed, 32 insertions, 0 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 8b301f85ac..7ece5ff3fe 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -198,6 +198,7 @@ enum TokenizeState {
TokenizeStateSawSlash,
TokenizeStateSawSlash2,
TokenizeStateSawSlash3,
+ TokenizeStateSawSlashBang,
TokenizeStateSawBackslash,
TokenizeStateSawPercent,
TokenizeStateSawPlus,
@@ -209,6 +210,7 @@ enum TokenizeState {
TokenizeStateSawBar,
TokenizeStateSawBarBar,
TokenizeStateDocComment,
+ TokenizeStateContainerDocComment,
TokenizeStateLineComment,
TokenizeStateLineString,
TokenizeStateLineStringEnd,
@@ -938,6 +940,9 @@ void tokenize(Buf *buf, Tokenization *out) {
case '/':
t.state = TokenizeStateSawSlash3;
break;
+ case '!':
+ t.state = TokenizeStateSawSlashBang;
+ break;
case '\n':
cancel_token(&t);
t.state = TokenizeStateStart;
@@ -965,6 +970,19 @@ void tokenize(Buf *buf, Tokenization *out) {
break;
}
break;
+ case TokenizeStateSawSlashBang:
+ switch (c) {
+ case '\n':
+ set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
+ end_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
+ t.state = TokenizeStateContainerDocComment;
+ break;
+ }
+ break;
case TokenizeStateSawBackslash:
switch (c) {
case '\\':
@@ -1055,6 +1073,17 @@ void tokenize(Buf *buf, Tokenization *out) {
break;
}
break;
+ case TokenizeStateContainerDocComment:
+ switch (c) {
+ case '\n':
+ end_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ // do nothing
+ break;
+ }
+ break;
case TokenizeStateSymbolFirstC:
switch (c) {
case '"':
@@ -1545,6 +1574,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateSawBarBar:
case TokenizeStateLBracket:
case TokenizeStateDocComment:
+ case TokenizeStateContainerDocComment:
end_token(&t);
break;
case TokenizeStateSawDotDot:
@@ -1559,6 +1589,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateLineComment:
case TokenizeStateSawSlash2:
case TokenizeStateSawSlash3:
+ case TokenizeStateSawSlashBang:
break;
}
if (t.state != TokenizeStateError) {
@@ -1606,6 +1637,7 @@ const char * token_name(TokenId id) {
case TokenIdDash: return "-";
case TokenIdDivEq: return "/=";
case TokenIdDocComment: return "DocComment";
+ case TokenIdContainerDocComment: return "ContainerDocComment";
case TokenIdDot: return ".";
case TokenIdDotStar: return ".*";
case TokenIdEllipsis2: return "..";