diff options
| author | BenoitJGirard <BenoitJGirard@users.noreply.github.com> | 2019-02-17 14:38:55 -0500 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2019-02-17 14:38:55 -0500 |
| commit | 6daa041932ae5ab03eed953dacf3ca506078390c (patch) | |
| tree | 0f51f6c2ff84dde51b61bba6799e5c5abccf91b4 /src/tokenizer.cpp | |
| parent | f0ec308e26ff957c7fbb50ccc69d3d549c42c4da (diff) | |
| parent | 8d2a902945ef97f28152c3d5a68bb974809c8539 (diff) | |
| download | zig-6daa041932ae5ab03eed953dacf3ca506078390c.tar.gz zig-6daa041932ae5ab03eed953dacf3ca506078390c.zip | |
Merge pull request #2 from ziglang/master
Refreshing fork.
Diffstat (limited to 'src/tokenizer.cpp')
| -rw-r--r-- | src/tokenizer.cpp | 33 |
1 files changed, 25 insertions, 8 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 921ee4de09..9ff6ed3bbe 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -146,6 +146,7 @@ static const struct ZigKeyword zig_keywords[] = { {"suspend", TokenIdKeywordSuspend}, {"switch", TokenIdKeywordSwitch}, {"test", TokenIdKeywordTest}, + {"threadlocal", TokenIdKeywordThreadLocal}, {"true", TokenIdKeywordTrue}, {"try", TokenIdKeywordTry}, {"undefined", TokenIdKeywordUndefined}, @@ -220,6 +221,7 @@ enum TokenizeState { TokenizeStateError, TokenizeStateLBracket, TokenizeStateLBracketStar, + TokenizeStateLBracketStarC, }; @@ -248,13 +250,8 @@ ATTRIBUTE_PRINTF(2, 3) static void tokenize_error(Tokenize *t, const char *format, ...) { t->state = TokenizeStateError; - if (t->cur_tok) { - t->out->err_line = t->cur_tok->start_line; - t->out->err_column = t->cur_tok->start_column; - } else { - t->out->err_line = t->line; - t->out->err_column = t->column; - } + t->out->err_line = t->line; + t->out->err_column = t->column; va_list ap; va_start(ap, format); @@ -850,7 +847,6 @@ void tokenize(Buf *buf, Tokenization *out) { switch (c) { case '*': t.state = TokenizeStateLBracketStar; - set_token_id(&t, t.cur_tok, TokenIdBracketStarBracket); break; default: // reinterpret as just an lbracket @@ -862,6 +858,21 @@ void tokenize(Buf *buf, Tokenization *out) { break; case TokenizeStateLBracketStar: switch (c) { + case 'c': + t.state = TokenizeStateLBracketStarC; + set_token_id(&t, t.cur_tok, TokenIdBracketStarCBracket); + break; + case ']': + set_token_id(&t, t.cur_tok, TokenIdBracketStarBracket); + end_token(&t); + t.state = TokenizeStateStart; + break; + default: + invalid_char_error(&t, c); + } + break; + case TokenizeStateLBracketStarC: + switch (c) { case ']': end_token(&t); t.state = TokenizeStateStart; @@ -886,6 +897,9 @@ void tokenize(Buf *buf, Tokenization *out) { break; case TokenizeStateSawAmpersand: switch (c) { + case '&': + tokenize_error(&t, "`&&` is invalid. Note that `and` is boolean AND."); + break; case '=': set_token_id(&t, t.cur_tok, TokenIdBitAndEq); end_token(&t); @@ -1492,6 +1506,7 @@ void tokenize(Buf *buf, Tokenization *out) { case TokenizeStateLineStringContinue: case TokenizeStateLineStringContinueC: case TokenizeStateLBracketStar: + case TokenizeStateLBracketStarC: tokenize_error(&t, "unexpected EOF"); break; case TokenizeStateLineComment: @@ -1529,6 +1544,7 @@ const char * token_name(TokenId id) { case TokenIdBitShiftRightEq: return ">>="; case TokenIdBitXorEq: return "^="; case TokenIdBracketStarBracket: return "[*]"; + case TokenIdBracketStarCBracket: return "[*c]"; case TokenIdCharLiteral: return "CharLiteral"; case TokenIdCmpEq: return "=="; case TokenIdCmpGreaterOrEq: return ">="; @@ -1588,6 +1604,7 @@ const char * token_name(TokenId id) { case TokenIdKeywordStruct: return "struct"; case TokenIdKeywordSwitch: return "switch"; case TokenIdKeywordTest: return "test"; + case TokenIdKeywordThreadLocal: return "threadlocal"; case TokenIdKeywordTrue: return "true"; case TokenIdKeywordTry: return "try"; case TokenIdKeywordUndefined: return "undefined"; |
