aboutsummaryrefslogtreecommitdiff
path: root/lib/std
diff options
context:
space:
mode:
Diffstat (limited to 'lib/std')
-rw-r--r--lib/std/zig/tokenizer.zig18
1 files changed, 17 insertions, 1 deletions
diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig
index d322b98f71..93f0339c41 100644
--- a/lib/std/zig/tokenizer.zig
+++ b/lib/std/zig/tokenizer.zig
@@ -425,7 +425,16 @@ pub const Tokenizer = struct {
const c = self.buffer[self.index];
switch (state) {
.start => switch (c) {
- 0 => break,
+ 0 => {
+ if (self.index != self.buffer.len) {
+ result.tag = .invalid;
+ result.loc.start = self.index;
+ self.index += 1;
+ result.loc.end = self.index;
+ return result;
+ }
+ break;
+ },
' ', '\n', '\t', '\r' => {
result.loc.start = self.index + 1;
},
@@ -1851,6 +1860,13 @@ test "saturating operators" {
try testTokenize("-|=", &.{.minus_pipe_equal});
}
+test "null byte before eof" {
+ try testTokenize("123 \x00 456", &.{ .number_literal, .invalid, .number_literal });
+ try testTokenize("//\x00", &.{.invalid});
+ try testTokenize("\\\\\x00", &.{ .multiline_string_literal_line, .invalid });
+ try testTokenize("\x00", &.{.invalid});
+}
+
fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
var tokenizer = Tokenizer.init(source);
for (expected_token_tags) |expected_token_tag| {