diff options
| author | Ben Fiedler <git@bfiedler.ch> | 2022-06-24 17:24:44 +0200 |
|---|---|---|
| committer | Isaac Freund <mail@isaacfreund.com> | 2022-06-28 21:45:02 +0200 |
| commit | 76546b3f8e19b11f0c88259b1b3e5fd14cfbde31 (patch) | |
| tree | bf4b76051728ea72a9a28617668efe9200a8d663 /lib/std | |
| parent | bb2929ba083d3c5d86cae1d83cba0abd43ffa3d5 (diff) | |
| download | zig-76546b3f8e19b11f0c88259b1b3e5fd14cfbde31.tar.gz zig-76546b3f8e19b11f0c88259b1b3e5fd14cfbde31.zip | |
std.mem: add peek() to TokenIterator(T)
Diffstat (limited to 'lib/std')
| -rw-r--r-- | lib/std/mem.zig | 20 |
1 files changed, 17 insertions, 3 deletions
diff --git a/lib/std/mem.zig b/lib/std/mem.zig index 61d6b84874..046339b6fc 100644 --- a/lib/std/mem.zig +++ b/lib/std/mem.zig @@ -1608,6 +1608,7 @@ pub fn tokenize(comptime T: type, buffer: []const T, delimiter_bytes: []const T) test "tokenize" { var it = tokenize(u8, " abc def ghi ", " "); try testing.expect(eql(u8, it.next().?, "abc")); + try testing.expect(eql(u8, it.peek().?, "def")); try testing.expect(eql(u8, it.next().?, "def")); try testing.expect(eql(u8, it.next().?, "ghi")); try testing.expect(it.next() == null); @@ -1626,9 +1627,11 @@ test "tokenize" { it = tokenize(u8, "|", "|"); try testing.expect(it.next() == null); + try testing.expect(it.peek() == null); it = tokenize(u8, "", "|"); try testing.expect(it.next() == null); + try testing.expect(it.peek() == null); it = tokenize(u8, "hello", ""); try testing.expect(eql(u8, it.next().?, "hello")); @@ -1650,11 +1653,13 @@ test "tokenize" { test "tokenize (multibyte)" { var it = tokenize(u8, "a|b,c/d e", " /,|"); try testing.expect(eql(u8, it.next().?, "a")); + try testing.expect(eql(u8, it.peek().?, "b")); try testing.expect(eql(u8, it.next().?, "b")); try testing.expect(eql(u8, it.next().?, "c")); try testing.expect(eql(u8, it.next().?, "d")); try testing.expect(eql(u8, it.next().?, "e")); try testing.expect(it.next() == null); + try testing.expect(it.peek() == null); var it16 = tokenize( u16, @@ -1778,8 +1783,17 @@ pub fn TokenIterator(comptime T: type) type { const Self = @This(); - /// Returns a slice of the next token, or null if tokenization is complete. + /// Returns a slice of the current token, or null if tokenization is + /// complete, and advances to the next token. pub fn next(self: *Self) ?[]const T { + const result = self.peek() orelse return null; + self.index += result.len; + return result; + } + + /// Returns a slice of the current token, or null if tokenization is + /// complete. Does not advance to the next token. + pub fn peek(self: *Self) ?[]const T { // move to beginning of token while (self.index < self.buffer.len and self.isSplitByte(self.buffer[self.index])) : (self.index += 1) {} const start = self.index; @@ -1788,8 +1802,8 @@ pub fn TokenIterator(comptime T: type) type { } // move to end of token - while (self.index < self.buffer.len and !self.isSplitByte(self.buffer[self.index])) : (self.index += 1) {} - const end = self.index; + var end = start; + while (end < self.buffer.len and !self.isSplitByte(self.buffer[end])) : (end += 1) {} return self.buffer[start..end]; } |
