aboutsummaryrefslogtreecommitdiff
path: root/lib/std/Io/Reader.zig
diff options
context:
space:
mode:
Diffstat (limited to 'lib/std/Io/Reader.zig')
-rw-r--r--lib/std/Io/Reader.zig140
1 files changed, 104 insertions, 36 deletions
diff --git a/lib/std/Io/Reader.zig b/lib/std/Io/Reader.zig
index 6ed0230317..e2344e959e 100644
--- a/lib/std/Io/Reader.zig
+++ b/lib/std/Io/Reader.zig
@@ -481,7 +481,6 @@ pub fn readVecAll(r: *Reader, data: [][]u8) Error!void {
/// is returned instead.
///
/// See also:
-/// * `peek`
/// * `toss`
pub fn peek(r: *Reader, n: usize) Error![]u8 {
try r.fill(n);
@@ -732,7 +731,7 @@ pub const DelimiterError = error{
};
/// Returns a slice of the next bytes of buffered data from the stream until
-/// `sentinel` is found, advancing the seek position.
+/// `sentinel` is found, advancing the seek position past the sentinel.
///
/// Returned slice has a sentinel.
///
@@ -765,7 +764,7 @@ pub fn peekSentinel(r: *Reader, comptime sentinel: u8) DelimiterError![:sentinel
}
/// Returns a slice of the next bytes of buffered data from the stream until
-/// `delimiter` is found, advancing the seek position.
+/// `delimiter` is found, advancing the seek position past the delimiter.
///
/// Returned slice includes the delimiter as the last byte.
///
@@ -793,31 +792,42 @@ pub fn takeDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
/// * `peekDelimiterExclusive`
/// * `takeDelimiterInclusive`
pub fn peekDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
- const buffer = r.buffer[0..r.end];
- const seek = r.seek;
- if (std.mem.indexOfScalarPos(u8, buffer, seek, delimiter)) |end| {
- @branchHint(.likely);
- return buffer[seek .. end + 1];
+ {
+ const contents = r.buffer[0..r.end];
+ const seek = r.seek;
+ if (std.mem.findScalarPos(u8, contents, seek, delimiter)) |end| {
+ @branchHint(.likely);
+ return contents[seek .. end + 1];
+ }
}
- // TODO take a parameter for max search length rather than relying on buffer capacity
- try rebase(r, r.buffer.len);
- while (r.buffer.len - r.end != 0) {
- const end_cap = r.buffer[r.end..];
- var writer: Writer = .fixed(end_cap);
- const n = r.vtable.stream(r, &writer, .limited(end_cap.len)) catch |err| switch (err) {
- error.WriteFailed => unreachable,
- else => |e| return e,
- };
- r.end += n;
- if (std.mem.indexOfScalarPos(u8, end_cap[0..n], 0, delimiter)) |end| {
- return r.buffer[0 .. r.end - n + end + 1];
+ while (true) {
+ const content_len = r.end - r.seek;
+ if (r.buffer.len - content_len == 0) break;
+ try fillMore(r);
+ const seek = r.seek;
+ const contents = r.buffer[0..r.end];
+ if (std.mem.findScalarPos(u8, contents, seek + content_len, delimiter)) |end| {
+ return contents[seek .. end + 1];
}
}
- return error.StreamTooLong;
+ // It might or might not be end of stream. There is no more buffer space
+ // left to disambiguate. If `StreamTooLong` was added to `RebaseError` then
+ // this logic could be replaced by removing the exit condition from the
+ // above while loop. That error code would represent when `buffer` capacity
+ // is too small for an operation, replacing the current use of asserts.
+ var failing_writer = Writer.failing;
+ while (r.vtable.stream(r, &failing_writer, .limited(1))) |n| {
+ assert(n == 0);
+ } else |err| switch (err) {
+ error.WriteFailed => return error.StreamTooLong,
+ error.ReadFailed => |e| return e,
+ error.EndOfStream => |e| return e,
+ }
}
/// Returns a slice of the next bytes of buffered data from the stream until
-/// `delimiter` is found, advancing the seek position up to the delimiter.
+/// `delimiter` is found, advancing the seek position up to (but not past)
+/// the delimiter.
///
/// Returned slice excludes the delimiter. End-of-stream is treated equivalent
/// to a delimiter, unless it would result in a length 0 return value, in which
@@ -831,20 +841,13 @@ pub fn peekDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
/// Invalidates previously returned values from `peek`.
///
/// See also:
+/// * `takeDelimiter`
/// * `takeDelimiterInclusive`
/// * `peekDelimiterExclusive`
pub fn takeDelimiterExclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
- const result = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) {
- error.EndOfStream => {
- const remaining = r.buffer[r.seek..r.end];
- if (remaining.len == 0) return error.EndOfStream;
- r.toss(remaining.len);
- return remaining;
- },
- else => |e| return e,
- };
+ const result = try r.peekDelimiterExclusive(delimiter);
r.toss(result.len);
- return result[0 .. result.len - 1];
+ return result;
}
/// Returns a slice of the next bytes of buffered data from the stream until
@@ -865,7 +868,7 @@ pub fn takeDelimiterExclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
/// * `takeDelimiterInclusive`
/// * `takeDelimiterExclusive`
pub fn takeDelimiter(r: *Reader, delimiter: u8) error{ ReadFailed, StreamTooLong }!?[]u8 {
- const result = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) {
+ const inclusive = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) {
error.EndOfStream => {
const remaining = r.buffer[r.seek..r.end];
if (remaining.len == 0) return null;
@@ -874,8 +877,8 @@ pub fn takeDelimiter(r: *Reader, delimiter: u8) error{ ReadFailed, StreamTooLong
},
else => |e| return e,
};
- r.toss(result.len + 1);
- return result[0 .. result.len - 1];
+ r.toss(inclusive.len);
+ return inclusive[0 .. inclusive.len - 1];
}
/// Returns a slice of the next bytes of buffered data from the stream until
@@ -1402,6 +1405,9 @@ test peekSentinel {
var r: Reader = .fixed("ab\nc");
try testing.expectEqualStrings("ab", try r.peekSentinel('\n'));
try testing.expectEqualStrings("ab", try r.peekSentinel('\n'));
+ r.toss(3);
+ try testing.expectError(error.EndOfStream, r.peekSentinel('\n'));
+ try testing.expectEqualStrings("c", try r.peek(1));
}
test takeDelimiterInclusive {
@@ -1416,22 +1422,52 @@ test peekDelimiterInclusive {
try testing.expectEqualStrings("ab\n", try r.peekDelimiterInclusive('\n'));
r.toss(3);
try testing.expectError(error.EndOfStream, r.peekDelimiterInclusive('\n'));
+ try testing.expectEqualStrings("c", try r.peek(1));
}
test takeDelimiterExclusive {
var r: Reader = .fixed("ab\nc");
+
try testing.expectEqualStrings("ab", try r.takeDelimiterExclusive('\n'));
+ try testing.expectEqualStrings("", try r.takeDelimiterExclusive('\n'));
+ try testing.expectEqualStrings("", try r.takeDelimiterExclusive('\n'));
+ try testing.expectEqualStrings("\n", try r.take(1));
+
try testing.expectEqualStrings("c", try r.takeDelimiterExclusive('\n'));
try testing.expectError(error.EndOfStream, r.takeDelimiterExclusive('\n'));
}
test peekDelimiterExclusive {
var r: Reader = .fixed("ab\nc");
+
try testing.expectEqualStrings("ab", try r.peekDelimiterExclusive('\n'));
try testing.expectEqualStrings("ab", try r.peekDelimiterExclusive('\n'));
- r.toss(3);
+ r.toss(2);
+ try testing.expectEqualStrings("", try r.peekDelimiterExclusive('\n'));
+ try testing.expectEqualStrings("\n", try r.take(1));
+
try testing.expectEqualStrings("c", try r.peekDelimiterExclusive('\n'));
try testing.expectEqualStrings("c", try r.peekDelimiterExclusive('\n'));
+ r.toss(1);
+ try testing.expectError(error.EndOfStream, r.peekDelimiterExclusive('\n'));
+}
+
+test takeDelimiter {
+ var r: Reader = .fixed("ab\nc\n\nd");
+ try testing.expectEqualStrings("ab", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqualStrings("c", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqualStrings("", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqualStrings("d", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqual(null, try r.takeDelimiter('\n'));
+ try testing.expectEqual(null, try r.takeDelimiter('\n'));
+
+ r = .fixed("ab\nc\n\nd\n"); // one trailing newline does not affect behavior
+ try testing.expectEqualStrings("ab", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqualStrings("c", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqualStrings("", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqualStrings("d", (try r.takeDelimiter('\n')).?);
+ try testing.expectEqual(null, try r.takeDelimiter('\n'));
+ try testing.expectEqual(null, try r.takeDelimiter('\n'));
}
test streamDelimiter {
@@ -1601,6 +1637,18 @@ test "readSliceShort with smaller buffer than Reader" {
try testing.expectEqualStrings(str, &buf);
}
+test "readSliceShort with indirect reader" {
+ var r: Reader = .fixed("HelloFren");
+ var ri_buf: [3]u8 = undefined;
+ var ri: std.testing.ReaderIndirect = .init(&r, &ri_buf);
+ var buf: [5]u8 = undefined;
+ try testing.expectEqual(5, try ri.interface.readSliceShort(&buf));
+ try testing.expectEqualStrings("Hello", buf[0..5]);
+ try testing.expectEqual(4, try ri.interface.readSliceShort(&buf));
+ try testing.expectEqualStrings("Fren", buf[0..4]);
+ try testing.expectEqual(0, try ri.interface.readSliceShort(&buf));
+}
+
test readVec {
var r: Reader = .fixed(std.ascii.letters);
var flat_buffer: [52]u8 = undefined;
@@ -1701,6 +1749,26 @@ test "takeDelimiterInclusive when it rebases" {
}
}
+test "takeDelimiterInclusive on an indirect reader when it rebases" {
+ const written_line = "ABCDEFGHIJKLMNOPQRSTUVWXYZ\n";
+ var buffer: [128]u8 = undefined;
+ var tr: std.testing.Reader = .init(&buffer, &.{
+ .{ .buffer = written_line[0..4] },
+ .{ .buffer = written_line[4..] },
+ .{ .buffer = written_line },
+ .{ .buffer = written_line },
+ .{ .buffer = written_line },
+ .{ .buffer = written_line },
+ .{ .buffer = written_line },
+ });
+ var indirect_buffer: [128]u8 = undefined;
+ var tri: std.testing.ReaderIndirect = .init(&tr.interface, &indirect_buffer);
+ const r = &tri.interface;
+ for (0..6) |_| {
+ try std.testing.expectEqualStrings(written_line, try r.takeDelimiterInclusive('\n'));
+ }
+}
+
test "takeStruct and peekStruct packed" {
var r: Reader = .fixed(&.{ 0b11110000, 0b00110011 });
const S = packed struct(u16) { a: u2, b: u6, c: u7, d: u1 };