Skip to content

Commit 529aa9f

Browse files
authored
Merge pull request #25512 from ziglang/sendfile-fixes
std.Io: Writer and Reader bug fixes related to sendFile, delimiters, Limited, and seeking
2 parents 0bdd1b5 + d83d79c commit 529aa9f

File tree

10 files changed

+503
-307
lines changed

10 files changed

+503
-307
lines changed

build.zig

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -81,12 +81,13 @@ pub fn build(b: *std.Build) !void {
8181
docs_step.dependOn(langref_step);
8282
docs_step.dependOn(std_docs_step);
8383

84+
const test_default_only = b.option(bool, "test-default-only", "Limit test matrix to exactly one target configuration") orelse false;
8485
const skip_debug = b.option(bool, "skip-debug", "Main test suite skips debug builds") orelse false;
85-
const skip_release = b.option(bool, "skip-release", "Main test suite skips release builds") orelse false;
86+
const skip_release = b.option(bool, "skip-release", "Main test suite skips release builds") orelse test_default_only;
8687
const skip_release_small = b.option(bool, "skip-release-small", "Main test suite skips release-small builds") orelse skip_release;
8788
const skip_release_fast = b.option(bool, "skip-release-fast", "Main test suite skips release-fast builds") orelse skip_release;
8889
const skip_release_safe = b.option(bool, "skip-release-safe", "Main test suite skips release-safe builds") orelse skip_release;
89-
const skip_non_native = b.option(bool, "skip-non-native", "Main test suite skips non-native builds") orelse false;
90+
const skip_non_native = b.option(bool, "skip-non-native", "Main test suite skips non-native builds") orelse test_default_only;
9091
const skip_libc = b.option(bool, "skip-libc", "Main test suite skips tests that link libc") orelse false;
9192
const skip_single_threaded = b.option(bool, "skip-single-threaded", "Main test suite skips tests that are single-threaded") orelse false;
9293
const skip_compile_errors = b.option(bool, "skip-compile-errors", "Main test suite skips compile error tests") orelse false;
@@ -449,6 +450,7 @@ pub fn build(b: *std.Build) !void {
449450
.include_paths = &.{},
450451
.skip_single_threaded = skip_single_threaded,
451452
.skip_non_native = skip_non_native,
453+
.test_default_only = test_default_only,
452454
.skip_freebsd = skip_freebsd,
453455
.skip_netbsd = skip_netbsd,
454456
.skip_windows = skip_windows,
@@ -471,6 +473,7 @@ pub fn build(b: *std.Build) !void {
471473
.include_paths = &.{},
472474
.skip_single_threaded = true,
473475
.skip_non_native = skip_non_native,
476+
.test_default_only = test_default_only,
474477
.skip_freebsd = skip_freebsd,
475478
.skip_netbsd = skip_netbsd,
476479
.skip_windows = skip_windows,
@@ -492,6 +495,7 @@ pub fn build(b: *std.Build) !void {
492495
.include_paths = &.{},
493496
.skip_single_threaded = true,
494497
.skip_non_native = skip_non_native,
498+
.test_default_only = test_default_only,
495499
.skip_freebsd = skip_freebsd,
496500
.skip_netbsd = skip_netbsd,
497501
.skip_windows = skip_windows,
@@ -513,6 +517,7 @@ pub fn build(b: *std.Build) !void {
513517
.include_paths = &.{},
514518
.skip_single_threaded = skip_single_threaded,
515519
.skip_non_native = skip_non_native,
520+
.test_default_only = test_default_only,
516521
.skip_freebsd = skip_freebsd,
517522
.skip_netbsd = skip_netbsd,
518523
.skip_windows = skip_windows,

lib/std/Io/Reader.zig

Lines changed: 72 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -481,7 +481,6 @@ pub fn readVecAll(r: *Reader, data: [][]u8) Error!void {
481481
/// is returned instead.
482482
///
483483
/// See also:
484-
/// * `peek`
485484
/// * `toss`
486485
pub fn peek(r: *Reader, n: usize) Error![]u8 {
487486
try r.fill(n);
@@ -732,7 +731,7 @@ pub const DelimiterError = error{
732731
};
733732

734733
/// Returns a slice of the next bytes of buffered data from the stream until
735-
/// `sentinel` is found, advancing the seek position.
734+
/// `sentinel` is found, advancing the seek position past the sentinel.
736735
///
737736
/// Returned slice has a sentinel.
738737
///
@@ -765,7 +764,7 @@ pub fn peekSentinel(r: *Reader, comptime sentinel: u8) DelimiterError![:sentinel
765764
}
766765

767766
/// Returns a slice of the next bytes of buffered data from the stream until
768-
/// `delimiter` is found, advancing the seek position.
767+
/// `delimiter` is found, advancing the seek position past the delimiter.
769768
///
770769
/// Returned slice includes the delimiter as the last byte.
771770
///
@@ -793,32 +792,42 @@ pub fn takeDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
793792
/// * `peekDelimiterExclusive`
794793
/// * `takeDelimiterInclusive`
795794
pub fn peekDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
796-
const buffer = r.buffer[0..r.end];
797-
const seek = r.seek;
798-
if (std.mem.indexOfScalarPos(u8, buffer, seek, delimiter)) |delimiter_index| {
799-
@branchHint(.likely);
800-
return buffer[seek .. delimiter_index + 1];
795+
{
796+
const contents = r.buffer[0..r.end];
797+
const seek = r.seek;
798+
if (std.mem.findScalarPos(u8, contents, seek, delimiter)) |end| {
799+
@branchHint(.likely);
800+
return contents[seek .. end + 1];
801+
}
801802
}
802-
// TODO take a parameter for max search length rather than relying on buffer capacity
803-
try rebase(r, r.buffer.len);
804-
while (r.buffer.len - r.end != 0) {
805-
const existing_buffered_len = r.end - r.seek;
806-
const end_cap = r.buffer[r.end..];
807-
var writer: Writer = .fixed(end_cap);
808-
const n = r.vtable.stream(r, &writer, .limited(end_cap.len)) catch |err| switch (err) {
809-
error.WriteFailed => unreachable,
810-
else => |e| return e,
811-
};
812-
r.end += n;
813-
if (std.mem.indexOfScalarPos(u8, r.buffer[0..r.end], r.seek + existing_buffered_len, delimiter)) |delimiter_index| {
814-
return r.buffer[r.seek .. delimiter_index + 1];
803+
while (true) {
804+
const content_len = r.end - r.seek;
805+
if (r.buffer.len - content_len == 0) break;
806+
try fillMore(r);
807+
const seek = r.seek;
808+
const contents = r.buffer[0..r.end];
809+
if (std.mem.findScalarPos(u8, contents, seek + content_len, delimiter)) |end| {
810+
return contents[seek .. end + 1];
815811
}
816812
}
817-
return error.StreamTooLong;
813+
// It might or might not be end of stream. There is no more buffer space
814+
// left to disambiguate. If `StreamTooLong` was added to `RebaseError` then
815+
// this logic could be replaced by removing the exit condition from the
816+
// above while loop. That error code would represent when `buffer` capacity
817+
// is too small for an operation, replacing the current use of asserts.
818+
var failing_writer = Writer.failing;
819+
while (r.vtable.stream(r, &failing_writer, .limited(1))) |n| {
820+
assert(n == 0);
821+
} else |err| switch (err) {
822+
error.WriteFailed => return error.StreamTooLong,
823+
error.ReadFailed => |e| return e,
824+
error.EndOfStream => |e| return e,
825+
}
818826
}
819827

820828
/// Returns a slice of the next bytes of buffered data from the stream until
821-
/// `delimiter` is found, advancing the seek position up to the delimiter.
829+
/// `delimiter` is found, advancing the seek position up to (but not past)
830+
/// the delimiter.
822831
///
823832
/// Returned slice excludes the delimiter. End-of-stream is treated equivalent
824833
/// to a delimiter, unless it would result in a length 0 return value, in which
@@ -832,20 +841,13 @@ pub fn peekDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
832841
/// Invalidates previously returned values from `peek`.
833842
///
834843
/// See also:
844+
/// * `takeDelimiter`
835845
/// * `takeDelimiterInclusive`
836846
/// * `peekDelimiterExclusive`
837847
pub fn takeDelimiterExclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
838-
const result = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) {
839-
error.EndOfStream => {
840-
const remaining = r.buffer[r.seek..r.end];
841-
if (remaining.len == 0) return error.EndOfStream;
842-
r.toss(remaining.len);
843-
return remaining;
844-
},
845-
else => |e| return e,
846-
};
848+
const result = try r.peekDelimiterExclusive(delimiter);
847849
r.toss(result.len);
848-
return result[0 .. result.len - 1];
850+
return result;
849851
}
850852

851853
/// Returns a slice of the next bytes of buffered data from the stream until
@@ -866,7 +868,7 @@ pub fn takeDelimiterExclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 {
866868
/// * `takeDelimiterInclusive`
867869
/// * `takeDelimiterExclusive`
868870
pub fn takeDelimiter(r: *Reader, delimiter: u8) error{ ReadFailed, StreamTooLong }!?[]u8 {
869-
const result = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) {
871+
const inclusive = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) {
870872
error.EndOfStream => {
871873
const remaining = r.buffer[r.seek..r.end];
872874
if (remaining.len == 0) return null;
@@ -875,8 +877,8 @@ pub fn takeDelimiter(r: *Reader, delimiter: u8) error{ ReadFailed, StreamTooLong
875877
},
876878
else => |e| return e,
877879
};
878-
r.toss(result.len + 1);
879-
return result[0 .. result.len - 1];
880+
r.toss(inclusive.len);
881+
return inclusive[0 .. inclusive.len - 1];
880882
}
881883

882884
/// Returns a slice of the next bytes of buffered data from the stream until
@@ -1403,6 +1405,9 @@ test peekSentinel {
14031405
var r: Reader = .fixed("ab\nc");
14041406
try testing.expectEqualStrings("ab", try r.peekSentinel('\n'));
14051407
try testing.expectEqualStrings("ab", try r.peekSentinel('\n'));
1408+
r.toss(3);
1409+
try testing.expectError(error.EndOfStream, r.peekSentinel('\n'));
1410+
try testing.expectEqualStrings("c", try r.peek(1));
14061411
}
14071412

14081413
test takeDelimiterInclusive {
@@ -1417,22 +1422,52 @@ test peekDelimiterInclusive {
14171422
try testing.expectEqualStrings("ab\n", try r.peekDelimiterInclusive('\n'));
14181423
r.toss(3);
14191424
try testing.expectError(error.EndOfStream, r.peekDelimiterInclusive('\n'));
1425+
try testing.expectEqualStrings("c", try r.peek(1));
14201426
}
14211427

14221428
test takeDelimiterExclusive {
14231429
var r: Reader = .fixed("ab\nc");
1430+
14241431
try testing.expectEqualStrings("ab", try r.takeDelimiterExclusive('\n'));
1432+
try testing.expectEqualStrings("", try r.takeDelimiterExclusive('\n'));
1433+
try testing.expectEqualStrings("", try r.takeDelimiterExclusive('\n'));
1434+
try testing.expectEqualStrings("\n", try r.take(1));
1435+
14251436
try testing.expectEqualStrings("c", try r.takeDelimiterExclusive('\n'));
14261437
try testing.expectError(error.EndOfStream, r.takeDelimiterExclusive('\n'));
14271438
}
14281439

14291440
test peekDelimiterExclusive {
14301441
var r: Reader = .fixed("ab\nc");
1442+
14311443
try testing.expectEqualStrings("ab", try r.peekDelimiterExclusive('\n'));
14321444
try testing.expectEqualStrings("ab", try r.peekDelimiterExclusive('\n'));
1433-
r.toss(3);
1445+
r.toss(2);
1446+
try testing.expectEqualStrings("", try r.peekDelimiterExclusive('\n'));
1447+
try testing.expectEqualStrings("\n", try r.take(1));
1448+
14341449
try testing.expectEqualStrings("c", try r.peekDelimiterExclusive('\n'));
14351450
try testing.expectEqualStrings("c", try r.peekDelimiterExclusive('\n'));
1451+
r.toss(1);
1452+
try testing.expectError(error.EndOfStream, r.peekDelimiterExclusive('\n'));
1453+
}
1454+
1455+
test takeDelimiter {
1456+
var r: Reader = .fixed("ab\nc\n\nd");
1457+
try testing.expectEqualStrings("ab", (try r.takeDelimiter('\n')).?);
1458+
try testing.expectEqualStrings("c", (try r.takeDelimiter('\n')).?);
1459+
try testing.expectEqualStrings("", (try r.takeDelimiter('\n')).?);
1460+
try testing.expectEqualStrings("d", (try r.takeDelimiter('\n')).?);
1461+
try testing.expectEqual(null, try r.takeDelimiter('\n'));
1462+
try testing.expectEqual(null, try r.takeDelimiter('\n'));
1463+
1464+
r = .fixed("ab\nc\n\nd\n"); // one trailing newline does not affect behavior
1465+
try testing.expectEqualStrings("ab", (try r.takeDelimiter('\n')).?);
1466+
try testing.expectEqualStrings("c", (try r.takeDelimiter('\n')).?);
1467+
try testing.expectEqualStrings("", (try r.takeDelimiter('\n')).?);
1468+
try testing.expectEqualStrings("d", (try r.takeDelimiter('\n')).?);
1469+
try testing.expectEqual(null, try r.takeDelimiter('\n'));
1470+
try testing.expectEqual(null, try r.takeDelimiter('\n'));
14361471
}
14371472

14381473
test streamDelimiter {

lib/std/Io/Reader/Limited.zig

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ pub fn init(reader: *Reader, limit: Limit, buffer: []u8) Limited {
2727

2828
fn stream(r: *Reader, w: *Writer, limit: Limit) Reader.StreamError!usize {
2929
const l: *Limited = @fieldParentPtr("interface", r);
30+
if (l.remaining == .nothing) return error.EndOfStream;
3031
const combined_limit = limit.min(l.remaining);
3132
const n = try l.unlimited.stream(w, combined_limit);
3233
l.remaining = l.remaining.subtract(n).?;
@@ -51,8 +52,51 @@ test stream {
5152

5253
fn discard(r: *Reader, limit: Limit) Reader.Error!usize {
5354
const l: *Limited = @fieldParentPtr("interface", r);
55+
if (l.remaining == .nothing) return error.EndOfStream;
5456
const combined_limit = limit.min(l.remaining);
5557
const n = try l.unlimited.discard(combined_limit);
5658
l.remaining = l.remaining.subtract(n).?;
5759
return n;
5860
}
61+
62+
test "end of stream, read, hit limit exactly" {
63+
var f: Reader = .fixed("i'm dying");
64+
var l = f.limited(.limited(4), &.{});
65+
const r = &l.interface;
66+
67+
var buf: [2]u8 = undefined;
68+
try r.readSliceAll(&buf);
69+
try r.readSliceAll(&buf);
70+
try std.testing.expectError(error.EndOfStream, l.interface.readSliceAll(&buf));
71+
}
72+
73+
test "end of stream, read, hit limit after partial read" {
74+
var f: Reader = .fixed("i'm dying");
75+
var l = f.limited(.limited(5), &.{});
76+
const r = &l.interface;
77+
78+
var buf: [2]u8 = undefined;
79+
try r.readSliceAll(&buf);
80+
try r.readSliceAll(&buf);
81+
try std.testing.expectError(error.EndOfStream, l.interface.readSliceAll(&buf));
82+
}
83+
84+
test "end of stream, discard, hit limit exactly" {
85+
var f: Reader = .fixed("i'm dying");
86+
var l = f.limited(.limited(4), &.{});
87+
const r = &l.interface;
88+
89+
try r.discardAll(2);
90+
try r.discardAll(2);
91+
try std.testing.expectError(error.EndOfStream, l.interface.discardAll(2));
92+
}
93+
94+
test "end of stream, discard, hit limit after partial read" {
95+
var f: Reader = .fixed("i'm dying");
96+
var l = f.limited(.limited(5), &.{});
97+
const r = &l.interface;
98+
99+
try r.discardAll(2);
100+
try r.discardAll(2);
101+
try std.testing.expectError(error.EndOfStream, l.interface.discardAll(2));
102+
}

lib/std/Io/Writer.zig

Lines changed: 31 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -923,10 +923,12 @@ pub fn sendFileHeader(
923923
return n;
924924
}
925925

926-
/// Asserts nonzero buffer capacity.
926+
/// Asserts nonzero buffer capacity and nonzero `limit`.
927927
pub fn sendFileReading(w: *Writer, file_reader: *File.Reader, limit: Limit) FileReadingError!usize {
928+
assert(limit != .nothing);
928929
const dest = limit.slice(try w.writableSliceGreedy(1));
929-
const n = try file_reader.read(dest);
930+
const n = try file_reader.interface.readSliceShort(dest);
931+
if (n == 0) return error.EndOfStream;
930932
w.advance(n);
931933
return n;
932934
}
@@ -2778,7 +2780,8 @@ pub const Allocating = struct {
27782780
if (additional == 0) return error.EndOfStream;
27792781
a.ensureUnusedCapacity(limit.minInt64(additional)) catch return error.WriteFailed;
27802782
const dest = limit.slice(a.writer.buffer[a.writer.end..]);
2781-
const n = try file_reader.read(dest);
2783+
const n = try file_reader.interface.readSliceShort(dest);
2784+
if (n == 0) return error.EndOfStream;
27822785
a.writer.end += n;
27832786
return n;
27842787
}
@@ -2849,18 +2852,40 @@ test "allocating sendFile" {
28492852

28502853
const file = try tmp_dir.dir.createFile("input.txt", .{ .read = true });
28512854
defer file.close();
2852-
var r_buffer: [256]u8 = undefined;
2855+
var r_buffer: [2]u8 = undefined;
28532856
var file_writer: std.fs.File.Writer = .init(file, &r_buffer);
2854-
try file_writer.interface.writeByte('h');
2857+
try file_writer.interface.writeAll("abcd");
28552858
try file_writer.interface.flush();
28562859

28572860
var file_reader = file_writer.moveToReader();
28582861
try file_reader.seekTo(0);
2862+
try file_reader.interface.fill(2);
28592863

28602864
var allocating: Writer.Allocating = .init(testing.allocator);
28612865
defer allocating.deinit();
2866+
try allocating.ensureUnusedCapacity(1);
2867+
try testing.expectEqual(4, allocating.writer.sendFileAll(&file_reader, .unlimited));
2868+
try testing.expectEqualStrings("abcd", allocating.writer.buffered());
2869+
}
2870+
2871+
test sendFileReading {
2872+
var tmp_dir = testing.tmpDir(.{});
2873+
defer tmp_dir.cleanup();
2874+
2875+
const file = try tmp_dir.dir.createFile("input.txt", .{ .read = true });
2876+
defer file.close();
2877+
var r_buffer: [2]u8 = undefined;
2878+
var file_writer: std.fs.File.Writer = .init(file, &r_buffer);
2879+
try file_writer.interface.writeAll("abcd");
2880+
try file_writer.interface.flush();
28622881

2863-
_ = try file_reader.interface.streamRemaining(&allocating.writer);
2882+
var file_reader = file_writer.moveToReader();
2883+
try file_reader.seekTo(0);
2884+
try file_reader.interface.fill(2);
2885+
2886+
var w_buffer: [1]u8 = undefined;
2887+
var discarding: Writer.Discarding = .init(&w_buffer);
2888+
try testing.expectEqual(4, discarding.writer.sendFileReadingAll(&file_reader, .unlimited));
28642889
}
28652890

28662891
test writeStruct {

0 commit comments

Comments
 (0)