@@ -74,14 +74,18 @@ pub const VTable = struct {
74
74
///
75
75
/// `data` may not contain an alias to `Reader.buffer`.
76
76
///
77
+ /// `data` is mutable because the implementation may to temporarily modify
78
+ /// the fields in order to handle partial reads. Implementations must
79
+ /// restore the original value before returning.
80
+ ///
77
81
/// Implementations may ignore `data`, writing directly to `Reader.buffer`,
78
82
/// modifying `seek` and `end` accordingly, and returning 0 from this
79
83
/// function. Implementations are encouraged to take advantage of this if
80
84
/// it simplifies the logic.
81
85
///
82
86
/// The default implementation calls `stream` with either `data[0]` or
83
87
/// `Reader.buffer`, whichever is bigger.
84
- readVec : * const fn (r : * Reader , data : []const []u8 ) Error ! usize = defaultReadVec ,
88
+ readVec : * const fn (r : * Reader , data : [][]u8 ) Error ! usize = defaultReadVec ,
85
89
86
90
/// Ensures `capacity` more data can be buffered without rebasing.
87
91
///
@@ -262,8 +266,7 @@ pub fn streamRemaining(r: *Reader, w: *Writer) StreamRemainingError!usize {
262
266
/// number of bytes discarded.
263
267
pub fn discardRemaining (r : * Reader ) ShortError ! usize {
264
268
var offset : usize = r .end - r .seek ;
265
- r .seek = 0 ;
266
- r .end = 0 ;
269
+ r .seek = r .end ;
267
270
while (true ) {
268
271
offset += r .vtable .discard (r , .unlimited ) catch | err | switch (err ) {
269
272
error .EndOfStream = > return offset ,
@@ -417,7 +420,7 @@ pub fn readVec(r: *Reader, data: [][]u8) Error!usize {
417
420
}
418
421
419
422
/// Writes to `Reader.buffer` or `data`, whichever has larger capacity.
420
- pub fn defaultReadVec (r : * Reader , data : []const []u8 ) Error ! usize {
423
+ pub fn defaultReadVec (r : * Reader , data : [][]u8 ) Error ! usize {
421
424
assert (r .seek == r .end );
422
425
r .seek = 0 ;
423
426
r .end = 0 ;
@@ -438,23 +441,6 @@ pub fn defaultReadVec(r: *Reader, data: []const []u8) Error!usize {
438
441
return 0 ;
439
442
}
440
443
441
- /// Always writes to `Reader.buffer` and returns 0.
442
- pub fn indirectReadVec (r : * Reader , data : []const []u8 ) Error ! usize {
443
- _ = data ;
444
- assert (r .seek == r .end );
445
- var writer : Writer = .{
446
- .buffer = r .buffer ,
447
- .end = r .end ,
448
- .vtable = &.{ .drain = Writer .fixedDrain },
449
- };
450
- const limit : Limit = .limited (writer .buffer .len - writer .end );
451
- r .end += r .vtable .stream (r , & writer , limit ) catch | err | switch (err ) {
452
- error .WriteFailed = > unreachable ,
453
- else = > | e | return e ,
454
- };
455
- return 0 ;
456
- }
457
-
458
444
pub fn buffered (r : * Reader ) []u8 {
459
445
return r .buffer [r .seek .. r .end ];
460
446
}
@@ -463,8 +449,8 @@ pub fn bufferedLen(r: *const Reader) usize {
463
449
return r .end - r .seek ;
464
450
}
465
451
466
- pub fn hashed (r : * Reader , hasher : anytype ) Hashed (@TypeOf (hasher )) {
467
- return .{ . in = r , . hasher = hasher } ;
452
+ pub fn hashed (r : * Reader , hasher : anytype , buffer : [] u8 ) Hashed (@TypeOf (hasher )) {
453
+ return .init ( r , hasher , buffer ) ;
468
454
}
469
455
470
456
pub fn readVecAll (r : * Reader , data : [][]u8 ) Error ! void {
@@ -539,8 +525,7 @@ pub fn toss(r: *Reader, n: usize) void {
539
525
540
526
/// Equivalent to `toss(r.bufferedLen())`.
541
527
pub fn tossBuffered (r : * Reader ) void {
542
- r .seek = 0 ;
543
- r .end = 0 ;
528
+ r .seek = r .end ;
544
529
}
545
530
546
531
/// Equivalent to `peek` followed by `toss`.
@@ -627,8 +612,7 @@ pub fn discardShort(r: *Reader, n: usize) ShortError!usize {
627
612
return n ;
628
613
}
629
614
var remaining = n - (r .end - r .seek );
630
- r .end = 0 ;
631
- r .seek = 0 ;
615
+ r .seek = r .end ;
632
616
while (true ) {
633
617
const discard_len = r .vtable .discard (r , .limited (remaining )) catch | err | switch (err ) {
634
618
error .EndOfStream = > return n - remaining ,
@@ -1678,7 +1662,7 @@ fn endingStream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize {
1678
1662
return error .EndOfStream ;
1679
1663
}
1680
1664
1681
- fn endingReadVec (r : * Reader , data : []const []u8 ) Error ! usize {
1665
+ fn endingReadVec (r : * Reader , data : [][]u8 ) Error ! usize {
1682
1666
_ = r ;
1683
1667
_ = data ;
1684
1668
return error .EndOfStream ;
@@ -1709,6 +1693,15 @@ fn failingDiscard(r: *Reader, limit: Limit) Error!usize {
1709
1693
return error .ReadFailed ;
1710
1694
}
1711
1695
1696
+ pub fn adaptToOldInterface (r : * Reader ) std.Io.AnyReader {
1697
+ return .{ .context = r , .readFn = derpRead };
1698
+ }
1699
+
1700
+ fn derpRead (context : * const anyopaque , buffer : []u8 ) anyerror ! usize {
1701
+ const r : * Reader = @constCast (@alignCast (@ptrCast (context )));
1702
+ return r .readSliceShort (buffer );
1703
+ }
1704
+
1712
1705
test "readAlloc when the backing reader provides one byte at a time" {
1713
1706
const str = "This is a test" ;
1714
1707
var tiny_buffer : [1 ]u8 = undefined ;
@@ -1772,15 +1765,16 @@ pub fn Hashed(comptime Hasher: type) type {
1772
1765
return struct {
1773
1766
in : * Reader ,
1774
1767
hasher : Hasher ,
1775
- interface : Reader ,
1768
+ reader : Reader ,
1776
1769
1777
1770
pub fn init (in : * Reader , hasher : Hasher , buffer : []u8 ) @This () {
1778
1771
return .{
1779
1772
.in = in ,
1780
1773
.hasher = hasher ,
1781
- .interface = .{
1774
+ .reader = .{
1782
1775
.vtable = &.{
1783
- .read = @This ().read ,
1776
+ .stream = @This ().stream ,
1777
+ .readVec = @This ().readVec ,
1784
1778
.discard = @This ().discard ,
1785
1779
},
1786
1780
.buffer = buffer ,
@@ -1790,33 +1784,39 @@ pub fn Hashed(comptime Hasher: type) type {
1790
1784
};
1791
1785
}
1792
1786
1793
- fn read (r : * Reader , w : * Writer , limit : Limit ) StreamError ! usize {
1794
- const this : * @This () = @alignCast (@fieldParentPtr ("interface" , r ));
1795
- const data = w .writableVector (limit );
1787
+ fn stream (r : * Reader , w : * Writer , limit : Limit ) StreamError ! usize {
1788
+ const this : * @This () = @alignCast (@fieldParentPtr ("reader" , r ));
1789
+ const data = limit .slice (try w .writableSliceGreedy (1 ));
1790
+ var vec : [1 ][]u8 = .{data };
1791
+ const n = try this .in .readVec (& vec );
1792
+ this .hasher .update (data [0.. n ]);
1793
+ w .advance (n );
1794
+ return n ;
1795
+ }
1796
+
1797
+ fn readVec (r : * Reader , data : [][]u8 ) Error ! usize {
1798
+ const this : * @This () = @alignCast (@fieldParentPtr ("reader" , r ));
1796
1799
const n = try this .in .readVec (data );
1797
- const result = w .advanceVector (n );
1798
1800
var remaining : usize = n ;
1799
1801
for (data ) | slice | {
1800
1802
if (remaining < slice .len ) {
1801
1803
this .hasher .update (slice [0.. remaining ]);
1802
- return result ;
1804
+ return n ;
1803
1805
} else {
1804
1806
remaining -= slice .len ;
1805
1807
this .hasher .update (slice );
1806
1808
}
1807
1809
}
1808
1810
assert (remaining == 0 );
1809
- return result ;
1811
+ return n ;
1810
1812
}
1811
1813
1812
1814
fn discard (r : * Reader , limit : Limit ) Error ! usize {
1813
- const this : * @This () = @alignCast (@fieldParentPtr ("interface" , r ));
1814
- var w = this .hasher .writer (&.{});
1815
- const n = this .in .stream (& w , limit ) catch | err | switch (err ) {
1816
- error .WriteFailed = > unreachable ,
1817
- else = > | e | return e ,
1818
- };
1819
- return n ;
1815
+ const this : * @This () = @alignCast (@fieldParentPtr ("reader" , r ));
1816
+ const peeked = limit .slice (try this .in .peekGreedy (1 ));
1817
+ this .hasher .update (peeked );
1818
+ this .in .toss (peeked .len );
1819
+ return peeked .len ;
1820
1820
}
1821
1821
};
1822
1822
}
@@ -1874,3 +1874,7 @@ pub fn writableVectorWsa(
1874
1874
}
1875
1875
return .{ i , n };
1876
1876
}
1877
+
1878
+ test {
1879
+ _ = Limited ;
1880
+ }
0 commit comments