mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 05:44:20 +00:00
std.Io.Reader: fix appendRemaining
it calls readVec which is a higher level function than was expected in the previous implementation
This commit is contained in:
parent
9222d201d7
commit
e7a639967e
1 changed files with 5 additions and 21 deletions
|
|
@ -246,34 +246,18 @@ pub fn appendRemaining(
|
|||
limit: Limit,
|
||||
) LimitedAllocError!void {
|
||||
assert(r.buffer.len != 0); // Needed to detect limit exceeded without losing data.
|
||||
const buffer = r.buffer;
|
||||
const buffer_contents = buffer[r.seek..r.end];
|
||||
const copy_len = limit.minInt(buffer_contents.len);
|
||||
try list.ensureUnusedCapacity(gpa, copy_len);
|
||||
@memcpy(list.unusedCapacitySlice()[0..copy_len], buffer[0..copy_len]);
|
||||
list.items.len += copy_len;
|
||||
r.seek += copy_len;
|
||||
if (copy_len == buffer_contents.len) {
|
||||
r.seek = 0;
|
||||
r.end = 0;
|
||||
}
|
||||
var remaining = limit.subtract(copy_len).?;
|
||||
while (true) {
|
||||
try list.ensureUnusedCapacity(gpa, 1);
|
||||
var remaining = limit;
|
||||
while (remaining.nonzero()) {
|
||||
try list.ensureUnusedCapacity(gpa, r.bufferedLen() + 1);
|
||||
const dest = remaining.slice(list.unusedCapacitySlice());
|
||||
const additional_buffer: []u8 = if (@intFromEnum(remaining) == dest.len) buffer else &.{};
|
||||
const n = readVec(r, &.{ dest, additional_buffer }) catch |err| switch (err) {
|
||||
const n = readVecLimit(r, &.{dest}, .unlimited) catch |err| switch (err) {
|
||||
error.EndOfStream => break,
|
||||
error.ReadFailed => return error.ReadFailed,
|
||||
};
|
||||
if (n > dest.len) {
|
||||
r.end = n - dest.len;
|
||||
list.items.len += dest.len;
|
||||
return error.StreamTooLong;
|
||||
}
|
||||
list.items.len += n;
|
||||
remaining = remaining.subtract(n).?;
|
||||
}
|
||||
if (r.bufferedLen() != 0) return error.StreamTooLong;
|
||||
}
|
||||
|
||||
/// Writes bytes from the internally tracked stream position to `data`.
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue